[
  {
    "path": ".cargo/config.toml",
    "content": "[build]\nrustflags = [\"--cfg\", \"hax\"]\n"
  },
  {
    "path": ".docker/Dockerfile",
    "content": "# This Dockerfile should be run from the root directory of this repo\n# e.g. `docker build -f .docker/Dockerfile .` from the parent directory\n\nFROM nixpkgs/nix-flakes\n\n# See issue #71\nRUN if [ \"$(uname)\" = Darwin ]; then \\\n    echo \"filter-syscalls = false\" >> /etc/nix/nix.conf; \\\n    fi\n\n# Prepare the sources\nCOPY . /hax-sources\nRUN cd /hax-sources && git init && git add .\n\n# Use cache to speed up install\nENV PATH=\"$PATH:/root/.nix-profile/bin\"\nRUN nix-env -iA cachix -f https://cachix.org/api/v1/install\nRUN cachix use hacspec\n\n# Install\nRUN nix profile install /hax-sources\n"
  },
  {
    "path": ".dockerignore",
    "content": ".git\n.gitignore\n**/target\n**/_build\ndebug"
  },
  {
    "path": ".envrc",
    "content": "watch_file rust-toolchain.toml\nuse flake\n"
  },
  {
    "path": ".github/assets/change-padding.sh",
    "content": "#!/usr/bin/env bash\n# set padding so that logos are centered when rendered by GH\n\nset -euo pipefail\nX=\"${1:?Usage: $0 <new Y value>}\"\n\nfind . -type f -name '*.svg' -exec sd 'id=\"__topPaddingWrapper\" transform=\"translate\\(0, \\d+\\)\"' \"id=\\\"__topPaddingWrapper\\\" transform=\\\"translate(0, ${1})\\\"\" {} +\n"
  },
  {
    "path": ".github/workflows/bertie.yml",
    "content": "name: Extract Bertie\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n  push:\n    branches: [main]\n\nenv:\n  CARGO_TERM_COLOR: always\n\njobs:\n  extract-bertie:\n    if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' }}\n    runs-on: \"ubuntu-latest\"\n\n    steps:\n      - name: ⤵ Clone Bertie repository\n        uses: actions/checkout@v4\n        with:\n          repository: cryspen/bertie\n\n      - uses: actions/checkout@v4\n        with:\n          path: hax\n\n      - uses: DeterminateSystems/nix-installer-action@main\n      - name: ⤵ Install hax\n        run: |\n          nix profile install ./hax\n\n      - name: 🏃 Extract fstar\n        run: ./hax-driver.py extract-fstar\n"
  },
  {
    "path": ".github/workflows/changelog.yml",
    "content": "name: Check Changelog Update\n\non:\n  pull_request:\n    types: [opened, synchronize, reopened, edited]\n    if: github.actor != 'github-merge-queue[bot]'\n\njobs:\n  check-changelog:\n    if: github.actor != 'github-merge-queue[bot]'\n    runs-on: ubuntu-latest\n\n    steps:\n      - name: Check for [skip changelog] tag in PR body\n        id: skip_check\n        uses: actions/github-script@v7\n        with:\n          script: |\n            const body = context.payload.pull_request.body || \"\";\n            core.debug(body);\n            if (body.includes('[skip changelog]')) {\n              core.notice(\"Skipping changelog check because [skip changelog] was found in PR body.\");\n              core.setOutput(\"skip\", \"true\");\n            } else {\n              core.setOutput(\"skip\", \"false\");\n            }\n\n      - name: Checkout full git history\n        if: steps.skip_check.outputs.skip == 'false'\n        uses: actions/checkout@v4\n        with:\n          fetch-depth: 0\n\n      - name: Fetch base branch\n        if: steps.skip_check.outputs.skip == 'false'\n        run: git fetch origin ${{ github.base_ref }}\n\n      - name: Check if CHANGELOG.md was updated\n        if: steps.skip_check.outputs.skip == 'false'\n        id: updated\n        run: |\n          git diff --name-only origin/${{ github.base_ref }} HEAD > changed_files.txt\n          echo \"::group::Changed files\"\n          cat changed_files.txt\n          echo \"::endgroup::\"\n\n          if ! grep -q 'CHANGELOG.md' changed_files.txt; then\n            {\n              echo '**Missing `CHANGELOG.md` entry**'\n              echo ''\n              echo 'Please do one of the following:'\n              echo '- Add relevant changes to `CHANGELOG.md`'\n              echo '- Or add `[skip changelog]` to the pull request body'\n              echo ''\n              echo 'Once done, re-run this workflow by clicking **\"Re-run jobs\"**.'\n              echo ''\n              cat CONTRIBUTING.md | awk '/^### Changelog$/{f=1;next} /^##?#? /&&f{exit} f' | sed 's/^###\\s*//'\n            } > error-message\n            cat error-message >> $GITHUB_STEP_SUMMARY\n            exit 1\n          fi\n\n      - name: Fail with markdown error\n        if: failure()\n        uses: actions/github-script@v7\n        with:\n          script: |\n            const msg = require('fs').readFileSync('error-message', 'utf8');\n            core.setFailed(msg);\n"
  },
  {
    "path": ".github/workflows/clippy_rust_engine.yml",
    "content": "name: Linting for the Rust engine\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n  push:\n    branches: [main]\n\njobs:\n  clippy:\n    name: clippy\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - name: Install clippy\n      run: |\n        rustup component add clippy\n    - name: Run clippy\n      run: |\n        cargo clippy -p hax-rust-engine -- -D warnings --no-deps\n"
  },
  {
    "path": ".github/workflows/extract_and_run_coq.yml",
    "content": "name: Extract and Run - Coq\n\non: [pull_request]\njobs:\n  build:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v4\n      - uses: DeterminateSystems/nix-installer-action@main\n\n      - name: ⤵ Install hax\n        run: |\n          nix build .\\#check-coq-coverage"
  },
  {
    "path": ".github/workflows/flake_lock.yml",
    "content": "name: Make sure flake.lock is up-to-date\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n\njobs:\n  flake_lock_up_to_date:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - uses: DeterminateSystems/nix-installer-action@main\n    - name: Lock flake\n      run: nix flake lock\n    - name: Diff `flake.lock`\n      run: git diff --exit-code flake.lock\n"
  },
  {
    "path": ".github/workflows/format.yml",
    "content": "name: Ensure formatting\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n  push:\n    branches: [main]\n\njobs:\n  ocamlformat:\n    name: ocamlformat\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - uses: DeterminateSystems/nix-installer-action@main\n    - name: Run OCaml formatter\n      run: |\n        nix shell ..#ocamlformat -c \\\n          ocamlformat --check $(find . -name '*.ml')\n      working-directory: engine\n  rustfmt:\n    name: rustfmt\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - name: Run Rust formatter\n      run: |\n        cargo fmt --check\n"
  },
  {
    "path": ".github/workflows/gh_pages.yml",
    "content": "name: Deploy to GH Pages\n\non:\n  workflow_dispatch:\n  push:\n    branches: [main]\n\njobs:\n  # Build job\n  build:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/checkout@v4\n      - uses: DeterminateSystems/nix-installer-action@main\n      - name: Build documentation\n        run: nix build .#docs\n      - name: Upload static files as artifact\n        id: deployment\n        uses: actions/upload-pages-artifact@v3\n        with:\n          path: result/\n  # Deploy job\n  deploy:\n    needs: build\n    permissions:\n      pages: write      # to deploy to Pages\n      id-token: write   # to verify the deployment originates from an appropriate source\n\n    # Deploy to the github-pages environment\n    environment:\n      name: github-pages\n      url: ${{ steps.deployment.outputs.page_url }}\n\n    # Specify runner + deployment step\n    runs-on: ubuntu-latest\n    steps:\n      - name: Deploy to GitHub Pages\n        id: deployment\n        uses: actions/deploy-pages@v4 # or specific \"vX.X.X\" version tag for this action\n"
  },
  {
    "path": ".github/workflows/install_and_test.yml",
    "content": "name: Install & test\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n  push:\n    branches: [main]\n\njobs:\n  tests:\n    name: nix-action\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - uses: DeterminateSystems/nix-installer-action@main\n    - uses: cachix/cachix-action@v15\n      with:\n        name: hax\n        skipPush: true\n        extraPullNames: fstar-nix-versions, z3-nix-versions\n\n    - name: Build\n      run: nix build -L\n\n    - name: Install the toolchain\n      run: |\n        nix profile install nixpkgs#yq\n        nix profile install .#rustc\n        nix profile install .\n\n    - name: Ensure readme coherency\n      run: |\n        nix build .#check-readme-coherency -L\n\n    - name: Test the toolchain\n      run: |\n        nix build .#check-toolchain -L\n\n    - name: Try to extract Rust By Examples\n      run: |\n        nix build .#rust-by-example-hax-extraction -L\n\n    - name: Test the examples\n      run: |\n        cd examples\n        nix develop ..#ci-examples --command make clean\n        nix develop ..#ci-examples --command make\n\n    - name: Checkout specifications\n      uses: actions/checkout@v4\n      with:\n        repository: 'hacspec/specs'\n        path: specs\n\n    - name: Push to Cachix\n      if: ${{ github.event_name == 'workflow_dispatch'  || github.event_name == 'merge_group'  }}\n      env:\n        CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}\n      run: |\n        nix-store -qR --include-outputs $(nix build .# --json | jq -r '.[].outputs | to_entries[].value') \\\n          | cachix push hax\n"
  },
  {
    "path": ".github/workflows/licenses.yml",
    "content": "name: Check licenses\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n  push:\n    branches: [main]\n\njobs:\n  tests:\n    name: nix-action\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - uses: extractions/setup-just@v1\n    - name: Set-up OCaml\n      uses: ocaml/setup-ocaml@v3\n      with:\n        ocaml-compiler: 5\n    - uses: actions-rust-lang/setup-rust-toolchain@v1\n      with:\n        toolchain: stable\n    - name: Install cargo-deny\n      run: cargo install cargo-deny\n    - name: Install cargo-deny\n      run: cargo install toml2json\n    - name: Check the licenses\n      run: just check-licenses\n\n"
  },
  {
    "path": ".github/workflows/mldsa.yml",
    "content": "name: Extract and lax-check libcrux ML-DSA\n\non:\n  schedule:\n    - cron: '0 0 * * *'\n  workflow_dispatch:\n\nenv:\n  CARGO_TERM_COLOR: always\n\njobs:\n  extract-and-lax-mldsa:\n    runs-on: \"ubuntu-latest\"\n\n    steps:\n      - name: ⤵ Clone Libcrux repository\n        uses: actions/checkout@v4\n        with:\n          repository: cryspen/libcrux\n          path: libcrux\n\n      - uses: actions/checkout@v4\n        with:\n          path: hax\n\n      - name: Use local hax-lib\n        working-directory: libcrux\n        run: |\n          cargo remove hax-lib -v -p libcrux-ml-dsa\n          cargo add hax-lib --path \"../hax/hax-lib\" -v -p libcrux-ml-dsa\n\n      - uses: DeterminateSystems/nix-installer-action@main\n      - name: Set up Cachix\n        uses: cachix/cachix-action@v15\n        with:\n          name: fstar-nix-versions\n          push: false\n    \n      - name: ⤵ Install hax\n        run: |\n          nix profile install ./hax\n\n      - name: ⤵ Install FStar\n        run: nix profile install github:FStarLang/FStar/v2025.03.25\n\n      - name: 🏃 Extract ML-DSA crate\n        working-directory: libcrux/libcrux-ml-dsa\n        run: ./hax.py extract\n  \n      - name: 🏃 Lax ML-DSA crate\n        working-directory: libcrux/libcrux-ml-dsa\n        run: |\n          env FSTAR_HOME=${{ github.workspace }}/fstar \\\n              HAX_HOME=${{ github.workspace }}/hax \\\n              PATH=\"${PATH}:${{ github.workspace }}/fstar/bin\" \\\n              ./hax.py prove --admit\n"
  },
  {
    "path": ".github/workflows/mlkem.yml",
    "content": "name: Extract and TC ML-Kem\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n  push:\n    branches: [main]\n\nenv:\n  CARGO_TERM_COLOR: always\n\njobs:\n  extract-mlkem:\n    if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' }}\n    runs-on: \"ubuntu-latest\"\n\n    steps:\n      - name: ⤵ Extract libcrux version from PR body\n        id: extract_version\n        uses: actions/github-script@v7\n        with:\n          result-encoding: string\n          script: |\n            let extractLibcruxRef = body => body.match(/libcrux-ref:\\s*([a-zA-Z0-9._\\/-]+)/)?.[1];\n            const refMap = new Map();\n            if (context.eventName === 'pull_request') {\n              const ref = extractLibcruxRef(context.payload.pull_request?.body || '') ?? 'main';\n              core.notice(`Using libcrux ref: ${resolved}`);\n              return resolved;\n            } else if (context.eventName === 'merge_group') {\n              const query = 'query {repository(owner:\"cryspen\", name:\"hax\") {mergeQueue {entries(first:100) {nodes {pullRequest {body, number}}}}}}';\n              const result = await github.graphql(query);\n              const mergeQueuePRs = result.repository.mergeQueue.entries.nodes;\n              for (const entry of mergeQueuePRs) {\n                const pr = entry.pullRequest\n                core.notice(\"Found merge queue PR:\", pr);\n                const ref = extractLibcruxRef(pr.body);\n                ref && refMap.set(pr.number, ref);\n              }\n              if (refMap.size === 0) {\n                core.notice('No libcrux-ref specified, defaulting to \"main\"');\n                return 'main';\n              }\n              const uniqueRefs = new Set(refMap.values());\n              if (uniqueRefs.size > 1) {\n                let errorMessage = 'Error: Multiple different libcrux refs detected:\\n';\n                for (const [prNum, ref] of refMap.entries())\n                  errorMessage += `- PR #${prNum}: ${ref}\\n`;\n                core.setFailed(errorMessage);\n                return;\n              }\n\n              const [ref] = uniqueRefs;\n              core.notice(`Using libcrux ref: ${ref}`);\n              return ref;\n            }\n            core.warning(`Unsupported event type: ${context.eventName}, default to main`);\n            return 'main';\n\n      - name: ⤵ Clone Libcrux repository\n        uses: actions/checkout@v4\n        with:\n          repository: cryspen/libcrux\n          path: libcrux\n          ref: ${{ steps.extract_version.outputs.result }}\n\n      - uses: actions/checkout@v4\n        with:\n          path: hax\n\n      - name: Use local hax-lib\n        working-directory: libcrux\n        run: |\n          cargo remove hax-lib -v -p libcrux-ml-kem\n          cargo add hax-lib --path \"../hax/hax-lib\" -v -p libcrux-ml-kem\n\n      - uses: DeterminateSystems/nix-installer-action@main\n      - name: Set up Cachix\n        uses: cachix/cachix-action@v15\n        with:\n          name: fstar-nix-versions\n          push: false\n    \n      - name: ⤵ Install hax\n        run: |\n          nix profile install ./hax\n\n      - name: ⤵ Install FStar\n        run: nix profile install github:FStarLang/FStar/v2025.02.17\n\n      - name: 🏃 Extract ML-KEM crate\n        working-directory: libcrux/libcrux-ml-kem\n        run: ./hax.py extract\n  \n      - name: 🏃 Lax ML-KEM crate\n        working-directory: libcrux/libcrux-ml-kem\n        run: |\n          env FSTAR_HOME=${{ github.workspace }}/fstar \\\n              HAX_HOME=${{ github.workspace }}/hax \\\n              PATH=\"${PATH}:${{ github.workspace }}/fstar/bin\" \\\n              ./hax.py prove --admit\n"
  },
  {
    "path": ".github/workflows/playwright-docs.yml",
    "content": "name: Playwright Docs Tests\non:\n  schedule:\n    - cron: '0 0 * * *'\n  workflow_dispatch:\njobs:\n  test:\n    timeout-minutes: 45\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v5\n    - uses: DeterminateSystems/determinate-nix-action@v3\n    - uses: actions/setup-node@v5\n      with:\n        node-version: lts/*\n    - name: Install dependencies\n      working-directory: docs/.test\n      run: npm ci\n    - name: Install Playwright Browsers\n      working-directory: docs/.test\n      run: npx playwright install --with-deps\n    - name: Replace version with commit hash\n      run: sed -i \"s/const HAX_PLAYGROUND_FORCED_VERSION = false;/const HAX_PLAYGROUND_FORCED_VERSION = \\\"${GITHUB_SHA}\\\";/\" hax_playground.js\n      working-directory: docs/javascripts\n    - name: Playground warmup and build docs\n      run: |\n        set -euo pipefail\n\n        nix build .#docs & pid1=$!\n        curl -sS \"https://hax-playground.cryspen.com/query/$GITHUB_SHA/fstar\" \\\n          -X POST \\\n          -H 'User-Agent: bot' \\\n          -H 'Accept: application/json' \\\n          -H 'Content-Type: application/json' \\\n          --data-raw '[[\"src/lib.rs\",\"fn f(){}\"]]' & pid2=$!\n\n        wait $pid1 || exit_code1=$?\n        wait $pid2 || exit_code2=$?\n\n        exit ${exit_code1:-0} || exit ${exit_code2:-0}\n    - name: Run Playwright tests\n      working-directory: docs/.test\n      run: npx playwright test --reporter github,html --trace on \n    - uses: actions/upload-artifact@v4\n      if: ${{ !cancelled() }}\n      with:\n        name: playwright-report\n        path: docs/.test/playwright-report/\n        retention-days: 30\n"
  },
  {
    "path": ".github/workflows/release.yml",
    "content": "name: Release binaries for hax-engine\n\non:\n  push:\n    tags:\n      - '*'\n\njobs:\n  release-js:\n    if: startsWith(github.ref, 'refs/tags/')\n    runs-on: ubuntu-latest\n    permissions:\n      contents: write\n    steps:\n    - uses: actions/checkout@v4\n    - uses: DeterminateSystems/nix-installer-action@main\n    - name: Build JS\n      run: nix build .#hax-engine.passthru.js -L -o hax-engine.js\n\n    - run: |\n        tar --dereference -czf hacspec_js.tar.gz hax-engine.js\n\n    - name: Release\n      uses: softprops/action-gh-release@v1\n      with:\n        files: hacspec_js.tar.gz\n\n  release:\n    if: startsWith(github.ref, 'refs/tags/')\n    strategy:\n      fail-fast: false\n      matrix:\n        os:\n          - macos-latest\n          - ubuntu-latest\n          # - windows-latest (See #4)\n        ocaml-compiler: [4.14.x]\n        \n    runs-on: ${{ matrix.os }}\n    permissions:\n      contents: write\n    steps:\n      - name: Checkout code\n        uses: actions/checkout@v4\n\n      - uses: ocaml/setup-ocaml@v2\n        with:\n          ocaml-compiler: ${{ matrix.ocaml-compiler }}\n\n      - uses: dtolnay/rust-toolchain@1.70\n          \n      - run: cargo install --path cli/driver && cargo install --path cli/subcommands\n\n      - run: opam install . --deps-only\n        working-directory: engine\n        \n      - run: opam exec -- dune build\n        working-directory: engine\n\n      - run: |\n          cp engine/_build/default/bin/native_driver.exe  hax-engine\n          tar -czf hacspec_${{ matrix.os }}.tar.gz hax-engine\n        \n      - name: Release\n        uses: softprops/action-gh-release@v1\n        with:\n          files: hacspec_${{ matrix.os }}.tar.gz\n\n          \n"
  },
  {
    "path": ".github/workflows/rustc-coverage-tests.yml",
    "content": "name: Rustc coverage tests with negative results, and snapshots verification\n\non:\n    pull_request:\n    merge_group:\n    workflow_dispatch:\n    push:\n        branches: [main]\n\njobs:\n    rustc-coverage-tests:\n      runs-on: ubuntu-latest\n  \n      steps:\n        - uses: actions/checkout@v4\n  \n        - if: runner.environment == 'github-hosted'\n          uses: DeterminateSystems/nix-installer-action@main\n        - name: Set up Cachix\n          uses: cachix/cachix-action@v15\n          with:\n            name: fstar-nix-versions\n            push: false\n      \n        - name: ⤵ Install hax\n          run: |\n            nix profile install .\n            nix profile install nixpkgs#rustup\n  \n        - name: ⤵ Install FStar\n          run: nix profile install github:FStarLang/FStar/v2025.02.17\n\n        - name: Set up Python\n          uses: actions/setup-python@v5\n          with:\n            python-version: '3.x'\n\n        - name: Install Python dependencies\n          uses: py-actions/py-dependency-install@v4\n          with:\n            path: rustc-coverage-tests/requirements.txt\n  \n        - name: Run tests with negative checking\n          working-directory: rustc-coverage-tests\n          run: |\n            FSTAR_HOME=~/.nix-profile python3 run-coverage-tests.py all --with-negative --check-stability\n"
  },
  {
    "path": ".github/workflows/stale.yml",
    "content": "name: 'Triage stale issues and PRs'\non:\n  schedule:\n    - cron: '00 00 * * 4'\n  workflow_dispatch:\n\njobs:\n  stale:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: actions/stale@v9\n        with:\n          stale-issue-message: \"This issue has been marked as stale due to a lack of activity for 60 days. If you believe this issue is still relevant, please provide an update or comment to keep it open. Otherwise, it will be closed in 7 days.\"\n          stale-pr-message: \"This PR has been marked as stale due to a lack of activity for 60 days. If you believe this pull request is still relevant, please provide an update or comment to keep it open. Otherwise, it will be closed in 7 days.\"\n          stale-issue-label: 'stale'\n          exempt-issue-labels: 'keep-open'\n          stale-pr-label: 'stale'\n          exempt-pr-labels: 'keep-open'\n          days-before-stale: 60\n          days-before-close: 7\n          close-issue-message: \"This issue has been closed due to a lack of activity since being marked as stale. If you believe this issue is still relevant, please reopen it with an update or comment.\"\n          close-pr-message: \"This PR has been closed due to a lack of activity since being marked as stale. If you believe this pull request is still relevant, please reopen it with an update or comment.\"\n"
  },
  {
    "path": ".github/workflows/test.yml",
    "content": "name: Test Workspace\n\non:\n  push:\n    branches: [main]\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n\nconcurrency:\n  group: ${{ github.workflow }}-${{ github.ref }}\n  cancel-in-progress: true\n\njobs:\n  test-workspace:\n    strategy:\n      fail-fast: false\n      matrix:\n        os:\n          - macos-latest\n          - ubuntu-latest\n          - windows-latest\n\n    runs-on: ${{ matrix.os }}\n\n    steps:\n      - uses: actions/checkout@v4\n      - uses: Swatinem/rust-cache@v2\n\n      - name: Test\n        run: cargo test --workspace --exclude hax-engine-names-extract --exclude hax-rust-engine --verbose\n\n      - name: Test `hax-frontend-exporter` with feature `rustc` off\n        run: cargo check -p hax-frontend-exporter --no-default-features --verbose\n\n  no-std-lib:\n    runs-on: ubuntu-latest\n    steps:\n      - uses: dtolnay/rust-toolchain@master\n        with:\n          toolchain: stable\n          targets: thumbv7em-none-eabi\n      - uses: actions/checkout@v4\n      - uses: Swatinem/rust-cache@v2\n\n      - name: Build no-std\n        run: |\n          rustup target add thumbv7em-none-eabi\n          cargo build -p hax-lib --target thumbv7em-none-eabi\n"
  },
  {
    "path": ".github/workflows/test_installs.yml",
    "content": "name: Test installations\n\non:\n  pull_request:\n  merge_group:\n  workflow_dispatch:\n  push:\n    branches: [main]\n    \njobs:\n  docker:\n    if: ${{ github.event_name == 'workflow_dispatch'  || github.event_name == 'merge_group'  }}\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v4\n    - run: docker build -f .docker/Dockerfile . -t hax\n  setup_sh:\n    if: ${{ github.event_name == 'workflow_dispatch'  || github.event_name == 'merge_group'   }}\n    strategy:\n      matrix:\n        os:\n          - ubuntu-latest\n          - ubuntu-22.04\n          - macos-latest\n          - macos-15-intel\n    runs-on: ${{ matrix.os }}\n    steps:\n    - uses: actions/checkout@v4\n    - if: runner.os == 'macOS'\n      run: brew install opam nodejs rustup-init jq\n    - if: runner.os == 'Linux'\n      run: sudo apt-get update\n    - if: runner.os == 'Linux'\n      run: sudo apt-get install -y opam nodejs jq\n    - run: curl --proto '=https' --tlsv1.3 https://sh.rustup.rs -sSf | sh -s -- -y\n    - run: opam init --bare -y && opam switch create -y 4.14.1\n    - name: Run `setup.sh`\n      run: |\n        export OPAMERRLOGLEN=0\n        ./setup.sh\n    - run: cargo hax --version\n    - name: Test an extraction\n      run: |\n        cd examples/chacha20\n        eval $(opam env)\n        cargo hax into fstar\n  setup_sh_status:\n    if: |\n      always() &&\n      github.event_name ==  'workflow_dispatch'  ||github.event_name ==  'merge_group'  \n    needs: setup_sh\n    runs-on: ubuntu-latest\n    steps:\n      - name: Successful\n        if: ${{ !(contains(needs.*.result, 'failure')) }}\n        run: exit 0\n      - name: Failing\n        if: ${{ contains(needs.*.result, 'failure') }}\n        run: exit 1\n"
  },
  {
    "path": ".github/workflows/this-month-in-hax.yml",
    "content": "name: Generate This Month in hax\n\non:\n  workflow_dispatch:\n  schedule:\n    - cron: '0 4 1 * *'\n\njobs:\n  generate:\n    permissions:\n      issues: write\n      contents: write\n    if: github.repository == 'cryspen/hax'\n    runs-on: ubuntu-latest\n    steps:\n      - name: Checkout repo\n        uses: actions/checkout@v5\n\n      - name: Run script and capture output\n        id: run_script\n        run: |\n          bash .utils/this-month-in-hax-skeleton.sh\n        env:\n          GH_TOKEN: ${{ github.token }}\n\n      - name: Commit changes\n        run: |\n          git config user.name \"github-actions[bot]\"\n          git config user.email \"41898282+github-actions[bot]@users.noreply.github.com\"\n          git checkout -b $(cat this-month-in-hax-branch)\n          git add docs/blog\n          git commit -m \"chore(blog): set up a skeleton for 'This Month in hax'\"\n          git push --force origin $(cat this-month-in-hax-branch)\n\n      - uses: JasonEtco/create-an-issue@v2\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n        with:\n          filename: this-month-in-hax-issue.yml\n"
  },
  {
    "path": ".gitignore",
    "content": "debug/\ntarget/\n**/*.rs.bk\n**/*.profraw\nnode_modules\nTODO.org\n.direnv\n_build\nresult\n.DS_Store\n.depend\n.cache\n.lake\nproof-libs/fstar/rust_primitives/#*#\n**/proofs/*/extraction/*\n!**/proofs/*/extraction/Makefile\n!**/proofs/*/extraction/*.diff\n!**/proofs/fstar/extraction/*.fst\n!**/proofs/coq/extraction/*.v\n!**/proofs/lean/extraction/lakefile.toml\n!**/proofs/rust/extraction/Cargo.toml\n"
  },
  {
    "path": ".utils/jq_utils.jq",
    "content": "# Removes a field from an object at any depth\ndef remove_field(field):\n    walk(if type == \"object\" and has(field) then del(.[field]) else . end);\n\n# Remove `table_id` indirections whenever a value is found\ndef thir_drop_table_id_nodes:\n    walk(if type == \"object\" and has(\"cache_id\") and has(\"value\") and .value then .value else . end);\n\n# Prints a THIR def_id as a string, useful for searching\ndef thir_str_of_def_id_contents:\n  (\n      [.krate]\n    + [\n          .path.[]\n        | try (.disambiguator as $d | .data | . as $data | keys | .[0] | $data[.] + (if $d > 0 then \"#\" + $d else \"\" end))\n        | select(type == \"string\")]\n  ) | join(\"::\");\n\n# Prints all THIR def_ids\ndef thir_str_of_def_ids:\n     thir_drop_table_id_nodes | walk(\n          # if type == \"object\" and has(\"contents\") and (.contents | type) == \"object\" and .contents | has(\"krate\") and .contents | has(\"path\") then\n          if try(. as $o | ($o.contents.krate | type == \"string\") and ($o.contents.path | type == \"array\")) catch false then\n            .contents | thir_str_of_def_id_contents\n          else\n            .\n          end);\n"
  },
  {
    "path": ".utils/rebuild.sh",
    "content": "#!/usr/bin/env bash\n\n# This is a small script to rebuild Hax (the Rust CLI & frontend and\n# OCaml engine) quickly.\n\n# Options:\n#  - the flag `--online` allow Cargo to look for updates on the internet;\n#  - the environment variable `DUNEJOBS` limits the number of jobs `dune`\n#    is allowed to spawn in parallel while building.\n\nset -euo pipefail\n\nOFFLINE_FLAG=\"--offline\"\nif [[ \"${1:-}\" == \"--online\" ]]; then\n    OFFLINE_FLAG=\"\"\n    shift 1\nfi\n\nTARGETS=\"${1:-rust ocaml}\"\nDUNEJOBS=${DUNEJOBS:-} # required since `set -u`\n\nYELLOW=43\nGREEN=42\nRED=41\nBLACK=90\nstatus () { echo -e \"\\033[1m[rebuild script] \\033[30m\\033[$1m$2\\033[0m\"; }\n\ncd_rootwise () {\n    cd $(git rev-parse --show-toplevel)/$1\n}\n\nrust () {\n    cd_rootwise \"cli\"\n    for i in driver subcommands ../engine/names/extract ../rust-engine; do\n        CURRENT=\"rust/$i\"\n        cargo install --locked --quiet $OFFLINE_FLAG --debug --path $i\n    done\n}\n\nocaml () {\n    cd_rootwise \"engine\"\n    CURRENT=\"ocaml\"\n    dune build $([ -z $DUNEJOBS ] || echo \"-j $DUNEJOBS\")\n    CURRENT=\"ocaml/install\"\n\n    # Small hack for those that are not using [opam] at all: by\n    # default install OCaml binaries in `~/.cargo` (which is supposed\n    # to be in PATH anyway).\n    INSTALL_PREFIX=\"${OPAM_SWITCH_PREFIX:-${DUNE_INSTALL_PREFIX:-$HOME/.cargo}}\"\n    dune install --profile dev --prefix $INSTALL_PREFIX\n\n    if ( command -v \"which\" && command -v \"sort\" && command -v \"wc\" ) >/dev/null; then\n        case $(which -a hax-engine | sort -u | wc -l) in\n            0) status $YELLOW 'Warning: cannot detect `hax-engine` in PATH';;\n            1) :;;\n            *) status $YELLOW 'Warning: multiple `hax-engine` detected in PATH. Maybe you installed Hax with OPAM (i.e. via `setup.sh`)? Please uninstall it, otherwise you might use a stale engine!';;\n        esac\n    else\n        status $YELLOW 'Warning: cannot run sanity checks because `which`, `sort` or `wc` commands are not available. Please install them.'\n    fi\n}\n\non_exit () {\n    if [ $? -ne 0 ]; then\n        status $RED \"ERR: $CURRENT\";\n    fi\n}\ntrap on_exit                EXIT ERR\ntrap \"status $RED 'SIGINT'\" SIGINT\n\nCURRENT=\"none\"\nstarted() { [ -z ${QUIET+x} ] && status $BLACK \"$1 build started\" || true; }\nif [[ \"$TARGETS\" == *rust* ]]; then\n    started rust\n    rust\n    status $GREEN \"rust succeed\"\nfi\nif [[ \"$TARGETS\" == *ml* ]]; then\n    started ocaml\n    ocaml\n    status $GREEN \"ocaml succeed\"\nfi\n\n"
  },
  {
    "path": ".utils/rust-by-example.js",
    "content": "// This script expects Rust By Example to be in current directory\n// (clone the repo https://github.com/rust-lang/rust-by-example, `cd` into it, and run `node rust-by-examples.js`)\n\nconst fs = require('fs');\nconst SRC_DIR = 'src';\n\n// Lists all markdown files under `SRC_DIR`\nfunction getMarkdownFiles() {\n    return fs.readdirSync(SRC_DIR, { recursive: true })\n        .filter(path => path.endsWith('.md'));\n}\n\n// Code blocks from a file of given path\nfunction extractCodeBlocks(path) {\n    let contents = fs.readFileSync(SRC_DIR + '/' + path).toString();\n    let blocks = contents\n        .split(/^```/m)\n        .filter((_, i) => i % 2 == 1)\n        .map(s => {\n            let lines = s.split('\\n');\n            let modifiers = lines[0].split(',').map(x => x.trim()).filter(x => x);\n            let contents = lines.slice(1).join('\\n');\n            return {modifiers, contents};\n        })\n        .filter(x => x.modifiers.includes('rust'));\n    let name = path.replace(/[.]md$/, '').split('/').join('_');\n    return {name, blocks};\n}\n\nlet code = getMarkdownFiles()\n    .map(extractCodeBlocks)\n    .filter(({blocks}) => blocks.length);\n\n// Strips the comments of a rust snippet\nlet stripComments = rust_snippet => rust_snippet.replace(/[/][/]+.*/mg, '');\n\n// Given a Rust snippet, returns `true` whenever we detect a top-level\n// `let` binding: this means we need to wrap the snippet in a function.\nlet isDirectLet = rust_snippet => stripComments(rust_snippet).trim().startsWith('let ');\n\n// Wraps a Rust snippet inside a function\nlet protectSnippet = rust_snippet => `fn wrapper_fn() { let _ = {${rust_snippet}}; }`;\n\nfunction codeBlocksToModules(code_blocks) {\n    let denylist = [\n        /unsafe_asm \\d+/\n    ];\n    let modules = {};\n\n    for(let {name, blocks} of code_blocks) {\n        let mod_section = `section_${name}`;\n        modules[mod_section] = {};\n        let nth = 0;\n        for(let {modifiers, contents} of blocks) {\n            nth += 1;\n            if(['edition2015', 'compile_fail', 'ignore'].some(m => modifiers.includes(m))) {\n                continue;\n            }\n            let id = `section_${name} ${nth}`;\n            // Remove top-level assertions\n            contents = contents.replace(/^# assert.*\\n?/mg, '');\n            // Strip `# ` (the mdbook marker to hide a line)\n            contents = contents.replace(/^# /mg, '');\n            // Whenever we detect a `let`\n            if(isDirectLet(contents))\n                contents = protectSnippet(contents);\n            if(denylist.some(re => id.match(re)))\n                continue;\n            let mod_snippet = `snippet_${nth}`;\n            // Replace `crate::` by a full path to the current module\n            contents = contents.replace(/crate::/g, 'crate::' + mod_section + '::' + mod_snippet + '::');\n            modules[mod_section][mod_snippet] = `// modifiers: ${modifiers.join(', ')}\\n` + contents;\n        }\n    }\n\n    return modules;\n}\n\nlet modules = codeBlocksToModules(code);\n\nlet OUTPUT_CRATE = 'rust-by-examples-crate';\nfs.rmSync(OUTPUT_CRATE, { recursive: true, force: true });\nfs.mkdirSync(OUTPUT_CRATE, { recursive: true });\nconst { execSync } = require('child_process');\nexecSync(\"cargo init --lib\", { cwd: OUTPUT_CRATE });\n\nlet OUTPUT_CRATE_SRC = OUTPUT_CRATE + '/src/';\nfs.rmSync(OUTPUT_CRATE_SRC, { recursive: true, force: true });\nlet root_mod = '#![allow(unused)]';\nfor(let mod_name in modules) {\n    let submodules = modules[mod_name];\n    fs.mkdirSync(OUTPUT_CRATE_SRC + mod_name, { recursive: true });\n    let mod_contents = '';\n    for (let submod_name in submodules) {\n        let contents = submodules[submod_name];\n        fs.writeFileSync(OUTPUT_CRATE_SRC + mod_name + '/' + submod_name + '.rs', contents);\n        mod_contents += 'pub mod ' + submod_name + ';\\n';\n    }\nfs.writeFileSync(OUTPUT_CRATE_SRC + mod_name + '.rs', mod_contents);\n    root_mod += 'pub mod ' + mod_name + ';\\n';\n}\nfs.writeFileSync(OUTPUT_CRATE_SRC + '/lib.rs', root_mod);\n\n\n// A list of [<module_name>, [<snippet_number>]] that are known not to be processed by hax\nlet cargo_hax_denylist = [\n    ['error_iter_result', [3]],\n    ['error_multiple_error_types_boxing_errors', [1]], // uses dyn\n    ['error_multiple_error_types_reenter_question_mark', [2]], // uses dyn\n    ['error_multiple_error_types_wrap_error', [1]], // uses dyn\n    ['error_option_unwrap_defaults', [3,4]],\n    ['flow_control_for', [1,2,3,5]],\n    ['flow_control_if_let', [3]],\n    ['flow_control_let_else', [1,2]], // Let else panics, bug #1460\n    ['flow_control_loop_nested', [1]],\n    ['flow_control_loop_return', [1]],\n    ['flow_control_loop', [1]],\n    ['flow_control_match_binding', [1,2]],\n    ['flow_control_match_destructuring_destructure_pointers', [1]],\n    ['flow_control_match_destructuring_destructure_slice', [1]],\n    ['flow_control_match_destructuring_destructure_tuple', [1]], // .. pattern, bug #1462\n    ['flow_control_match', [1]],\n    ['flow_control_while_let', [1,2]],\n    ['fn_closures_capture', [1]],\n    ['fn_closures_input_parameters', [1]],\n    ['fn', [1]],\n    ['hello_print_fmt', [1]],\n    ['generics_bounds_testcase_empty', [1]], // Marker traits, bug #1221\n    ['macros_dry', [1]],\n    ['scope_borrow_alias', [1]],\n    ['scope_borrow_ref', [1]],\n    ['scope_move_mut', [1]],\n    ['scope_raii', [1]],\n    ['std_arc', [1]],\n    ['std_hash', [1]],\n    ['std_misc_arg_matching', [1]],\n    ['std_misc_channels', [1]],\n    ['std_misc_file_read_lines', [3]],\n    ['std_misc_threads', [1]],\n    ['std_misc_threads_testcase_mapreduce', [1]],\n    ['std_str', [1]],\n    ['trait_iter', [1]],\n    ['trait', [1]],\n    ['trait_dyn', [1]], // uses dyn\n    ['trait_supertraits', [1]], // uses dyn\n    ['unsafe', [1,2]],\n].map(([module, snippets]) => snippets.map(n => `section_${module}::snippet_${n}`)).flat();\n\nlet include_clause = cargo_hax_denylist.map(path => `-*::${path}::**`).join(' ');\n\nexecSync(`cargo hax into -i '${include_clause}' fstar`, { cwd: OUTPUT_CRATE, stdio: 'inherit' });\n"
  },
  {
    "path": ".utils/this-month-in-hax-skeleton.sh",
    "content": "#!/usr/bin/env bash\n# This script creates a skeleton blog post for the \"This Month in hax\" blog series.\n# It writes a new markdown file, and outputs a PR body.\n# This script is an helper for the github action workflow \"this-month-in-hax.yml\".\n\nset -e\n\n# Go to the folder of blog posts\ncd $(git rev-parse --show-toplevel)/docs/blog/posts/this-month-in-hax\n\n# By default, use `cryspen/hax`, and the month and year from two weeks ago\nrepo=\"--repo cryspen/hax\"\nmonth=$(date -d \"14 days ago\" +'%m')\nyear=$(date -d \"14 days ago\" +'%Y')\n\n# Set date formatting to English\nexport LC_ALL=C\n\n# Parse command line arguments.\nall_args=(\"$@\")\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n    -r | --repo) repo=\"--repo $2\"; shift ;;\n    -m | --month) month=$2; shift ;;\n    -y | --year) year=$2; shift ;;\n    --author) author=$2; shift ;;\n    esac\n    shift\ndone\n\nreport() {\n    # Calculate the first day of the month\n    start=$(date -u -d \"$year-$month-01\" +\"%Y-%m-%dT%H:%M:%SZ\")\n\n    # Get the next month\n    end=$(date -u -d \"$year-$month-01 + 1 month - 1 day\" +\"%Y-%m-%dT%H:%M:%SZ\")\n\n    # Get all closed PRs with number, title, and description\n    pr_data=$(\n        gh pr list $repo --state merged --limit 1000 \\\n            --json number,title,url,author,mergedAt \\\n            --jq \"map(select(.mergedAt >= \\\"$start\\\" and .mergedAt <= \\\"$end\\\" and .author.login != \\\"app/dependabot\\\")) | .[] | {number, title, url, author}\" | jq -s\n    )\n\n    echo \"In $(date -d \"$year-$month-01\" +\"%B\"), we successfully merged **$(echo \"$pr_data\" | jq -r 'length') pull requests**!\"\n    echo \"\"\n    echo \"<DESCRIPTION>\"\n    echo \"\"\n    echo \"### Full list of PRs\"\n\n    # Extract markdown list with jq\n    echo \"$pr_data\" | jq -r '.[] | . | \"* \\\\#\\(.number): [\\(.title)](\\(.url))\"'\n\n    echo \"\"\n    echo \"### Contributors\"\n    # Extract markdown list of authors with jq\n    echo \"$pr_data\" | jq -r 'map(.author.login) | unique | .[] | \"* [@\\(.)](https://github.com/\\(.))\"'\n}\n\n# Available authors, and their GH handles\nauthors_and_handles() {\n    sort -u <<AUTHORS | sed '/^[[:space:]]*$/d'\nmaxime:maximebuyse\nclement:clementblaudeau\nalex:abentkamp\nAUTHORS\n}\nauthors() {\n    authors_and_handles | cut -d: -f1\n}\nhandle_of() {\n    authors_and_handles | grep \"^$1:\" | cut -d: -f2\n}\n\nfind_last_blog_authors() {\n    N=$(authors_and_handles | wc -l)\n    N=$((N - 1))\n    ls -t1 | head -n$N | xargs awk '/^authors:/,/^---/{ if ($0 ~ /^  - /) { sub(/^  - /, \"\"); print } }' | sort -u\n}\n\npick_author() {\n    diff <(authors) <(find_last_blog_authors) | grep '^< ' | cut -d' ' -f2 | shuf -n1\n}\n\nauthor=$(pick_author)\n\nBLOG_POST_FILE=\"$year-$month.md\"\n\n\ncat << HEADER > $BLOG_POST_FILE\n---\nauthors:\n  - $author\ntitle: \"This Month in Hax: $(date -d \"$year-$month-15\" +\"%B %Y\")\"\ndate: $(date +\"%Y-%m-%d\")\n---\n\nHEADER\nreport >> $BLOG_POST_FILE\n\nBLOG_POST=\"$(cat $BLOG_POST_FILE)\"\n\n# Go to root\ncd $(git rev-parse --show-toplevel)\nBRANCH=\"this-month-in-hax-blog-post-$year-$month\"\necho $BRANCH > this-month-in-hax-branch\n\n# Echo the author's handle\ncat <<MESSAGE > this-month-in-hax-issue.yml\n---\ntitle: Write This Month in Hax\nassignees: $(handle_of $author)\n---\n\nThis is an auto-generated issue for the \"This Month in hax\" blog series.\n\nBranch [\\`$BRANCH\\`](https://github.com/cryspen/hax/tree/$BRANCH) have been created with the following template:\n\\`\\`\\`md\n$(echo \"$BLOG_POST\")\n\\`\\`\\`\n\nIt is an skeleton blog post with the list of PRs pushed in $(date -d \"$year-$month-01\" +\"%B %Y\") and a list of contributor.\n\nSuggested person to pick this draft PR: @$(handle_of $author)\n\n## Action Items\n - [ ] Write the blog article\n - [ ] Release a new version of hax\n    - [ ] Follow \\`PUBLISHING.md\\`\n    - [ ] Create Github release\nMESSAGE\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n## [Unreleased]\n\nChanges to the Rust Engine:\n - Rename `GenericConstraint::Type` to `TypeClass` and `::Projection` to `Equality` (#1996)\n - Remove `BinOp` resugaring (#1950)\n - Apply resugarings to linked items (pre/post conditions) (#1961)\n - Add new import_thir implemented in Rust and using `FullDef`, activated with `--experimental-full-def` (#1967)\n\nChanges to the engine:\n - Omit type aliases whose body has unresolvable trait bounds instead of crashing (#2014)\n - Report let-chains (`if let .. && let ..`) as a soft error instead of panicking (#2014)\n\nChanges to the frontend:\n - Fix support for ellipsis: add wildcard for every field (based on type info\n   rather than number of subpatterns) (#2001)\n - Fix panic on constants of type `&[&T]` (e.g. `&[&str]`) caused by a wrong type for the synthesized array length (#2014)\n\nChanges to cargo-hax:\n\nChanges to hax-lib:\n - Lean lib: use Rust core models (#1865)\n - Lean lib: specs for negation (#1891)\n - Lean lib: Add casting for all integer type pairs (#1837)\n - Lean lib: bump lean to v4.28.0-rc1 (#1900)\n - Lean lib: Extract more core models (#1919)\n - Lean lib: Separate symbolic and bit-blasting specs (#1933)\n - Lean lib: Communicate user-generated specs to mvcgen (#1937)\n - Lean lib: Rust primitives for prop (#1942)\n - Lean lib: For-loops for all unsigned integers (#1951)\n - Lean lib: Upgrade to Lean v4.29.0-rc1 (#1962)\n - Lean lib: Add support for Int128 and UInt128 while waiting for upstream in Lean (#1968)\n - Lean lib: Refactor `RustM` as `ExceptT Error Option` (#1994)\n - Lean lib: Add Repr instance for tuples (#2000)\n - Lean lib: Make the proof of `RustM.toBVRustM_bind` compatible with Lean 4.29.0 (#2005)\n\nChanges to the Lean backend:\n - Add `hax_zify` and `hax_construct_pure` tactics (#1888)\n - Add support for opaque `impl`s (#1887)\n - Fix support for associated constants in trait impls (#1906)\n - Gather definitions in namespaces, shortening names (#1901)\n - Add support for associated types with constraints and inheritance (#1909)\n - Fix bug with monadic wrapping of trait constants (#1929)\n - Add type annotation for cast_op (#1925)\n - Add attributes for pureEnsures/pureRequires (#1931)\n - Extract correct `PhantomData` structure (#1932)\n - Standardize generated Lean naming to lowercase namespaces (#1914)\n - Fix associated constants with default values (#1941)\n - New default proof for the Lean backend & proof method attribute (#1938)\n - Prettier proof_mode annotations (#1943)\n - Detect recursive functions and mark them partial_fixpoint (#1946)\n - Add more binops (#1963)\n - Add a resugaring for ellipsis patterns (#2002)\n\nMiscellaneous:\n - Fix Nix development shell: add an `fstar` devShell providing F* and the\n   required environment variables (#1972)\n\n## 0.3.6\n\nChanges to the Rust Engine:\n - Add a rejection phase for interleaving of expressions and statements not\n   supported by the Lean do-notation syntax (#1739).\n - Add a phase to handle the monadic encoding: it explicitly introduces two new\n   Hax primitives `pure` (to wrap values as monadic computations) and `lift` (to\n   lift monadic computations into values) (#1746)\n - Add a mechanism to lookup pre- and post-conditions (#1805)\n - Add a proper Rust backend (#1898)\n\nChanges to the frontend:\n - Update the pin of rustc (#1765)\n - Miscellaneous changes related to Charon (#1765)\n\nChange to cargo-hax:\n\nChanges to hax-lib:\n - Add Lean core models for options, results, default (#1747)\n - F* lib: improved while loops support, additions of some specific arithmetic operations and fixed `TryInto` for integer types (#1742)\n - Lean lib: use macros for int operations (#1795)\n - Lean lib: add new setup for `bv_decide` (#1828)\n - Lean lib: base specs on mathematical integers (#1829)\n - Lean lib: represent `usize` via a copy of `UInt64` (#1829)\n - Lean lib: Add support for while loops (#1857, #1863)\n - Core models: integers, arrays, iterators, full replacement of the F* proof-lib (#1898)\n\nChanges to the Lean backend:\n - Support for constants with arbitrary computation (#1738)\n - Add support for base-expressions of structs (#1736)\n - Use the explicit monadic phase to insert `pure` and `←` only on demand, and\n   not introduce extra `do` block (#1746)\n - Rename `Result` monad to `RustM` to avoid confusion with Rust `Result` type (#1768)\n - Add support for shift-left (#1785)\n - Add support for default methods of traits (#1777)\n - Add support for floats (#1784)\n - Add support for pattern matching on constant literals (#1789)\n - Add support for binding subpatterns in match constructs (#1790)\n - Add error when using patterns in function parameters (#1792)\n - Add grind annotations for various lemmas in the Lean library (#1802)\n - Add support for constant parameters to functions and traits (#1797)\n - Add support for associated types with equality constraints (#1806)\n - Make trait-level arguments explicit for all trait functions, adding them as\n   extra parameters (#1803)\n - Add generation of specs from requires/ensures-annotations (#1815)\n - Add support for nonliteral array sizes (#1826)\n - Add `hax_lib::lean::proof` attribute (#1831)\n - Add support for `#[hax_lib::opaque]` (#1846)\n - Turn rejection phase into a transformation phase (#1840)\n - Fix string escaping (#1834)\n\nMiscellaneous:\n- Reserve extraction folder for auto-generated files in Lean examples (#1754)\n- Add `lean_adc` example to the Lean examples section, demonstrating tactics introduced in PR(#1933)\n\n## 0.3.5\n\nChanges to the Rust Engine:\n - The module `names` now produces `ExplicitDefId`s instead of `DefId`s (#1648)\n - Add a resugaring `FunctionsToConstants` (#1559)\n - Drop the tuple nodes of the AST, add resugaring node for tuples (#1662)\n - Add support for enums and structs to the Lean backend (type definitions,\n   expressions, pattern-matching) (#1623)\n - Update name rendering infrastructure in the Lean backend (#1623, #1624)\n - Printers now emit proper diagnostics (PR #1669)\n - Global identifiers are now interned (#1689)\n - Global identifiers are encapsulated properly, and provide easy destructuring as tuple identifiers (#1693)\n - Add support for `trait` and `impl` in the Lean backend (#1679): trait definitions, trait bounds\n   on functions, impl definitions. The typeclass resolution in the generated code is left implicit\n   (relies on Lean). Limited support for associated types. No support for default implementations.\n - Refactor of the printing infrastructure: lowers the boilerplate, get rid of most lifetimes annotation, add proper contextual span support (#1735)\n\nChanges to the frontend:\n- Add an explicit `Self: Trait` clause to trait methods and consts (#1559)\n- Fix `ImplExpr::Builtin` that had some type errors (#1559)\n- Improve the translation of `Drop` information (#1559)\n- Add variance information to type parameters (#1559)\n- Cleanup the `State` infrastructure a little bit (#1559)\n- Add information about the metadata to use in unsize coercions (#1559)\n- Resolve `dyn Trait` predicates (#1559)\n- Many improvements to `FullDef` (#1559)\n- Add infrastructure to get a monomorphized `FullDef`; this is used in charon to monomorphize a crate graph (#1559)\n- Fix a regression affecting projection predicates (#1678)\n\nChange to cargo-hax:\n- Improve the caching of rustc when using `cargo hax` commands (#1719)\n- Add hidden commands and flags to explicitly manipulate `haxmeta` files (#1722)\n\nChanges to hax-lib:\n- New behavior for `hax_lib::include`: it now forces inclusion when in contradiction with `-i` flag.\n- hax-lib requires edition 2021 instead of 2024 (#1726)\n- Improved `VecDeque` model in F* proof lib (#1728)\n- Split the Lean library into several files, update to lean 4.23.0 (#1696)\n\nChanges to the Lean backend:\n- Improve support for functionalized loops (#1695)\n- Improve error messages, having each error (coming from the Lean backend) point to a specific github issue (#1717).\n\nMiscellaneous:\n - A lean tutorial has been added to the hax website (#1626)\n - Add end-to-end tests for the website (#1690)\n - Diagnostics reporting were improved (#1692)\n\n## 0.3.4\n\nThe release of `0.3.3` got troubles because of the new Rust Engine crates.\nThis release is mostly empty.\n\n## 0.3.3\n\nChanges to the frontend:\n - A field `visibility` was added to HIR items (#1643)\n\nRust Engine:\n - A Lean backend was introduced (#1593, #1591, #1590, #1607)\n - The Rust engine was improved (#1624, #1603, #1600, #1585)\n - The F* backend has been improved (#1587, #1585)\n\n## 0.3.2\n\nChanges to the frontend:\n - Provide the `FnOnce` shim for closures (#1477)\n - Update pin of rustc (#1482)\n - Add `Ty::FnDef` (splitting `FnPtr` and `FnDef`) (#1487)\n - Regroup generic and trait arguments in a struct `ItemRef` (#1514)\n - Support trait aliases in `FullDef` (#1494)\n - Separate `{Add,Sub,Mul}Unchecked` and `{Add,Sub,Mul}` (#1513)\n - Our pin to rustc was updated (#1534)\n\nChanges to the engine:\n - introduce an experimental Rust engine (#1501, #1502, #1504, #1505, #1518)\n\nChanges the `hax-lib`:\n - Support hax octal and binary literals in the `int!` macro\n - F*: additions of integer function implementations (#1520)\n - F*: change the definition of the `Clone` tyepclass (#1552)\n\n\n## 0.3.1 (2025-05-26)\n\nChanges to `hax-lib`:\n- Bug fix with PartialOrd in f* lib: [#1473](https://github.com/cryspen/hax/pull/1473)\n- Move `proof-libs` into `hax-lib` to allow dependencies using crates.io\n\n## 0.3.0 (2025-05-16)\n\nChanges to `hax-lib`:\n- Support for SMT patterns in lemmas: [#1428](https://github.com/cryspen/hax/pull/1428)\n- While loop invariants and termination (`loop_decreases`): [#1375](https://github.com/cryspen/hax/pull/1375)\n- Removal of deprecated dependencies: [#1385](https://github.com/cryspen/hax/pull/1385) and [#1394](https://github.com/cryspen/hax/pull/1394)\n- Support for mathematical integers and logical propositions has been strengthened: [#1372](https://github.com/cryspen/hax/pull/1372), [#1352](https://github.com/cryspen/hax/pull/1352), [#1351](https://github.com/cryspen/hax/pull/1351)\n- `hax_lib::BACKEND::replace_body`: [#1321](https://github.com/cryspen/hax/pull/1321)\n- `hax_lib::decreases`: [#1342](https://github.com/cryspen/hax/pull/1342)\n\n## 0.2.0 (2024-01-20)\n - Initial release\n"
  },
  {
    "path": "CI.md",
    "content": "# Continuous Integration (CI)\n\n## Github Actions\n - [`add_to_project`](./.github/workflows/add_to_project.yml): each\n   time an issue or a PR is open, this action adds it to the project\n   [https://github.com/orgs/hacspec/projects/1](https://github.com/orgs/hacspec/projects/1).\n - [`release`](./.github/workflows/release.yml): whenever a tagged\n   commit is pushed, this action builds the Linux binary, MacOS\n   binary and JS of `hax-engine`, and uploads them to a new GitHub\n   release.\n - [`format`](./.github/workflows/format.yml): ensure formatting for\n   Rust and OCaml files.\n - [`specs`](./.github/workflows/specs.yml): compiles the toolchain\n   (using Nix) and runs it on (for now) a selection of the examples\n   provided by [hacspec/specs](https://github.com/hacspec/specs). For\n   now this only tests the extraction of the specifications to Coq and\n   FStar, we do not run Coq or FStar on the extractions.\n - [`test_installs`](./.github/workflows/test_installs.yml): compiles\n   the toolchain on two versions of Ubuntu and two versions of MacOS\n   using `apt` or `homebrew` and the `setup.sh` script;\n - [`engine-js-build`](./.github/workflows/engine_js_build.yml): tests\n   the build the JS version of the engine.\n \n## Merge queue\nAdditional actions are triggered on pull requests in the merge queue. They are\nfound in [`test_installs`](./.github/workflows/test_installs.yml).\n"
  },
  {
    "path": "CODEOWNERS",
    "content": "* @cryspen/hax\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Engineering & Contributing Guidelines\n\nThe following is a set of guidelines for contributing to this repository.\nThese are mostly guidelines, not rules.\nUse your best judgement, and feel free to propose changes to this document in a pull request.\nThe processes described here is not to pester you but to increase and maintain code quality.\n\n## Working with this repository\n\nWe use issues to organise and prioritise work items.\n\n**Assignee meaning in issues:** The assignee is the person responsible for following up on the issue (making sure it eventually gets addressed). It is usually (but not necessarily) the one working on it.\n\nAfter picking up an issue, create a branch.\nThere can be any number of branches and pull requests for one issue.\nBut make sure that each issue is clearly linked to the pull request.\nThere must be one pull request that closes the issue.\nIf there are multiple PRs for an issue, make sure this is clear in the pull request.\n\n## Pull Requests\n\nWe use the GitHub-based PR workflow.\nWhen starting to work on an issue, create a branch and an according pull request that fixes the issue.\nThe changeset in a pull request must not be larger than 1000 lines (with some exceptions for test snapshots or generated code).\nIf an issue needs more work than that, split it into multiple pull requests.\n\nAfter submitting the pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing before asking for review.\n\nWhile the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted.\n\n### PR & Commit Guidelines\n\n- Split out mass-changes or mechanical changes into a separate PR from the substantive changes.\n- Separate commits into conceptually-separate pieces for review purposes (even if you then later collapse them into a single changeset to merge), if technically possible.\n- Address all comments from previous reviews (either by fixing as requested, or explaining why you haven't) before requesting another review.\n- If your request only relates to part of the changes, say so clearly.\n\n### Force pushing\n\nIt is fine to force-push either (1) before asking for a review or (2) after PR approval, just before merging. Otherwise, in between two reviews, please do not force-push.\n\n### Regressions\n\nWhen a PR introduces a regression, a fix should be submitted in a\nwindow of 2 days, otherwise the PR will be reverted.\n\n## Rules for the OCaml code\n - Never use the OCaml standard library, always use [`base`](https://v3.ocaml.org/p/base/latest/doc/index.html), [`core`](https://v3.ocaml.org/p/core/latest/doc/index.html) or [`stdlib`](https://v3.ocaml.org/p/stdlib/latest/doc/index.html) instead.\n - Avoid non-total functions (e.g. all the `_exn` functions in `base`).\n - Try to avoid exceptions, if possible.\n - Never use `==`, which is the physical equality, and almost never what you want.\n\n### Changelog\nOur changelog format is based on https://keepachangelog.com/.\nPlease add an entry in a subsection (`Added`, `Changed`, `Deprecated`, `Removed`, `Fixed` -- see https://keepachangelog.com/en/1.0.0/#how) for each notable change.\n\nPlease prefix with `engine:`, `frontend:` or similar.\n\n#### Should I add an entry to `CHANGELOG.md`?\n\n**Include in CHANGELOG.md:**\n - New features and enhancements\n - Bug fixes\n - Breaking changes\n - Security patches\n - Major documentation updates\n - Dependency updates that affect users\n\n**Do not include:**\n - Code refactoring with no user impact\n - Minor doc fixes (typos, grammar)\n - CI/CD or tooling changes with no external effect\n - Linting, formatting, or style-only commits\n - Reverts or fixup commits\n - Dependency bumps with no behavioral impact\n\n**Rule of thumb:**\nIf a user (developer or customer) wouldn’t notice or need to know, leave it out.\n\n## Styleguides\n\n### Optional Title Prefixes for Issues\nTo help quickly convey the focus of an issue, we sometimes add a short prefix in square brackets at the start of the title: `[prefix] Issue short title`. This is optional; you can just use it if the issue has a clear direction or goal.\n\nKeep it short and intuitive, think of it as a lightweight hint, not a strict taxonomy or replacements for labels or milestones.\n\nUse it when it helps. Leave it out when it doesn’t.\n\n### Git Commit Messages\n\n- Use the present tense\n- Use the imperative mood\n- Limit the first line to 80 characters\n- Don't end the first line of the commit message with a period\n- Reference issues and pull requests liberally after the first line\n- If the patch is of nontrivial size, point to the important comments in the non-first lines of the commit message.\n\n### Styleguide\n\nUse `rustfmt` for every Rust code and `ocamlformat` for every OCaml\ncode. From the command line, run `cargo fmt` in the root of hax and\n`dune fmt` in `engine`.\n\n### Documentation Styleguide\n\nUse [rustdoc](https://doc.rust-lang.org/rustdoc/index.html) comments\non Rust files and functions. Use\n[`odoc`](https://ocaml.github.io/odoc/) comments on OCaml files. It is\nmandatory on public functions and encouraged on internal functions.\n\n\n## Reviews\n\nAs a reviewer always keep in mind the following principles\n\n- Reviewing code is more valuable than writing code as it results in higher overall project activity. If you find you can't write code any more due to prioritizing reviews over coding, let's talk.\n- You should respond to a review request within one working day of getting it, either with a review, a deadline by which you promise to do the review, or a polite refusal. If you think a patch is lower priority than your other work communicate that.\n\n### Review Guidelines\n\n- Check that the issue is assigned and linked.\n- Commit title and message make sense and says what is being changed.\n- Check that the PR applies cleanly on the target branch.\n- Check new files for license and administrative issues.\n- Check out code changes\n  - Run automated tests\n  - Manually verify changes if possible\n- Code review\n  - Does the change address the issue at hand?\n  - Is the code well documented?\n  - Do you understand the code changes?\n    - If not, add a comment. The PR can't be accepted in this stage.\n  - Is the public API changed?\n    - Are the changes well documented for consumers?\n    - Do the changes break backwards compatibility?\n    - Is the new API sensible/needed?\n  - Is the code maintainable after these changes?\n  - Are there any security issues with these changes?\n  - Are all code changes tested?\n  - Do the changes effect performance?\n  - Look at the interdiff for second and subsequent reviews.\n- Ask if more information is needed to understand and judge the changes.\n\n## AI guidelines\n\nUsing AI tools to generate code for Hax is accepted under the following conditions:\n- The PR should clearly state that AI has been used and say for which parts of the code, tests, or documentation.\n- The author should also explain the methodology: how AI has been used and how the result has been tested.\n- Any AI generated content should be carefully reviewed by the author of the PR (before the reviewer).\n"
  },
  {
    "path": "Cargo.toml",
    "content": "[workspace]\nmembers = [\n     \"frontend/exporter\",\n     \"frontend/exporter/options\",\n     \"cli/subcommands\",\n     \"cli/driver\",\n     \"test-harness\",\n     \"hax-lib\",\n     \"hax-lib/macros\",\n     \"hax-lib/macros/types\",\n     \"hax-lib-protocol\",\n     \"hax-lib-protocol-macros\",\n     \"hax-bounded-integers\",\n     \"engine/names\",\n     \"engine/names/extract\",\n     \"hax-types\",\n     \"rust-engine\",\n     \"rust-engine/macros\",\n]\nexclude = [\"tests\", \"rustc-coverage-tests\", \"rust-engine/tests\", \"hax-lib/core-models\"]\ndefault-members = [\n     \"frontend/exporter\",\n     \"frontend/exporter/options\",\n     \"cli/subcommands\",\n     \"cli/driver\",\n     \"test-harness\",\n     \"hax-lib\",\n     \"hax-lib/macros\",\n     \"hax-lib/macros/types\",\n     \"hax-lib-protocol\",\n     \"hax-lib-protocol-macros\",\n     \"engine/names\",\n]\nresolver = \"2\"\n\n[workspace.package]\nversion = \"0.3.6\"\nauthors = [\"hax Authors\"]\nlicense = \"Apache-2.0\"\nhomepage = \"https://github.com/hacspec/hax\"\nedition = \"2024\"\nrepository = \"https://github.com/hacspec/hax\"\nreadme = \"README.md\"\n\n[workspace.dependencies]\nitertools = \"0.11.0\"\nschemars = \"0.8\"\nwhich = \"4.4\"\nserde = { version = \"1.0\", features = [\"derive\", \"rc\"] }\nserde_json = \"1.0\"\nclap = { version = \"4.0\", features = [\"derive\"] }\nsyn = { version = \"1.0.107\", features = [\n     \"derive\",\n     \"printing\",\n     \"extra-traits\",\n     \"parsing\",\n     \"full\",\n] }\ntracing = { version = \"0.1\", features = [\n     \"max_level_trace\",\n     \"release_max_level_trace\",\n] }\ntracing-subscriber = { version = \"0.3\", features = [\n     \"env-filter\",\n     \"std\",\n     \"fmt\",\n] }\ntracing-tree = \"^0.2\"\nquote = \"1.0.32\"\nproc-macro2 = \"1.0.66\"\ncargo_metadata = \"0.15\"\ncolored = \"2\"\nannotate-snippets = \"0.11\"\n\n# Crates in this repository\nhax-frontend-exporter = { path = \"frontend/exporter\", version = \"=0.3.6\", default-features = false }\nhax-adt-into = { path = \"frontend/exporter/adt-into\", version = \"=0.3.6\" }\nhax-frontend-exporter-options = { path = \"frontend/exporter/options\", version = \"=0.3.6\" }\nhax-lib-macros = { path = \"hax-lib/macros\", version = \"=0.3.6\" }\nhax-lib-macros-types = { path = \"hax-lib/macros/types\", version = \"=0.3.6\" }\nhax-lib = { path = \"hax-lib\", version = \"=0.3.6\" }\nhax-engine-names = { path = \"engine/names\", version = \"=0.3.6\" }\nhax-types = { path = \"hax-types\", version = \"=0.3.6\" }\nhax-rust-engine = { path = \"rust-engine\", version = \"=0.3.6\" }\nhax-rust-engine-macros = { path = \"rust-engine/macros\", version = \"=0.3.6\" }\n\n[workspace.metadata.release]\nowners = [\"github:cryspen:tools\"]\n"
  },
  {
    "path": "LICENSE",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   Copyright 2023 Hax Authors\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License."
  },
  {
    "path": "PUBLISHING.md",
    "content": "# Publishing\n\n## OCaml\n\nThere is only the package `hax-engine`, that includes a binary and a\nnumber of libraries.\n\nWe have no particular release procedure for the engine: we don't plan\non publishing it to opam.\n\n## Rust\n\nThis repository is divided into several crates, some to be published,\nsome not. All crates should start with the `hax-` prefix, but\n`cargo-hax` which is the entrypoint to the cargo `hax` subcommand.\n\nHere is the list of the crates in this repository (excluding `tests`\nand `examples`):\n\n- `hax-test-harness` **(doesn't need to be published)**\n\n### cargo-hax\n\n1. `hax-frontend-exporter-options` (`frontend/exporter/options `)\n2. `hax-adt-into` (`frontend/exporter/adt-into`)\n3. `hax-frontend-exporter` (`frontend/exporter`)\n4. `hax-types` (`hax-types`)\n5. `hax-subcommands` (binaries) (`cli/subcommands`)\n   - `cargo-hax`\n   - `hax-export-json-schemas`\n   - `hax-pretty-print-diagnostics`\n\n- `hax-driver`\n\n### hax-lib\n\nWe publish the following crates that are helper libraries to be used\nfor hax code:\n\n1. `hax-lib-macros-types`\n2. `hax-lib-macros`\n3. `hax-lib`\n\n### Supporting crates for the engine\nThe crate listed below are used only by the OCaml build of the\nengine. Those should not be published on `crate.io`.\n\n1. `cargo-hax-engine-names`\n2. `cargo-hax-engine-names-extract`\n\n## Procedure\n 1. Move the contents of `CHANGELOG.md` under the `[Unreleased]` section to a new section named following the target version. Commit this change.\n 2. Bump the version number with `cargo release LEVEL --workspace --no-publish --no-tag --execute` (`cargo release --help` for more details on `LEVEL`, `cargo install cargo-release` if you don't already have this package). This will bump the version of every Rust crate, but also the version in `engine/dune-project`. This will also regenerate `engine/hax-engine.opam`. Note this will *not* publish the crate.\n 3. PR the change\n 4. when the PR is merged in main, checkout `main` and run `cargo release --workspace --execute`\n\nNote: for now, we are not publishing to Opam. Instead, let's just advertise the following for installation:\n```bash\nopam pin hax-engine https://github.com/hacspec/hax.git#the-release-tag\nopam install hax-engine\n```\n\n## Notes\n`cargo release` reads the `Cargo.toml` of each crates of the workspace.\nSome creates are excluded from releasing: in their `Cargo.toml` manifest, they have `package.metadata.release.release` set to `false`.\n\nAlso, `cli/subcommands/Cargo.toml` specifies pre-release replacements for the engine: the version of the engine is bumped automatically by `cargo release`.\n"
  },
  {
    "path": "README.md",
    "content": "<p align=\"center\">\n  <img src=\"logo.svg\"/>\n</p>\n\n<p align=\"center\">\n  <a href=\"https://hacspec.zulipchat.com/\"><img src=\"https://img.shields.io/badge/Zulip-50ADFF?logo=Zulip&logoColor=white\" alt=\"Zulip\"></a>\n  <a href=\"https://hax-playground.cryspen.com\"><img src=\"https://img.shields.io/badge/try-Playground-1f6feb\" alt=\"Playground\"></a>\n  <a href=\"https://hax.cryspen.com\"><img src=\"https://img.shields.io/badge/docs-Website-brightgreen\" alt=\"Website\"></a>\n  <a href=\"https://hax.cryspen.com/blog\"><img src=\"https://img.shields.io/badge/Blog-9b59b6\" alt=\"Blog\"></a>\n  <a href=\"LICENSE\"><img src=\"https://img.shields.io/badge/license-Apache--2.0-blue.svg\" alt=\"License: Apache-2.0\"></a>\n</p>\n\n# Hax\n\nhax is a tool for high assurance translations of a large subset of\nRust into formal languages such as [F\\*](https://www.fstar-lang.org/) or [Rocq](https://rocq-prover.org/).\n\n<p align=\"center\">\n    <a href=\"https://hax-playground.cryspen.com/#fstar+tc/latest-main/gist=5252f86237adbca7fdeb7a8fea0b1648\">\n    Try out hax online now!\n    </a>\n</p>\n\n### Supported Backends\n\n<table align=\"center\">\n  <tr>\n    <td align=\"center\" colspan=\"3\">\n      General purpose proof assistants\n    </td>\n    <td align=\"center\" colspan=\"2\">\n      Cryptography & protocols\n    </td>\n  </tr>\n  <tr>\n    <td align=\"center\">\n      <a href=\"https://www.fstar-lang.org/\">\n        F*\n        <!-- <picture>\n          <source srcset=\".github/assets/fstar-dark.png\" media=\"(prefers-color-scheme: dark)\">\n          <source srcset=\".github/assets/fstar-light.png\" media=\"(prefers-color-scheme: light)\">\n          <img src=\".github/assets/fstar-light.png\" height=\"40\" alt=\"F*\">\n        </picture> -->\n      </a>\n    </td>\n    <td align=\"center\">\n      <a href=\"https://rocq-prover.org/\">\n        <picture>\n          <source srcset=\".github/assets/rocq-dark.svg\" media=\"(prefers-color-scheme: dark)\">\n          <source srcset=\".github/assets/rocq-light.svg\" media=\"(prefers-color-scheme: light)\">\n          <img src=\".github/assets/rocq-light.svg\" height=\"18\" alt=\"Rocq\">\n        </picture>\n      </a>\n    </td>\n    <td align=\"center\" style=\"vertical-align: center; \">\n      <a href=\"https://lean-lang.org/\">\n        <picture>\n          <source srcset=\".github/assets/lean-dark.svg\" media=\"(prefers-color-scheme: dark)\">\n          <source srcset=\".github/assets/lean-light.svg\" media=\"(prefers-color-scheme: light)\">\n          <img src=\".github/assets/lean-light.svg\" height=\"18\" alt=\"Lean\">\n        </picture>\n      </a>\n    </td>\n    <td align=\"center\">\n      <a href=\"https://github.com/SSProve/ssprove\">\n        <picture>\n          <source srcset=\".github/assets/ssprove-dark.svg\" media=\"(prefers-color-scheme: dark)\">\n          <source srcset=\".github/assets/ssprove-light.svg\" media=\"(prefers-color-scheme: light)\">\n          <img src=\".github/assets/ssprove-light.svg\" height=\"18\" alt=\"SSProve\">\n        </picture>\n      </a>\n    </td>\n    <td align=\"center\">\n      <a href=\"https://proverif.inria.fr/\">\n        <b>ProVerif</b>\n      </a>\n    </td>\n  </tr>\n  <tr>\n    <!-- 🟢🟡🟠🔴 -->\n    <td align=\"center\"><sub>🟢 stable</sub></td>\n    <td align=\"center\"><sub>🟡 partial</sub></td>\n    <td align=\"center\"><sub>🚀 active dev.</sub></td>\n    <td align=\"center\"><sub>🟡 partial</sub></td>\n    <td align=\"center\"><sub>🟠 PoC</sub></td>\n  </tr>\n</table>\n\n## Learn more\n\nHere are some resources for learning more about hax:\n\n - [Manual](https://hax.cryspen.com/manual/index.html) (work in progress)\n    + Quick start: [F*](https://hax.cryspen.com/manual/fstar/quick_start/), [Lean](https://hax.cryspen.com/manual/lean/quick_start/)\n    + Tutorial: [F*](https://hax.cryspen.com/manual/fstar/tutorial/), [Lean](https://hax.cryspen.com/manual/lean/tutorial/)\n - [Examples](./examples/): the [examples directory](./examples/) contains\n   a set of examples that show what hax can do for you.\n - Other [specifications](https://github.com/hacspec/specs) of cryptographic protocols.\n\nQuestions? Join us on [Zulip](https://hacspec.zulipchat.com/) or open a [GitHub Discussion](https://github.com/cryspen/hax/discussions). For bugs, file an [Issue](https://github.com/cryspen/hax/issues).\n\n## Usage\nHax is a cargo subcommand. \nThe command `cargo hax` accepts the following subcommands:\n * **`into`** (`cargo hax into BACKEND`): translate a Rust crate to the backend `BACKEND` (e.g. `fstar`, `coq`, `lean`).\n * **`json`** (`cargo hax json`): extract the typed AST of your crate as a JSON file.\n \nNote:\n * `BACKEND` can be `fstar`, `lean`, `coq`, `easycrypt` or `pro-verif`. `cargo hax into --help`\n   gives the full list of supported backends.\n * The subcommands `cargo hax`, `cargo hax into` and `cargo hax into\n   <BACKEND>` takes options. For instance, you can `cargo hax into\n   fstar --z3rlimit 100`. Use `--help` on those subcommands to list\n   all options.\n\n## Installation\n<details>\n  <summary><b>Manual installation</b></summary>\n\n1. Make sure to have the following installed on your system:\n\n- [`opam`](https://opam.ocaml.org/) (`opam switch create 5.1.1`)\n- [`rustup`](https://rustup.rs/)\n- [`nodejs`](https://nodejs.org/)\n- [`jq`](https://jqlang.github.io/jq/)\n\n2. Clone this repo: `git clone git@github.com:cryspen/hax.git && cd hax`\n3. Run the [setup.sh](./setup.sh) script: `./setup.sh`.\n4. Run `cargo-hax --help`\n\n</details>\n\n<details>\n  <summary><b>Nix</b></summary>\n\n This should work on [Linux](https://nixos.org/download.html#nix-install-linux), [MacOS](https://nixos.org/download.html#nix-install-macos) and [Windows](https://nixos.org/download.html#nix-install-windows).\n\n<details>\n  <summary><b>Prerequisites:</b> <a href=\"https://nixos.org/\">Nix package\nmanager</a> <i>(with <a href=\"https://nixos.wiki/wiki/Flakes\">flakes</a> enabled)</i></summary>\n\n  - Either using the [Determinate Nix Installer](https://github.com/DeterminateSystems/nix-installer), with the following bash one-liner:\n    ```bash\n    curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install\n    ```\n  - or following [those steps](https://github.com/mschwaig/howto-install-nix-with-flake-support).\n\n</details>\n\n+ **Run hax on a crate directly** to get F\\*/Coq/Lean/... (assuming you are in the crate's folder):\n   - `nix run github:hacspec/hax -- into fstar` extracts F*.\n\n+ **Install hax**:  `nix profile install github:hacspec/hax`, then run `cargo hax --help` anywhere\n+ **Note**: in any of the Nix commands above, replace `github:hacspec/hax` by `./dir` to compile a local checkout of hax that lives in `./some-dir`\n+ **Setup binary cache**: [using Cachix](https://app.cachix.org/cache/hax), just `cachix use hax`\n\n</details>\n\n<details>\n  <summary><b>Using Docker</b></summary>\n\n1. Clone this repo: `git clone git@github.com:hacspec/hax.git && cd hax`\n3. Build the docker image: `docker build -f .docker/Dockerfile . -t hax`\n4. Get a shell: `docker run -it --rm -v /some/dir/with/a/crate:/work hax bash`\n5. You can now run `cargo-hax --help` (notice here we use `cargo-hax` instead of `cargo hax`)\n\nNote: Please make sure that `$HOME/.cargo/bin` is in your `$PATH`, as\nthat is where `setup.sh` will install hax.\n\n</details>\n\n## Supported Subset of the Rust Language\n\nHax intends to support full Rust, with the one exception, promoting a functional style: mutable references (aka `&mut T`) on return types or when aliasing (see https://github.com/hacspec/hax/issues/420) are forbidden.\n\nEach unsupported Rust feature is documented as an issue labeled [`unsupported-rust`](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust). When the issue is labeled [`wontfix-v1`](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust+label%3Awontfix%2Cwontfix-v1), that means we don't plan on supporting that feature soon.\n\nQuicklinks:\n - [🔨 Rejected rust we want to support](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust+-label%3Awontfix%2Cwontfix-v1);\n - [💭 Rejected rust we don't plan to support in v1](https://github.com/hacspec/hax/issues?q=is%3Aissue+is%3Aopen+label%3Aunsupported-rust+label%3Awontfix%2Cwontfix-v1).\n\n## Hacking on Hax\nThe documentation of the internal crate of hax and its engine can be\nfound [here for the engine](https://hax.cryspen.com/engine/index.html)\nand [here for the frontend](https://hax.cryspen.com/frontend/index.html).\n\n### Edit the sources (Nix)\n\nJust clone & `cd` into the repo, then run `nix develop .`.\nYou can also just use [direnv](https://github.com/nix-community/nix-direnv), with [editor integration](https://github.com/direnv/direnv/wiki#editor-integration).\n\n### Structure of this repository\n\n- `rust-frontend/`: Rust library that hooks in the rust compiler and\n  extract its internal typed abstract syntax tree\n  [**THIR**](https://rustc-dev-guide.rust-lang.org/thir.html) as JSON.\n- `engine/`: the simplification and elaboration engine that translates programs\n  from the Rust language to various backends (see `engine/backends/`). Written\n  in OCaml.\n- `rust-engine/`: an on-going rewrite of our engine from OCaml to Rust.\n- `cli/`: the `hax` subcommand for Cargo.\n\n### Compiling, formatting, and more\nWe use the [`just` command runner](https://just.systems/). If you use\nNix, the dev shell provides it automatically, if you don't use Nix,\nplease [install `just`](https://just.systems/man/en/packages.html) on\nyour system.\n\nAnywhere within the repository, you can build and install in PATH (1)\nthe Rust parts with `just rust`, (2) the OCaml parts with `just ocaml`\nor (3) both with `just build`. More commands (e.g. `just fmt` to\nformat) are available, please run `just` or `just --list` to get all\nthe commands.\n\n## Publications & Other material\n\n* [📕 Tech report](https://hal.inria.fr/hal-03176482)\n* [📕 HACSpec: A gateway to high-assurance cryptography](https://github.com/hacspec/hacspec/blob/master/rwc2023-abstract.pdf)\n* [📕 Original hacspec paper](https://www.franziskuskiefer.de/publications/hacspec-ssr18-paper.pdf)\n\n### Secondary literature, using hacspec:\n* [📕 Last yard](https://eprint.iacr.org/2023/185)\n* [📕 A Verified Pipeline from a Specification Language to Optimized, Safe Rust](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl22-final61.pdf) at [CoqPL'22](https://popl22.sigplan.org/details/CoqPL-2022-papers/5/A-Verified-Pipeline-from-a-Specification-Language-to-Optimized-Safe-Rust)\n* [📕 Hax - Enabling High Assurance Cryptographic Software](https://github.com/hacspec/hacspec.github.io/blob/master/RustVerify24.pdf) at [RustVerify24](https://sites.google.com/view/rustverify2024)\n* [📕 A formal security analysis of Blockchain voting](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper8-2.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/8/A-formal-security-analysis-of-Blockchain-voting)\n* [📕 Specifying Smart Contract with Hax and ConCert](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper9-13.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/9/Specifying-Smart-Contract-with-Hax-and-ConCert)\n\n## Contributing\n\nBefore starting any work please join the [Zulip chat][chat-link], start a [discussion on Github](https://github.com/hacspec/hax/discussions), or file an [issue](https://github.com/hacspec/hax/issues) to discuss your contribution.\n\n\n[chat-link]: https://hacspec.zulipchat.com\n\n## Acknowledgements\n\n[Zulip] graciously provides the hacspec & hax community with a \"Zulip Cloud Standard\" tier.\n\n\n[Zulip]: https://zulip.com/\n"
  },
  {
    "path": "cli/default.nix",
    "content": "{ craneLib, stdenv, makeWrapper, lib, rustc, rustc-docs, gcc, hax-engine\n, doCheck ? true, libz, libiconv }:\nlet\n  pname = \"hax\";\n  is-webapp-static-asset = path:\n    builtins.match \".*(script[.]js|index[.]html)\" path != null;\n  buildInputs = lib.optionals stdenv.isDarwin [ libiconv libz.dev ];\n  binaries = [ hax hax-engine.bin rustc gcc hax_rust_engine ] ++ buildInputs;\n  commonArgs = {\n    version = \"0.0.1\";\n    src = lib.cleanSourceWith {\n      src = craneLib.path ./..;\n      filter = path: type:\n        (builtins.isNull\n        (builtins.match \".*/(tests|examples|docs|proof-libs)/.*\" path)\n        && builtins.isNull (builtins.match \".*[.](md|svg)\" path)\n        && (craneLib.filterCargoSources path type\n          || is-webapp-static-asset path))\n        || !(builtins.isNull (builtins.match \".*/renamings\" path));\n    };\n    inherit buildInputs doCheck;\n    doNotRemoveReferencesToRustToolchain = true;\n  } // (if doCheck then {\n    # [cargo test] builds independent workspaces. Each time another\n    # workspace is added, it's corresponding lockfile should be added\n    # in the [cargoLockList] list below.\n    cargoVendorDir = craneLib.vendorMultipleCargoDeps {\n      cargoLockList = [ ../Cargo.lock ../tests/Cargo.lock ];\n    };\n  } else\n    { });\n  # hax dependencies (without hax itself)\n  cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { pname = pname; });\n  # hax with cargo artifact for incremental compilation\n  hax_with_artifacts = craneLib.buildPackage (commonArgs // {\n    inherit cargoArtifacts pname;\n    doInstallCargoArtifacts = true;\n  });\n  # hax without cargo artifacts: only binaries\n  hax = stdenv.mkDerivation {\n    name = hax_with_artifacts.name;\n    unpackPhase = \"true\";\n    buildPhase = \"true\";\n    installPhase = ''\n      mkdir -p $out\n      cp -r ${hax_with_artifacts}/bin $out/bin\n    '';\n  };\n  hax_rust_engine = craneLib.buildPackage (commonArgs // {\n    inherit cargoArtifacts;\n    buildInputs = buildInputs ++ [ makeWrapper ];\n    pname = \"hax-rust-engine\";\n    cargoExtraArgs = \"--manifest-path rust-engine/Cargo.toml --locked\";\n  });\n  docs = craneLib.cargoDoc (commonArgs // {\n    # preBuildPhases = [ \"addRustcDocs\" ];\n    cargoDocExtraArgs = \"--document-private-items\";\n    # addRustcDocs = ''\n    #   mkdir -p target/doc\n    #   cp --no-preserve=mode -rf ${rustc-docs}/share/doc/rust/html/rustc/* target/doc/\n    # '';\n    inherit cargoArtifacts pname;\n  });\n  tests = craneLib.buildPackage (commonArgs // {\n    inherit cargoArtifacts;\n    pname = \"hax-tests\";\n    doCheck = true;\n    CI = \"true\";\n    cargoBuildCommand = \"true\";\n    checkPhaseCargoCommand = ''\n      SNAPS_DIR=test-harness/src/snapshots && rmdir \"$SNAPS_DIR\"\n      TESTS_DIR=tests                      && rmdir \"$TESTS_DIR\"\n\n      ln -s ${../test-harness/src/snapshots}        \"$SNAPS_DIR\"\n      cp -r --no-preserve=mode   ${../tests}        \"$TESTS_DIR\"\n\n      cargo test --test toolchain --profile release\n    '';\n    buildInputs = binaries;\n    CARGO_TESTS_ASSUME_BUILT = \"yes\";\n  });\nin stdenv.mkDerivation {\n  name = hax.name;\n  buildInputs = [ makeWrapper ];\n  phases = [ \"installPhase\" ];\n  installPhase = ''\n    mkdir -p $out/bin\n    makeWrapper ${hax}/bin/cargo-hax $out/bin/cargo-hax \\\n      --prefix PATH : ${lib.makeBinPath binaries} \\\n      ${\n        lib.optionalString stdenv.isDarwin ''\n          --prefix RUSTFLAGS : \"-C link-arg=-L${libiconv}/lib\" \\\n          --suffix DYLD_LIBRARY_PATH : ${lib.makeLibraryPath [ libz rustc ]}\n        ''\n      }\n  '';\n  meta.mainProgram = \"cargo-hax\";\n  passthru = {\n    unwrapped = hax;\n    hax-engine-names-extract = craneLib.buildPackage (commonArgs // {\n      pname = \"hax_engine_names_extract\";\n      cargoLock = ../Cargo.lock;\n      cargoToml = ../engine/names/extract/Cargo.toml;\n      cargoArtifacts = hax_with_artifacts;\n      nativeBuildInputs = [ hax_with_artifacts ];\n      postUnpack = ''\n        cd $sourceRoot/engine/names/extract\n        sourceRoot=\".\"\n      '';\n    });\n    inherit docs tests;\n  };\n}\n"
  },
  {
    "path": "cli/driver/Cargo.toml",
    "content": "[package]\nname = \"hax-driver\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"The custom rustc driver used by hax.\"\n\n[package.metadata.rust-analyzer]\nrustc_private = true\n\n[[bin]]\npath = \"src/driver.rs\"\nname = \"driver-hax-frontend-exporter\"\n\n[dependencies]\nserde.workspace = true\nserde_json.workspace = true\nclap.workspace = true\ncolored.workspace = true\nhax-frontend-exporter = {workspace = true, features = [\"rustc\"]}\nhax-types = {workspace = true, features = [\"rustc\"]}\nhax-frontend-exporter-options.workspace = true\nhax-lib-macros-types.workspace = true\nitertools.workspace = true\ntracing.workspace = true\ntracing-subscriber.workspace = true\ntracing-tree.workspace = true\n"
  },
  {
    "path": "cli/driver/src/callbacks_wrapper.rs",
    "content": "use hax_types::cli_options::{ENV_VAR_OPTIONS_FRONTEND, ExporterOptions};\n\nuse rustc_ast::Crate;\nuse rustc_driver::{Callbacks, Compilation};\nuse rustc_interface::interface;\nuse rustc_middle::ty::TyCtxt;\nuse rustc_span::symbol::Symbol;\n\n/// Wraps a [Callbacks] structure, and injects some cache-related\n/// configuration in the `config` phase of rustc\npub struct CallbacksWrapper<'a> {\n    pub sub: &'a mut (dyn Callbacks + Send + 'a),\n    pub options: ExporterOptions,\n}\nimpl<'a> Callbacks for CallbacksWrapper<'a> {\n    fn config(&mut self, config: &mut interface::Config) {\n        let options = self.options.clone();\n        config.psess_created = Some(Box::new(move |parse_sess| {\n            // Silence the \"unexpected cfg\" lints.\n            parse_sess.check_config.exhaustive_names = false;\n            let depinfo = parse_sess.env_depinfo.get_mut();\n            depinfo.insert((\n                Symbol::intern(ENV_VAR_OPTIONS_FRONTEND),\n                Some(Symbol::intern(&serde_json::to_string(&options).unwrap())),\n            ));\n            depinfo.insert((\n                Symbol::intern(\"HAX_CARGO_CACHE_KEY\"),\n                std::env::var(\"HAX_CARGO_CACHE_KEY\")\n                    .ok()\n                    .as_deref()\n                    .map(Symbol::intern),\n            ));\n        }));\n        self.sub.config(config)\n    }\n    fn after_crate_root_parsing<'tcx>(\n        &mut self,\n        compiler: &interface::Compiler,\n        krate: &mut Crate,\n    ) -> Compilation {\n        self.sub.after_crate_root_parsing(compiler, krate)\n    }\n    fn after_expansion<'tcx>(\n        &mut self,\n        compiler: &interface::Compiler,\n        tcx: TyCtxt<'tcx>,\n    ) -> Compilation {\n        self.sub.after_expansion(compiler, tcx)\n    }\n    fn after_analysis<'tcx>(\n        &mut self,\n        compiler: &interface::Compiler,\n        tcx: TyCtxt<'tcx>,\n    ) -> Compilation {\n        self.sub.after_analysis(compiler, tcx)\n    }\n}\n"
  },
  {
    "path": "cli/driver/src/driver.rs",
    "content": "#![feature(rustc_private)]\n#![feature(box_patterns)]\n#![feature(trait_alias)]\n#![allow(unused_imports)]\n#![allow(unused_variables)]\n#![allow(unreachable_code)]\n#![allow(dead_code)]\n#![feature(macro_metavar_expr)]\n#![feature(internal_output_capture)]\n\nextern crate rustc_ast;\nextern crate rustc_borrowck;\nextern crate rustc_data_structures;\nextern crate rustc_driver;\nextern crate rustc_errors;\nextern crate rustc_feature;\nextern crate rustc_hashes;\nextern crate rustc_hir;\nextern crate rustc_hir_analysis;\nextern crate rustc_hir_id;\nextern crate rustc_index;\nextern crate rustc_interface;\nextern crate rustc_middle;\nextern crate rustc_mir_build;\nextern crate rustc_session;\nextern crate rustc_span;\nextern crate rustc_target;\nextern crate rustc_type_ir;\n\nmod exporter;\n\nuse std::collections::HashSet;\n\nuse exporter::ExtractionCallbacks;\n\nmod callbacks_wrapper;\nmod features;\nuse callbacks_wrapper::*;\nuse features::*;\n\nuse hax_types::cli_options::{ENV_VAR_OPTIONS_FRONTEND, ExporterOptions};\n\nuse rustc_driver::{Callbacks, Compilation};\nuse rustc_interface::interface;\nuse rustc_span::symbol::Symbol;\n\nfn rustc_sysroot() -> String {\n    std::process::Command::new(\"rustc\")\n        .args([\"--print\", \"sysroot\"])\n        .output()\n        .ok()\n        .and_then(|out| String::from_utf8(out.stdout).ok())\n        .map(|s| s.trim().to_string())\n        .unwrap()\n}\n\nfn setup_logging() {\n    use tracing_subscriber::prelude::*;\n    let enable_colors = {\n        /* Respect [never] in [RUST_LOG_STYLE] */\n        !std::env::var(\"RUST_LOG_STYLE\").is_ok_and(|style| style == \"never\")\n    };\n    let subscriber = tracing_subscriber::Registry::default()\n        .with(tracing_subscriber::EnvFilter::from_default_env())\n        .with(\n            tracing_tree::HierarchicalLayer::new(2)\n                .with_ansi(enable_colors)\n                .with_indent_lines(true),\n        );\n    tracing::subscriber::set_global_default(subscriber).unwrap();\n}\n\nconst HAX_VANILLA_RUSTC: &str = \"HAX_VANILLA_RUSTC\";\n\nfn main() {\n    setup_logging();\n\n    let options: ExporterOptions = serde_json::from_str(\n        &std::env::var(ENV_VAR_OPTIONS_FRONTEND).unwrap_or_else(|_| {\n            panic!(\n                \"Cannot find environnement variable {}\",\n                ENV_VAR_OPTIONS_FRONTEND\n            )\n        }),\n    )\n    .unwrap_or_else(|_| {\n        panic!(\n            \"Invalid value for the environnement variable {}\",\n            ENV_VAR_OPTIONS_FRONTEND\n        )\n    });\n\n    let mut rustc_args: Vec<String> = std::env::args().skip(1).collect();\n    // add [--sysroot] if not present\n    if !rustc_args.iter().any(|arg| arg.starts_with(\"--sysroot\")) {\n        rustc_args.extend(vec![\"--sysroot\".into(), rustc_sysroot()])\n    };\n\n    // When `HAX_FEATURES_DETECTION_MODE` is set, we just detect\n    // features for the current crate, output them in JSON on stderr\n    // and exit immediately\n    if std::env::var(\"HAX_FEATURES_DETECTION_MODE\").is_ok() {\n        use std::io::BufWriter;\n        return serde_json::to_writer(\n            BufWriter::new(std::io::stderr()),\n            &Features::detect(&options, &rustc_args),\n        )\n        .unwrap();\n    }\n\n    let (vanilla_rustc, vanilla_rustc_never) = {\n        let vanilla_rustc = std::env::var(HAX_VANILLA_RUSTC);\n        let vanilla_rustc_never = vanilla_rustc == Ok(\"never\".into());\n        (\n            !vanilla_rustc_never && vanilla_rustc.is_ok(),\n            vanilla_rustc_never,\n        )\n    };\n\n    // fetch the correct callback structure given the command, and\n    // coerce options\n    let is_primary_package = std::env::var(\"CARGO_PRIMARY_PACKAGE\").is_ok();\n    let is_build_script = std::env::var(\"CARGO_CRATE_NAME\") == Ok(\"build_script_build\".to_string()); // FIXME: is there a more robust way to do this?\n    let translate_package =\n        !vanilla_rustc && !is_build_script && (options.deps || is_primary_package);\n    let mut callbacks: Box<dyn Callbacks + Send> = if translate_package {\n        Box::new(exporter::ExtractionCallbacks {\n            body_kinds: options.body_kinds.clone(),\n            experimental_full_def: options.experimental_full_def,\n        })\n    } else {\n        struct CallbacksNoop;\n        impl Callbacks for CallbacksNoop {}\n        Box::new(CallbacksNoop)\n    };\n\n    if translate_package {\n        // We want to enable certain features, but only if the crate\n        // itself doesn't enable those\n        let features = Features {\n            adt_const_params: false,    // not useful for now\n            generic_const_exprs: false, // not useful for now\n            register_tool: true,\n            registered_tools: HashSet::from_iter(vec![hax_lib_macros_types::HAX_TOOL.into()]),\n            auto_traits: true,\n            negative_impls: true,\n        } - Features::detect_forking();\n        rustc_args = [rustc_args[0].clone()]\n            .into_iter()\n            .chain([\n                \"--cfg\".into(),\n                hax_lib_macros_types::HAX_CFG_OPTION_NAME.into(),\n            ])\n            .chain(match &options.backend {\n                Some(backend) => vec![\"--cfg\".into(), format!(\"hax_backend_{backend}\")],\n                None => vec![],\n            })\n            .chain(features.into_iter().map(|s| format!(\"-Zcrate-attr={}\", s)))\n            .chain(rustc_args[1..].iter().cloned())\n            .collect();\n    };\n\n    let mut callbacks = CallbacksWrapper {\n        sub: &mut *callbacks,\n        options: {\n            let mut options = options.clone();\n            options.force_cargo_build = if translate_package {\n                options.force_cargo_build\n            } else {\n                hax_types::cli_options::ForceCargoBuild::default()\n            };\n            options\n        },\n    };\n\n    let exit_code = rustc_driver::catch_with_exit_code({\n        let rustc_args = rustc_args.clone();\n        move || rustc_driver::run_compiler(&rustc_args, &mut callbacks)\n    });\n\n    std::process::exit(\n        if !vanilla_rustc_never && translate_package && exit_code == 0 {\n            // When the hax translation is successful, we need to re-run\n            // rustc. Indeed, hax translation doesn't actually build a\n            // package: no `rlib` will be written on disk.\n            self::vanilla_rustc()\n        } else {\n            exit_code\n        },\n    )\n}\n\n/// Re-run rustc without doing any hax translation. This ensures a\n/// `rlib` is produced (when the crate compiles correctly).\nfn vanilla_rustc() -> i32 {\n    use std::process::{Command, Stdio};\n    let output = Command::new(std::env::args().next().unwrap())\n        .args(std::env::args().skip(1))\n        .env(HAX_VANILLA_RUSTC, \"1\")\n        .stdout(Stdio::piped())\n        .stderr(Stdio::piped())\n        .spawn()\n        .unwrap()\n        .wait_with_output()\n        .unwrap();\n    if output.status.success() {\n        0\n    } else {\n        let stdout = &std::str::from_utf8(&output.stdout).unwrap();\n        let stderr = &std::str::from_utf8(&output.stderr).unwrap();\n        println!(\"{stdout}\");\n        eprintln!(\"{stderr}\");\n        output.status.code().unwrap_or(1)\n    }\n}\n"
  },
  {
    "path": "cli/driver/src/exporter.rs",
    "content": "use hax_frontend_exporter::SInto;\nuse hax_frontend_exporter::state::LocalContextS;\nuse hax_types::cli_options::PathOrDash;\nuse hax_types::driver_api::Items;\nuse rustc_driver::{Callbacks, Compilation};\nuse rustc_interface::interface;\nuse rustc_interface::interface::Compiler;\nuse rustc_middle::middle::region::Scope;\nuse rustc_middle::ty::TyCtxt;\nuse rustc_middle::{\n    thir,\n    thir::{Block, BlockId, Expr, ExprId, ExprKind, Pat, PatKind, Stmt, StmtId, StmtKind, Thir},\n};\nuse rustc_span::symbol::Symbol;\nuse serde::Serialize;\nuse std::cell::RefCell;\nuse std::collections::{HashMap, HashSet};\nuse std::rc::Rc;\n\n/// Browse a crate and translate every item\n#[tracing::instrument(skip_all)]\nfn export_crate<'tcx, Body: hax_frontend_exporter::IsBody>(\n    options: &hax_frontend_exporter_options::Options,\n    tcx: TyCtxt<'tcx>,\n    experimental_full_def: bool,\n) -> (\n    Vec<rustc_span::Span>,\n    Vec<hax_frontend_exporter::DefId>,\n    Vec<(\n        hax_frontend_exporter::DefId,\n        hax_frontend_exporter::ImplInfos,\n    )>,\n    Items<Body>,\n    hax_frontend_exporter::id_table::Table,\n) {\n    use hax_frontend_exporter::WithGlobalCacheExt;\n    let state = hax_frontend_exporter::state::State::new(tcx, options.clone());\n\n    let result = if experimental_full_def {\n        let owners = tcx.hir_crate_items(()).owners();\n        Items::FullDef(\n            owners\n                .map(|owner_id| {\n                    owner_id\n                        .to_def_id()\n                        .sinto(&state)\n                        .full_def(&state)\n                        .as_ref()\n                        .clone()\n                })\n                .collect(),\n        )\n    } else {\n        Items::Legacy(\n            tcx.hir_free_items()\n                .map(|id| tcx.hir_item(id).sinto(&state))\n                .collect(),\n        )\n    };\n    let impl_infos = hax_frontend_exporter::impl_def_ids_to_impled_types_and_bounds(&state)\n        .into_iter()\n        .collect();\n    let exported_spans = state.with_global_cache(|cache| cache.spans.keys().copied().collect());\n    let exported_def_ids = state.with_global_cache(|cache| {\n        cache\n            .per_item\n            .values()\n            .filter_map(|per_item_cache| per_item_cache.def_id.clone())\n            .collect()\n    });\n    let cache_map = state.with_global_cache(|cache| cache.id_table_session.table().clone());\n\n    (\n        exported_spans,\n        exported_def_ids,\n        impl_infos,\n        result,\n        cache_map,\n    )\n}\n\n/// Callback for extraction\n#[derive(Debug, Clone, Serialize)]\npub(crate) struct ExtractionCallbacks {\n    pub body_kinds: Vec<hax_types::cli_options::ExportBodyKind>,\n    pub experimental_full_def: bool,\n}\n\nimpl From<ExtractionCallbacks> for hax_frontend_exporter_options::Options {\n    fn from(opts: ExtractionCallbacks) -> hax_frontend_exporter_options::Options {\n        hax_frontend_exporter_options::Options {\n            inline_anon_consts: true,\n            bounds_options: hax_frontend_exporter_options::BoundsOptions {\n                resolve_destruct: false,\n                prune_sized: true,\n            },\n            item_ref_use_concrete_impl: false,\n        }\n    }\n}\n\nimpl Callbacks for ExtractionCallbacks {\n    fn config(&mut self, config: &mut rustc_interface::interface::Config) {\n        config.override_queries = Some(|_sess, providers| {\n            hax_frontend_exporter::override_queries_store_body(providers);\n        });\n    }\n    fn after_expansion<'tcx>(&mut self, compiler: &Compiler, tcx: TyCtxt<'tcx>) -> Compilation {\n        use std::ops::{Deref, DerefMut};\n\n        use hax_frontend_exporter::ThirBody;\n        use hax_types::cli_options::Command;\n        use rustc_session::config::CrateType;\n        use serde::{Deserialize, Serialize};\n        use std::fs::File;\n        use std::io::BufWriter;\n\n        use std::path::PathBuf;\n\n        let opts = &compiler.sess.opts;\n        let externs: Vec<_> = opts\n            .externs\n            .iter()\n            .flat_map(|(_, ext)| match &ext.location {\n                rustc_session::config::ExternLocation::ExactPaths(set) => set\n                    .iter()\n                    .map(|cp| cp.canonicalized())\n                    .collect::<Vec<_>>()\n                    .into_iter(),\n                _ => vec![].into_iter(),\n            })\n            .map(|path| path.with_extension(\"haxmeta\"))\n            .collect();\n\n        let cg_metadata = opts.cg.metadata[0].clone();\n        let crate_name = opts.crate_name.clone().unwrap();\n\n        let output_dir = compiler.sess.io.output_dir.clone().unwrap();\n        let haxmeta_path = output_dir.join(format!(\"{crate_name}-{cg_metadata}.haxmeta\",));\n\n        let mut file = BufWriter::new(File::create(&haxmeta_path).unwrap());\n\n        use hax_types::driver_api::{HaxMeta, with_kind_type};\n        with_kind_type!(\n            self.body_kinds.clone(),\n            <Body>|| {\n                let (spans, def_ids, impl_infos, items, cache_map) =\n                    export_crate(&self.clone().into(), tcx, self.experimental_full_def);\n                let files: HashSet<PathBuf> =\n                    match &items {\n                        Items::Legacy(items) => HashSet::from_iter(items\n                        .iter()\n                        .flat_map(|item| item.span.filename.to_path().map(|path| path.to_path_buf()))),\n                        Items::FullDef(items) => HashSet::from_iter(items\n                            .iter()\n                            .flat_map(|item| item.span.filename.to_path().map(|path| path.to_path_buf()))),\n                    }\n\n                ;\n                let haxmeta: HaxMeta<Body> = HaxMeta {\n                    crate_name,\n                    cg_metadata,\n                    externs,\n                    impl_infos,\n                    items,\n                    comments: files.into_iter()\n                        .flat_map(|path|hax_frontend_exporter::comments::comments_of_file(path).ok())\n                        .flatten()\n                        .collect(),\n                    def_ids,\n                    hax_version: hax_types::HAX_VERSION.into(),\n                };\n                haxmeta.write(&mut file, cache_map);\n            }\n        );\n\n        let manifest_dir = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n        let manifest_dir = std::path::Path::new(&manifest_dir);\n\n        let data = hax_types::driver_api::EmitHaxMetaMessage {\n            manifest_dir: Some(manifest_dir.to_path_buf()),\n            working_dir: Some(\n                opts.working_dir\n                    .to_path(rustc_span::FileNameDisplayPreference::Local)\n                    .to_path_buf(),\n            ),\n            path: haxmeta_path,\n        };\n        eprintln!(\n            \"{}{}\",\n            hax_types::driver_api::HAX_DRIVER_STDERR_PREFIX,\n            &serde_json::to_string(&hax_types::driver_api::HaxDriverMessage::EmitHaxMeta(data))\n                .unwrap()\n        );\n\n        Compilation::Stop\n    }\n}\n"
  },
  {
    "path": "cli/driver/src/features.rs",
    "content": "use std::collections::HashSet;\n\nuse rustc_driver::{Callbacks, Compilation};\nuse rustc_interface::interface;\nuse rustc_middle::ty::TyCtxt;\nuse rustc_span::symbol::Symbol;\n\nuse crate::callbacks_wrapper::CallbacksWrapper;\n\nuse serde::{Deserialize, Serialize};\n\n/// A subset of `rustc_feature::Features` that is relevant to us\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub struct Features {\n    pub adt_const_params: bool,\n    pub generic_const_exprs: bool,\n    pub register_tool: bool,\n    pub auto_traits: bool,\n    pub negative_impls: bool,\n    pub registered_tools: HashSet<String>,\n}\n\nimpl From<&rustc_feature::Features> for Features {\n    fn from(rfeatures: &rustc_feature::Features) -> Self {\n        Features {\n            adt_const_params: rfeatures.adt_const_params(),\n            generic_const_exprs: rfeatures.generic_const_exprs(),\n            register_tool: rfeatures.register_tool(),\n            auto_traits: rfeatures.auto_traits(),\n            negative_impls: rfeatures.negative_impls(),\n            registered_tools: HashSet::new(),\n        }\n    }\n}\n\nimpl core::ops::Sub for Features {\n    type Output = Self;\n    fn sub(self, rhs: Self) -> Self {\n        fn sub(x: bool, y: bool) -> bool {\n            x & !y\n        }\n        Features {\n            adt_const_params: sub(self.adt_const_params, rhs.adt_const_params),\n            generic_const_exprs: sub(self.generic_const_exprs, rhs.generic_const_exprs),\n            register_tool: sub(self.register_tool, rhs.register_tool),\n            auto_traits: sub(self.auto_traits, rhs.auto_traits),\n            negative_impls: sub(self.negative_impls, rhs.negative_impls),\n            registered_tools: self\n                .registered_tools\n                .difference(&rhs.registered_tools)\n                .cloned()\n                .collect(),\n        }\n    }\n}\n\nimpl Default for Features {\n    fn default() -> Self {\n        (&rustc_feature::Features::default()).into()\n    }\n}\n\nimpl Features {\n    pub fn into_iter(&self) -> impl Iterator<Item = String> {\n        [\n            self.adt_const_params.then_some(\"adt_const_params\"),\n            self.generic_const_exprs.then_some(\"generic_const_exprs\"),\n            self.register_tool.then_some(\"register_tool\"),\n        ]\n        .into_iter()\n        .flatten()\n        .map(|s| format!(\"feature({})\", s))\n        .chain(\n            self.registered_tools\n                .clone()\n                .into_iter()\n                .map(|tool| format!(\"register_tool({})\", tool)),\n        )\n    }\n    /// Runs Rustc with a driver that only collects which unstable\n    /// Rustc features are enabled\n    pub fn detect(\n        options: &hax_types::cli_options::ExporterOptions,\n        rustc_args: &Vec<String>,\n    ) -> Self {\n        struct CollectFeatures {\n            features: Features,\n        }\n        impl Callbacks for CollectFeatures {\n            fn after_expansion<'tcx>(\n                &mut self,\n                compiler: &interface::Compiler,\n                tcx: TyCtxt<'tcx>,\n            ) -> Compilation {\n                self.features = tcx.features().into();\n                self.features.registered_tools = tcx\n                    .registered_tools(())\n                    .iter()\n                    .map(|x| x.name.to_ident_string())\n                    .collect();\n                rustc_driver::Compilation::Stop\n            }\n        }\n        let mut callbacks = CollectFeatures {\n            features: Features::default(),\n        };\n        let exit_code = rustc_driver::catch_with_exit_code(|| {\n            rustc_driver::run_compiler(\n                rustc_args,\n                &mut CallbacksWrapper {\n                    sub: &mut callbacks,\n                    options: options.clone(),\n                },\n            )\n        });\n        if exit_code != 0 {\n            std::process::exit(exit_code);\n        }\n        callbacks.features.clone()\n    }\n\n    /// Just like `detect`, but wraps the call in a subprocess so that\n    /// we can capture `stdout` and `stderr`: we don't want the use to\n    /// see error message from Rustc twice, or Cargo to have to parse\n    /// Rustc messages twice.\n    pub fn detect_forking() -> Self {\n        use std::process::{Command, Stdio};\n        let output = Command::new(std::env::args().next().unwrap())\n            .args(std::env::args().skip(1))\n            .env(\"HAX_FEATURES_DETECTION_MODE\", \"1\")\n            .stdout(Stdio::piped())\n            .stderr(Stdio::piped())\n            .spawn()\n            .unwrap()\n            .wait_with_output()\n            .unwrap();\n        let stderr = &std::str::from_utf8(&output.stderr).unwrap();\n        serde_json::from_str(stderr).unwrap_or_else(|e| {\n            eprintln!(\"{}\", stderr);\n            tracing::error!(\"rustc emitted an error, aborting hax custom driver.\");\n            std::process::exit(1);\n        })\n    }\n}\n"
  },
  {
    "path": "cli/subcommands/Cargo.toml",
    "content": "[package]\nname = \"cargo-hax\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\nbuild = \"build.rs\"\ndescription = \"The high assurance translation toolchain\"\n\n[[bin]]\npath = \"src/cargo_hax.rs\"\nname = \"cargo-hax\"\n\n[[bin]]\npath = \"src/json_schema.rs\"\nname = \"hax-export-json-schemas\"\n\n[dependencies]\nserde.workspace = true\nserde_json.workspace = true\nschemars.workspace = true\nitertools.workspace = true\nclap.workspace = true\npaste = \"1.0.11\"\nhax-frontend-exporter.workspace = true\nhax-frontend-exporter-options.workspace = true\nhax-types.workspace = true\npath-clean = \"1.0.1\"\ntempfile = \"3.8\"\nwhich.workspace = true\nversion_check = \"0.9\"\nrustup-toolchain = \"0.1\"\ncolored.workspace = true\nis-terminal = \"0.4.9\"\ntiny_http = \"0.12\"\ninquire = \"0.6\"\nannotate-snippets.workspace = true\nserde-jsonlines = \"0.5.0\"\nprettyplease = \"0.2.20\"\nsyn = { version = \"2.*\", features = [\"full\"] }\ncargo_metadata.workspace = true\nextension-traits = \"1.0.1\"\n\n[build-dependencies]\nserde.workspace = true\nserde_json.workspace = true\nhax-types.workspace = true\nhax-rust-engine.workspace = true\nschemars.workspace = true\nhax-frontend-exporter.workspace = true\nhax-lib-macros-types = { workspace = true, features = [\"schemars\"] }\nversion_check = \"0.9\"\ntoml = \"0.8\"\n\n[package.metadata.release]\npre-release-hook = [\n    \"dune\",\n    \"build\",\n    \"--root\",\n    \"../../engine\",\n    \"hax-engine.opam\",\n]\n\n[[package.metadata.release.pre-release-replacements]]\nfile = \"../../engine/dune-project\"\nsearch = \"version [a-z0-9\\\\.-]+\"\nreplace = \"version {{version}}\"\nprerelease = true\n"
  },
  {
    "path": "cli/subcommands/build.rs",
    "content": "fn rustc_version_env_var() {\n    let (_version, channel, date) = version_check::triple().unwrap();\n    println!(\"cargo:rustc-env=HAX_RUSTC_VERSION={channel}-{date}\");\n\n    let rust_toolchain_file = include_str!(\"rust-toolchain.toml\")\n        .parse::<toml::Table>()\n        .unwrap();\n    println!(\n        \"cargo:rustc-env=HAX_TOOLCHAIN={}\",\n        rust_toolchain_file[\"toolchain\"][\"channel\"]\n            .as_str()\n            .expect(\"Could not find key [toolchain.channel] in [rust-toolchain.toml]\")\n    );\n}\n\nfn json_schema_static_asset() {\n    let mut schema = schemars::schema_for!((\n        hax_frontend_exporter::Item<hax_frontend_exporter::ThirBody>,\n        hax_types::cli_options::Options,\n        hax_types::diagnostics::Diagnostics,\n        hax_types::engine_api::EngineOptions,\n        hax_types::engine_api::Output,\n        hax_types::engine_api::WithDefIds<hax_frontend_exporter::ThirBody>,\n        hax_types::engine_api::protocol::FromEngine,\n        hax_types::engine_api::protocol::ToEngine,\n        hax_lib_macros_types::AttrPayload,\n        hax_rust_engine::ocaml_engine::Query,\n        hax_rust_engine::ocaml_engine::Response,\n    ));\n    schema.schema.metadata.get_or_insert_default().id = Some(hax_types::HAX_VERSION.into());\n    serde_json::to_writer(\n        std::fs::File::create(format!(\"{}/schema.json\", std::env::var(\"OUT_DIR\").unwrap()))\n            .unwrap(),\n        &schema,\n    )\n    .unwrap();\n}\n\nfn git_dirty_env_var() {\n    println!(\"cargo:rurun-if-env-changed=HAX_GIT_IS_DIRTY\");\n    let dirty = {\n        use std::process::Command;\n        let _ = Command::new(\"git\")\n            .args([\"update-index\", \"-q\", \"--refresh\"])\n            .status();\n        !Command::new(\"git\")\n            .args([\"diff-index\", \"--quiet\", \"HEAD\", \"--\"])\n            .status()\n            .map(|status| status.success())\n            .unwrap_or(true)\n    };\n    println!(\"cargo:rustc-env=HAX_GIT_IS_DIRTY={}\", dirty);\n}\n\nfn main() {\n    rustc_version_env_var();\n    json_schema_static_asset();\n    git_dirty_env_var();\n}\n"
  },
  {
    "path": "cli/subcommands/src/cargo_hax.rs",
    "content": "#![feature(rustc_private)]\nuse annotate_snippets::{Level, Renderer};\nuse clap::Parser;\nuse colored::Colorize;\nuse hax_types::cli_options::*;\nuse hax_types::driver_api::*;\nuse hax_types::engine_api::*;\nuse is_terminal::IsTerminal;\nuse serde_jsonlines::BufReadExt;\nuse std::collections::HashMap;\nuse std::fs;\nuse std::io::BufRead;\nuse std::io::Write;\nuse std::path::PathBuf;\nuse std::process;\n\nmod engine_debug_webapp;\nuse hax_frontend_exporter::id_table;\n\n/// Return a toolchain argument to pass to `cargo`: when the correct nightly is\n/// already present, this is None, otherwise we (1) ensure `rustup` is available\n/// (2) install the nightly (3) return the toolchain\nfn toolchain() -> Option<&'static str> {\n    let current_rustc_version = version_check::triple()\n        .map(|(_, channel, date)| format!(\"{channel}-{date}\"))\n        .unwrap_or(\"unknown\".into());\n    if env!(\"HAX_RUSTC_VERSION\") != current_rustc_version {\n        const TOOLCHAIN: &str = env!(\"HAX_TOOLCHAIN\");\n        // ensure rustup is available\n        which::which(\"rustup\").ok().unwrap_or_else(|| {\n            println!(\"Error: {} was not found, but toolchain {} is required while the current toolchain is {}\\n\\nExiting.\", \"rustup\".bold(), TOOLCHAIN.bold(), current_rustc_version.bold());\n            std::process::exit(1)\n        });\n        // make sure the toolchain is installed\n        rustup_toolchain::install(TOOLCHAIN).unwrap();\n        // return the correct toolchain\n        Some(TOOLCHAIN)\n    } else {\n        None\n    }\n}\n\n/// [`get_args`] is a wrapper of `std::env::args` that strips a possible\n/// cargo subcommand. This allows for a binary `BINARY` to be called\n/// both with `cargo BINARY args...` and `cargo-BINARY args...`.\npub fn get_args(subcommand: &str) -> Vec<String> {\n    let mut args: Vec<_> = std::env::args().collect();\n    if args.get(1) == Some(&subcommand.to_string()) {\n        // we face a call `cargo [subcommand]`: we need to get rid of the first argument\n        args = args.into_iter().skip(1).collect();\n    }\n    args\n}\n\n/// Our custom rustc driver will *not* be run in an proper terminal,\n/// thus logs would appear uncolored. When no `RUST_LOG_STYLE` env. var.\n/// is set, [`rust_log_style`] checks wether the `cargo hax` command was\n/// run inside a terminal. If it was inside a terminal,\n/// [`rust_log_style`] returns `\"always\"`, which is the usual default\n/// behavior. Otherwise we return `\"never\"`. When [`RUST_LOG_STYLE`] is\n/// set, we just return its value.\nconst RUST_LOG_STYLE: &str = \"RUST_LOG_STYLE\";\nfn rust_log_style() -> String {\n    std::env::var(RUST_LOG_STYLE).unwrap_or_else(|_| {\n        if std::io::stderr().is_terminal() {\n            \"always\".to_string()\n        } else {\n            \"never\".to_string()\n        }\n    })\n}\n\n/// We set `cfg(hax)` so that client crates can include dependencies\n/// or cfg-gate pieces of code.\nconst RUSTFLAGS: &str = \"RUSTFLAGS\";\nfn rustflags() -> String {\n    let rustflags = std::env::var(RUSTFLAGS).unwrap_or(\"\".into());\n    [rustflags, \"--cfg hax\".into()].join(\" \")\n}\n\nconst ENGINE_BINARY_NAME: &str = \"hax-engine\";\nconst ENGINE_BINARY_NOT_FOUND: &str = \"The binary [hax-engine] was not found in your [PATH].\";\n\n/// Dynamically looks for binary [ENGINE_BINARY_NAME].  First, we\n/// check whether [HAX_ENGINE_BINARY] is set, and use that if it\n/// is. Then, we try to find [ENGINE_BINARY_NAME] in PATH. If not\n/// found, detect whether nodejs is available, download the JS-compiled\n/// engine and use it.\n#[allow(unused_variables, unreachable_code)]\nfn find_hax_engine(message_format: MessageFormat) -> process::Command {\n    use which::which;\n\n    std::env::var(\"HAX_ENGINE_BINARY\")\n        .ok()\n        .map(process::Command::new)\n        .or_else(|| which(ENGINE_BINARY_NAME).ok().map(process::Command::new))\n        .or_else(|| {\n            which(\"node\").ok().and_then(|_| {\n                if let Ok(true) = inquire::Confirm::new(&format!(\n                    \"{} Should I try to download it from GitHub?\",\n                    ENGINE_BINARY_NOT_FOUND,\n                ))\n                .with_default(true)\n                .prompt()\n                {\n                    let cmd = process::Command::new(\"node\");\n                    let engine_js_path: String =\n                        panic!(\"TODO: Downloading from GitHub is not supported yet.\");\n                    cmd.arg(engine_js_path);\n                    Some(cmd)\n                } else {\n                    None\n                }\n            })\n        })\n        .unwrap_or_else(|| {\n            fn is_opam_setup_correctly() -> bool {\n                std::env::var(\"OPAM_SWITCH_PREFIX\").is_ok()\n            }\n            HaxMessage::EngineNotFound {\n                is_opam_setup_correctly: is_opam_setup_correctly(),\n            }\n            .report(message_format, None);\n            std::process::exit(2);\n        })\n}\n\nconst RUST_ENGINE_BINARY_NAME: &str = \"hax-rust-engine\";\nconst RUST_ENGINE_BINARY_NOT_FOUND: &str =\n    \"The binary [hax-rust-engine] was not found in your [PATH].\";\n\n#[allow(unused_variables, unreachable_code)]\nfn find_rust_hax_engine(message_format: MessageFormat) -> process::Command {\n    use which::which;\n\n    std::env::var(\"HAX_RUST_ENGINE_BINARY\")\n        .ok()\n        .map(process::Command::new)\n        .or_else(|| {\n            which(RUST_ENGINE_BINARY_NAME)\n                .ok()\n                .map(process::Command::new)\n        })\n        .expect(RUST_ENGINE_BINARY_NOT_FOUND)\n}\n\nuse hax_types::diagnostics::message::HaxMessage;\nuse hax_types::diagnostics::report::ReportCtx;\n\n#[extension_traits::extension(trait ExtHaxMessage)]\nimpl HaxMessage {\n    fn report(self, message_format: MessageFormat, mut rctx: Option<&mut ReportCtx>) {\n        if let (Some(r), HaxMessage::Diagnostic { diagnostic, .. }) = (rctx.as_mut(), &self)\n            && r.seen_already(diagnostic.clone())\n        {\n            return;\n        }\n        match message_format {\n            MessageFormat::Json => println!(\"{}\", serde_json::to_string(&self).unwrap()),\n            MessageFormat::Human => self.report_styled(rctx),\n        }\n    }\n    fn report_styled(self, rctx: Option<&mut ReportCtx>) {\n        let renderer = Renderer::styled();\n        match self {\n            Self::Diagnostic {\n                diagnostic,\n                working_dir,\n            } => {\n                let mut _rctx = None;\n                let rctx = rctx.unwrap_or_else(|| _rctx.get_or_insert(ReportCtx::default()));\n                diagnostic.with_message(\n                    rctx,\n                    working_dir.as_ref().map(PathBuf::as_path),\n                    Level::Error,\n                    |msg| eprintln!(\"{}\", renderer.render(msg)),\n                );\n            }\n            Self::EngineNotFound {\n                is_opam_setup_correctly,\n            } => {\n                use colored::Colorize;\n                let message = format!(\"hax: {}\\n{}\\n\\n{} {}\\n\",\n                      &ENGINE_BINARY_NOT_FOUND,\n                      \"Please make sure the engine is installed and is in PATH!\",\n                      \"Hint: With OPAM, `eval $(opam env)` is necessary for OPAM binaries to be in PATH: make sure to run `eval $(opam env)` before running `cargo hax`.\".bright_black(),\n                      format!(\"(diagnostics: {})\", if is_opam_setup_correctly { \"opam seems okay ✓\" } else {\"opam seems not okay ❌\"}).bright_black()\n            );\n                let message = Level::Error.title(&message);\n                eprintln!(\"{}\", renderer.render(message))\n            }\n            Self::ProducedFile { mut path, wrote } => {\n                // Make path relative if possible\n                if let Ok(current_dir) = std::env::current_dir() {\n                    if let Ok(relative) = path.strip_prefix(current_dir) {\n                        path = PathBuf::from(\".\").join(relative).to_path_buf();\n                    }\n                }\n                let title = if wrote {\n                    format!(\"hax: wrote file {}\", path.display())\n                } else {\n                    format!(\"hax: unchanged file {}\", path.display())\n                };\n                eprintln!(\"{}\", renderer.render(Level::Info.title(&title)))\n            }\n            Self::HaxEngineFailure { exit_code } => {\n                let title = format!(\n                    \"hax: {} exited with non-zero code {}\",\n                    ENGINE_BINARY_NAME, exit_code,\n                );\n                eprintln!(\"{}\", renderer.render(Level::Error.title(&title)));\n            }\n            Self::ProfilingData(data) => {\n                fn format_with_dot(shift: u32, n: u64) -> String {\n                    let factor = 10u64.pow(shift);\n                    format!(\"{}.{}\", n / factor, n % factor)\n                }\n                let title = format!(\n                    \"hax[profiling]: {}: {}ms, memory={}, {} item{}{}\",\n                    data.context,\n                    format_with_dot(6, data.time_ns),\n                    data.memory,\n                    data.quantity,\n                    if data.quantity > 1 { \"s\" } else { \"\" },\n                    if data.errored {\n                        \" (note: this failed!)\"\n                    } else {\n                        \"\"\n                    }\n                );\n                eprintln!(\"{}\", renderer.render(Level::Info.title(&title)));\n            }\n            Self::Stats { errors_per_item } => {\n                let success_items = errors_per_item.iter().filter(|(_, n)| *n == 0).count();\n                let total = errors_per_item.len();\n                let title = format!(\n                    \"hax: {}/{} items were successfully translated ({}% success rate)\",\n                    success_items,\n                    total,\n                    (success_items * 100) / total\n                );\n                eprintln!(\"{}\", renderer.render(Level::Info.title(&title)));\n            }\n            Self::CargoBuildFailure => {\n                let title =\n                    \"hax: running `cargo build` was not successful, continuing anyway.\".to_string();\n                eprintln!(\"{}\", renderer.render(Level::Warning.title(&title)));\n            }\n            Self::WarnExperimentalBackend { backend } => {\n                let title = format!(\n                    \"hax: Experimental backend \\\"{}\\\" is work in progress.\",\n                    backend\n                );\n                eprintln!(\"{}\", renderer.render(Level::Warning.title(&title)));\n            }\n        }\n    }\n}\n\n/// Runs `hax-engine`\nfn run_engine(\n    haxmeta: HaxMeta<hax_frontend_exporter::ThirBody>,\n    id_table: id_table::Table,\n    working_dir: Option<PathBuf>,\n    manifest_dir: Option<PathBuf>,\n    backend: &BackendOptions<()>,\n    message_format: MessageFormat,\n) -> bool {\n    let engine_options = EngineOptions {\n        hax_version: haxmeta.hax_version,\n        backend: backend.clone(),\n        input: haxmeta.items,\n        impl_infos: haxmeta.impl_infos,\n    };\n    let mut hax_engine_command = match &engine_options.backend.backend {\n        Backend::Coq | Backend::Ssprove | Backend::Easycrypt | Backend::ProVerif(_) => {\n            find_hax_engine(message_format)\n        }\n        Backend::Fstar(_) if matches!(&engine_options.input, Items::Legacy(_)) => {\n            find_hax_engine(message_format)\n        }\n        _ => find_rust_hax_engine(message_format),\n    };\n    let mut engine_subprocess = hax_engine_command\n        .stdin(std::process::Stdio::piped())\n        .stdout(std::process::Stdio::piped())\n        .spawn()\n        .inspect_err(|e| {\n            if let std::io::ErrorKind::NotFound = e.kind() {\n                panic!(\n                    \"The binary [{}] was not found in your [PATH].\",\n                    ENGINE_BINARY_NAME\n                )\n            }\n        })\n        .unwrap();\n\n    let mut error = false;\n    let mut output = Output {\n        diagnostics: vec![],\n        files: vec![],\n        debug_json: vec![],\n    };\n    {\n        let mut rctx = hax_types::diagnostics::report::ReportCtx::default();\n        let mut stdin = std::io::BufWriter::new(\n            engine_subprocess\n                .stdin\n                .as_mut()\n                .expect(\"Could not write on stdin\"),\n        );\n\n        macro_rules! send {\n            ($value:expr) => {\n                serde_json::to_writer(&mut stdin, $value).unwrap();\n                stdin.write_all(b\"\\n\").unwrap();\n                stdin.flush().unwrap();\n            };\n        }\n\n        id_table::WithTable::run(id_table, engine_options, |with_table| {\n            send!(with_table);\n        });\n\n        let out_dir = backend.output_dir.clone().unwrap_or({\n            let relative_path: PathBuf = [\n                \"proofs\",\n                format!(\"{}\", backend.backend).as_str(),\n                \"extraction\",\n            ]\n            .iter()\n            .collect();\n            manifest_dir\n                .map(|manifest_dir| manifest_dir.join(&relative_path))\n                .unwrap_or(relative_path)\n        });\n\n        let stdout = std::io::BufReader::new(engine_subprocess.stdout.take().unwrap());\n        let mut errors_per_item: HashMap<_, usize> = HashMap::new();\n        for msg in stdout.json_lines() {\n            let msg = msg.expect(\n                \"Hax engine sent an invalid json value. \\\n            This might be caused by debug messages on stdout, \\\n            which is reserved for JSON communication with cargo-hax\",\n            );\n            use protocol::*;\n            match msg {\n                FromEngine::Exit => break,\n                FromEngine::Diagnostic(diagnostic) => {\n                    error = true;\n                    if backend.dry_run {\n                        output.diagnostics.push(diagnostic.clone())\n                    }\n                    if let Some(owner_id) = &diagnostic.owner_id {\n                        *errors_per_item.entry(owner_id.clone()).or_default() += 1;\n                    }\n                    HaxMessage::Diagnostic {\n                        diagnostic,\n                        working_dir: working_dir.clone(),\n                    }\n                    .report(message_format, Some(&mut rctx));\n                }\n                FromEngine::File(file) => {\n                    if backend.dry_run {\n                        output.files.push(file)\n                    } else {\n                        let path = out_dir.join(&file.path);\n                        std::fs::create_dir_all(path.parent().unwrap()).unwrap();\n                        let mut wrote = false;\n                        if fs::read_to_string(&path).as_ref().ok() != Some(&file.contents) {\n                            std::fs::write(&path, file.contents).unwrap();\n                            wrote = true;\n                        }\n                        if let Some(mut sourcemap) = file.sourcemap.clone() {\n                            sourcemap.sourcesContent = sourcemap\n                                .sources\n                                .iter()\n                                .map(PathBuf::from)\n                                .map(|path| {\n                                    if let Some(working_dir) = working_dir.as_ref()\n                                        && path.is_relative()\n                                    {\n                                        working_dir.join(path).to_path_buf()\n                                    } else {\n                                        path\n                                    }\n                                })\n                                .map(|path| fs::read_to_string(path).ok())\n                                .collect();\n                            let f = std::fs::File::create(path.with_file_name(format!(\n                                \"{}.map\",\n                                path.file_name().unwrap().to_string_lossy()\n                            )))\n                            .unwrap();\n                            serde_json::to_writer(std::io::BufWriter::new(f), &sourcemap).unwrap()\n                        }\n                        HaxMessage::ProducedFile { path, wrote }.report(message_format, None)\n                    }\n                }\n                FromEngine::DebugString(debug) => output.debug_json.push(debug),\n                FromEngine::PrettyPrintDiagnostic(diag) => {\n                    send!(&ToEngine::PrettyPrintedDiagnostic(format!(\"{}\", diag)));\n                }\n                FromEngine::PrettyPrintRust(code) => {\n                    let code = match syn::parse_file(&code) {\n                        Ok(file) => match std::panic::catch_unwind(|| prettyplease::unparse(&file))\n                        {\n                            Ok(pp) => Ok(pp),\n                            Err(err) => Err(format!(\"prettyplease panicked with: {:#?}\", err)),\n                        },\n                        Err(err) => Err(format!(\"{}\", err)),\n                    };\n                    send!(&ToEngine::PrettyPrintedRust(code));\n                }\n                FromEngine::ProfilingData(profiling_data) => {\n                    HaxMessage::ProfilingData(profiling_data).report(message_format, None)\n                }\n                FromEngine::ItemProcessed(items) => {\n                    for item in items {\n                        errors_per_item.insert(item, 0);\n                    }\n                }\n                FromEngine::Ping => {\n                    send!(&ToEngine::Pong);\n                }\n            }\n        }\n        if backend.stats {\n            HaxMessage::Stats {\n                errors_per_item: errors_per_item.into_iter().collect(),\n            }\n            .report(message_format, None)\n        }\n        drop(stdin);\n    }\n\n    let exit_status = engine_subprocess.wait().unwrap();\n    if !exit_status.success() {\n        HaxMessage::HaxEngineFailure {\n            exit_code: exit_status.code().unwrap_or(-1),\n        }\n        .report(message_format, None);\n        std::process::exit(1);\n    }\n\n    if backend.dry_run {\n        serde_json::to_writer(std::io::BufWriter::new(std::io::stdout()), &output).unwrap()\n    }\n    if !output.debug_json.is_empty() {\n        use DebugEngineMode;\n        let debug_json = &format!(\"[{}]\", output.debug_json.join(\",\"));\n        match &backend.debug_engine {\n            Some(DebugEngineMode::Interactive) => {\n                eprintln!(\"----------------------------------------------\");\n                eprintln!(\"----------------------------------------------\");\n                eprintln!(\"----------------------------------------------\");\n                eprintln!(\"-- Engine debug mode. Press CTRL+C to exit. --\");\n                eprintln!(\"----------------------------------------------\");\n                eprintln!(\"----------------------------------------------\");\n                eprintln!(\"----------------------------------------------\");\n                engine_debug_webapp::run(|| debug_json.clone())\n            }\n            Some(DebugEngineMode::File(file)) if !backend.dry_run => {\n                let mut file = file.open_or_stdout();\n                write!(file, \"{debug_json}\").unwrap()\n            }\n            _ => (),\n        }\n    }\n\n    error\n}\n\n/// Uses `cargo metadata` to compute a derived target directory.\nfn target_dir(suffix: &str) -> PathBuf {\n    let metadata = cargo_metadata::MetadataCommand::new().exec().unwrap();\n    let mut dir = metadata.target_directory;\n    dir.push(suffix);\n    dir.into()\n}\n\n/// Gets hax version: if hax is being compiled from a dirty git repo,\n/// then this function taints the hax version with the hash of the\n/// current executable. This makes sure cargo doesn't cache across\n/// different versions of hax, for more information see\n/// https://github.com/hacspec/hax/issues/801.\nfn get_hax_version() -> String {\n    let mut version = hax_types::HAX_VERSION.to_string();\n    if env!(\"HAX_GIT_IS_DIRTY\") == \"true\" {\n        version += &std::env::current_exe()\n            .ok()\n            .and_then(|exe_path| std::fs::read(exe_path).ok())\n            .map(|contents| {\n                use std::hash::{DefaultHasher, Hash, Hasher};\n                let mut s = DefaultHasher::new();\n                contents.hash(&mut s);\n                format!(\"hash-exe-{}\", s.finish())\n            })\n            .expect(\"Expect read path\")\n    }\n\n    version\n}\n\n/// Returns the path to the custom rustc driver used by cargo-hax.\n///\n/// This function retrieves the path of the current executable (i.e. `cargo-hax`), determines its\n/// parent directory, and then appends the driver executable name `\"driver-hax-frontend-exporter\"` to it.\n/// This path is used to locate the custom rustc driver that computes `haxmeta` files.\nfn get_hax_rustc_driver_path() -> PathBuf {\n    std::env::current_exe()\n        .expect(\"Could not get the current executable path for `cargo-hax`.\")\n        .parent().expect(\"The executable `cargo-hax` is supposed to be a file, which is supposed to have a parent folder.\")\n        .join(\"driver-hax-frontend-exporter\")\n}\n\n/// Calls `cargo` with a custom driver which computes `haxmeta` files\n/// in `TARGET`. One `haxmeta` file is produced by crate. Each\n/// `haxmeta` file contains the full AST of one crate.\nfn compute_haxmeta_files(options: &Options) -> (Vec<EmitHaxMetaMessage>, i32) {\n    let frontend_options = ExporterOptions::from(options);\n    let mut cmd = {\n        let mut cmd = process::Command::new(\"cargo\");\n        if let Some(toolchain) = toolchain() {\n            cmd.env(\"RUSTUP_TOOLCHAIN\", toolchain);\n        }\n        cmd.args([\"check\".into()].iter().chain(options.cargo_flags.iter()));\n        const COLOR_FLAG: &str = \"--color\";\n        let explicit_color_flag = options.cargo_flags.iter().any(|flag| flag == COLOR_FLAG);\n        if !explicit_color_flag && std::io::stderr().is_terminal() {\n            cmd.args([COLOR_FLAG, \"always\"]);\n        }\n        const MSG_FMT_FLAG: &str = \"--message-format\";\n        let explicit_msg_fmt_flag = options.cargo_flags.iter().any(|flag| flag == MSG_FMT_FLAG);\n        if !explicit_msg_fmt_flag && options.message_format == MessageFormat::Json {\n            cmd.args([MSG_FMT_FLAG, \"json\"]);\n        }\n        cmd.stderr(std::process::Stdio::piped());\n        if !options.no_custom_target_directory {\n            cmd.env(\"CARGO_TARGET_DIR\", target_dir(\"hax\"));\n        };\n        cmd.env(\"RUSTC_WORKSPACE_WRAPPER\", get_hax_rustc_driver_path())\n            .env(RUST_LOG_STYLE, rust_log_style())\n            .env(RUSTFLAGS, rustflags())\n            .env(\"HAX_CARGO_CACHE_KEY\", get_hax_version())\n            .env(\n                ENV_VAR_OPTIONS_FRONTEND,\n                serde_json::to_string(&frontend_options)\n                    .expect(\"Options could not be converted to a JSON string\"),\n            );\n        cmd\n    };\n\n    let mut child = cmd.spawn().unwrap();\n    let haxmeta_files = {\n        let mut haxmeta_files = vec![];\n        let stderr = child.stderr.take().unwrap();\n        let stderr = std::io::BufReader::new(stderr);\n        for line in std::io::BufReader::new(stderr).lines() {\n            if let Ok(line) = line {\n                if let Some(msg) = line.strip_prefix(HAX_DRIVER_STDERR_PREFIX) {\n                    use HaxDriverMessage;\n                    let msg = serde_json::from_str(msg).unwrap();\n                    match msg {\n                        HaxDriverMessage::EmitHaxMeta(data) => haxmeta_files.push(data),\n                    }\n                } else {\n                    eprintln!(\"{}\", line);\n                }\n            }\n        }\n        haxmeta_files\n    };\n\n    let status = child\n        .wait()\n        .expect(\"`driver-hax-frontend-exporter`: could not start?\");\n\n    let exit_code = if !status.success() {\n        HaxMessage::CargoBuildFailure.report(options.message_format, None);\n        status.code().unwrap_or(254)\n    } else {\n        0\n    };\n\n    (haxmeta_files, exit_code)\n}\n\n/// Run the command given by the user\nfn run_command(options: &Options, haxmeta_files: Vec<EmitHaxMetaMessage>) -> bool {\n    match options.command.clone() {\n        Command::JSON {\n            output_file,\n            kind,\n            include_extra,\n            use_ids,\n            ..\n        } => {\n            with_kind_type!(kind, <Body>|| {\n                for EmitHaxMetaMessage { path, .. } in haxmeta_files {\n                    let (haxmeta, id_table): (HaxMeta<Body>, _) = HaxMeta::read(fs::File::open(&path).unwrap());\n                    let dest = output_file.open_or_stdout();\n\n                    (if include_extra {\n                        let data = WithDefIds {\n                            def_ids: haxmeta.def_ids,\n                            impl_infos: haxmeta.impl_infos,\n                            items: haxmeta.items,\n                            comments: haxmeta.comments,\n                        };\n                        if use_ids {\n                            id_table::WithTable::run(id_table, data, |with_table| {\n                                serde_json::to_writer(dest, with_table)\n                            })\n                        } else {\n                            serde_json::to_writer(dest, &data)\n                        }\n                    } else {\n                        if use_ids {\n                            id_table::WithTable::run(id_table, haxmeta.items, |with_table| {\n                                serde_json::to_writer(dest, with_table)\n                            })\n                        } else {\n                            serde_json::to_writer(dest, &haxmeta.items)\n                        }\n                    })\n                        .unwrap()\n\n                }\n            });\n            false\n        }\n        Command::Backend(backend) => {\n            use Backend;\n            use hax_frontend_exporter::ThirBody as Body;\n\n            if matches!(backend.backend, Backend::Easycrypt | Backend::ProVerif(..)) {\n                HaxMessage::WarnExperimentalBackend {\n                    backend: backend.backend.clone(),\n                }\n                .report(options.message_format, None);\n            }\n\n            let mut error = false;\n            for EmitHaxMetaMessage {\n                working_dir,\n                manifest_dir,\n                path,\n            } in haxmeta_files\n            {\n                let (mut haxmeta, id_table): (HaxMeta<Body>, _) =\n                    HaxMeta::read(fs::File::open(&path).unwrap());\n\n                if let Some(root_module) = &backend.prune_haxmeta {\n                    use hax_frontend_exporter::{DefPathItem, DisambiguatedDefPathItem, IsBody};\n\n                    /// Remove every item from an `HaxMeta` whose path is not `*::<root_module>::**`, where `root_module` is a string.\n                    fn prune_haxmeta<B: IsBody>(haxmeta: &mut HaxMeta<B>, root_module: &str) {\n                        match &mut haxmeta.items {\n                            Items::Legacy(items) => {\n                                items.retain(|item| match &item.owner_id.path[..] {\n                                    [] => true,\n                                    [\n                                        DisambiguatedDefPathItem {\n                                            data: DefPathItem::TypeNs(s),\n                                            disambiguator: 0,\n                                        },\n                                        ..,\n                                    ] => s == root_module,\n                                    _ => false,\n                                })\n                            }\n                            Items::FullDef(items) => {\n                                items.retain(|item| match &item.this.contents().def_id.path[..] {\n                                    [] => true,\n                                    [\n                                        DisambiguatedDefPathItem {\n                                            data: DefPathItem::TypeNs(s),\n                                            disambiguator: 0,\n                                        },\n                                        ..,\n                                    ] => s == root_module,\n                                    _ => false,\n                                })\n                            }\n                        };\n                    }\n                    prune_haxmeta(&mut haxmeta, root_module.as_str())\n                }\n\n                error = error\n                    || run_engine(\n                        haxmeta,\n                        id_table,\n                        working_dir,\n                        manifest_dir,\n                        &backend,\n                        options.message_format,\n                    );\n            }\n            error\n        }\n        Command::Serialize { .. } => {\n            for EmitHaxMetaMessage { path, .. } in haxmeta_files {\n                HaxMessage::ProducedFile { path, wrote: true }.report(options.message_format, None);\n            }\n            false\n        }\n    }\n}\n\nfn main() {\n    let args: Vec<String> = get_args(\"hax\");\n    let mut options = match &args[..] {\n        [_, kw] if kw == \"__json\" => {\n            serde_json::from_str(&std::env::var(ENV_VAR_OPTIONS_FULL).unwrap_or_else(|_| {\n                panic!(\n                    \"Cannot find environnement variable {}\",\n                    ENV_VAR_OPTIONS_FULL\n                )\n            }))\n            .unwrap_or_else(|_| {\n                panic!(\n                    \"Invalid value for the environnement variable {}\",\n                    ENV_VAR_OPTIONS_FULL\n                )\n            })\n        }\n        _ => Options::parse_from(args.iter()),\n    };\n    options.normalize_paths();\n\n    let (haxmeta_files, exit_code) = options\n        .haxmeta\n        .clone()\n        .map(|path| {\n            (\n                vec![EmitHaxMetaMessage {\n                    working_dir: None,\n                    manifest_dir: None,\n                    path,\n                }],\n                0,\n            )\n        })\n        .unwrap_or_else(|| compute_haxmeta_files(&options));\n    let error = run_command(&options, haxmeta_files);\n\n    std::process::exit(if exit_code == 0 && error {\n        1\n    } else {\n        exit_code\n    })\n}\n"
  },
  {
    "path": "cli/subcommands/src/engine_debug_webapp/README.md",
    "content": "This folder implements a small webapp designed for viewing how a rust\ncrate is translated by the engine, step-by-step.\n\nThe engine works by phases. First, it receives a tweaked version of\nRust's internal typed representation. On this representation, the\nengine then applies sequentially a certain number of phases. Each\nphase transports your code from a representation to another, by\nperforming some translation or rewriting.\n\nThis webapp allows you to display a rust code before and after each\nphase.\n\n### How to\nWhen running `cargo hax into BACKEND`, pass the option\n`--debug-engine` (or `-d`) to the subcommand `into`. This will spawn a\nsmall webserver with the webapp.\n\n"
  },
  {
    "path": "cli/subcommands/src/engine_debug_webapp/mod.rs",
    "content": "use tiny_http::{Header, Response, Server};\n\nfn get_server() -> Server {\n    let mut port = std::env::var_os(\"HAX_DEBUGGER_PORT\")\n        .and_then(|s| s.into_string().ok())\n        .and_then(|s| s.parse::<u32>().ok())\n        .unwrap_or(8000);\n    loop {\n        if let Ok(server) = Server::http(format!(\"0.0.0.0:{}\", port)) {\n            eprintln!(\"Hax webapp is available on http://localhost:{:?}\", port);\n            return server;\n        }\n        std::thread::sleep(std::time::Duration::from_millis(300));\n        eprintln!(\"Could not listen to port {:?}, trying another\", port);\n        port += 1;\n    }\n}\n\npub fn run(get_json: impl Fn() -> String) {\n    let server = get_server();\n    let ct_html = Header::from_bytes(&b\"Content-Type\"[..], &b\"text/html\"[..]).unwrap();\n    let ct_js = Header::from_bytes(&b\"Content-Type\"[..], &b\"text/javascript\"[..]).unwrap();\n    let ct_utf8 = Header::from_bytes(&b\"charset\"[..], &b\"utf-8\"[..]).unwrap();\n    for request in server.incoming_requests() {\n        let response = match request.url() {\n            \"/\" => Response::from_string(include_str!(\"static/index.html\"))\n                .with_header(ct_html.clone())\n                .with_header(ct_utf8.clone()),\n            \"/script.js\" => Response::from_string(include_str!(\"static/script.js\"))\n                .with_header(ct_js.clone())\n                .with_header(ct_utf8.clone()),\n            path if path.starts_with(\"/debug-hax-engine.json\") => {\n                Response::from_string(get_json()).with_header(ct_utf8.clone())\n            }\n            _ => Response::from_string(\"Unknown route\".to_string()).with_status_code(404),\n        };\n        let _ = request.respond(response);\n    }\n}\n"
  },
  {
    "path": "cli/subcommands/src/engine_debug_webapp/static/index.html",
    "content": "<meta charset=\"UTF-8\">\n<script src=\"//unpkg.com/prismjs@1.29.0/components/prism-core.min.js\"></script>\n<script src=\"//unpkg.com/prismjs@1.29.0/plugins/autoloader/prism-autoloader.min.js\"></script>\n<link href=\"//unpkg.com/prismjs@1.29.0/themes/prism.min.css\" rel=\"stylesheet\" />\n<link rel=\"preconnect\" href=\"https://fonts.googleapis.com\">\n<link rel=\"preconnect\" href=\"https://fonts.gstatic.com\" crossorigin>\n<link href=\"https://fonts.googleapis.com/css2?family=Roboto:wght@100;300&display=swap\" rel=\"stylesheet\">\n<script src=\"//unpkg.com/prismjs@1.29.0/components/prism-rust.min.js\"></script>\n<script src=\"//unpkg.com/prismjs@1.29.0/components/prism-json.min.js\"></script>\n\n<style>\n  .header.inactive:hover { color: #444!important; }\n  .header {\n      white-space: nowrap!important;\n  }\n  header {\n      padding-bottom: 10px;\n  }\n  code span.active { background-color: #FFFF00; }\n  code span.in-range { background-color: #FFFFDD; }\n  code {\n      line-height: 1.2!important;\n      font-size: 80%!important;\n      white-space: pre-wrap!important;\n  }\n  dialog {\n      z-index: 100;\n  }\n  .json-viewer div.v {\n      padding-left: 10px;\n  }\n  .json-viewer .i {\n      display: inline-block;\n  }\n  .json-viewer code {\n      display: inline-block;\n      font-size: 100%!important;\n  }\n  .json-viewer .constructor {\n      color: blue;\n      \n  }\n  .json-viewer .pathchunk {\n      color: green;\n  }\n  .json-viewer .pathsep {\n      color: green; opacity: 0.6;\n      letter-spacing: -3px;\n      position: relative;\n      left: -1px;\n  }\n  .json-viewer ul {\n      list-style-type: none;\n      padding: 0px;\n      padding-left: 8px;\n      margin: 0;\n  }\n  body {\n      font-family: 'Roboto', sans-serif;\n  }\n  #help {\n      position: absolute;\n      top: 0;\n      left: 0;\n      margin: 1em;\n  }\n  #help-contents {\n      display: inline-block;\n      opacity: 0.3;\n      background: #16a085;\n      color: white;\n      width: 1.1em;\n      height: 1.1em;\n      padding: 0.2em;\n      text-align: center;\n      border-radius: 50%;\n  }\n  #help-contents:hover {\n      opacity: 1;\n  }\n  #help dialog {\n      max-width: 400px;\n      text-align: justify;\n  }\n</style>\n<body>\n  <div id='help'>\n    <div onclick='document.querySelector(\"#help > dialog\").showModal()' id='help-contents'>?</div>\n    <dialog>\n      <h3>Help</h3>\n      The screen divides in two:\n      <ul>\n        <li>on the upper part, there is the list of the different phases that were applied;</li>\n        <li>on the lower part, the \"rustish\" code of the selected phase is displayed.</li>\n      </ul>\n      <p>\n        You can click on a phase name in the upper part or press the <i>left</i> and <i>right</i> (or <i>p</i> and <i>n</i>) keys on your keyboard to display the rust code of another phase. Press <i>r</i> to refresh.\n      </p>\n      <p>\n        On the lower part, you can click on any chunk of code to show a dialog displaying its AST representation. Then, you can browse the AST and open/collapse AST nodes by clicking on them.\n      </p>\n\n      \n      <button onclick='document.querySelector(\"#help > dialog\").close()'>Ok</button>\n    </dialog>\n  </div>\n  <div id='app'>\n  </div>\n</body>\n<script src=\"script.js\" charset=\"UTF-8\"></script>\n"
  },
  {
    "path": "cli/subcommands/src/engine_debug_webapp/static/script.js",
    "content": "/*\n  This webapp is written in vanilla JS as two pure components: `json` and `phases_viewer`.\n  */\n\n// Make a DOM node\nlet mk = (kind, body = [], classes = []) => {\n    let e = document.createElement(kind);\n    classes.forEach(cl => e.classList.add(cl));\n    if (typeof body == 'string') {\n        e.innerText = body;\n    } else if (body instanceof Array) {\n        body.forEach(sub => e.appendChild(sub));\n    } else if (body instanceof HTMLElement) {\n        e.appendChild(body);\n    } else {\n        console.error('wrong type for body', body);\n    }\n    return e;\n};\n\nfunction findNode(o, search){\n    let h = o => o instanceof Object ? (search(o) ? o : Object.values(o).map(h).find(x => x)) : null;\n    return h(o);\n}\nlet is_span = o => o instanceof Object && \"data\" in o && \"id\" in o;\n\nlet spanned = span_id => o  => Object.values(o).some(o => is_span(o) && o[\"id\"] === span_id);\n\nlet rewrite = f => o => f(\n    o instanceof Array\n        ? o.map(rewrite(f))\n        : (o instanceof Object ? Object.fromEntries(Object.entries(o).map(([k, v]) => [k, rewrite(f)(v)])) : o)\n);\nlet loc_to_string = ({col, line}) => `${line}:${col}`;\nlet filename_to_string = name =>\n    ((name instanceof Array && name[0] == 'Real' && name[1]?.[0] =='LocalPath') ?\n     name?.[1]?.[1] : null) || JSON.stringify(name);\nlet span_data_to_string = ({filename, lo, hi}) => `<${filename_to_string(filename)} ${loc_to_string(lo)}→${loc_to_string(hi)}>`;\nlet span_to_string = ({id, data}) => data.length ? data.map(span_data_to_string).join('∪') : '<dummy>';\nlet clean = rewrite(o => {\n    if(!(o instanceof Object))\n        return o;\n    if (is_span(o))\n        return span_to_string(o);\n    return o;\n});\n\nfunction json(json) {\n    let o = JSON.parse(JSON.stringify(json));\n    let root = mk('div', [], ['json-viewer']);\n    let state = {\n        open: new Map(),\n        default_open: false,\n    };\n    function render_all() {\n        root.replaceChildren(render(o, []));\n        let expand_button = mk('button', state.default_open ? '🡒🡐' : '🡘', ['expand-all']);\n        expand_button.style = `\n            position: absolute;\n            top: 1px;\n            right: 1px;\n            padding: 0 3px;\n            margin: 0;\n            line-height: 0;\n            height: 16px;\n        `;\n        expand_button.onclick = () => {\n            state.default_open = !state.default_open;\n            render_all();\n        };\n        root.prepend(expand_button);\n    }\n    let key_of_path = path => JSON.stringify(path);\n    let set_open = (path, v) => state.open.set(key_of_path(path), v);\n    let is_open = (path, def = path.length < 6) => {\n        let b = state.open.get(key_of_path(path));\n        return b === undefined ? (state.default_open || def) : b;\n    };\n    let swap = (path, def) => {\n        set_open(path, !is_open(path, def), false);\n        render_all();\n    };\n    let is_constructor = o => {\n        if (o instanceof Array && (o.length == 2 || o.length == 1)) {\n            let [constructor, arg] = o;\n            if(typeof constructor == 'string' && constructor[0] == constructor[0].toUpperCase()) {\n                return true;\n            }\n        }\n        return false;\n    };\n    let is_simple = o => {\n        if (o instanceof Object) {\n            if (is_constructor(o)) {\n                return o[1] === undefined;\n            }\n            return false;\n        }\n        return true;\n    };\n    function render(o, path, add_comma = true) {\n        function as_code(o) {\n            let code = mk('code');\n            code.innerHTML = Prism.highlight(JSON.stringify(o, null, 4), Prism.languages.json, 'json');\n            return add_comma ? mk('span', [code, mk('span', ',')]) : code;\n        }\n        if (o instanceof Object) {\n            if (is_constructor(o)) {\n                \n                let [constructor, arg] = o;\n                let cdiv = mk('span', constructor + (arg === undefined ? '' : ' '), ['constructor']);\n                if (constructor == \"Concrete\" && \"crate\" in arg && \"path\" in arg) {\n                    let {crate, path} = arg;\n                    return mk('span', [\n                        ...[crate, ...path].map((chunk, i) => [...(i > 0 ? [mk('span', '::', ['pathsep'])] : []), mk('span', chunk, ['pathchunk'])]).flat(),\n                        add_comma ? [mk('span', ',')] : []\n                    ].flat());\n                }\n                let contents = arg === undefined ? [] : render(arg, path, false);\n                if(arg !== undefined && is_constructor(arg))\n                    contents = mk('span', [mk('span', '('), contents, mk('span', ')')]);\n                \n                let self_path = [...path, []];\n                let elide = mk('span', '…');\n                elide.onclick = () => swap(self_path);\n                let open = arg === undefined || is_open(self_path);\n                cdiv.onclick = () => swap(self_path);\n                return mk('span', [\n                    cdiv,\n                    open ? contents : elide,\n                    add_comma ? [mk('span', ',')] : []\n                ].flat());\n            }\n            if (o instanceof Array) {\n                let self_path = [...path, []];\n                let open = is_open(self_path);\n                let bracket = mk('code', '[');\n                bracket.onclick = () => swap(self_path);\n                let elide = mk('span', '…');\n                elide.onclick = () => swap(self_path);\n                return mk('span', [\n                    bracket,\n                    open ? mk('ul', o.map((v, i) => {\n                        let new_path = [...path, i];\n                        let simple_val = is_simple(v);\n                        let open = simple_val || is_open(new_path);\n                        return [mk('li', render(v, new_path), ['v'])];\n                    }).flat()) : elide,\n                    mk('code', ']'),\n                    ...(add_comma ? [mk('span', ',')] : [])\n                ]);\n            }\n            \n            return mk('span', [\n                mk('code', '{'),\n                mk('ul', Object.entries(o).map(([k, v]) => {\n                    let new_path = [...path, k];\n                    let simple_val = is_simple(v);\n                    let open = simple_val || is_open(new_path);\n                    let elide = mk('span', '…');\n                    elide.onclick = () => swap(new_path, open);\n                    let contents = mk((simple_val || !open) ? 'span' : 'span', open ? render(v, new_path) : [elide, mk('span', ',')], ['v']);\n                    let key = mk('span', [\n                        mk('span', k+': '),\n                    ].flat(), ['k']);\n                    key.onclick = () => swap(new_path);\n                    return [mk('li', [\n                        key,\n                        contents\n                    ], ['p'])];\n                }).flat(), ['o']),\n                mk('code', '}'),\n                ...(add_comma ? [mk('span', ',')] : [])\n            ]);\n        } else if (typeof o == \"string\" && o.length > 20) {\n            let new_path = [...path, 'v'];\n            let code = as_code(is_open(new_path, false) ? o : o.slice(0, 20)+'…');\n            code.onclick = () => swap(new_path, false);\n            return code;\n        } else {\n            return as_code(o);\n        }\n    };\n    render_all();\n    return root;\n}\n\nconst SEED = Date.now();\nasync function phases_viewer(state = {index: 0, ast_focus: null, seed: SEED}) {\n    let data = await (await fetch('debug-hax-engine.json?seed='+state.seed)).json();\n    if (!data[state.index] && state.index != 0) {\n        return phases_viewer({...state, index: 0});\n    };\n    let current = null;\n    let s = '';\n    let header = mk('header');\n    for(let i in data) {\n        let o = data[i];\n        let w = 100;\n        let active = state.index == i;\n        let self = mk('div', o.name.toLowerCase().replace(/reject_not_in_/g, 'rej ~').replace(/_/g, ' '), ['header', active ? 'active' : 'inactive']);\n        self.style = `width: ${w}px; font-variant: small-caps;\n                      position: relative; top: ${w}px; left: 10px;\n                      transform-origin: 0% 50%; transform: rotate(-40deg);\n                      color: ${active ? 'black' : 'gray'}; user-select: none;`;\n        self.onclick = () => phases_viewer({...state, index: i, ast_focus: null});\n        let container = mk('div', self, []);\n        container.style = `display: inline-block; width: 18px; height: ${w}px;`;\n        if(active){\n            current = o;\n        }\n        header.appendChild(container);\n    }\n    let last_item = null;\n    let codes = [current.rustish].map(({string, map}) => {\n        let src = string;\n        let code = mk('code', [], ['language-rust']);\n        code.innerHTML = Prism.highlight(src, Prism.languages.rust, 'rust');\n\n        [...code.childNodes]\n            .filter(o => o.nodeType === Node.TEXT_NODE)\n            .forEach(o => {\n                let n = mk('span');\n                n.textContent = o.textContent;\n                code.replaceChild(n, o);\n            });\n\n        let mappings = map.slice(0).reverse();\n        let stack = [...code.childNodes].reverse();\n\n        let highlighted = null;\n        let maybe = [];\n        \n        while(stack.length) {\n            let node = stack.pop();\n            let [len, id, s] = mappings.pop();\n            let text = node.textContent;\n            if (len > text.length) {\n                mappings.push([len - text.length, id, s.slice(text.length)]);\n            } else if (len < text.length) {\n                let after = node.cloneNode();\n                let left = text.slice(0, len);\n                let right = text.slice(len);\n                src = right + src;\n                after.textContent = right;\n                node.textContent = left;\n                node.after(after);\n                stack.push(after);\n            }\n            let active = state.ast_focus === id && text.trim();\n            node.onclick = ev => {\n                phases_viewer({...state, ast_focus: id});\n                ev.stopPropagation();\n            };\n            if (active) {\n                highlighted = highlighted || [];\n                highlighted.push(...maybe);\n                maybe = [];\n                active && node.classList.add('active');\n                last_item = node;\n            } else if (highlighted) {\n                maybe.push(node);\n            }\n        }\n\n        (highlighted||[]).map(o => o.classList.add('in-range'));\n        \n        return code;\n    });\n    let pre = mk('pre', codes);\n    let main = mk('main', [header, pre]);\n    if(last_item) {\n        let ast = clean(findNode(current.items, spanned(state.ast_focus)));\n        let dialog = mk('dialog', json(ast));\n        dialog.setAttribute('open', true);\n        dialog.onclick = ev => {\n            ev.stopPropagation();\n        };\n        main.onclick = ev => phases_viewer({...state, ast_focus: null});\n        last_item.after(dialog);\n    }\n    let app_root = document.querySelector('#app');\n    app_root.childNodes.forEach(old => old.remove());\n    app_root.appendChild(main);\n    document.body.onkeydown = (e) => {\n        let key = ({'ArrowRight': 'n', 'ArrowLeft': 'p'})[e.key] || e.key;\n        (({\n            'n': () => phases_viewer({...state, index: state.index + 1, ast_focus: null}),\n            'p': () => phases_viewer({...state, index: state.index ? state.index - 1 : data.length - 1, ast_focus: null}),\n            'r': () => phases_viewer({...state, seed: Date.now(), ast_focus: null}),\n        })[key] || Function)();\n    };\n}\nphases_viewer();\n"
  },
  {
    "path": "cli/subcommands/src/json_schema.rs",
    "content": "const JSON_SCHEMA: &str = include_str!(concat!(env!(\"OUT_DIR\"), \"/schema.json\"));\n\nfn main() {\n    println!(\"{}\", JSON_SCHEMA);\n}\n"
  },
  {
    "path": "deny.toml",
    "content": "[licenses]\nunused-allowed-license = \"allow\"\nallow = [\n    \"Apache-2.0\",\n    \"MIT\",\n    \"Unicode-DFS-2016\",\n    \"MPL-2.0\",\n    # Licences used in the OCaml dependencies in the engine\n    \"BSD-3-Clause\",\n    \"LGPL-2.1\",\n    \"LGPL-2.0\",\n    \"ISC\",\n]\n"
  },
  {
    "path": "dependabot.yml",
    "content": "version: 2\nupdates:\n- package-ecosystem: \"cargo\"\n  directory: \"/\"\n  schedule:\n    interval: \"weekly\"\n- package-ecosystem: \"github-actions\"\n  directory: \"/\"\n  schedule:\n    interval: \"weekly\"\n"
  },
  {
    "path": "docs/.test/.gitignore",
    "content": "test-results/\ngenerated.json\nplaywright-report\n"
  },
  {
    "path": "docs/.test/global-setup.ts",
    "content": "// Global setup: writes `generated.json`, the list of pages\nimport { BrowserContext, chromium, expect, FullConfig } from '@playwright/test';\nimport fs from 'fs';\n\nexport type CrawlPage = { url: string; has_playground: boolean, links: string[] };\nconst DOCS_HOST = 'localhost:8000';\n\nconst skip_url = (s: string) => s.includes(\"/livereload\");\n\n/// Run jobs in parallel.\n/// `job` runs a new job, returns true if more jobs are to be run.\nasync function parallel(\n    job: () => Promise<boolean>,\n    maxJobs = 10\n): Promise<void> {\n    const workers: { promise?: Promise<void>, free: boolean }[] = (new Array(maxJobs)).fill(0).map(_ => ({ free: true }));\n\n    let spawn = (self: { promise?: Promise<void>, free: boolean }) => {\n        self.promise = (async () => { self.free = false; let cont = await job(); self.free = true; cont && control() })();\n    };\n    let control = () => workers.filter(w => w.free).forEach(spawn);\n    control();\n\n    let active_workers: Promise<void>[] = [];\n    do {\n        active_workers = workers.filter(w => !w.free).map(w => w.promise).filter(p => p !== undefined);\n        await Promise.all(active_workers);\n    } while (active_workers.length > 0)\n}\n\n/// Crawl the documentation\nconst crawl = async (baseURL: string, context: BrowserContext): Promise<CrawlPage[]> => {\n    if (!baseURL) throw new Error('Base URL not configured.');\n\n    const pages: CrawlPage[] = [];\n    const visited = new Set<string>();\n    const queue: string[] = [new URL('/', baseURL).toString()];\n\n    await parallel(async () => {\n        const url = queue.shift();\n        if (url === undefined || visited.has(url))\n            return false;\n        visited.add(url);\n\n        const page = await context.newPage();\n\n        const res = await page.goto(url, { waitUntil: 'domcontentloaded' });\n        await page.waitForLoadState('networkidle').catch(() => { });\n        const status = res?.status() ?? 0;\n        expect(status, `Failed to GET ${url}`).toBeGreaterThanOrEqual(200);\n        expect(status, `Failed to GET ${url}`).toBeLessThan(400);\n\n        const has_playground = (await page.content()).includes('md-hax-playground');\n        const links = await page.$$eval('a[href]', as => as.map(a => (a as HTMLAnchorElement).getAttribute('href')!));\n\n        pages.push({ url, has_playground, links });\n\n        for (const href of links) {\n            if (!href || href.startsWith('mailto:') || href.startsWith('tel:') || href.startsWith('javascript:')) continue;\n\n            const absolute = new URL(href, url);\n            const sameHost = absolute.host === DOCS_HOST;\n            if (!sameHost) continue;\n            absolute.hash = '';\n            const absStr = absolute.toString();\n            if (!visited.has(absStr) && !skip_url(absStr) && !queue.includes(absStr)) queue.push(absStr);\n        }\n\n        page.close();\n        return true;\n    });\n\n    return pages;\n}\n\nasync function globalSetup(config: FullConfig) {\n    const browser = await chromium.launch();\n    let PAGES = await crawl('http://localhost:8000', await browser.newContext());\n    await browser.close();\n    fs.writeFileSync('generated.json', JSON.stringify(PAGES, null, 2), 'utf-8');\n}\nexport default globalSetup;\n"
  },
  {
    "path": "docs/.test/package.json",
    "content": "{\n    \"name\": \"docs-ci-checks\",\n    \"private\": true,\n    \"type\": \"module\",\n    \"scripts\": {\n        \"test\": \"playwright test --reporter=list\"\n    },\n    \"devDependencies\": {\n        \"@playwright/test\": \"^1.56.1\",\n        \"http-server\": \"^14.1.1\"\n    }\n}\n"
  },
  {
    "path": "docs/.test/playwright.config.ts",
    "content": "import { defineConfig } from '@playwright/test';\n\nexport default defineConfig({\n    globalSetup: './global-setup.ts',\n    timeout: 600_000,\n    expect: { timeout: 300_000 },\n    reporter: [['list']],\n    use: {\n        baseURL: 'http://localhost:8000',\n        serviceWorkers: 'block',\n        trace: 'on-first-retry',\n    },\n    webServer: {\n        command: 'nix run ../..#serve-docs',\n        port: 8000,\n        reuseExistingServer: !process.env.CI,\n        timeout: 5 * 60 * 1000,\n        stderr: 'ignore',\n        stdout: 'ignore',\n    }\n});\n"
  },
  {
    "path": "docs/.test/tests/docs.spec.ts",
    "content": "import { test, expect, request, BrowserContext, Page } from '@playwright/test';\nimport { CrawlPage } from '../global-setup';\nimport fs from 'fs';\n\nconst cssEscape = (s: string) =>\n    s.replace(/^[0-9-]|[^a-zA-Z0-9_-]/g, (ch, idx) => {\n        const code = ch.codePointAt(0)!.toString(16).toUpperCase();\n        return `\\\\${code} `;\n    });\n\nconst PAGES = JSON.parse(fs.readFileSync('generated.json', 'utf-8')) as CrawlPage[];\n\n\nasync function tryNavigateTo(page: Page, url: string) {\n    const response = await page.request.get(url, {\n        headers: {\n            accept: \"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\"\n        }\n    });\n    const status = response.status();\n    const contentType = response.headers()['content-type'] || '';\n\n    if (!contentType.includes('text/html'))\n        return { status, html: false };\n\n    try {\n        await page.goto(url, { waitUntil: 'domcontentloaded' });\n        return { status, html: true };\n    } catch (e) {\n        return { status, html: false };\n    }\n}\n\n// test.describe('Documentation consistency checks', () => {\nlet run_tests = () => {\n    let tried = new Set();\n\n    let links_origins: Map<string, Set<string>> = new Map();\n    let links: Set<string> = new Set();\n    for (let page of PAGES) {\n        for (let link of page.links) {\n            let absolute_link = (new URL(link, page.url)).toString();\n            links.add(absolute_link);\n            links_origins.has(absolute_link) || links_origins.set(absolute_link, new Set());\n            links_origins.get(absolute_link)!.add(page.url);\n        }\n    }\n\n\n    for (let link of links) {\n        if (link.includes(\"hax-playground.cryspen.com/\") || link.includes(\"#__codelineno\"))\n            continue;\n        test('Check if link is live: ' + link, (async ({ page, baseURL }, testInfo) => {\n            await testInfo.attach('Parent pages', {\n                body: [...(links_origins.get(link) || new Set())].join('\\n'),\n                contentType: 'text/plain',\n            });\n            let other_page = await page.context().newPage();\n            let { status, html } = await tryNavigateTo(other_page, link.toString());\n            let anti_bot_codes = [401, 403, 429, 451, 999].includes(status);\n            expect(anti_bot_codes || (status >= 200 && status < 300)).toBeTruthy()\n\n            let hash = (new URL(link)).hash?.replace(/^#/, '');\n            if (hash && !link.includes(\"\"))\n                await test.step(\"Try detection of fragment `\" + hash + '`', async () => {\n                    let el = other_page.locator('[id=\"' + cssEscape(hash) + '\"]');\n                    if (await el.count() === 0)\n                        console.warn('⚠️ Could not find anchor in a page ', link);\n                });\n        }));\n    }\n\n    for (let p of PAGES) {\n        if (!p.has_playground)\n            continue;\n        test('Test playgrounds in `' + p.url + '`', async ({ page, baseURL }, testInfo) => {\n            await page.goto(p.url, { waitUntil: 'domcontentloaded' });\n            const playableLocators = page.locator('.playable:has(.md-hax-playground .fa-check)');\n            const count = await playableLocators.count();\n\n            for (let i = 0; i < count; i++) {\n                await test.step(`Try playground #${i}`, async () => {\n                    const playable = playableLocators.nth(i);\n                    const contents = await playable.locator(\".cm-content\").first().innerText();\n                    await testInfo.attach('Code snippet contents', {\n                        body: contents,\n                        contentType: 'text/plain',\n                    });\n\n                    const checkBtn = playable.locator('.md-hax-playground .fa-check');\n                    await checkBtn.first().click();\n\n                    let classes = '';\n                    let hasSuccess = false;\n                    let hasFailure = false;\n\n                    for (let i = 0; i < 60; i++) {\n                        classes = (await playable.getAttribute('class')) || '';\n                        hasSuccess = classes.includes('state-success');\n                        hasFailure = classes.includes('state-failure');\n                        if (hasSuccess || hasFailure)\n                            break;\n                        await new Promise(r => setTimeout(r, 1000));\n                    }\n\n                    expect(hasSuccess || hasFailure, \"At least class `state-success` or `state-failure` should have been attached; none detected.\").toBeTruthy();\n\n                    const expectFailure = classes.includes('expect-failure');\n\n                    if (expectFailure) {\n                        expect(hasFailure, '`.state-failure` should be set (the snippet is tagged with a class `expect-failure`), but `.state-success` was detected').toBeTruthy();\n                    } else {\n                        expect(hasSuccess, '`.state-success` should be set, but `.state-failure` was detected').toBeTruthy();\n                    }\n                });\n            }\n        });\n    }\n};\n\nrun_tests();\n"
  },
  {
    "path": "docs/RFCs/.nav.yml",
    "content": "hide: true\n"
  },
  {
    "path": "docs/RFCs/0000-template.md",
    "content": "---\n    title: 0000 template\n    tags:\n        - Accepted\n---\n\n```\n---\ntags: Draft | Proposed | Rejected | Accepted | Superseded | Deprecated\n---\n```\n\n| Authors       | :material-account: Franziskus Kiefer                        |\n| :------------ | :---------------------------------------------------------- |\n| Last update   | :material-calendar: Jan 1 2025                              |\n| Extends       | :material-directions-fork: [xxxx-other](./0000-template.md) |\n| Superseded by | :material-cancel: [xxxx-other](./0000-template.md)          |\n| Based on      | :material-forward: [xxxx-other](./0000-template.md)         |\n\n## Context\n\nWhat is the issue that we're seeing that is motivating this decision or change?\n\n## Assumptions\n\nAnything that could cause problems if untrue now or later\n\n## Decision\n\nWhat is the change that we're proposing and/or doing?\n\n### Considered Options\n\nWhat other options are there and why didn’t you pick them?\n\n## Risks\n\nAnything that could cause malfunction, delay, or other negative impacts\n\n## Consequences\n\nWhat becomes easier or more difficult to do because of this change?\n\n## Additional Information\n\nProvide additional evidence/confidence for the decision outcome Links to other decisions and resources might here appear as well.\n"
  },
  {
    "path": "docs/RFCs/index.md",
    "content": "# RFCs\n\n\n"
  },
  {
    "path": "docs/blog/.authors.yml",
    "content": "authors:\n  franziskus:\n    name: Franziskus Kiefer\n    description: Creator\n    avatar: /blog/avatars/franziskus.jpg\n  lucas:\n    name: Lucas Franceschino\n    description: Creator\n    avatar: /blog/avatars/lucas.jpg\n  clement:\n    name: Clement Blaudeau\n    description: Engineer\n    avatar: /blog/avatars/clement.jpg\n  maxime:\n    name: Maxime Buyse\n    description: Engineer\n    avatar: /blog/avatars/maxime.png\n  alex:\n    name: Alexander Bentkamp\n    description: Engineer\n    avatar: /blog/avatars/alex.jpg"
  },
  {
    "path": "docs/blog/index.md",
    "content": "---\nweight: 4\n---\n\n# Blog\n\nThe hax blog.\nHere you find announcement, development news, and more.\n"
  },
  {
    "path": "docs/blog/posts/announce-v0.1.md",
    "content": "---\nauthors:\n  - franziskus\n  - lucas\ntitle: \"A new chapter\"\ndate: 2025-01-21\n---\n\n# Hax Takes Flight: Announcing Our First Release and New Home at Cryspen!\n\nWe're thrilled to announce that hax is entering a new era of stability and\ngrowth with the launch of our new website, a fresh start at Cryspen,\nand our first official release,\n[v0.1.0](https://github.com/cryspen/hax/releases/tag/cargo-hax-v0.1.0)!\n\nAfter an intense period of research and development, hax is transitioning to a\nmore stable phase.\nTo support this evolution, we've moved the repository to its new home within the\nCryspen GitHub organization.\nThis change streamlines our processes and clarifies project ownership while\nmaintaining hax's open-source nature.\nCryspen is responsible for driving hax forward, but we enthusiastically\nwelcome contributions from the community, and continue working closely with\nthe team of existing contributors!\n\nThis move also marks our shift to a release-driven development model,\nculminating in our first official release, v0.1.0.\nWhile we anticipate some breaking changes in the lead-up to v1.0, detailed\nrelease notes will clearly outline any backward compatibility issues.\n\n### The state of hax\n\nHax currently boasts three actively used backends: ([F\\*](https://fstar-lang.org/),\n[Rocq](https://rocq-prover.org/) and [SSProve](https://github.com/SSProve/ssprove)).\nWhile Cryspen primarily focuses on the F\\* backend, [Bas Spitters](https://www.au.dk/en/spitters@cs.au.dk)\nand his team at the University of Aarhus are actively developing and utilizing\nthe Rocq and SSProve backends. Cryspen also supports an experimental backend for\n[ProVerif](https://bblanche.gitlabpages.inria.fr/proverif/).\n\nWith this initial release, hax can process a significant subset of Rust code.\nBoth the frontend, which extracts a JSON AST from the Rust compiler, and the\nengine, which lowers the code to the backends, have undergone major\nimprovements and stabilization throughout 2024.\n\nOur new website provides a central hub for all things hax.\nUsers can explore the [manual](../../manual/index.md), experiment with the\ninteractive [hax playground](https://hax-playground.cryspen.com/),\nand delve into a diverse collection of [examples](https://github.com/cryspen/hax/tree/main/examples)\nshowcasing hax's capabilities.\n\nWe will work on improving the manual and developer documentation over the next\nfew months.\n\n#### Hax in Action\n\nOver the past year, hax has proven its versatility in various projects:\n\n- [Verifying Bertie](https://cryspen.com/post/hax-pv/): A TLS 1.3 implementation, verified with the ProVerif backend\n- [Verifying ML-KEM](https://cryspen.com/post/ml-kem-verification): A post quantum cryptographic algorithm verified with the F\\* backend\n- [Verifying Smart Contracts](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper9-13.pdf): Leveraging the Rocq backend for enhanced security verification.\n\n#### The Road Ahead\n\nWhile hax can handle a substantial portion of Rust code, certain limitations\nremain.\nFeatures like Generic Associated Types (GATs), some Rust nightly features, specific\nloop and pattern structures, and a range of mutations are not yet supported.\n\n??? hint \"Detailed list of unsupported features\"\n    Here's some content.\n\n    **GATs**\n\n    Support for Generic Associated Types (GATs) in the frontend is under consideration\n    ([Issue #915](https://github.com/cryspen/hax/issues/915))\n\n    **Rust nightly features**\n\n    A full list of unsupported Rust nightly features can be found with the [unsupported-rust label](https://github.com/cryspen/hax/issues?q=is%3Aissue%20state%3Aopen%20nightly%20label%3Aunsupported-rust).\n\n    **Pattern**\n\n    Some expressive Rust patterns are not supported yet in the hax engine.\n    For example, [range patterns](https://github.com/cryspen/hax/issues/925) such as\n    `0..12`, [`as` patterns](https://github.com/cryspen/hax/issues/833) such as `x @ Option(_)` or [array or slice patterns](https://github.com/cryspen/hax/issues/804) such as `[head, ..tail]` are not supported.\n\n    **Mutation**\n\n    - Mutations inside closures are not supported ([Issue #1060](https://github.com/cryspen/hax/issues/1060))\n    - Re-borrowing mutable refferences is not allowed ([Issue #420](https://github.com/cryspen/hax/issues/420))\n    - Implicit reborrowing of mutable references is not supported ([Issue #419](https://github.com/cryspen/hax/issues/419))\n    - User-defined functions cannot return `&mut`s ([Issue #418](https://github.com/cryspen/hax/issues/418))\n    - Calling `&mut`-returning functions is not allowed in general ([Issue #418](https://github.com/cryspen/hax/issues/418), [Issue #494](https://github.com/cryspen/hax/issues/494) and [Issue #491](https://github.com/cryspen/hax/issues/491))\n    - Enum variants cannot be mutated ([Issue #493](https://github.com/cryspen/hax/issues/493))\n\n    **Loops**\n\n    - Unconditional loops `loop {...}` ([Issue #124](https://github.com/cryspen/hax/issues/124))\n    - While let `while let .. = .. {}` ([Issue #113](https://github.com/cryspen/hax/issues/113))\n    - Loops without side effect ([Issue #405](https://github.com/cryspen/hax/issues/405))\n\n    **`const` inline blocks**\n\n    Inline `const` blocks are not supported yet.\n    [Issue #923](https://github.com/cryspen/hax/issues/923)\n\n### Parting Thoughts\n\nThis is an exciting time for hax!\nWith our new home at Cryspen, a dedicated release model, and a growing community,\nwe're confident that hax will continue to mature and empower developers to build\nsecure and reliable software.\n\nWe encourage you to explore the new hax website, dive into the documentation,\nand experiment with the playground.\nJoin us on this journey!\nContribute to the project, share your feedback, and help us shape the future of\nRust verification.\n"
  },
  {
    "path": "docs/blog/posts/hax-for-everyone.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"Hax for everyone\"\ndate: 2025-02-25\n---\n\n# Trying to make hax usable in more contexts\nThe hax toolchain has been successfully used to formally verify our cryptographic implementations for [ML-KEM](https://cryspen.com/post/ml-kem-verification/),[Bertie](https://cryspen.com/post/hax-pv/) and more. All these projects are developed with formal verification (using hax) in mind, and use a limited subset of Rust features.\nHowever, hax is under constant development and the improvements we bring are targeted at making it more usable. With these improvements we want to bring hax to a new kind of projects that don’t have restrictions on the Rust patterns they use. We want hax to be usable in this context with minimal modifications to the code (ideally no modification at all). An example of such a project is the verification of [sandwich](https://github.com/sandbox-quantum/sandwich), a high-level cryptographic library built by [SandboxAQ](https://cryspen.com/post/hax-sandbox/). This project revealed the weaknesses of hax in this context which brought us to implement some improvements that will be presented in this blog post.\n## Challenges\nThe projects that use hax from the beginning can limit themselves to the subset of Rust supported by hax. Applying hax to a pre-existing project means that it may use various Rust features that are probably not supported yet in hax. The challenge is then to identify which features to prioritize for support in hax (and adding support is yet another challenge), and which features have no short-term plan for support. For the latter we need to abstract out the code (if it is not relevant for proofs) or rewrite it (when possible; ideally we try to avoid this).\nHaving external users encourages us even more to make hax an easily-usable and well-documented tool.\n## Frontend improvements\nThe hax frontend is mostly relying on rustc and cargo to extract intermediary representations of a Rust crate. It is supposed to produce a result for any Rust crate (restrictions on the available Rust features come later in the toolchain). However the information given by rustc is sometimes partial or lacks some parts that are needed for our translations. A crucial example of this is trait resolution as we need to know the trait derivation that is used by each call of a trait method. This is a part of the hax frontend that has proven tricky and still had many bugs a few months ago. At that time, launching it on a somehow complicated crate had big chances of resulting in a crash. As part of our effort to improve the usability of hax, many of these bugs have now been fixed (in collaboration with our colleagues at Inria). This is a big step forward, since even for a project that looks small and simple, we need to handle all of its dependencies which are usually more problematic.\n\nAccording to our tests on the top 500 crates (by number of downloads on crates.io), hax frontend succeeds without crashing or timing out on more than 99%. However we are still looking for a better way to measure the coverage of the Rust features, and identifying the situations where we can still improve.\n## Recursive Bundles\nRust code is organized in modules, where modules can be seen as a namespacing system. When translating modules to our backends (F*, Coq, ProVerif) we need to generate the corresponding module-like abstraction in the backend, which typically works quite differently. In particular our backends require the module dependency graph to be acyclic while Rust has no such restriction. It is quite common in Rust to make use of this and create cyclic dependencies between modules which means it is necessary for us to have a solution for this problem.\nHere is an example (you can open it in the hax playground to check the code hax generates out of it):\n```rust\n\npub struct Error();\n\nmod private {\n\tpub(crate) fn f() -> Result<(), super::Error> {\n    \tOk(())\n\t}\n}\n\npub fn user_f() -> Result<(), Error> {\n\tprivate::f()\n}\n```\n[Open this code snippet in the hax playground](https://hax-playground.cryspen.com/#fstar/b7fe08cccd/gist=fcb9cb9854c69ee6e2788648a380ff79)\n\nIn this example there is a dependency between the top level module and the `private` module. Our solution to break these cycles is simply to put the content of the cyclic modules in a single module (that we call bundle), and then re-exposing the items in their original modules.\nThis solution is not perfect because it changes the architecture of the generated code compared to the original code, and it could be improved by minimizing the content of the bundles (choosing a set of definitions to break the cycle instead of the full content of the modules). But so far it has proven very useful as it removes a big limitation on the Rust we support.\n## Opaque items\nLarge projects usually contain code that we don’t support yet but we still want to reason about the rest of the project and have an abstract model (axiomatization) for the parts that we don’t support. We need to control which parts we want to fully extract and which parts we extract only as opaque items. The command-line options offered by the hax toolchain provide a solution to this, but they only allow to choose at the model level, which is inconvenient for large projects. To make this more practical we added another way to specify inside the source with the attribute `hax_lib::opaque` makes an item axiomatized. There is still the problem of complicated `-i` flags which will be solved in the future by having the corresponding information in configuration files.\n## Control flow rewriting without monads including inside loops\nTranslating imperative code to functional backends for verification implies some handling of side effects and transformation of control flow. A classic solution for this is to have a monadic encoding state which results in generated code that can be hard to read (and to reason about). This is the solution that was implemented (with some bugs) in hax but we decided to replace it with a solution without monads. The code we produce is simpler to read, but the main limitation is that there is code duplication which in some cases can lead to an extracted code that is exponentially bigger than the source.\n\nHere is a simple example of this:\n```rust\nfn f() -> i32{\n\tif true {\n    \tif true {\n        \treturn 1\n    \t}\n\t}\n\t3\n}\n```\n[Open this code snippet in the hax playground](https://hax-playground.cryspen.com/#fstar/b7fe08cccd/gist=078ca6da8dad17541533bb5a0724784b)\n\nThe F* code extracted from this example is the following:\n```ocaml\nlet f (_: Prims.unit) : i32 =\n if true\n then if true then mk_i32 1 else mk_i32 3\n else mk_i32 3\n```\nHere the semantics is preserved, but adding the `else` branches results in a duplication of the return value `3`.\nOur idea to improve in the future is to revive the monadic version, but use it only if the duplication is too big. \nSupport for control flow (`return`, `break` and `continue`) in loops has been added as well. In hax, loops are translated as a functional fold in which the accumulator keeps track of the modification of the environment done by the effectful operations in the source. This extension relies on a monadic encoding of the loop result, that is passed in the accumulator to deal with the specific cases of `return`, `break` and `continue`. \n## Items sorting\nA quality of life feature that we have been lacking for a long time is trying to respect, as much as possible, the same order of items in the generated code compared to the source. We need to modify the order because (as for modules), Rust allows items to be defined in any order, while our backends need items to be defined after the other items they depend on (except for mutual recursion). We rely on a graph topological sort to ensure this property, and now use a modified version of the stable topological sort provided by ocamlgraph, which produces an order that respects the dependencies, but in the absence of constraints tries respects the order of the source.\n## Conclusion\nBringing hax to a new kind of project revealed the gap needed for it to be usable, but thanks to our active work, we have made great progress towards this goal. Even though there is still much more to do, this has allowed us to get results in these new applications of hax (stay tuned for more details about that!).\n"
  },
  {
    "path": "docs/blog/posts/lucas-departure.md",
    "content": "---\nauthors:\n  - lucas\ntitle: \"My Departure from hax and Cryspen\"\ndate: 2026-01-14\n---\n\nToday, I want to share an update on both my professional path and my role in\nhax. I decided to leave Cryspen, and as a result, I will also be stepping away\nfrom hax.\n\n## Looking Back\n\nBack in September 2023, while I was working at Inria, I started working with\nKarthikeyan on [Hacspec](https://github.com/hacspec/hacspec). Hacspec was\na domain-specific language embedded in Rust's syntax, aimed at cryptography\nspecification and verification. It relied on the surface AST (abstract syntax\ntree) of the Rust compiler (rustc). Using such an early representation in the\ncompiler pipeline gave us very limited information: no types, no name resolution\n-- essentially just syntax.\n\nBoth technically and in terms of intent, Hacspec had limitations. In December\n2023, we decided to take a fresh start and build a new tool from the ground\nup: hax.\n\nDesigning and implementing hax has been a fun adventure. I had the constraint to\nwrite the \"compiler\" part of hax in OCaml. That led me to design hax in two main\nparts:\n\n - **The frontend**: hooks into rustc and dumps enhanced ASTs, inlining a large\n   amount of semantic information about Rust programs. The frontend produces a\n   comprehensive, complete, and easy-to-consume AST that other tools can build\n   upon. It grew a lot, notably thanks to our collaboration with Inria (for\n   Charon and Aeneas), and especially thanks to\n   [Nadrieril](https://github.com/Nadrieril), with whom it has been a great\n   pleasure and a lot of fun to work.\n\n - **The engine**: an OCaml binary that reads our frontend's Rust AST, applies a\n   sequence of translation phases, and finally outputs F*, Coq, etc.\n\nFor a full year at Inria and then two years at Cryspen, I was the main developer\nof hax. Throughout this time, I greatly enjoyed working with Karthik; we\ndiscussed many aspects of hax countless times: its design, its applications, the\nworkflows, and more. Those were great conversations, essential to the\ndevelopment of hax.\n\nLeading the development of hax was a great and intense experience. I had to engineer a\npretty large piece of software, design interesting semantic compiler passes,\nbuild debugging tools, do DevOps work, build a playground, and more. I also\nlearned how complicated human interactions can be.\n\n## Working at Cryspen\n\nDuring my time at Cryspen, the proofs and tools team grew a lot. When I arrived,\nit was Karthik and me. Then [Maxime](https://cryspen.com/post/welcome_maxime/)\njoined towards the end of my first year (in August 2024). In May the next year\n[Clément](https://cryspen.com/post/welcome_clement/) arrived, and very recently,\nin November 2025, [Alex](https://cryspen.com/post/welcome_alex/) arrived. I\nreally enjoyed working with everyone in the proofs and tools team at Cryspen!\n\nBeyond the proofs and tools team, it was also great to work with others at Cryspen:\nJan, Jonas, Clara.\n\n## The Future\n\nAfter three years working on hax, I decided it was time for me to leave. Hax is\na bit my baby, so that was a very hard decision to make.\n\nThat said, the rest of the proofs and tools team at Cryspen will continue\nmaintaining, improving, and applying hax to cool real-world Rust projects! They\nare already working on the new Lean backend, on better libraries, and on very\nexciting applications!\n\nI'm proud of what hax has become, and I hope it will have a bright future! If\nhax speaks to you, consider following the project, trying it out, or\ncontributing.\n"
  },
  {
    "path": "docs/blog/posts/reworking-names/reworking-names.md",
    "content": "---\nauthors:\n  - lucas\ntitle: \"Redesigning Global Identifiers in hax\"\ndate: 2025-04-01\n---\n\n# Redesigning Global Identifiers in hax\n\nA careful treatment of identifiers lies at the heart of all code analysis frameworks, and we hope our experience here proves useful to others.\n\nIn Rust, global identifier serves to uniquely locate uniquely an item: for instance `::serde::ser::Serialize` designates the `Serialize` trait from the Serde library. In constrat, local identifiers are relative, limited to the scope in which they are declared.\n\n## Global Identifiers from the Rust Compiler\n\nInitially, hax assumed that all identifiers originated exclusively from Rust. While this assumption held in the early stages, it was eventually challenged as the system grew[^1]. As hax evolved, new requirements emerged, prompting the engine to generate identifiers internally:\n\n- **Trait pre- and post-conditions:** in hax, these are explicitly represented as concrete methods within typeclasses. Conversely, in Rust, these conditions exist only as anonymous standalone functions.\n- **Explicit enum cast operations:** enum casts are primitive operations in Rust, but hax treats these casts as specialized operations, assigning distinct identifiers to them.\n- **Cross-module mutually recursive item bundles:** these bundles[^2] are internally introduced by hax, necessitating the generation of unique identifiers to prevent naming conflicts.\n\n[^1]: See [PR #935](https://github.com/cryspen/hax/pull/935), [PR #211](https://github.com/cryspen/hax/pull/211) or [PR #571](https://github.com/cryspen/hax/pull/571) for examples of such new features.\n[^2]: Rust supports cross-module mutual recursion without enforcing declaration order, an uncommon feature among programming languages. In contrast, most of our backends require some form of forward declaration. To bridge this gap and accommodate Rust’s permissive namespacing, we group related items into bundles and reorder them to eliminate cross-module recursion.\n\nMoreover, the previous identifier system lacked detailed metadata, such as the type of identifier (struct, function, type, etc.), complicating identifier rendering for backend tools.\n\n## Issues with the Previous Design {#issues-with-previous-design}\n\nInitially, identifiers were represented using slightly modified Rust `DefId`s accompanied by minimal metadata indicating the identifier's kind. This approach presumed that hax would never alter these `DefId`s but merely use those directly produced by the Rust compiler.\n\nThis assumption was quickly challenged. The need to prefix or suffix identifiers emerged early, but the introduction of new internal modules completely disrupted the assumption. Identifiers had to be relocated across modules, representing a significant departure from the original design.\n\nAs the API for manipulating identifiers grew increasingly permissive and transparent, the foundational assumption—that `DefId`s were unique, consistent, and Rust-generated—was entirely undermined. In consequence, rendering names for the backends became a complicated, error-prone process. This resulted in numerous bugs in identifier rendering in backend outputs, leading to at least 16 documented issues ([#1135](https://github.com/cryspen/hax/issues/1135)).\n\nAs an example, the rendering process made distinguishing the two functions `c` very difficult in the following snippet of code. This resulted in a bug (see [\\#1136](https://github.com/cryspen/hax/issues/1136)) where hax would extract F\\* code with two functions both named `c` in the same module `Mycrate.A.B`!\n```rust\nmod a {\n    mod b {\n        fn c() { ... }\n    }\n}\nfn a() {\n    mod b {\n        fn c() { ... }\n    }\n}\n```\n\n## Our New Approach\n\nThe frontend has been enhanced to explicitly indicate the kind of each identifier, clarifying whether it represents a function, an associated type, a constant, etc. Additionally, it now provides detailed parent information, making the origin of identifiers more transparent. Alongside these improvements, we have redesigned our internal engine's identifier representation, introducing a layered structure where each layer addresses a distinct aspect.\n\n1. **Raw Rust Identifiers:** using Rust's `DefId` type, generated from Rust to OCaml, with minor normalization to address potential duplicate references. These identifiers are immutable and cannot be arbitrarily created or altered.\n\n2. **Explicit_def_id:** addresses Rust's ambiguity between a struct constructor and the type itself, explicitly distinguishing identifiers belonging to types from those belonging to values, enhancing clarity for backend translation.\n\n3. **Concrete_ident:** built upon `Explicit_def_id`, this layer adds capabilities for generating fresh module names or adding hygienic suffixes. It ensures identifier uniqueness and declares constraints clearly when creating new names or namespaces.\n\n### Simplified Identifier Views\n\nRust's namespace structure is highly flexible, allowing various forms of nesting, such as types within functions, functions within constants, and more.\n\nBroadly, there are two kinds of nesting in Rust. Consider the following snippet:\n\n```rust\nmod a {\n    impl MyTrait for MyType {\n        fn assoc_fn() {\n            struct LocalStruct {\n                field: u8,\n            };\n        }\n   }\n}\n```\n\nIn this example, the user has intentionally placed `LocalStruct` within the method `assoc_fn`, which itself resides inside the module `a`. This is an instance of **user-driven nesting**, where the developer freely organizes elements within the code for clarity, convenience, or structural preference.\n\nAt the same time, we observe another form of nesting: `field` is contained within `LocalStruct`, and `assoc_fn` is enclosed within the `impl` block implementing `MyTrait` for `MyType`. This represents **hierarchical nesting**, which is dictated by the Rust language itself. Unlike user-driven nesting, hierarchical relationships are inherent to Rust's type system: a field **must** belong to a struct or an enum variant, and a method **must** exist within an impl block.\n\nThe following diagram shows how these hierarchical relationships are structured.\n\n![](name-example.excalidraw.png)\n\nDistinguishing between these two types of nesting is crucial when rendering names. Hierarchical nesting often requires special handling in backends due to its structural constraints, whereas user-driven nesting primarily serves readability and organization.\n\nTo manage this effectively, we introduced a hierarchical view for identifiers. Instead of handling Rust's deeply nested identifier paths as-is, we transform them into structured, relational representations. This approach simplifies backend processing, minimizes namespace conflicts, and ensures better compatibility with backend language constraints.\n\nLooking back at our [`a::b::c` example](./reworking-names.md#issues-with-previous-design), this hierarchical view makes the problem very easy, since modules and functions are user nesting.\n\n## Conclusion: Say Goodbye to Naming Issues (Almost)!\n\nThis comprehensive redesign of identifier representation and handling has resolved most previously identified naming issues and significantly enhanced the expressiveness and robustness of backend identifier rendering in hax.\n\nCheck out the pull request [#1199](https://github.com/cryspen/hax/pull/1199) on the GitHub repository of hax for more details!\n\nWe are confident that this enhanced representation is sufficiently robust and flexible to accommodate future developments and evolving project requirements.\n"
  },
  {
    "path": "docs/blog/posts/rust-gcd-1.md",
    "content": "---\nauthors:\n  - alex\ntitle: \"Verifying a real world Rust crate\"\ndate: 2025-12-08\n---\n\n# Verifying a real world Rust crate\n\nIn this post,\nwe are going to use hax and F\\* to verify a small real world Rust crate.\nThen, we will try other verification tools (Kani, Verus, Aeneas) to the same thing.\nThe Rust crate [gcd](https://crates.io/crates/gcd) by Corey Farwell that we are going to verify implements\nfunctions to compute the greatest common divisor of two integers.\nWe will focus on proving termination and panic freedom for now.\nIn a future post, we will look at functional correctness.\n\nWe have forked the repository [here](https://github.com/cryspen/rust-gcd).\nThe results of this tutorial can be found in different branches of this fork:\n\n* [hax_fstar](https://github.com/cryspen/rust-gcd/tree/hax_fstar)\n* [kani](https://github.com/cryspen/rust-gcd/tree/kani)\n* [aeneas](https://github.com/cryspen/rust-gcd/tree/aeneas)\n* [verus](https://github.com/cryspen/rust-gcd/tree/verus)\n\n## Preparation\n\nFirst, install Hax and F\\*:\n\n* [Install Hax](https://github.com/cryspen/hax?tab=readme-ov-file#installation) (We are using commit `0334b38`, so after `git clone git@github.com:cryspen/hax.git && cd hax`, run `git checkout 0334b38`)\n\n* [Install F\\*](https://github.com/FStarLang/FStar/blob/master/INSTALL.md)\n\nTo get started, we clone the repo of the Rust crate and switch to the\ncommit that we use in this post (`8fb3a59`):\n```\ngit clone git@github.com:frewsxcv/rust-gcd.git && cd rust-gcd\ngit checkout 8fb3a59\n```\n\nWe add hax-lib as a dependency, which will allow us to make annotations in the Rust code:\n```\ncargo add --git https://github.com/hacspec/hax hax-lib --rev 0334b38\n```\n\n## Extraction\n\nNow we can attempt to translate the Rust code into F\\* code, which we will later verify. Our Rust crate implements two variants to compute the greatest common divisor, the euclidean algorithm and the binary algorithm, each in various variants for different integer types. To start simple, we will focus on\nthe `u8` variant of the euclidean algorithm first. The following command instructs hax to extract only the function `gcd::euclid_u8` and its dependencies.\n```\ncargo hax into -i '-** +gcd::euclid_u8' fstar\n```\nThis creates a new file `proofs/fstar/extraction/Gcd.fst`, which contains a translation of our Rust crate in F\\*. To help F\\* find the correct dependencies, we download [this Makefile](https://gist.githubusercontent.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3/raw/a54aec2538c625eb525281106ff73ea96f7b96dc/Makefile)\nand put it into `proofs/fstar/extraction/`.\n\nBefore we instruct F\\* to start proving anything, we first check that all dependencies can be found:\n```\nOTHERFLAGS=\"--lax\" make -C proofs/fstar/extraction/\n```\nThis yields some harmless warnings and eventually:\n```\nAll verification conditions discharged successfully\n```\nThis means that all dependencies are available and we can start proving things.\n\nThe Makefile we are using helps us to cache the results of the F\\* verification, but this cache has the dangerous flaw that it does not invalidate when removing the `--lax` flag we used above. So we should delete the cache now:\n```\nrm -rf .fstar-cache\n```\n\n## Panic freedom of Euclidean GCD\n\nBy default, without us specifying anything, hax's F\\* backend will attempt to prove that the Rust program terminates and does not panic:\n```\nmake -C proofs/fstar/extraction/\n```\nThe proof attempt fails with the following error:\n```\n* Error 19 at Gcd.fst(26,10-26,14):\n  - Subtyping check failed\n  - Expected type\n      o:\n      (Rust_primitives.Integers.u8 & Rust_primitives.Integers.u8)\n        { (let _, _ = o in\n            true) /\\\n          (let _, _ = o in\n            Rust_primitives.Hax.Int.from_machine (Rust_primitives.Integers.mk_u32\n                  0)\n            <:\n            Hax_lib.Int.t_Int) <\n          (let _, _ = temp_0_ in\n            Rust_primitives.Hax.Int.from_machine (Rust_primitives.Integers.mk_u32\n                  0)\n            <:\n            Hax_lib.Int.t_Int) }\n    got type Rust_primitives.Integers.u8 & Rust_primitives.Integers.u8\n```\nTo prove that a while-loop terminates, F\\* requires a measure that decreases with every loop iteration. By default, the measure is simply the number 0, which always fails and results in errors resembling the one above. We need to find a better expression that decreases with every loop iteration. The relevant while-loop is the following:\n```rust\nwhile b != 0 {\n    let temp = a;\n    a = b;\n    b = temp;\n\n    b %= a;\n}\n```\nIn each iteration, the variables `a` and `b` get swapped and `b` is then set to `b % a`. If we focus only on what is happening to `b` here, we observe that `b` is set to `a % b` over the course of one iteration. Since `b` is always smaller than `a % b`, `b` is decreasing with every iteration, and we can set it as our termination measure:\n```rust\nwhile b != 0 {\n    hax_lib::loop_decreases!(b);\n    let temp = a;\n    a = b;\n    b = temp;\n\n    b %= a;\n}\n```\nWe extract the F\\* code again and rerun F\\*:\n```\ncargo hax into -i '-** +gcd::euclid_u8' fstar\nmake -C proofs/fstar/extraction/\n```\nWe get:\n```\n[CHECK] Gcd.fst \nVerified module: Gcd\nAll verification conditions discharged successfully\n```\nSo the `gcd::euclid_u8` function terminates on all inputs and never panics!\n\nWe would like to verify the other variants of this function for different bit lengths as well, but using\n```\ncargo hax into -i '-** +gcd::euclid_u8 +gcd::euclid_u16 +gcd::euclid_u32 +gcd::euclid_u64 +gcd::euclid_u128 +gcd::euclid_usize' fstar\n```\nis a bit inconvenient. Instead, we can also mark the functions that we want to extract in the Rust code using the `#[hax_lib::include]` annotation:\n```rust\n#[hax_lib::include]\npub const fn $euclid(a: $T, b: $T) -> $T\n{\n[...]\n```\nand then we can extract those functions using\n```\ncargo hax into -i '-**' fstar\n```\nYou can open the the file `Gcd.fst` to make sure that all desired functions have indeed been extracted.\nNow we can verify all variants, which should work without any further changes:\n```\nmake -C proofs/fstar/extraction/\n```\n\n## Panic freedom of binary GCD\n\nNext, we will attempt to prove panic freedom also for the binary variants. We add an `include`-annotation to the `$binary` function:\n```rust\n#[hax_lib::include]\npub const fn $binary(mut u: $T, mut v: $T) -> $T\n{\n[...]\n```\n\nWe attempt to extract this function as well:\n```\ncargo hax into -i '-**' fstar\n```\n\nUnfortunately, it's not that easy. Hax can only translate a fragment of Rust. If something cannot be translated, we need to work around that. In our case, we get lots of errors of this kind:\n```\nerror: [HAX0001] something is not implemented yet.This is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nUnhandled loop kind\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `FunctionalizeLoops`.\n\n  --> src/lib.rs:45:13\n   |\n45 | /             loop {\n46 | |                 v >>= v.trailing_zeros();\n...  |\n58 | |                 if v == 0 { break; }\n59 | |             }\n   | |_____________^\n   |\n```\nThis is because the `loop`-construct cannot be translated. As a first (temporary) fix, we replace `loop` in `src/lib.rs` by `while true`.\n\nWe run extraction again:\n```\ncargo hax into -i '-**' fstar\n```\nNow it succeeds. We verify:\n```\nmake -C proofs/fstar/extraction/\n```\nThis yields a couple of harmless warnings and one error:\n```\nError 72 at Gcd.fst(29,38-29,61):\n  - Identifier impl_u8__trailing_zeros not found in module Core_models.Num\n```\nThis is happening because the function `trailing_zeros` is missing in hax's F\\* library. We can add it locally to our project by creating a file named `Core_models.Num.fsti` in `proofs/fstar/extraction`, and inserting the following code:\n```fstar\nmodule Core_models.Num\nopen Rust_primitives\n \nval trailing_zeros: #t:inttype -> int_t t -> (n:u32{v n >= 0 /\\ v n <= bits t})\n\nunfold let impl_u8__trailing_zeros (n:u8) = trailing_zeros n\nunfold let impl_u16__trailing_zeros (n:u16) = trailing_zeros n\nunfold let impl_u32__trailing_zeros (n:u32) = trailing_zeros n\nunfold let impl_u64__trailing_zeros (n:u64) = trailing_zeros n\nunfold let impl_u128__trailing_zeros (n:u128) = trailing_zeros n\nunfold let impl_usize__trailing_zeros (n:usize) = trailing_zeros n\n```\nThis code tells F\\* about the `trailing_zeros` functions and their signature for all unsigned integer types.\n\nRunning verification again, we get the next error:\n```\n* Error 72 at Gcd.fst(19,28-19,41):\n  - Identifier while_loop_cf not found in module Rust_primitives.Hax\n```\nThis is another missing function in hax's F\\* libraries ([issue #1204](https://github.com/cryspen/hax/issues/1204)).\nWe can avoid it by refactoring the Rust code such that it avoids `break`s in while-loops.\nHere is the problematic while-loop:\n```rust\npub const fn $binary(mut u: $T, mut v: $T) -> $T\n{\n    if u == 0 { return v; }\n    if v == 0 { return u; }\n\n    let shift = (u | v).trailing_zeros();\n    u >>= shift;\n    v >>= shift;\n    u >>= u.trailing_zeros();\n\n    while true {\n        v >>= v.trailing_zeros();\n\n        if u > v {\n            let temp = u;\n            u = v;\n            v = temp;\n        }\n\n        v -= u;\n\n        if v == 0 { break; }\n    }\n\n    u << shift\n}\n```\nSo how can we get rid of the `break`? We will have to modify the Rust code a little.\nWe will try to move the line `if v == 0 { break; }` further up.\nSince `v - u == 0` if and only if `u == v`, we can check for that\nbefore the assignment `v -= u`:\n```rust\nwhile true {\n    v >>= v.trailing_zeros();\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    if u == v { break; }\n\n    v -= u;\n}\n```\nMoreover, for the condition `u == v` it does not matter\nwhether `u` and `v` are swapped. So we can also move the check before the swapping:\n```rust\nwhile true {\n    v >>= v.trailing_zeros();\n\n    if u == v { break; }\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u;\n}\n```\nSince the loop-condition is always true, we can do the\nassignment `v >>= v.trailing_zeros();` just as well at the end of every iteration instead of the beginning of each iteration\nif we perform it one additional time before the loop starts:\n```rust\nv >>= v.trailing_zeros();\nwhile true {\n    if u == v { break; }\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u;\n    v >>= v.trailing_zeros();\n}\n```\nFinally, we can move the line `if u == v { break; }` into the loop's condition:\n```rust\nv >>= v.trailing_zeros();\nwhile u != v {\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u;\n    v >>= v.trailing_zeros();\n}\n```\nExtracting and running F\\* now yields:\n```\n* Error 19 at Gcd.fst(15,23-15,28):\n  - Subtyping check failed\n  - Expected type\n      b:\n      Rust_primitives.Integers.int_t Rust_primitives.Integers.U32\n        { Rust_primitives.Integers.v b >= 0 /\\\n          Rust_primitives.Integers.v b <\n          Rust_primitives.Integers.bits Rust_primitives.Integers.U8 }\n    got type Rust_primitives.Integers.u32\n```\nThis error occurs because the F\\* specification of the `>>`-function expects its right-hand argument to be smaller than the total number of bits of the employed integer type. This is already the case in our code, but F\\* is not able to figure out that the value `shift` is indeed small enough.\n\nMost right-shifts in our code are by the number of trailing zeros of the given integer. That number of zeros can in principle be equal to the number of bits of the integer (which would be to large for `>>`), but only if the integer is `0`. So we can help F\\* to figure out that everything is okay by adding the following lemma to `Core_models.Num.fsti`:\n\n```fstar\nval trailing_zeros_lt_bits #t (a: int_t t):\n    Lemma (requires (v a <> 0))\n          (ensures (v (trailing_zeros a) < bits t))\n          [SMTPat (trailing_zeros a)]\n```\nThe lemma states that the number of trailing zeros is smaller than the total number of bits whenever the integer is nonzero.\nThe `SMTPat`-annotation tells F\\* that this lemma should be considered whenever a problem contains the `trailing_zeros` function.\n\nHowever, there are also two occurrences of `>>` where we shift `u` and `v` by `(u | v).trailing_zeros()`. For these, we need the following additional lemmas:\n```fstar\nval trailing_zeros_band_le_left #t (a b : int_t t):\n    Lemma (v (trailing_zeros (a |. b)) <= v (trailing_zeros a))\n          [SMTPat (trailing_zeros (a |. b))]\n\nval trailing_zeros_band_le_right #t (a b : int_t t):\n    Lemma (v (trailing_zeros (a |. b)) <= v (trailing_zeros b))\n          [SMTPat (trailing_zeros (a |. b))]\n```\nThese lemmas state that the trailing zeros of `a |. b` will always be at most as many as the trailing zeros of `a`, and similarly for `b`. Via `SMTPat`, we tell F\\* to use this lemma when it encounters expressions of the form `trailing_zeros (a |. b)`.\n\nSince our first lemma applies only when the integer is nonzero, we also need to enable F\\* to know that our integers do not become zero by shifting:\n```fstar\nval shift_right_trailing_zeros_nonzero #t (a: int_t t) (b : u32):\n    Lemma (requires (v a <> 0) && (v b <= v (trailing_zeros a)))\n          (ensures (v (shift_right a b) <> 0))\n          [SMTPat (shift_right a b)]\n```\n\nThis resolves the error around `>>`.\n\nFinally, we need to add a termination measure to the while-loop.\nThis is the loop:\n```rust\nwhile u != v {\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u;\n    v >>= v.trailing_zeros();\n}\n```\n\nHere is a summary of what the while loop is doing: It subtracts the smaller number among `u` and `v` from the larger one among them.\nThen, it removes any trailing zeros from the result.\nSo in each iteration, as long as both numbers are nonzero, the larger one of the two numbers will definitely get smaller, and the other one will remain the same.\nTherefore, we will use the larger number among `u` and `v` as our termination measure:\n```rust\nwhile u != v {\n    hax_lib::loop_decreases!(if v < u { u } else { v });\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u;\n    v >>= v.trailing_zeros();\n}\n```\nSince subtracting `0` does not decrease the number,\nit is cruicial that `v` and `u` do not become `0`. We annotate the loop with this invariant to make F\\* aware of this:\n```rust\nwhile u != v {\n    hax_lib::loop_decreases!(if v < u { u } else { v });\n    hax_lib::loop_invariant!(v != 0 && u != 0);\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u;\n    v >>= v.trailing_zeros();\n}\n```\nWe also need to make F\\* aware that `v >>= v.trailing_zeros();` cannot\nincrease `v` with an additional lemma:\n```fstar\nval shift_right_trailing_zeros_le #t (a: int_t t):\n    Lemma (requires (v a <> 0))\n          (ensures (v (shift_right a (trailing_zeros a)) <= v a))\n          [SMTPat (shift_right a (trailing_zeros a))]\n```\nWe extract and reverify:\n```\n[CHECK] Gcd.fst \nVerified module: Gcd\nAll verification conditions discharged successfully\n```\nYay, we made it! Also the binary implementation always terminates and never panics.\n\n## Verification using other tools\n\nFor comparison, we verify panic freedom and termination\nusing other tools as well.\n\n### Kani\n\nFirst, [install Kani](https://model-checking.github.io/kani/install-guide.html). We will use version 0.66.0.\n\nTo verify the function `$euclid` using Kani,\nwe need to add another function that implements the verification. Since `$euclid` is part of a macro `gcd_impl`\nthat duplicates it for various bit-lengths,\nwe add a third identifier `$check_euclid:ident` to that macro,\nand provide the following identifiers:\n```rust\ngcd_impl! {\n    (u8) binary_u8 euclid_u8 check_euclid_u8,\n    (u16) binary_u16 euclid_u16 check_euclid_u16,\n    (u32) binary_u32 euclid_u32 check_euclid_u32,\n    (u64) binary_u64 euclid_u64 check_euclid_u64,\n    (u128) binary_u128 euclid_u128 check_euclid_u128,\n    (usize) binary_usize euclid_usize check_euclid_usize\n}\n```\nNow we can implement the verification function `check_gcd` inside the macro. Here is a first draft:\n```rust\n#[kani::proof]\n#[cfg(kani)]\nfn $check_gcd() {\n    let x: $T = kani::any();\n    let y: $T = kani::any();\n\n    $euclid(x, y);\n}\n```\nThe first annotation tells Kani to run this verification function, and the second annotation tells the normal Rust compiler to ignore this function. The expression `kani::any()` tells Kani to test all possible integer values for `x` and `y`.\n\nNow we run Kani:\n```\nkani ./src/lib.rs \n```\nUnfortunatlely, Kani does not terminate because the loop in `$euclid` is potentially unbounded.\n\n#### Unwinding bound\n\nWe need to specify an upper bound on how often we want Kani to unwind the loop, using the `kani::unwind` annotation. When setting such an upper bound, we also need to limit the variables `x` and `y` to values that will make the number of loop iterations stay below the given bound, using `kani::assume`. Here are some numbers that work okay:\n```rust\n#[kani::proof]\n#[cfg(kani)]\n#[kani::unwind(15)]\nfn $check_euclid() {\n    let limit: u128 = 200;\n    let x: $T = kani::any();\n    let y: $T = kani::any();\n    kani::assume((x as u128) < limit);\n    kani::assume((y as u128) < limit);\n\n    let res = $euclid(x, y);\n}\n```\nNow we run `kani ./src/lib.rs` again:\n```\nComplete - 12 successfully verified harnesses, 0 failures, 12 total.\n```\nThe binary version can be verified in the exact same way by adding an analogous verification function `$check_binary` to the macro.\n\n#### Loop contracts\nHowever, there is yet another option to verify loops in Kani, without the need to limit the verified input values: loop contracts.\n\nTo use them, we first need to add the following\nannotations at the top of our file:\n```rust\n#![feature(stmt_expr_attributes)]\n#![feature(proc_macro_hygiene)]\n```\n\nNow we can use the annotation `kani::loop_invariant` to annotate our loop with an invariant. Since we only want to show panic-freedom here, simple using `true` works as an invariant for the loop in `$euclid`:\n```rust\n#[kani::loop_invariant(true)]\nwhile b != 0 {\n    let temp = a;\n    a = b;\n    b = temp;\n\n    b %= a;\n}\n```\nWith this annotation, Kani will abstract over the loop instead of unwinding it. So we no longer need to\nlimit the size of our inputs in `$check_euclid`. We can comment out the corresponding lines:\n```rust\n// kani::assume(x < limit);\n// kani::assume(y < limit);\n```\nTo activate the loop contract feature we need to add\nthe following option when invoking Kani:\n```\nkani ./src/lib.rs -Z loop-contracts\n```\nNow verification is much faster, and it verifies all possible inputs:\n```\nComplete - 12 successfully verified harnesses, 0 failures, 12 total.\n```\nHowever, in contrast to the approach without loop contracts, this does not verify termination!\n\nSimilarly, we can also verify the function `$binary` using loop contracts. However, using `true` as an invariant does not work here because the line\n```rust\nv >>= v.trailing_zeros();\n```\ncan panic when `v` is `0`.\nSo we add `v != 0` as an invariant:\n```rust\n#[kani::loop_invariant(v != 0)]\nloop {\n    v >>= v.trailing_zeros();\n\n    #[allow(clippy::manual_swap)]\n    if u > v {\n        // mem::swap(&mut u, &mut v);\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u; // here v >= u\n\n    if v == 0 { break; }\n}\n```\n\n*What's to love:* Kani requires amazingly little manual labor to set up!\n\n\n### Verus\n\n[Install Verus](https://github.com/verus-lang/verus/blob/main/INSTALL.md).\n(We use version `0.2025.11.07.a99b6c7`.)\n\nTo start verifying with Verus,\nwe add the following import to `src/lib.rs`:\n```rust\nuse vstd::prelude::*;\n```\nAnd we wrap the function `$euclid` that we would like to verify into\n```rust\nverus! {\n\n}\n```\nNow we can try to run Verus:\n```\nverus src/lib.rs --crate-type=lib\n```\nWe get:\n```\nerror: loop must have a decreases clause\n```\nFrom our discussion above, we know that the variable `b` decreases in the loop. Let's tell Verus about that:\n```rust\nwhile b != 0 decreases b {\n    let temp = a;\n    a = b;\n    b = temp;\n\n    b %= a;\n}\n```\nRunning Verus again now yields:\n```\nverification results:: 12 verified, 0 errors\n```\nThe `$euclid` function terminates and is panic-free!\n\nNext, we wrap the `$binary` function into `verus! { ... }` as well.\nRunning Verus now results in an error:\n```\nerror: `core::num::impl&%11::trailing_zeros` is not supported\n```\nThe `trailing_zeros` function is present in Verus's library, but only for certain bit sizes. We could add the missing functions, but to simplify things, let's simply comment out the large bit sizes:\n```rust\ngcd_impl! {\n    (u8) binary_u8 euclid_u8,\n    (u16) binary_u16 euclid_u16,\n    (u32) binary_u32 euclid_u32,\n    (u64) binary_u64 euclid_u64//,\n    // (u128) binary_u128 euclid_u128,\n    // (usize) binary_usize euclid_usize\n}\n```\nand\n```rust\ngcd_impl_nonzero! {\n    (NonZeroU8) binary_nonzero_u8/binary_u8 euclid_nonzero_u8/euclid_u8,\n    (NonZeroU16) binary_nonzero_u16/binary_u16 euclid_nonzero_u16/euclid_u16,\n    (NonZeroU32) binary_nonzero_u32/binary_u32 euclid_nonzero_u32/euclid_u32,\n    (NonZeroU64) binary_nonzero_u64/binary_u64 euclid_nonzero_u64/euclid_u64//,\n    // (NonZeroU128) binary_nonzero_u128/binary_u128 euclid_nonzero_u128/euclid_u128,\n    // (NonZeroUsize) binary_nonzero_usize/binary_usize euclid_nonzero_usize/euclid_usize\n}\n```\nThe next error that we get is:\n```\nerror: loop must have a decreases clause\n```\nLet us reuse the same measure as we have used for Hax:\n```rust\nloop\n    decreases if v < u { u } else { v }\n{\n    v >>= v.trailing_zeros();\n\n    if u > v {\n        let temp = u;\n        u = v;\n        v = temp;\n    }\n\n    v -= u; // here v >= u\n\n    if v == 0 { break; }\n}\n```\n\nOur next error is:\n```\nerror: possible bit shift underflow/overflow\n   --> src/lib.rs:45:13\n    |\n45  |               u >>= shift;\n```\nWe can make our lives easier by simply commenting\nout the two lines\n```rust\nu >>= shift;\nv >>= shift;\n```\nNote that this does not change the function's behavior.\nThe following line and the first line of the loop\nwill shift `v` and `u` by all trailing zeros anyway.\nThere is no need to shift them by their common trailing\nzeros before that.\n\nWith those lines commented out, we now get:\n```\nerror: decreases not satisfied at end of loop\n```\nOur measure does actually decrease, but Verus is unable to prove it. First, we need to add a loop invariant\nthat `u` and `v` are nonzero. If one of them was zero, then subtracting one from the other would not make the measure decrease.\n```rust\nloop\n    invariant_except_break u != 0 && v != 0\n    decreases if v < u { u } else { v }\n{   \n```\nAlso, Verus has trouble figuring out\nthat the line\n```\nv >>= v.trailing_zeros();\n```\nwill never make `v` larger and will never cause `v` to become `0`.\nWe can add the following assumptions to\nfix this temporarily:\n```\nassume(v != 0 ==> v >> v.trailing_zeros() != 0);\nassume(forall |i: u8| v >> i <= v);\nv >>= v.trailing_zeros();\n```\nThe next error we get is:\n```\nerror: invariant not satisfied before loop\n   --> src/lib.rs:50:40\n    |\n50  |                   invariant_except_break u != 0 && v != 0\n```\nThis is because Verus cannot see that\n```\nu >>= u.trailing_zeros();\n```\ncannot make `u` become zero.\nWe can fix this temporarily using another assumption:\n```rust\nassume(u != 0 ==> u >> u.trailing_zeros() != 0);\nu >>= u.trailing_zeros();\n```\nThe only remaining error is:\n```\nerror: possible bit shift underflow/overflow\n   --> src/lib.rs:71:13\n    |\n71  |               u << shift\n```\nThis error occurs because shift could in principle be equal to the full number of bits of `u` when `u | v` is zero.\nAdding the following assumption above the definition of `shift`, helps Verus figure out that this cannot happen:\n```rust\nassume(u != 0 && v != 0 ==> u | v != 0);\nlet shift = (u | v).trailing_zeros();\n```\nNow verification succeeds:\n```\n$ verus src/lib.rs --crate-type=lib\nverification results:: 16 verified, 0 errors\n```\nHowever, this confirms termination\nand panic freedom only up to the assumptions we have inserted using `assume`.\n\nLet's try to prove them.\nThe `bit_vector` tactic can prove some of them:\n```\nassert(u != 0 && v != 0 ==> u | v != 0) by (bit_vector);\n```\nand\n```\nassert(forall |i: u8| v >> i <= v) by (bit_vector);\n```\nWe can use these lines to replace the corresponding `assume`s.\n\nThe remaining two `assume`s are harder to prove.\nWe will simply add them as an axiom by adding the following\nfunction to our `gcd_impl` macro:\n```\n#[verifier::external_body]\nproof fn $trailing_zeros_axiom(x: $T) \n    ensures x != 0 ==> x >> #[trigger] x.trailing_zeros() != 0\n{}\n```\nThen, we can replace\nthe two remaining assumes by\n```\nproof! { $trailing_zeros_axiom(u); }\n```\nand\n```\nproof! { $trailing_zeros_axiom(v); }\n```\n\n*What's to love:* Verus allows us to work directly with the Rust code!\n\n### Aeneas\n\n[Install Aeneas](https://github.com/AeneasVerif/aeneas?tab=readme-ov-file#installation--build).\nWe use commit `f2fbd655` here.\n\n[Install Lean](https://lean-lang.org/install/). \n\nWe will use the following `Makefile` to make Aeneas extract Lean code from our crate:\n```\nCHARON_HOME\t?= $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/../charon\nAENEAS_HOME\t?= $(dir $(abspath $(lastword $(MAKEFILE_LIST))))/../aeneas\n\nCHARON_EXE = $(CHARON_HOME)/bin/charon\nAENEAS_EXE = $(AENEAS_HOME)/bin/aeneas\n\nAENEAS_OPTIONS ?=\n\n.PHONY: extract\nextract: gcd.llbc\n\t$(AENEAS_EXE) -backend lean gcd.llbc -split-files -dest proofs/Gcd $(AENEAS_OPTIONS)\n\ngcd.llbc: $(wildcard */*.rs)\n\tRUSTFLAGS=\"--cfg eurydice\" $(CHARON_EXE) cargo --preset=aeneas --start-from crate::euclid_u8 --start-from crate::binary_u8\n```\nSave this under the name `Makefile` and run `make`. Note that we specify the options `--start-from crate::euclid_u8 --start-from crate::binary_u8`, which will extract specifically the functions `euclid_u8` and `binary_u8` into Lean. Running `make` produces a couple of Lean files in the directory `proofs/Gcd`.\n\nWe create a new Lean project around these files:\n```\ncd proofs\nlake +v4.24.0 init Gcd lib\n```\n\nAdd the following lines to `lakefile.toml` to\nadd the Aeneas Lean library as a dependency, adjusting the path as needed:\n```toml\n[[require]]\nname = \"aeneas\"\npath = \"../../aeneas/backends/lean\"\n```\nThen run\n```\nlake update\n```\nto update the dependencies. This will download `mathlib`,\na dependeny of Aeneas, which may take a while.\n\nAeneas created a file called `FunsExternal_Template.lean`\nbecause the `trailing_zeros` function is not part of the Aeneas library. Rename this template file to `FunsExternal.lean`. We could write a precise definition of this function here, but for now, we just define it as `sorry`, which is a placeholder for a missing definition.\nReplace the line\n```lean\naxiom core.num.U8.trailing_zeros : U8 → Result U32\n```\nby\n```lean\ndef core.num.U8.trailing_zeros : U8 → Result U32 := sorry\n```\nNow in the root file of our Lean project, `Gcd.lean`, add the import\n```\nimport Gcd.Funs\n```\nNow we run\n```\nlake build\n```\nto ensure that our Lean code typechecks:\n```\nwarning: Gcd/FunsExternal.lean:15:4: declaration uses 'sorry'\nBuild completed successfully (1500 jobs).\n```\n\nLet's have a look at how the Lean translations of our Rust functions look like.\nOpen the file `Funs.lean` in VSCode (with the Lean extension installed). You may see a lot of red in the editor, which will go away by pressing `Restart file`.\nThe file contains four definitions: The functions `binary_u8` and `euclid_u8` themselves, and for each of them a function representing the contained loop, which has become a recursive function in Lean.\n\nThe `euclid_u8` funciton for example looks as follows:\n```lean\n/- [gcd::euclid_u8]: loop 0:\n   Source: 'src/lib.rs', lines 75:12-82:13 -/\ndef euclid_u8_loop (a : U8) (b : U8) : Result U8 := do\n  if b != 0#u8\n  then let b1 ← a % b\n       euclid_u8_loop b b1\n  else ok a\npartial_fixpoint\n\n/- [gcd::euclid_u8]:\n   Source: 'src/lib.rs', lines 65:8-85:9 -/\ndef euclid_u8 (a : U8) (b : U8) : Result U8 := do\n  let (a1, b1) ← if a > b\n                   then ok (a, b)\n                   else ok (b, a)\n  euclid_u8_loop a1 b1\n```\n\n#### Euclidean GCD\n\nNow we can start proving. Open the file `Gcd.lean`.\nLet us verify termination and panic-freedom of `euclid_u8`.\nThis can be expressed in Lean as follows:\n```\ntheorem euclid_u8_spec (a b : U8) :\n    ∃ y, euclid_u8 a b = ok y := by sorry\n```\nHere, the `sorry` stands for a missing proof.\nA typical Aeneas proof looks like this:\n```\ntheorem euclid_u8_spec (a b : U8) :\n    ∃ y, euclid_u8 a b = ok y := by\n  unfold euclid_u8\n  progress*\n```\nUnfortunately, this proof does not quite work yet.\nWe get the error:\n```\nunsolved goals\ncase isTrue\na b : U8\nh✝ : a > b\n⊢ ∃ y, euclid_u8_loop a b = ok y\n\ncase isFalse\na b : U8\nh✝ : ¬a > b\n⊢ ∃ y, euclid_u8_loop b a = ok y\n```\nThe problem is that the `progress*` tactic does\nnot know the specification of the `euclid_u8_loop` function.\nLet's create a seperate theorem about that function.\nPut the following code above the theorem that we just wrote:\n```lean\n@[progress]\ntheorem euclid_loop_u8_spec (a b : U8) :\n    ∃ y, euclid_u8_loop a b = ok y := by sorry\n```\nThis theorem states that `euclid_u8_loop` terminates and does not panic, for now without proof (`sorry`).\nNote that after adding this theorem, the error on the theorem below has disappeared.\nThe annotation `@[progress]` informs the `progress*` tactic about this specification and it can be used in the proof of `euclid_u8_spec`.\n\nNow we need to replace the `sorry` with an actual proof.\nLet's try the same idea:\n```lean\n@[progress]\ntheorem euclid_loop_u8_spec (a b : U8) :\n    ∃ y, euclid_u8_loop a b = ok y := by \n  unfold euclid_u8_loop\n  progress*\n```\n\nWe get an error:\n```\nfail to show termination for\n  gcd.euclid_loop_u8_spec\n```\n\nFrom our discussion above, we know that `b` is a variable that decreases in this recursive function. We can tell Lean about this as follows:\n```lean\n@[progress]\ntheorem euclid_loop_u8_spec (a b : U8) :\n    ∃ y, euclid_u8_loop a b = ok y := by \n  unfold euclid_u8_loop\n  progress*\ntermination_by b.val\ndecreasing_by scalar_decr_tac\n```\n\nNow all errors have disappeared and there are little check marks in the margin. That means `euclid_u8` really terminates and is panic-free!\n\n#### Binary GCD\n\nNow, let's try to verify the binary version as well.\nThe Lean translation looks like this:\n```lean\n/- [gcd::binary_u8]: loop 0:\n   Source: 'src/lib.rs', lines 45:12-59:13 -/\ndef binary_u8_loop (u : U8) (v : U8) : Result U8 := do\n  let i ← core.num.U8.trailing_zeros v\n  let v1 ← v >>> i\n  let (u1, v2) ← if u > v1\n                   then ok (v1, u)\n                   else ok (u, v1)\n  let v3 ← v2 - u1\n  if v3 = 0#u8\n  then ok u1\n  else binary_u8_loop u1 v3\npartial_fixpoint\n\n/- [gcd::binary_u8]:\n   Source: 'src/lib.rs', lines 35:8-62:9 -/\ndef binary_u8 (u : U8) (v : U8) : Result U8 := do\n  if u = 0#u8\n  then ok v\n  else\n    if v = 0#u8\n    then ok u\n    else\n      let i ← (↑(u ||| v) : Result U8)\n      let shift ← core.num.U8.trailing_zeros i\n      let u1 ← u >>> shift\n      let v1 ← v >>> shift\n      let i1 ← core.num.U8.trailing_zeros u1\n      let u2 ← u1 >>> i1\n      let u3 ← binary_u8_loop u2 v1\n      u3 <<< shift\n```\nWe use the same approach as for `euclid_u8`, adding the following code to `Gdc.lean`:\n```lean\ntheorem binary_u8_spec (a b : U8) :\n    ∃ y, binary_u8 a b = ok y := by\n  unfold binary_u8\n  progress*\n```\nWe get the following error:\n```\nunsolved goals\na b : U8\nh✝¹ : ¬a = 0#u8\nh✝ : ¬b = 0#u8\ni : U8\n_ : [> let i ← ↑(a ||| b) <]\ni_post_1 : ↑i = ↑(a ||| b)\ni_post_2 : i.bv = a.bv ||| b.bv\n⊢ ∃ y,\n  (do\n      let shift ← core.num.U8.trailing_zeros i\n      let u1 ← a >>> shift\n      let v1 ← b >>> shift\n      let i1 ← core.num.U8.trailing_zeros u1\n      let u2 ← u1 >>> i1\n      let u3 ← binary_u8_loop u2 v1\n      u3 <<< shift) =\n    ok y\n```\nThe `progress*` tactic gets stuck at `core.num.U8.trailing_zeros` because there is no specification about this function.\nLet's provide one, for instance directly above `binary_u8_spec`:\n```lean\n@[progress]\ntheorem trailing_zeros_spec (v : U8) (hv : v ≠ 0#u8):\n  ∃ y, core.num.U8.trailing_zeros v = .ok y ∧ y < 8#u32 := sorry\n```\nHere, we have added the fact that `trailing_zeros` will be less than the bit length when the input is nonzero\nsince we have seen above that this is crucial for verification of binary GCD.\n\nNext, we get the error:\n```\nunsolved goals\ncase hv\na b : U8\nh✝¹ : ¬a = 0#u8\nh✝ : ¬b = 0#u8\ni : U8\n_ : [> let i ← ↑(a ||| b) <]\ni_post_1 : ↑i = ↑(a ||| b)\ni_post_2 : i.bv = a.bv ||| b.bv\n⊢ i ≠ 0#u8\n```\nThe tactic gets stuck because there is no specification saying that bitwise or (`|||`) will not yield zero when the inputs are nonzero.\nLet's add that:\n```lean\n@[progress]\ntheorem bor_spec (u v : U8) (hu : u ≠ 0#u8) (hv : v ≠ 0#u8) :\n  ∃ y, (↑(u ||| v) : Result U8) = .ok y ∧\n    y ≠ 0#u8 := sorry\n```\n\nThe next error is:\n```\nunsolved goals\ncase hv\na b : U8\nh✝¹ : ¬a = 0#u8\nh✝ : ¬b = 0#u8\ni : U8\n_✝² : [> let i ← ↑(a ||| b) <]\ni_post : i ≠ 0#u8\nshift : U32\n_✝¹ : [> let shift ← core.num.U8.trailing_zeros i <]\nshift_post : shift < 8#u32\nu1 : U8\n_✝ : [> let u1 ← a >>> shift <]\nu1_post_1 : ↑u1 = ↑a >>> ↑shift\nu1_post_2 : u1.bv = a.bv >>> ↑shift\nv1 : U8\n_ : [> let v1 ← b >>> shift <]\nv1_post_1 : ↑v1 = ↑b >>> ↑shift\nv1_post_2 : v1.bv = b.bv >>> ↑shift\n⊢ u1 ≠ 0#u8\n```\n\nHere, we need to tell Lean that right shifting by the number of trailing zeros (or less) will not turn a nonzero number into zero.\nHere is a first attempt to state that:\n```lean\n@[progress]\ntheorem shift_right_spec (u : U8) (v : U32) (hu : u ≠ 0#u8) (hv : v ≤ core.num.U8.trailing_zeros u):\n  ∃ y, u >>> v = .ok y ∧ y ≠ 0#u8 := sorry\n```\nUnfortunately, this does not work because `core.num.U8.trailing_zeros` lives in the `Result` monad, i.e., it's type is `U8 → Result U32`, not `U8 → U32`.\nTo get around this issue, we define another function `trailing_zeros`:\n```lean\ndef trailing_zeros : U8 → U32 := sorry\n```\nSince implementing it is beyond the scope of this blog post, we use the placeholder `sorry`.\nNow, we extend our specification of `core.num.U8.trailing_zeros` to state that it will always return the same result as prescibed by our new `trailing_zeros` function:\n```lean\n@[progress]\ntheorem trailing_zeros_spec (v : U8) (hv : v ≠ 0#u8):\n  ∃ y, core.num.U8.trailing_zeros v = .ok y ∧ y < 8#u32 ∧ y = trailing_zeros v := sorry\n```\nThen we can fix the specification of right-shift using our new function:\n```lean\n@[progress]\ntheorem shift_right_spec (u : U8) (v : U32) (hu : u ≠ 0#u8) (hv : v ≤ trailing_zeros u):\n  ∃ y, u >>> v = .ok y ∧ y ≠ 0#u8 := sorry\n```\nThe next error is this:\n```\nunsolved goals\ncase hv\na b : U8\nh✝¹ : ¬a = 0#u8\nh✝ : ¬b = 0#u8\ni : U8\n_✝ : [> let i ← ↑(a ||| b) <]\ni_post : i ≠ 0#u8\nshift : U32\n_ : [> let shift ← core.num.U8.trailing_zeros i <]\nshift_post_1 : shift < 8#u32\nshift_post_2 : shift = trailing_zeros i\n⊢ shift ≤ trailing_zeros a\n```\nWhat's missing here, is that bitwise or (`|||`) will always yield less trailing zeros than in the inputs.\nWe can edit the specification of bitwise or to fix that:\n```lean\n@[progress]\ntheorem bor_spec (u v : U8) (hu : u ≠ 0#u8) (hv : v ≠ 0#u8) :\n  ∃ y, (↑(u ||| v) : Result U8) = .ok y ∧\n    trailing_zeros y ≤ trailing_zeros u ∧\n    trailing_zeros y ≤ trailing_zeros v ∧\n    y ≠ 0#u8 := sorry\n```\nNext, the tactic gets stuck on:\n```\n⊢ ∃ y,\n  (do\n      let u3 ← binary_u8_loop u2 v1\n      u3 <<< shift) =\n    ok y\n```\nThis is because we don't have a specification for `binary_u8_loop` yet. Let's add one:\n```lean\n@[progress]\ntheorem binary_u8_loop_spec (a b : U8) :\n    ∃ y, binary_u8_loop a b = ok y := by\n  unfold binary_u8_loop\n  progress*\ntermination_by max a.val b.val\ndecreasing_by all_goals scalar_decr_tac\n```\nSince the tactic is recursive, we need to provide a measure for termination. We'll use the maximum of `a` and `b`, just like we have done above.\nWe add `all_goals` because `decreasing_by` is decreasing two goals here.\nThis still fails because we are missing two more things:\nFirst, we need to extend or specification of right-shift to state that it will make the input smaller:\n```\n@[progress]\ntheorem shift_right_spec (u : U8) (v : U32) (hu : u ≠ 0#u8) (hv : v ≤ trailing_zeros u):\n  ∃ y, u >>> v = .ok y ∧ y ≠ 0#u8 ∧ y ≤ u := sorry\n```\nSecond, we need to add what corresponds to a loop invariant in the specification of `binary_u8_loop`:\n```lean\n@[progress]\ntheorem binary_u8_loop_spec (a b : U8) (ha : a ≠ 0#u8) (hb : b ≠ 0#u8) :\n    ∃ y, binary_u8_loop a b = ok y ∧ y ≠ 0#u8 := by\n  unfold binary_u8_loop\n  progress*\ntermination_by max a.val b.val\ndecreasing_by all_goals scalar_decr_tac\n```\nNo more errors! So Aeneas, too, agrees that `binary_u8` terminates and does not panic.\n\n*What's to love:* Aeneas leaves our source code completely untouched!\n"
  },
  {
    "path": "docs/blog/posts/rust-gcd-2.md",
    "content": "---\nauthors:\n  - alex\ntitle: \"Verifying a real world Rust crate\"\ndate: 2026-01-19\n---\n\n# Verifying a while loop in Hax/Lean\n\nIn our last blog post, the dog that didn't bark was Hax/Lean. It was missing because we did not have support for while loops then. Now, we support them and we will demonstrate it here.\n\nYou can find the results of this tutorial\non [https://github.com/cryspen/rust-gcd/tree/hax_lean1](https://github.com/cryspen/rust-gcd/tree/hax_lean1) on the branch `hax_lean1`.\n\n## Preparation\n\nFirst, we need to install Hax and Lean:\n\n* [Hax](https://github.com/cryspen/hax?tab=readme-ov-file#installation)\n(We are using commit `d1365d4`, so after `git clone git@github.com:cryspen/hax.git && cd hax`, run `git checkout d1365d4`)\n\n* [Lean](https://lean-lang.org/install/)\n\nAgain, we will use the [gcd Rust crate](https://github.com/frewsxcv/rust-gcd) as an example:\n```\ngit clone git@github.com:frewsxcv/rust-gcd.git && cd rust-gcd\ngit checkout 8fb3a59\n```\nWe add hax-lib as a dependency, which will allow us to make annotations in the Rust code:\n```\ncargo add --git https://github.com/hacspec/hax hax-lib --rev d1365d4\n```\n\n## Extraction\n\nNow we are ready to translate the Rust code into Lean code. We will limit ourselves to\nthe `euclid_u16` function here:\n```\ncargo hax into -i '-** +gcd::euclid_u16' lean\n```\nThis will create a new file `proofs/lean/extraction/Gcd.lean` containing the Lean version of\nthe extracted function.\n\nFor Lean to find the required dependencies, \nwe must add the following two files in `proofs/lean`:\n\n\n`lean-toolchain`:\n```\nleanprover/lean4:v4.23.0\n```\n\n`lakefile.toml`:\n```\nname = \"Gcd\"\nversion = \"0.1.0\"\ndefaultTargets = [\"Gcd\"]\n\n[[lean_lib]]\nname = \"Gcd\"\nroots = [\"extraction.Gcd\"]\n\n[[require]]\nname = \"Hax\"\npath = \"../../../hax/hax-lib/proof-libs/lean\"\n```\nMake sure that the path above points to the subdirectory `hax-lib/proof-libs/lean` of the repository that you checked out during the installation of Hax (i.e., `git@github.com:cryspen/hax.git` on commit `d1365d4`). The path can be relative to the `lakefile.toml` file or absolute.\n\nNow we can run Lean on the extracted code.\n```\n(cd proofs/lean && lake build)\n```\nIt should take a moment and then say:\n```\nBuild completed successfully (35 jobs).\n```\nSo it this already verified? No, currently, we need to add a pre- or post-condition to\na function to make Hax generate a specification that we can prove correct. (This will likely change in the near future.)\n\n## Verification\n\nWe can add the following `hax_lib::ensures` annoation \nabove the definition of `$euclid`\nto say that we want to prove termination and panic-freedom:\n```\n#[hax_lib::ensures(|_| true)]\npub const fn $euclid(a: $T, b: $T) -> $T\n{\n    ...\n}\n```\nWe run Hax and Lean again:\n```\ncargo hax into -i '-** +gcd::euclid_u16' lean\n(cd proofs/lean && lake build)\n```\nNow, we get lots of `unsolved goals` errors.\nWe can open the `Gcd.lean` file to get a better impression of what is going on.\nThe file contains a definition of `Gcd.euclid_u16`, which is the Lean version of our `euclid_u16` function and which compiles without error.\nBelow, we have a definition of `Gcd.euclid_u16.spec`,\nwhich contains the specification of the function and an attempted proof of correctness.\nIt should have a red squiggly underline on the `contract` proof,\nindicating that the error occurs there.\nThe default proof `by mvcgen[Gcd.euclid_u16] <;> try grind` fails.\n\nIf we click just behind `mvcgen[Gcd.euclid_u16]`, we can see the verification conditions\nthat Lean's `mvcgen` tactic generated in Lean's infoview.\nIt shows a list of four goals.\nThe second and the forth goal end in:\n```\nToNat.toNat 0 < ToNat.toNat 0\n```\nSo this says that the `u16` value `0`, converted to a natural number, is smaller than itself.\nThis is simply wrong and will be impossible to prove.\nThese verification conditions are coming from the default termination measure associated with while loops, which is constant `0` by default. We will have to provide a better measure to prove termination, using the `hax_lib::loop_decreases` annotation.\nFrom our last blog post, we know that `b` is a useful measure for this loop:\n```\nwhile b != 0 {\n    hax_lib::loop_decreases!(b);\n    // mem::swap(&mut a, &mut b);\n    let temp = a;\n    a = b;\n    b = temp;\n\n    b %= a;\n}\n```\nAfter running Hax and Lean again, the default proof still fails,\nbut the generated verification conditions can now be proved with some manual effort.\nAfter developing the proof in Lean, we can copy the proof into the Rust file\nso that it does not get overwritten when reextracting the code:\n```\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof(\"by\n    mvcgen[Gcd.euclid_u16]\n    · expose_names\n      intro\n      simp_all [a_1]\n    · expose_names\n      simp only [ToNat.toNat, h_4]\n      apply Nat.mod_lt\n      grind\n    · expose_names\n      intro\n      simp_all [a_1]\n    · expose_names\n      simp only [ToNat.toNat, h_4]\n      apply Nat.mod_lt\n      grind\")]\npub const fn $euclid(a: $T, b: $T) -> $T\n{\n    ...\n}\n```\n(Be careful with the indentation here! Lean is white-space sensitive!)\n\nAfter running Hax again, `lake build` now says:\n```\nBuild completed successfully (35 jobs).\n```\nYay!\n\nWe are working on better automation for proofs like this one and on better coverage of the Rust core library, e.g., to be able to verify the binary gcd implementation in this crate as well."
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-01.md",
    "content": "---\nauthors:\n  - lucas\ntitle: \"This Month in Hax: January 2025\"\ndate: 2025-02-10\n---\n\n\nThis blog post continues our ongoing series introduced in the [previous blog of\nhax](https://hacspec.org/blog/tags/this-month-in-hax/), a monthly collection of\nhighlights showcasing key developments in hax and its ecosystem.\n\nThis month, we merged **31 pull requests** and celebrated a major milestone by\nreleasing the first official version of hax:\n[v0.1.0](https://github.com/cryspen/hax/releases/tag/cargo-hax-v0.1.0). If you\nhaven’t already, be sure to check out [our blog post](../announce-v0.1.md) for\nmore details on this release!\n\nWe tackled a variety of bug fixes and engine improvements. One significant\nachievement was resolving a long-standing issue related to the inconsistent\npreservation of declaration orders between Rust and the extractions. This\nproblem [was finally fixed](https://github.com/cryspen/hax/pull/1247). 🎉\n\nAdditionally, we merged [a comprehensive\noverhaul](https://github.com/cryspen/hax/pull/1199) of how identifiers are\ntreated and represented within the engine. This rework allowed us to fix nearly\nten related issues, making the system more robust and efficient.\n\nIn the F\\* backend, we transitioned away from using\n[HACL\\*](https://github.com/hacl-star/hacl-star) machine integers. Instead, we\nnow rely on a [thin wrapper](https://github.com/cryspen/hax/pull/1238) over\nF\\*'s native mathematical integers. Unlike HACL\\*'s opaque machine integers,\nthis new representation allows us to use F\\*'s normalizer freely, offering a\ncleaner and more lightweight solution.\n\nStay tuned for more updates in the coming months!\n\n### Full list of PRs\n\n* \\#1278: [ci(gha): drop magic-nix-cache action because of EOL](https://github.com/cryspen/hax/pull/1278)\n* \\#1277: [fix(mkdocs): use codemirror instead of ace, re-setup on page reload](https://github.com/cryspen/hax/pull/1277)\n* \\#1275: [Create CODEOWNERS](https://github.com/cryspen/hax/pull/1275)\n* \\#1273: [Various F* core lib additions.](https://github.com/cryspen/hax/pull/1273)\n* \\#1267: [fix(hax-lib/macros): handle correctly `&mut Self` arguments in `ensures`](https://github.com/cryspen/hax/pull/1267)\n* \\#1265: [Fix announce-v0.1.md](https://github.com/cryspen/hax/pull/1265)\n* \\#1263: [updatge readme and docs](https://github.com/cryspen/hax/pull/1263)\n* \\#1261: [Update website landing page](https://github.com/cryspen/hax/pull/1261)\n* \\#1260: [chore(deps): bump hashbrown from 0.15.0 to 0.15.2](https://github.com/cryspen/hax/pull/1260)\n* \\#1259: [changelog: initialize](https://github.com/cryspen/hax/pull/1259)\n* \\#1258: [Delete frontend/exporter/json-visualizer directory](https://github.com/cryspen/hax/pull/1258)\n* \\#1247: [Stable topological sort using original order.](https://github.com/cryspen/hax/pull/1247)\n* \\#1245: [Release hax v0.1.0](https://github.com/cryspen/hax/pull/1245)\n* \\#1241: [hax v0.1 blog post](https://github.com/cryspen/hax/pull/1241)\n* \\#1238: [Transparent integers](https://github.com/cryspen/hax/pull/1238)\n* \\#1237: [Fix order of `Call` trait clauses](https://github.com/cryspen/hax/pull/1237)\n* \\#1236: [Add more info to `ImplExprAtom::Builtin`](https://github.com/cryspen/hax/pull/1236)\n* \\#1230: [fix(engine) Propagate return rewrite to avoid crash in side_effect_utils](https://github.com/cryspen/hax/pull/1230)\n* \\#1229: [fix(engine) Add type arguments for associated constants.](https://github.com/cryspen/hax/pull/1229)\n* \\#1228: [fix(engine) Use ocamlgraph fork to fix missing rec bug.](https://github.com/cryspen/hax/pull/1228)\n* \\#1225: [Hax home page using mkdocs](https://github.com/cryspen/hax/pull/1225)\n* \\#1223: [fix(engine) Attempt to fix double return bug.](https://github.com/cryspen/hax/pull/1223)\n* \\#1222: [Make predicate handling a bit more consistent](https://github.com/cryspen/hax/pull/1222)\n* \\#1220: [Visit trait goals to rename impl expr they may contain.](https://github.com/cryspen/hax/pull/1220)\n* \\#1216: [Update README.md: `unsafe` is OK to use](https://github.com/cryspen/hax/pull/1216)\n* \\#1215: [Fix generics handling for function calls](https://github.com/cryspen/hax/pull/1215)\n* \\#1212: [fix(CI) Update F* version to fix mlkem CI job ](https://github.com/cryspen/hax/pull/1212)\n* \\#1206: [fix(engine) Make sub-parts of `Quote` visited by visitors](https://github.com/cryspen/hax/pull/1206)\n* \\#1199: [Engine: rework global name representation](https://github.com/cryspen/hax/pull/1199)\n* \\#1075: [Move trait methods in cyclic dependencies bundling.](https://github.com/cryspen/hax/pull/1075)\n* \\#1066: [Add EBNF for AST to book](https://github.com/cryspen/hax/pull/1066)\n\n### Contributors\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@W95Psp](https://github.com/W95Psp)\n* [@app/dependabot](https://github.com/dependabot)\n* [@cmester0](https://github.com/cmester0)\n* [@franziskuskiefer](https://github.com/franziskuskiefer)\n* [@karthikbhargavan](https://github.com/karthikbhargavan)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-02.md",
    "content": "---\nauthors:\n  - lucas\ntitle: \"This Month in Hax: February 2025\"\ndate: 2025-03-05\n---\n\nIn February, we merged **23 pull requests**!\n\nThe MIR translation of the frontend was improved by\n[@Nadrieril](https://github.com/Nadrieril): some bugs were fixed, and our\nhandling of constants have been improved and is now more robust. \n\nOne of the major updates this month was the introduction of a new\n[`Prop` abstraction](https://github.com/cryspen/hax/pull/1301) in `hax-lib`,\nwhich enhances expressiveness in property-based reasoning within the Hax\nengine. With `Prop`, it is now possible to write non-computable properties that leverage universal quantifiers.\n\nWe also made significant progress in the engine, including fixing issues\nrelated to [`continue` handling in loops](https://github.com/cryspen/hax/pull/1296) \nand ensuring proper naming and disambiguation in bundled components \n([#1280](https://github.com/cryspen/hax/pull/1280), [#1286](https://github.com/cryspen/hax/pull/1286)).\n\nWe also tackled improvements in the F\\* backend, such as fixing trait\ninheritance in `rand-core` ([#1322](https://github.com/cryspen/hax/pull/1322)) and \nexpanding the core library ([#1292](https://github.com/cryspen/hax/pull/1292)).\n\nStay tuned for more updates in the coming months!\n\n### Full list of PRs\n\n* \\#1325: [mkdocs: add Maxime description](https://github.com/cryspen/hax/pull/1325)\n* \\#1322: [Proof libs (F*): fix trait inheritance in rand-core](https://github.com/cryspen/hax/pull/1322)\n* \\#1320: ['hax for everyone' blog post.](https://github.com/cryspen/hax/pull/1320)\n* \\#1319: [Translate less data in MIR](https://github.com/cryspen/hax/pull/1319)\n* \\#1318: [ Not all evaluated MIR constants are byte strings](https://github.com/cryspen/hax/pull/1318)\n* \\#1317: [Avoid an ICE by matching on type earlier](https://github.com/cryspen/hax/pull/1317)\n* \\#1312: [full_def: no need to normalize clauses eagerly anymore](https://github.com/cryspen/hax/pull/1312)\n* \\#1309: [full_def: group generic and predicates into a common struct](https://github.com/cryspen/hax/pull/1309)\n* \\#1307: [update website landing page](https://github.com/cryspen/hax/pull/1307)\n* \\#1306: [init(docs/blog): this month in hax: January](https://github.com/cryspen/hax/pull/1306)\n* \\#1305: [fix(engine) Fix question marks simplification with deref/borrow.](https://github.com/cryspen/hax/pull/1305)\n* \\#1304: [feat(manual): hax-playground integration: use latest `main`](https://github.com/cryspen/hax/pull/1304)\n* \\#1303: [fix(engine) Fix return inside closure.](https://github.com/cryspen/hax/pull/1303)\n* \\#1302: [Engine: fix implicit representation for enums](https://github.com/cryspen/hax/pull/1302)\n* \\#1301: [`hax-lib`: introduce a `Prop` abstraction](https://github.com/cryspen/hax/pull/1301)\n* \\#1296: [fix(engine) Fix loops with `continue` and no `return`/`break`](https://github.com/cryspen/hax/pull/1296)\n* \\#1293: [fix(engine) Add const parameter for assoc const of parametric impl.](https://github.com/cryspen/hax/pull/1293)\n* \\#1292: [Additions and corrections in F* core lib.](https://github.com/cryspen/hax/pull/1292)\n* \\#1286: [fix(engine) Fix naming bundle regression](https://github.com/cryspen/hax/pull/1286)\n* \\#1284: [fix(engine) Make sure origins are renamed in bundles.](https://github.com/cryspen/hax/pull/1284)\n* \\#1282: [Update CI dependencies](https://github.com/cryspen/hax/pull/1282)\n* \\#1281: [Library additions for ML-DSA verification](https://github.com/cryspen/hax/pull/1281)\n* \\#1280: [fix(engine) Add default case for disambiguation of bundle element names](https://github.com/cryspen/hax/pull/1280)\n\n### Contributors\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@W95Psp](https://github.com/W95Psp)\n* [@franziskuskiefer](https://github.com/franziskuskiefer)\n* [@karthikbhargavan](https://github.com/karthikbhargavan)\n* [@maximebuyse](https://github.com/maximebuyse)\n\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-03.md",
    "content": "---\nauthors:\n  - lucas\ntitle: \"This Month in Hax: March 2025\"\ndate: 2025-04-01\n---\n\nIn March, we successfully merged **32 pull requests**!\n\nThanks [@Nadrieril](https://github.com/Nadrieril), who helped move `hax` forward by pinning it to a more recent nightly version of the Rust compiler ([#1380](https://github.com/cryspen/hax/pull/1380)). Nadrieril also continued work on the frontend. Trait resolution is now more robust, especially in the presence of closures ([#1376](https://github.com/cryspen/hax/pull/1376)), and our handling of constants has seen significant improvements, with refinements introduced in both [#1367](https://github.com/cryspen/hax/pull/1367) and [#1337](https://github.com/cryspen/hax/pull/1337).\n\nOutside of the frontend, we also focused on enhancements and fixes within `hax-lib` and the engine. Notably, support for mathematical integers and logical propositions has been strengthened, making reasoning more precise and expressive ([#1372](https://github.com/cryspen/hax/pull/1372), [#1352](https://github.com/cryspen/hax/pull/1352), [#1351](https://github.com/cryspen/hax/pull/1351)). Additionally, we resolved several issues related to the use of `self` in contracts, improving overall stability and correctness in those scenarios.\n\nMarch also brought new capabilities to `hax-lib`. The newly introduced `decreases` attribute makes it possible to express termination arguments directly in Rust, giving users better control over termination checking. Furthermore, the addition of the `<backend>::replace_body` family of attributes allows developers to substitute the body of a Rust function with backend-specific code, offering a powerful mechanism for fine-tuned extraction when needed.\n\nStay tuned for more updates next month!\n\n### Full list of PRs\n\n* \\#1380: [Update the rustc pin](https://github.com/cryspen/hax/pull/1380)\n* \\#1377: [Stop depending on ocamlgraph fork.](https://github.com/cryspen/hax/pull/1377)\n* \\#1376: [Correctly handle impl exprs for closures](https://github.com/cryspen/hax/pull/1376)\n* \\#1373: [simd types](https://github.com/cryspen/hax/pull/1373)\n* \\#1372: [`hax-lib`: `Int` improvements and fixes](https://github.com/cryspen/hax/pull/1372)\n* \\#1367: [Remove `ConstantExt` and its `translate_uneval` machinery](https://github.com/cryspen/hax/pull/1367)\n* \\#1363: [fix: update flake.lock](https://github.com/cryspen/hax/pull/1363)\n* \\#1361: [Various fstar core additions, mostly for iterators.](https://github.com/cryspen/hax/pull/1361)\n* \\#1357: [fix(hax-lib): allow `future(self)`](https://github.com/cryspen/hax/pull/1357)\n* \\#1356: [feat(proof-libs): add missing definitions](https://github.com/cryspen/hax/pull/1356)\n* \\#1355: [fix(engine/fstar-backend): drop spurious precondition on `Lemma`s](https://github.com/cryspen/hax/pull/1355)\n* \\#1354: [fix(hax-lib/dummy): intro `int!`](https://github.com/cryspen/hax/pull/1354)\n* \\#1353: [fix(proof-libs/F*): fix name `f_TryInto`](https://github.com/cryspen/hax/pull/1353)\n* \\#1352: [hax-lib: prop: allow equality on every type](https://github.com/cryspen/hax/pull/1352)\n* \\#1351: [fix(hax-lib/assume): fixes assume and assert_prop](https://github.com/cryspen/hax/pull/1351)\n* \\#1350: [fix(engine) Avoid replacing 'let rec' in interfaces.](https://github.com/cryspen/hax/pull/1350)\n* \\#1349: [fix(engine/fstar backend): subst self_ to self](https://github.com/cryspen/hax/pull/1349)\n* \\#1348: [Hax shouldn't distinguish the `If` case in MIR](https://github.com/cryspen/hax/pull/1348)\n* \\#1345: [Engine: import static items (but mutable ones), reject asm blocks](https://github.com/cryspen/hax/pull/1345)\n* \\#1342: [feat(hax-lib): add support for `decreases` clauses in F*](https://github.com/cryspen/hax/pull/1342)\n* \\#1339: [Bertie libs](https://github.com/cryspen/hax/pull/1339)\n* \\#1338: [Don't error on built-in associated types](https://github.com/cryspen/hax/pull/1338)\n* \\#1337: [Translate MIR constants using the const-eval interpreter](https://github.com/cryspen/hax/pull/1337)\n* \\#1336: [F* typeclass for `core::ops::BitXor`](https://github.com/cryspen/hax/pull/1336)\n* \\#1333: [feat(engine/names): extend name policy expressivity](https://github.com/cryspen/hax/pull/1333)\n* \\#1332: [fix(engine/gen-printer): fixes #1294](https://github.com/cryspen/hax/pull/1332)\n* \\#1331: [ci(nix): use F* bin cache in mlkem.yml](https://github.com/cryspen/hax/pull/1331)\n* \\#1330: [This month in hax 02-25 + release 0.2.0](https://github.com/cryspen/hax/pull/1330)\n* \\#1329: [fix(engine) Allow implementing arithmetic traits.](https://github.com/cryspen/hax/pull/1329)\n* \\#1328: [fix(setup.sh): rustup 1.28](https://github.com/cryspen/hax/pull/1328)\n* \\#1327: [fix(nix): MacOS: add rustc and libz dylib to `DYLD_LIBRARY_PATH`](https://github.com/cryspen/hax/pull/1327)\n* \\#1323: [Add more facts to logand_lemma](https://github.com/cryspen/hax/pull/1323)\n* \\#1321: [Introduce `hax_lib::BACKEND::replace_body` attribute](https://github.com/cryspen/hax/pull/1321)\n\n### Contributors\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@W95Psp](https://github.com/W95Psp)\n* [@jschneider-bensch](https://github.com/jschneider-bensch)\n* [@karthikbhargavan](https://github.com/karthikbhargavan)\n* [@mamonet](https://github.com/mamonet)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-04.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"This Month in Hax: April 2025\"\ndate: 2025-05-05\n---\n\nIn April, we successfully merged **38 pull requests**!\n\nThanks [@Nadrieril](https://github.com/Nadrieril), for pinning a more recent nightly version of the Rust compiler ([#1391](https://github.com/cryspen/hax/pull/1391)). Nadrieril also continued making the frontend more robust and complete with work on constants [#1402](https://github.com/cryspen/hax/pull/1402), [#1420](https://github.com/cryspen/hax/pull/1420), [#1429](https://github.com/cryspen/hax/pull/1429)) and item's children ([#1412](https://github.com/cryspen/hax/pull/1412)).\n\n[@W95Psp](https://github.com/W95Psp) worked on `hax-lib` with improved support for writing f* lemmas in rust ([#1428](https://github.com/cryspen/hax/pull/1428)), and fstar post-processing with tactics ([#1437](https://github.com/cryspen/hax/pull/1437)).\n\nI worked on while loops which now support invariants and variants (to prove termination) in [#1375](https://github.com/cryspen/hax/pull/1375)\n\nWe also worked on various improvements like removing deprecated dependencies used by hax-lib ([#1385](https://github.com/cryspen/hax/pull/1385) and [#1394](https://github.com/cryspen/hax/pull/1394)), some ProVerif backend workarounds by [@jschneider-bensch](https://github.com/jschneider-bensch) ([#1360](https://github.com/cryspen/hax/pull/1360), [#1401](https://github.com/cryspen/hax/pull/1401) and [#1406](https://github.com/cryspen/hax/pull/1406)), and multiple f* core lib additions.\n\nStay tuned for more updates next month!\n\n### Full list of PRs\n\n* \\#1437: [feat(hax_lib/macros): F*: add `postprocess_with`](https://github.com/cryspen/hax/pull/1437)\n* \\#1436: [Silence unused inputs in lemmas](https://github.com/cryspen/hax/pull/1436)\n* \\#1435: [Add `t_Debug` instance for `u128`](https://github.com/cryspen/hax/pull/1435)\n* \\#1432: [Add Instances of `Core.Fmt.t_Debug` for `Prims.bool` and pairs](https://github.com/cryspen/hax/pull/1432)\n* \\#1430: [Fix range loops for empty ranges.](https://github.com/cryspen/hax/pull/1430)\n* \\#1429: [Translate evaluated closure constants](https://github.com/cryspen/hax/pull/1429)\n* \\#1428: [feat(hax-lib&backend): F*: support for SMT patterns](https://github.com/cryspen/hax/pull/1428)\n* \\#1427: [feat(proof-libs): add `impl_i32__wrapping_sub`](https://github.com/cryspen/hax/pull/1427)\n* \\#1422: [Barrett example tutorial](https://github.com/cryspen/hax/pull/1422)\n* \\#1420: [Add a fake `DefId` for promoted constants](https://github.com/cryspen/hax/pull/1420)\n* \\#1417: [Add `arg_count` to MIR bodies](https://github.com/cryspen/hax/pull/1417)\n* \\#1416: [fix(engine) Fix name clashes for functions defined in impl methods.](https://github.com/cryspen/hax/pull/1416)\n* \\#1415: [fix(proof-libs): give a computable definition to `>>`](https://github.com/cryspen/hax/pull/1415)\n* \\#1414: [Use `ConstantExprKind::Todo` more](https://github.com/cryspen/hax/pull/1414)\n* \\#1413: [feat(justfile): `just expand`: always use nightly](https://github.com/cryspen/hax/pull/1413)\n* \\#1412: [full_def: Add helper to explore an item's children](https://github.com/cryspen/hax/pull/1412)\n* \\#1410: [Typeclass for`BitAnd`; Instantiations for `Prims.bool`](https://github.com/cryspen/hax/pull/1410)\n* \\#1409: [Libs needed for Bertie](https://github.com/cryspen/hax/pull/1409)\n* \\#1408: [feat(fstar/proof-libs): add a lemma for simplifying double casts](https://github.com/cryspen/hax/pull/1408)\n* \\#1406: [[ProVerif] Match arm type error workaround](https://github.com/cryspen/hax/pull/1406)\n* \\#1404: [feat(backends/fstar): make `unfold` the opaque proxy functions](https://github.com/cryspen/hax/pull/1404)\n* \\#1402: [Improve support for getting constant bodies](https://github.com/cryspen/hax/pull/1402)\n* \\#1401: [[ProVerif] Match arm type workaround](https://github.com/cryspen/hax/pull/1401)\n* \\#1395: [Put macro_metavar_expr_concat feature under hax cfg.](https://github.com/cryspen/hax/pull/1395)\n* \\#1394: [Replace `paste` by `with_builtin_macros`.](https://github.com/cryspen/hax/pull/1394)\n* \\#1393: [Tell crane to keep references to the rust toolchain](https://github.com/cryspen/hax/pull/1393)\n* \\#1391: [Update the rustc pin](https://github.com/cryspen/hax/pull/1391)\n* \\#1390: [Revert #1377](https://github.com/cryspen/hax/pull/1390)\n* \\#1389: [Cut ASTs printed in errors when they are too long.](https://github.com/cryspen/hax/pull/1389)\n* \\#1388: [Remove AST printing in import_thir errors.](https://github.com/cryspen/hax/pull/1388)\n* \\#1385: [Switch to proc-macro-error2 because original is unmaintained.](https://github.com/cryspen/hax/pull/1385)\n* \\#1384: [Remove deprecated macro parsing infrastructure](https://github.com/cryspen/hax/pull/1384)\n* \\#1381: [feat(docs/blog): this month in hax 03 2025](https://github.com/cryspen/hax/pull/1381)\n* \\#1375: [Add invariants for while loops.](https://github.com/cryspen/hax/pull/1375)\n* \\#1368: [feat(blog): add blog post about the rework of names](https://github.com/cryspen/hax/pull/1368)\n* \\#1360: [[PV] Generate consistent field accessor names](https://github.com/cryspen/hax/pull/1360)\n* \\#1340: [Add logor_disjoint to Rust_primitives.Integers](https://github.com/cryspen/hax/pull/1340)\n* \\#808: [Fix dependencies bounded integers](https://github.com/cryspen/hax/pull/808)\n\n### Contributors\n* [@N1ark](https://github.com/N1ark)\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@W95Psp](https://github.com/W95Psp)\n* [@jschneider-bensch](https://github.com/jschneider-bensch)\n* [@karthikbhargavan](https://github.com/karthikbhargavan)\n* [@mamonet](https://github.com/mamonet)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-05.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"This Month in Hax: May 2025\"\ndate: 2025-05-05\n---\n\nIn May, we successfully merged **19 pull requests**!\n\n[@Nadrieril](https://github.com/Nadrieril) helped making the frontend more robust and complete with work on impl exprs ([#1431](https://github.com/cryspen/hax/pull/1431)), MIR extraction ([#1444](https://github.com/cryspen/hax/pull/1444), [#1457](https://github.com/cryspen/hax/pull/1457)) and `FnOnce` ([#1477](https://github.com/cryspen/hax/pull/1477)).\n\n[@W95Psp](https://github.com/W95Psp) worked on `hax-lib` with improved support for writing F* lemmas in Rust ([#1456](https://github.com/cryspen/hax/pull/1456)).\n\n[@cmester0](https://github.com/cmester0) improved the Coq and SSProve backends ([#1426](https://github.com/cryspen/hax/pull/1426) and [#1108](https://github.com/cryspen/hax/pull/1108))\n\nApart from that, we contributed multiple F* [`core` library](https://doc.rust-lang.org/stable/core/) additions.\n\nStay tuned for more updates next month!\n\n### Full list of PRs\n\n* \\#1481: [Update owners metadata](https://github.com/cryspen/hax/pull/1481)\n* \\#1477: [Provide the `FnOnce` shim for closures](https://github.com/cryspen/hax/pull/1477)\n* \\#1476: [Release 0.3.1](https://github.com/cryspen/hax/pull/1476)\n* \\#1473: [fix(proof-libs) Remove fields that shouldn't be in PartialOrd.](https://github.com/cryspen/hax/pull/1473)\n* \\#1471: [fix(engine) Add InlineConst in concrete_idents.](https://github.com/cryspen/hax/pull/1471)\n* \\#1465: [Release 0.3.0](https://github.com/cryspen/hax/pull/1465)\n* \\#1458: [feat(proof-libs): add `rem_euclid` for every int types](https://github.com/cryspen/hax/pull/1458)\n* \\#1457: [Simplify MIR place translation](https://github.com/cryspen/hax/pull/1457)\n* \\#1456: [Fix unused in lemmas](https://github.com/cryspen/hax/pull/1456)\n* \\#1455: [feat(proof-libs): F*: implement some wrapping operations on i64](https://github.com/cryspen/hax/pull/1455)\n* \\#1454: [fix(engine/nix): pin ocamlgraph, waiting for https://github.com/NixOS/nixpkgs/pull/397883](https://github.com/cryspen/hax/pull/1454)\n* \\#1451: [fix(engine): naming: items under closures](https://github.com/cryspen/hax/pull/1451)\n* \\#1445: [Add interfaces to fstar core and rust_primitives](https://github.com/cryspen/hax/pull/1445)\n* \\#1444: [Add missing unwind information in MIR](https://github.com/cryspen/hax/pull/1444)\n* \\#1439: [Upstream evit changes up to Feb 21](https://github.com/cryspen/hax/pull/1439)\n* \\#1438: [This month in hax April 2025.](https://github.com/cryspen/hax/pull/1438)\n* \\#1431: [Consistently translate impl exprs for parent items](https://github.com/cryspen/hax/pull/1431)\n* \\#1426: [Bertie ssprove](https://github.com/cryspen/hax/pull/1426)\n* \\#1108: [Coq small fixes](https://github.com/cryspen/hax/pull/1108)\n\n### Contributors\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@W95Psp](https://github.com/W95Psp)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@cmester0](https://github.com/cmester0)\n* [@franziskuskiefer](https://github.com/franziskuskiefer)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-06.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"This Month in Hax: June 2025\"\ndate: 2025-06-08\n---\n\nIn June, we successfully merged **21 pull requests**!\n\n[@Nadrieril](https://github.com/Nadrieril) and [@N1ark](https://github.com/N1ark) continued the improvements on the frontend side with the addition of unchecked arithmetic operators ([#1513](https://github.com/cryspen/hax/pull/1513)), regrouping generic and trait arguments in a struct ([#1514](https://github.com/cryspen/hax/pull/1514)), support of trait aliases in `full_def` ([#1494](https://github.com/cryspen/hax/pull/1494)), addition of `Ty::FnDef` ([#1487](https://github.com/cryspen/hax/pull/1487)), drop calls resolution ([#1467](https://github.com/cryspen/hax/pull/1467)) and more.\n\n[@W95Psp](https://github.com/W95Psp), [@clementblaudeau](https://github.com/clementblaudeau) and myself worked on adding infrastructure for writing backends and compilation phases for hax in Rust (instead of Ocaml). We now have a Rust version of the hax AST and we can convert back and forth from the Ocaml version (which should allow to incrementally replace Ocaml phases by Rust phases). We also offer utilities for printing this AST when implementing backends. Our plan for the next months is to use this for the new backends we will add, and experiment with Rust phases.\n\nStay tuned for more updates next month!\n\n### Full list of PRs\n\n* \\#1517: [Update charon.yml: add `workflow_dispatch`](https://github.com/cryspen/hax/pull/1517)\n* \\#1514: [Regroup generic and trait arguments in a struct](https://github.com/cryspen/hax/pull/1514)\n* \\#1513: [Separate `{Add,Sub,Mul}Unchecked`](https://github.com/cryspen/hax/pull/1513)\n* \\#1510: [Fix following merges changing the frontend AST](https://github.com/cryspen/hax/pull/1510)\n* \\#1507: [Rust Engine: rename rust printer to rust engine](https://github.com/cryspen/hax/pull/1507)\n* \\#1506: [Rust engine: Add spans to the Rust AST.](https://github.com/cryspen/hax/pull/1506)\n* \\#1505: [Rust Engine: OCaml bridge for the AST (OCaml AST -> Rust AST)](https://github.com/cryspen/hax/pull/1505)\n* \\#1504: [Rust Engine: transport the Rust AST to OCaml](https://github.com/cryspen/hax/pull/1504)\n* \\#1502: [Upstream: Rust engine ast](https://github.com/cryspen/hax/pull/1502)\n* \\#1501: [Upstream evit changes up to May 19th](https://github.com/cryspen/hax/pull/1501)\n* \\#1499: [docs: Escape \"*\" in \"F*\" from Markdown](https://github.com/cryspen/hax/pull/1499)\n* \\#1494: [full_def: support trait aliases](https://github.com/cryspen/hax/pull/1494)\n* \\#1492: [sha256 example typecheck in f*](https://github.com/cryspen/hax/pull/1492)\n* \\#1491: [This month in hax May 2025.](https://github.com/cryspen/hax/pull/1491)\n* \\#1490: [proof-lib/fstar Add an actual instance for ordering of bound integers](https://github.com/cryspen/hax/pull/1490)\n* \\#1487: [Add `Ty::FnDef`](https://github.com/cryspen/hax/pull/1487)\n* \\#1485: [Fix detection of trait associated constants](https://github.com/cryspen/hax/pull/1485)\n* \\#1482: [Update rustc pin](https://github.com/cryspen/hax/pull/1482)\n* \\#1480: [Upstream evit changes up to April 25](https://github.com/cryspen/hax/pull/1480)\n* \\#1470: [Add enum coverage test for coq](https://github.com/cryspen/hax/pull/1470)\n* \\#1467: [Resolve Drop calls](https://github.com/cryspen/hax/pull/1467)\n\n### Contributors\n* [@N1ark](https://github.com/N1ark)\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@W95Psp](https://github.com/W95Psp)\n* [@chrysn](https://github.com/chrysn)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@cmester0](https://github.com/cmester0)\n* [@karthikbhargavan](https://github.com/karthikbhargavan)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-07.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"This Month in Hax: July 2025\"\ndate: 2025-07-07\n---\n\nIn July, we successfully merged **32 pull requests**!\n\n[@Nadrieril](https://github.com/Nadrieril) made sure that we use a recent version of rustc (PR [\\#1534](https://github.com/cryspen/hax/pull/1534)) and made new improvements to trait resolution in the frontend (PR [\\#1522](https://github.com/cryspen/hax/pull/1522)).\n\nWe continued efforts to improve the usability of hax with several f* core lib additions, and improvements to the CI.\n\nMore importantly, [@W95Psp](https://github.com/W95Psp), [@clementblaudeau](https://github.com/clementblaudeau) and myself worked on improvements of the new hax engine implemented in Rust (PR [\\#1508](https://github.com/cryspen/hax/pull/1508), [\\#1518](https://github.com/cryspen/hax/pull/1518), [\\#1525](https://github.com/cryspen/hax/pull/1525) and [\\#1526](https://github.com/cryspen/hax/pull/1526)).\n\nFinally, let's celebrate the arrival of our new backend for lean (PR [\\#1509](https://github.com/cryspen/hax/pull/1509))! [@clementblaudeau](https://github.com/clementblaudeau) is taking the lead on this project. This backend is implemented in Rust using our new infrastructure. It is still under active development and many improvements will come in the next couple of months. \n\nStay tuned for more updates next month!\n\n### Full list of PRs\n\n* \\#1587: [Fix inconsistent field naming in marker traits.](https://github.com/cryspen/hax/pull/1587)\n* \\#1583: [fix(fstar-backend) Add hint for type class resolution with inheritance](https://github.com/cryspen/hax/pull/1583)\n* \\#1581: [Regen code](https://github.com/cryspen/hax/pull/1581)\n* \\#1576: [feat(ci/mlkem): make job work with merge queues](https://github.com/cryspen/hax/pull/1576)\n* \\#1575: [fix(ci/mlkem): clone twice](https://github.com/cryspen/hax/pull/1575)\n* \\#1574: [feat(ci): mlkem job: use specific libcrux revision](https://github.com/cryspen/hax/pull/1574)\n* \\#1572: [fix(lib) Fix _super hashes for Cmp traits.](https://github.com/cryspen/hax/pull/1572)\n* \\#1570: [Frontend: Fix regression \\#1566](https://github.com/cryspen/hax/pull/1570)\n* \\#1562: [Local hax lib for ml kem ci](https://github.com/cryspen/hax/pull/1562)\n* \\#1561: [Check rust formatting with 'cargo fmt'.](https://github.com/cryspen/hax/pull/1561)\n* \\#1558: [chore(ci/gh actions): `ubuntu-20.04` -> `ubuntu-22.04`](https://github.com/cryspen/hax/pull/1558)\n* \\#1557: [Fix typo in properties.md](https://github.com/cryspen/hax/pull/1557)\n* \\#1556: [Nix: un-pin ocamlgraph](https://github.com/cryspen/hax/pull/1556)\n* \\#1552: [fix Core.Clone](https://github.com/cryspen/hax/pull/1552)\n* \\#1549: [Delete .github/workflows/engine_js_build.yml](https://github.com/cryspen/hax/pull/1549)\n* \\#1548: [Fix type of u64 rotate left](https://github.com/cryspen/hax/pull/1548)\n* \\#1546: [This month in hax June 2025.](https://github.com/cryspen/hax/pull/1546)\n* \\#1545: [fix(nix): make `nix run` work on darwin](https://github.com/cryspen/hax/pull/1545)\n* \\#1544: [fix(setup.sh): install Rust engine](https://github.com/cryspen/hax/pull/1544)\n* \\#1540: [chore(rengine/lean): fix warnings](https://github.com/cryspen/hax/pull/1540)\n* \\#1535: [Fix typo: frontent -> frontend](https://github.com/cryspen/hax/pull/1535)\n* \\#1534: [Update rustc to latest nightly](https://github.com/cryspen/hax/pull/1534)\n* \\#1533: [feat(hax-lib): `int!`: support hex, octal and binary literals](https://github.com/cryspen/hax/pull/1533)\n* \\#1526: [Rust Engine: expose common `DefId`s in the Rust engine](https://github.com/cryspen/hax/pull/1526)\n* \\#1525: [Rust Engine: allow multiple backends to implement the `Pretty` trait](https://github.com/cryspen/hax/pull/1525)\n* \\#1523: [Release 0.3.2](https://github.com/cryspen/hax/pull/1523)\n* \\#1522: [Don't erase inner binders in trait resolution](https://github.com/cryspen/hax/pull/1522)\n* \\#1520: [Addition of integer function implementations in Core.Num.fsti, along with generic functions in Rust_primitives.Integer.fsti to support them.](https://github.com/cryspen/hax/pull/1520)\n* \\#1518: [Rust Engine: intro. resugared AST fragment](https://github.com/cryspen/hax/pull/1518)\n* \\#1509: [Lean backend [Part 1/3]](https://github.com/cryspen/hax/pull/1509)\n* \\#1508: [Rust Engine: turn it into a hax-frontend compatible engine](https://github.com/cryspen/hax/pull/1508)\n* \\#1466: [Proof lib/fstar support more rbe](https://github.com/cryspen/hax/pull/1466)\n\n### Contributors\n* [@Coda-Coda](https://github.com/Coda-Coda)\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@Parrot7483](https://github.com/Parrot7483)\n* [@W95Psp](https://github.com/W95Psp)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@cmester0](https://github.com/cmester0)\n* [@franziskuskiefer](https://github.com/franziskuskiefer)\n* [@maximebuyse](https://github.com/maximebuyse)\n* [@satiscugcat](https://github.com/satiscugcat)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-08.md",
    "content": "---\nauthors:\n  - lucas\ntitle: \"This Month in Hax: August 2025\"\ndate: 2025-09-02\n---\n\nIn August, we successfully merged **17 pull requests**!\n\nThis month, we continued the effort of building out the new hax engine in Rust, with a focus on creating a robust infrastructure for backend development. We introduced a generic [`Backend` trait](https://github.com/cryspen/hax/pull/1603) and a new [printing infrastructure](https://github.com/cryspen/hax/pull/1600), which will simplify the process of creating new backends. We also improved how [global identifiers](https://github.com/cryspen/hax/pull/1624) are handled and added [visitors](https://github.com/cryspen/hax/pull/1585).\n\nBuilding on this new infrastructure, the Lean backend saw significant progress. This month, we merged [the first Lean proofs](https://github.com/cryspen/hax/pull/1590) and a set of [Lean examples](https://github.com/cryspen/hax/pull/1593). The Lean printer was also [updated](https://github.com/cryspen/hax/pull/1607) to leverage the latest improvements in the Rust engine.\n\nStay tuned for more updates next month!\n\n### Full list of PRs\n* \\#1624: [Rust Engine: global identifiers: add view and rendering](https://github.com/cryspen/hax/pull/1624)\n* \\#1613: [ci(rengine): clippy: deny lints](https://github.com/cryspen/hax/pull/1613)\n* \\#1612: [misc(rengine): stop pinning `derive-generic-visitor` with a git branch](https://github.com/cryspen/hax/pull/1612)\n* \\#1609: [feat(nix/ci): run examples outside of the sandbox](https://github.com/cryspen/hax/pull/1609)\n* \\#1608: [Rust Engine: improve debug utility `show-json`](https://github.com/cryspen/hax/pull/1608)\n* \\#1607: [Update Lean printer to new infrastructure](https://github.com/cryspen/hax/pull/1607)\n* \\#1605: [rename explicit_panic](https://github.com/cryspen/hax/pull/1605)\n* \\#1603: [Rust Engine: add a `Backend` trait](https://github.com/cryspen/hax/pull/1603)\n* \\#1600: [Rust Engine: print infrastructure](https://github.com/cryspen/hax/pull/1600)\n* \\#1597: [Fixes this month in hax](https://github.com/cryspen/hax/pull/1597)\n* \\#1596: [This month in hax July 2025.](https://github.com/cryspen/hax/pull/1596)\n* \\#1594: [Update publications.md](https://github.com/cryspen/hax/pull/1594)\n* \\#1593: [Lean backend [M2] - 2/3 - Examples](https://github.com/cryspen/hax/pull/1593)\n* \\#1592: [Upstream changes from evit up to June 11th 2025.](https://github.com/cryspen/hax/pull/1592)\n* \\#1590: [Lean backend [M2] - 1/3 - First proofs](https://github.com/cryspen/hax/pull/1590)\n* \\#1588: [fix(engine) Export traits defined in bundles.](https://github.com/cryspen/hax/pull/1588)\n* \\#1585: [Visitors using `derive-generic-visitor`](https://github.com/cryspen/hax/pull/1585)\n\n### Contributors\n* [@Parrot7483](https://github.com/Parrot7483)\n* [@W95Psp](https://github.com/W95Psp)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@franziskuskiefer](https://github.com/franziskuskiefer)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-09.md",
    "content": "---\nauthors:\n  - clement\ntitle: \"This Month in Hax: September 2025\"\ndate: 2025-10-01\n---\n\nIn September, we successfully merged **44 pull requests**!\n\nAfter a summer slow down, we focused on improving the new rust-written engine of\nHax (that progressively replaces the OCaml one), along with its flagship backend\n(Lean), while consolidating the documentation and tutorial. We released a [new\nversion of hax](https://github.com/cryspen/hax/pull/1647) 🎉\n\nIn the rust-engine, a lot of work went into the treatment of identifiers\n([#1648](https://github.com/cryspen/hax/pull/1648),\n[#1689](https://github.com/cryspen/hax/pull/1689),\n[#1693](https://github.com/cryspen/hax/pull/1693)).\n\nWe improved the caching ([#1701](https://github.com/cryspen/hax/pull/1701),\n[#1719](https://github.com/cryspen/hax/pull/1719)), and the control over\nextraction [with attributes](https://github.com/cryspen/hax/pull/1685).\n\nThe Lean backend saw a lot of new features: structs, enums, basic support for\ntraits, support for functionalized loops. Along those, we improved the\ndocumentation in the [tutorial](https://hax.cryspen.com/manual/lean/tutorial/) and in the\n[manual](https://hax.cryspen.com/manual/lean/). The F\\*-*parity* and the\nofficial launch of the Lean backend are getting closer!\n\nStay tuned for more updates next month!\n\n<DESCRIPTION>\n\n### Full list of PRs\n* \\#1719: [Improve rustc caching with `cargo-hax`](https://github.com/cryspen/hax/pull/1719)\n* \\#1718: [feat(CONTRIBUTING): document issue prefixes style](https://github.com/cryspen/hax/pull/1718)\n* \\#1701: [fix(ci): cache hax entirely on Cachix](https://github.com/cryspen/hax/pull/1701)\n* \\#1695: [(Lean Backend) Improve support for functionalized loops](https://github.com/cryspen/hax/pull/1695)\n* \\#1694: [Show Lean backend when doing `cargo hax into --help`](https://github.com/cryspen/hax/pull/1694)\n* \\#1693: [Rust Engine: refactor names, change tuples representation](https://github.com/cryspen/hax/pull/1693)\n* \\#1692: [Cargo hax: improve error reports](https://github.com/cryspen/hax/pull/1692)\n* \\#1691: [Update CHANGELOG.md](https://github.com/cryspen/hax/pull/1691)\n* \\#1690: [Website: add tests for dead links and playground integration](https://github.com/cryspen/hax/pull/1690)\n* \\#1689: [Rust Engine: add interning table, intern `GlobalId`s](https://github.com/cryspen/hax/pull/1689)\n* \\#1688: [Fix names in Lean tutorial.](https://github.com/cryspen/hax/pull/1688)\n* \\#1687: [Docs: fix build](https://github.com/cryspen/hax/pull/1687)\n* \\#1686: [Docs: hide RFCs tab and add toolchain structure page.](https://github.com/cryspen/hax/pull/1686)\n* \\#1685: [Allow hax_lib::include to override -i flags.](https://github.com/cryspen/hax/pull/1685)\n* \\#1684: [Fix libcrux-ref for the merge queue.](https://github.com/cryspen/hax/pull/1684)\n* \\#1683: [Update README.md](https://github.com/cryspen/hax/pull/1683)\n* \\#1682: [Add documentation for the Lean backend (manual)](https://github.com/cryspen/hax/pull/1682)\n* \\#1681: [fix(engine): add rewrite local self as a proper phase](https://github.com/cryspen/hax/pull/1681)\n* \\#1679: [(Lean Backend) Add basic support for traits](https://github.com/cryspen/hax/pull/1679)\n* \\#1678: [Merge evit Aug 21](https://github.com/cryspen/hax/pull/1678)\n* \\#1676: [Temporarily remove ocaml doc build because of odoc issue.](https://github.com/cryspen/hax/pull/1676)\n* \\#1669: [feat(rengine): output diagnostics in `todo!`s in printers](https://github.com/cryspen/hax/pull/1669)\n* \\#1665: [Improve readme](https://github.com/cryspen/hax/pull/1665)\n* \\#1662: [feat(rengine): add resugaring for tuples](https://github.com/cryspen/hax/pull/1662)\n* \\#1661: [CONTRIBUTING.md: change and clarify the meaning of assignee](https://github.com/cryspen/hax/pull/1661)\n* \\#1659: [feat(rengine): resugaring: add `FunctionsToConstants`](https://github.com/cryspen/hax/pull/1659)\n* \\#1655: [fix(just): rename `show-json` into `debug-json`: that's the correct name](https://github.com/cryspen/hax/pull/1655)\n* \\#1654: [feat(engine): import thir: add missing borrows](https://github.com/cryspen/hax/pull/1654)\n* \\#1649: [feat(gh actions): job that creates a \"this month in hax\" skeleton](https://github.com/cryspen/hax/pull/1649)\n* \\#1648: [feat(rengine): use `ExplicitDefId` instead of `DefId` for names](https://github.com/cryspen/hax/pull/1648)\n* \\#1647: [Release hax 0.3.4](https://github.com/cryspen/hax/pull/1647)\n* \\#1645: [fix(rengine): missed case `Static` in name rendering](https://github.com/cryspen/hax/pull/1645)\n* \\#1644: [Hax release 0.3.3](https://github.com/cryspen/hax/pull/1644)\n* \\#1643: [feat(frontend/hir): add visibility to items](https://github.com/cryspen/hax/pull/1643)\n* \\#1642: [feat(blog): this month in hax](https://github.com/cryspen/hax/pull/1642)\n* \\#1640: [Rust engine: Optimize communication with hax driver and Ocaml engine.](https://github.com/cryspen/hax/pull/1640)\n* \\#1635: [Lean backend - Run rustc coverage tests for lean.](https://github.com/cryspen/hax/pull/1635)\n* \\#1634: [chore(deps): bump tracing-subscriber from 0.3.19 to 0.3.20](https://github.com/cryspen/hax/pull/1634)\n* \\#1633: [Rust-engine / Lean backend: pass include flag from the ocaml engine to the rust engine.](https://github.com/cryspen/hax/pull/1633)\n* \\#1626: [Lean tutorial first version.](https://github.com/cryspen/hax/pull/1626)\n* \\#1623: [feat(Lean backend) Add support for enums and structs](https://github.com/cryspen/hax/pull/1623)\n* \\#1591: [Lean backend (M2) - 3/3 - Resugarings](https://github.com/cryspen/hax/pull/1591)\n* \\#1564: [feat(ci): add an action to ensure changelog updates](https://github.com/cryspen/hax/pull/1564)\n* \\#1559: [Merge frontend improvements](https://github.com/cryspen/hax/pull/1559)\n\n### Contributors\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@W95Psp](https://github.com/W95Psp)\n* [@alexanderlhicks](https://github.com/alexanderlhicks)\n* [@app/dependabot](https://github.com/dependabot)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-10.md",
    "content": "---\nauthors:\n  - lucas\ntitle: \"This Month in Hax: October 2025\"\ndate: 2025-11-01\n---\n\nIn October, we successfully merged **15 pull requests**!\n\nThe Rust engine and Lean backend gained a monadic phase that wraps pure values and binds computations through `pure` and `lift` insertions, ensuring that the Lean backend faithfully uses the Lean `Result` monad.\nA rejection phase was added to enforce Lean's do-notation DSL, preventing interleaving of expressions and statements and providing clearer diagnostics.  \nWe also introduced a `FunctionsToConstants` resugaring: Lean can now extract values from Rust `const`s using helper functions, guaranteeing panic-free constant evaluation.  \nStruct updates are now supported via base-expression syntax, and we refactored the printer traits to return static document builders, improve span handling, and simplify lifetimes.\n\nThe F\\* proof libraries now include a better `VecDeque` model and several fixes; loops without mutation are now accepted.\nThe Lean backend also gained improved error messages and a refactored proof library.  \n\nStay tuned for more updates next month!\n\n### Full list of PRs\n* \\#1746: [feat(rust-engine/lean): monadic phase](https://github.com/cryspen/hax/pull/1746)\n* \\#1739: [feat(rengine, lean): add rejection phase that ensures an expression is in the Lean do-notation DSL](https://github.com/cryspen/hax/pull/1739)\n* \\#1738: [feat(lean): Use `FunctionsToConstants`](https://github.com/cryspen/hax/pull/1738)\n* \\#1737: [This month in hax blog post 2025 09](https://github.com/cryspen/hax/pull/1737)\n* \\#1736: [feat(lean): add support for base expression of structs](https://github.com/cryspen/hax/pull/1736)\n* \\#1735: [refactor(rengine): revisit printer traits](https://github.com/cryspen/hax/pull/1735)\n* \\#1733: [Fix rustc coverage tests.](https://github.com/cryspen/hax/pull/1733)\n* \\#1732: [Accept loops without mutation.](https://github.com/cryspen/hax/pull/1732)\n* \\#1730: [Add nightly CI job for ML-DSA lax-checking.](https://github.com/cryspen/hax/pull/1730)\n* \\#1729: [Release 0.3.5](https://github.com/cryspen/hax/pull/1729)\n* \\#1728: [Better VecDeque model and other F* proof lib improvements/fixes.](https://github.com/cryspen/hax/pull/1728)\n* \\#1726: [Switch hax-lib to Rust edition 2021.](https://github.com/cryspen/hax/pull/1726)\n* \\#1724: [fix(engine): fix owner_id](https://github.com/cryspen/hax/pull/1724)\n* \\#1717: [[Lean] Proper error messages](https://github.com/cryspen/hax/pull/1717)\n* \\#1696: [proof-libs/lean Library update and refactor](https://github.com/cryspen/hax/pull/1696)\n\n### Contributors\n* [@W95Psp](https://github.com/W95Psp)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2025-11.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"This Month in Hax: November 2025\"\ndate: 2025-12-01\n---\n\nIn November, we successfully merged **16 pull requests**!\n\nThe frontend continues getting improvements thanks to [@Nadrieril](https://github.com/Nadrieril). \nSo does the lean backend, with support for default methods and lib improvements including renaming the `Result` monad to `RustM`.\nThe lib has been the focus as we now have started incorporating the new core models written in Rust. \nA first batch of changes already happened in November and are automatically extracted to the F* library (manually for lean).\nWe continue the development of these models which will hopefully cover all we already have in the F* lib as manual F* models.\n\n### Full list of PRs\n* \\#1778: [Change impl_u64__rotate_right second parameter type to u32](https://github.com/cryspen/hax/pull/1778)\n* \\#1777: [feat(lean): add support for default methods](https://github.com/cryspen/hax/pull/1777)\n* \\#1775: [docs(blog): add avatar pictures locally](https://github.com/cryspen/hax/pull/1775)\n* \\#1770: [Update author avatars in .authors.yml](https://github.com/cryspen/hax/pull/1770)\n* \\#1769: [Fix broken links in README.](https://github.com/cryspen/hax/pull/1769)\n* \\#1768: [refactor(lean): rename Result to RustM](https://github.com/cryspen/hax/pull/1768)\n* \\#1767: [fix(engine/fstar): print [Fstar.Char.char] instead of [char]](https://github.com/cryspen/hax/pull/1767)\n* \\#1765: [Incorporate charon changes to the frontend](https://github.com/cryspen/hax/pull/1765)\n* \\#1754: [chore(examples): reserve extraction folder for auto-generated files](https://github.com/cryspen/hax/pull/1754)\n* \\#1752: [fix(docs): fix a few typos in tutorial](https://github.com/cryspen/hax/pull/1752)\n* \\#1751: [fix(gha): fix this month in hax template: drop extra `**` markdown](https://github.com/cryspen/hax/pull/1751)\n* \\#1750: [This month in hax 2025-10](https://github.com/cryspen/hax/pull/1750)\n* \\#1749: [Merge hax-evit approved changes](https://github.com/cryspen/hax/pull/1749)\n* \\#1747: [feat(proof-lib/lean) core models](https://github.com/cryspen/hax/pull/1747)\n* \\#1743: [chore(deps): bump playwright and @playwright/test in /docs/.test](https://github.com/cryspen/hax/pull/1743)\n* \\#1742: [[F* lib] Tls codec panic freedom](https://github.com/cryspen/hax/pull/1742)\n\n### Contributors\n* [@Nadrieril](https://github.com/Nadrieril)\n* [@Parrot7483](https://github.com/Parrot7483)\n* [@W95Psp](https://github.com/W95Psp)\n* [@abentkamp](https://github.com/abentkamp)\n* [@app/dependabot](https://github.com/dependabot)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@franziskuskiefer](https://github.com/franziskuskiefer)\n* [@maximebuyse](https://github.com/maximebuyse)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2026-01.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"This Month in Hax: January 2026\"\ndate: 2026-02-02\n---\n\nIn January, we successfully merged **29 pull requests**!\n\nThe Lean backend continues to get improvements, mostly thanks to [@abentkamp](https://github.com/abentkamp)! We now have more specs in the proof library (for while loops, negation and more), and improved tactics to reason with our annotations of pre/post-conditions ([\\#1888](https://github.com/cryspen/hax/pull/1888)). Some naming fixes, and improvements of the handling of associated types make us closer to reaching parity with the F* backend!\n\nThe other significant improvements are on the core models side. Thanks to some new models, the F* proof library is now entirely extracted from Rust core models. The Rust primitives models will remain hand-written in each backend, but they have been designed to be as small as possible, and are intended to be modelled in a very backend-specific way. The Lean library is now also partly extracted from core models! Some modules are excluded for now as they rely on Rust features that we don't support in the Lean backend for now. We will prioritize these missing features in the next few months to extend the Lean library using extraction from core models.\n\n### Full list of PRs\n* \\#1902: [Fix broken dependabot links](https://github.com/cryspen/hax/pull/1902)\n* \\#1900: [feat: bump lean to v4.28.0-rc1](https://github.com/cryspen/hax/pull/1900)\n* \\#1899: [Various core models fixes.](https://github.com/cryspen/hax/pull/1899)\n* \\#1898: [Upstream Evit changes up to 24 dec 2025](https://github.com/cryspen/hax/pull/1898)\n* \\#1896: [fix(lean): more generous timeout for bvdecide](https://github.com/cryspen/hax/pull/1896)\n* \\#1895: [refactor(lean): rearrange lean lib file structure](https://github.com/cryspen/hax/pull/1895)\n* \\#1891: [feat(lean): spec for negation](https://github.com/cryspen/hax/pull/1891)\n* \\#1888: [feat(lean): hax_zify and hax_construct_pure tactics](https://github.com/cryspen/hax/pull/1888)\n* \\#1887: [feat(lean): support for opaque impls](https://github.com/cryspen/hax/pull/1887)\n* \\#1885: [[Lean] add Core_models.Slice.Impl.is_empty](https://github.com/cryspen/hax/pull/1885)\n* \\#1875: [set cfg for docs.rs](https://github.com/cryspen/hax/pull/1875)\n* \\#1872: [refactor(ci/nix): clean up a bit frontends cli src](https://github.com/cryspen/hax/pull/1872)\n* \\#1870: [Release 0.3.6](https://github.com/cryspen/hax/pull/1870)\n* \\#1869: [Blog post about my departure from Cryspen (and hax)](https://github.com/cryspen/hax/pull/1869)\n* \\#1868: [OCaml engine: export namespace insensitive sort, make Lean use it](https://github.com/cryspen/hax/pull/1868)\n* \\#1867: [fix(lean): escape keywords systematically](https://github.com/cryspen/hax/pull/1867)\n* \\#1865: [feat(lean): core models](https://github.com/cryspen/hax/pull/1865)\n* \\#1864: [docs(blog): Verifying a while loop in Hax/Lean](https://github.com/cryspen/hax/pull/1864)\n* \\#1863: [feat(lean): add negated condition to while loop spec](https://github.com/cryspen/hax/pull/1863)\n* \\#1860: [Lean: correctly call `fn_like_linked_expressions`, fixing `self_` -> `self`](https://github.com/cryspen/hax/pull/1860)\n* \\#1857: [feat(lean): Add support for while loops](https://github.com/cryspen/hax/pull/1857)\n* \\#1851: [fix(lean): associated type projections on multiple parameters](https://github.com/cryspen/hax/pull/1851)\n* \\#1850: [fix(lean): fix rendering of impl items with constraints](https://github.com/cryspen/hax/pull/1850)\n* \\#1849: [feat(lean): preliminary core model extraction](https://github.com/cryspen/hax/pull/1849)\n* \\#1848: [fix: replace macro should not affect other backends](https://github.com/cryspen/hax/pull/1848)\n* \\#1846: [feat(lean): Add support for `#[hax_lib::opaque]`](https://github.com/cryspen/hax/pull/1846)\n* \\#1840: [feat(lean) turn rejection phase into a transformation phase](https://github.com/cryspen/hax/pull/1840)\n* \\#1837: [[lean] add casting for all integer type pairs](https://github.com/cryspen/hax/pull/1837)\n* \\#1822: [feat(lean): Define usize as a newtype of UInt64](https://github.com/cryspen/hax/pull/1822)\n\n### Contributors\n* [@W95Psp](https://github.com/W95Psp)\n* [@abentkamp](https://github.com/abentkamp)\n* [@franziskuskiefer](https://github.com/franziskuskiefer)\n* [@klausnat](https://github.com/klausnat)\n* [@maximebuyse](https://github.com/maximebuyse)\n* [@rusch95](https://github.com/rusch95)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2026-02.md",
    "content": "---\nauthors:\n  - alex\ntitle: \"This Month in Hax: February 2026\"\ndate: 2026-03-03\n---\n\nIn February, we successfully merged **32 pull requests**!\n\nThis month, the Lean backend made headway by getting new Rust proof attributes. With the new attributes, Users can choose between two proof methods, one based on symbolic reasoning (`grind`) and one based on bit-blasting (`bv_decide`). This new setup is illustrated in our updated [Tutorial](https://hax.cryspen.com/manual/lean/tutorial/)\nand the [Chacha20](https://github.com/cryspen/hax/blob/main/examples/lean_chacha20/src/lib.rs)\nexample. Moreover, the Lean backend produces\nprettier output by opening namespaces, handles for-loops more reliably, and its library contains more of our Rust core models.\n\nWe also made great progress on our new THIR importer, implemented in Rust. Currenlty, it can be activated using `--experimental-full-def`, and we intend to make it the default soon.\n\nSpecial thanks to\n[@JuanCoRo](https://github.com/JuanCoRo),\n[@klausnat](https://github.com/klausnat), and\n[@rusch95](https://github.com/rusch95) for their contributions this month!\n\n<DESCRIPTION>\n\n### Full list of PRs\n* \\#1967: [Evit upstream january 2026](https://github.com/cryspen/hax/pull/1967)\n* \\#1962: [feat(lean): bump to Lean v4.29.0-rc1](https://github.com/cryspen/hax/pull/1962)\n* \\#1961: [Apply resugarings to linked items.](https://github.com/cryspen/hax/pull/1961)\n* \\#1959: [ADC example. documented](https://github.com/cryspen/hax/pull/1959)\n* \\#1956: [feat(lean_chacha20): use new attributes](https://github.com/cryspen/hax/pull/1956)\n* \\#1955: [Lean cleanup](https://github.com/cryspen/hax/pull/1955)\n* \\#1954: [fix(lean): print \"do\" in all ITE & match-branches](https://github.com/cryspen/hax/pull/1954)\n* \\#1951: [feat(lean): for-loops for all unsigned integers](https://github.com/cryspen/hax/pull/1951)\n* \\#1950: [Remove `BinOp` resugaring](https://github.com/cryspen/hax/pull/1950)\n* \\#1947: [[doc] Add AI contribution guidelines](https://github.com/cryspen/hax/pull/1947)\n* \\#1946: [feat(lean): detect recursive functions and mark them `partial_fixpoint`](https://github.com/cryspen/hax/pull/1946)\n* \\#1943: [feat(lean): prettier proof_mode annotations](https://github.com/cryspen/hax/pull/1943)\n* \\#1942: [feat(lean): Rust primitives for prop](https://github.com/cryspen/hax/pull/1942)\n* \\#1941: [fix(lean): default value for associated constants are pure.](https://github.com/cryspen/hax/pull/1941)\n* \\#1938: [New default proof for the Lean backend & proof method attribute](https://github.com/cryspen/hax/pull/1938)\n* \\#1937: [feat: communicate specs to mvcgen](https://github.com/cryspen/hax/pull/1937)\n* \\#1936: [doc: add `lean-toolchain` file to quick start](https://github.com/cryspen/hax/pull/1936)\n* \\#1935: [Render suffixes in the rust engine and backends.](https://github.com/cryspen/hax/pull/1935)\n* \\#1934: [fix(lean): Support functions without arguments in specs](https://github.com/cryspen/hax/pull/1934)\n* \\#1933: [feat(lean): Separate symbolic and bit-blasting specs](https://github.com/cryspen/hax/pull/1933)\n* \\#1932: [fix(lean): Extract correct `PhantomData` structure](https://github.com/cryspen/hax/pull/1932)\n* \\#1931: [feat(lean): attributes for pureEnsures/pureRequires](https://github.com/cryspen/hax/pull/1931)\n* \\#1929: [[Lean] Fix monadic phase bug with constants](https://github.com/cryspen/hax/pull/1929)\n* \\#1927: [lean: keep Rust crate/module names unchanged](https://github.com/cryspen/hax/pull/1927)\n* \\#1925: [feat(lean): add type annotation for cast_op](https://github.com/cryspen/hax/pull/1925)\n* \\#1919: [feat(lean): Extract more core models](https://github.com/cryspen/hax/pull/1919)\n* \\#1918: [fix(lean): support for opaque structs](https://github.com/cryspen/hax/pull/1918)\n* \\#1909: [feat(lean): assoc types with constraints and inheritance](https://github.com/cryspen/hax/pull/1909)\n* \\#1908: [This month in hax blog post 2026 01](https://github.com/cryspen/hax/pull/1908)\n* \\#1906: [Resugaring for associated constants from associated functions without parameters](https://github.com/cryspen/hax/pull/1906)\n* \\#1901: [feat(lean): Add support for namespaces (resurrected old PR)](https://github.com/cryspen/hax/pull/1901)\n* \\#1834: [fix(lean): escape special characters in string literals](https://github.com/cryspen/hax/pull/1834)\n\n### Contributors\n* [@JuanCoRo](https://github.com/JuanCoRo)\n* [@abentkamp](https://github.com/abentkamp)\n* [@klausnat](https://github.com/klausnat)\n* [@maximebuyse](https://github.com/maximebuyse)\n* [@rusch95](https://github.com/rusch95)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2026-03.md",
    "content": "---\nauthors:\n  - maxime\ntitle: \"This Month in Hax: March 2026\"\ndate: 2026-04-01\n---\n\nIn March, we successfully merged **8 pull requests**!\n\nThis was a rather quiet month with some of us away for Real World Crypto and HACS in Taipei. But we continued to improve the Lean library with bugfixes and new additions like support for `u128` and `i128`.\n\nSpecial thanks to [@JuanCoRo](https://github.com/JuanCoRo) for improving and extending the treatment of binops in the Lean backend. And thank you [@redshiftzero](https://github.com/redshiftzero) for your first PR!\n\n### Full list of PRs\n* \\#1986: [fix(lean): fix bugs found by claude](https://github.com/cryspen/hax/pull/1986)\n* \\#1982: [fix: use specs for ops only in specset int](https://github.com/cryspen/hax/pull/1982)\n* \\#1979: [add bitwise OR trait to `core_models::ops::bit` ](https://github.com/cryspen/hax/pull/1979)\n* \\#1974: [fix(lean): Remove unnecessary noncomputable tags](https://github.com/cryspen/hax/pull/1974)\n* \\#1973: [This month in hax blog post 2026 02](https://github.com/cryspen/hax/pull/1973)\n* \\#1968: [feat(lean): Int128 and UInt128](https://github.com/cryspen/hax/pull/1968)\n* \\#1966: [[Engine] proper rejection for anonymous associated types.](https://github.com/cryspen/hax/pull/1966)\n* \\#1963: [[Lean] Add more binops](https://github.com/cryspen/hax/pull/1963)\n\n### Contributors\n* [@JuanCoRo](https://github.com/JuanCoRo)\n* [@abentkamp](https://github.com/abentkamp)\n* [@maximebuyse](https://github.com/maximebuyse)\n* [@redshiftzero](https://github.com/redshiftzero)\n"
  },
  {
    "path": "docs/blog/posts/this-month-in-hax/2026-04.md",
    "content": "---\nauthors:\n  - alex\ntitle: \"This Month in Hax: April 2026\"\ndate: 2026-05-07\n---\n\nIn April, we successfully merged **11 pull requests**!\n\nBesides various bug fixes, we improved the\nrendering of ellipsis in pattern-matching and refactored the mvcgen registration of the RustM monad\nto support partial correctness lemmas. Thanks to the external contributors\n[@MavenRain](https://github.com/MavenRain), [@niooss-ledger](https://github.com/niooss-ledger),\nand [@remix7531](https://github.com/remix7531)!\n\nTowards the end of the month, we participated in the [Software Verification in Lean](https://beneficial-ai-foundation.github.io/SVIL2026/) workshop\nand hackathon organized by the Beneficial AI Foundation and the Lean FRO.\nWe came away with lots of new ideas for our Lean backend.\n\nFor a couple of weeks, we have been working on merging the Aeneas engine into Hax. We haven't made\nany public contributions to Hax in that direction yet, but we have created a new repo containing our\nRust core models with an Aeneas-compatible extraction: https://github.com/cryspen/rust-core-models\n\n### Full list of PRs\n* \\#2010: [refactor: prettier barrett example](https://github.com/cryspen/hax/pull/2010)\n* \\#2005: [fix(lean): make the proof of RustM.toBVRustM_bind compatible with Lean 4.29.0](https://github.com/cryspen/hax/pull/2005)\n* \\#2002: [feat(lean): add resugaring for ellipsis in pattern-matching](https://github.com/cryspen/hax/pull/2002)\n* \\#2001: [fix(exporter): support ellipsis patterns by adding wildcards](https://github.com/cryspen/hax/pull/2001)\n* \\#2000: [feat(lean): derive Repr, BEq, DecidableEq for tuples](https://github.com/cryspen/hax/pull/2000)\n* \\#1998: [This month in hax blog post 2026 03](https://github.com/cryspen/hax/pull/1998)\n* \\#1996: [Rename GenericConstraint::Type to TypeClass and ::Projection to Equality](https://github.com/cryspen/hax/pull/1996)\n* \\#1995: [fix(lean): fix core models extraction](https://github.com/cryspen/hax/pull/1995)\n* \\#1994: [refactor: redefine RustM monad using ExceptT Error Option](https://github.com/cryspen/hax/pull/1994)\n* \\#1991: [fix: remove monad_extract_simplify](https://github.com/cryspen/hax/pull/1991)\n* \\#1972: [nix: fix development shell](https://github.com/cryspen/hax/pull/1972)\n\n### Contributors\n* [@MavenRain](https://github.com/MavenRain)\n* [@abentkamp](https://github.com/abentkamp)\n* [@clementblaudeau](https://github.com/clementblaudeau)\n* [@maximebuyse](https://github.com/maximebuyse)\n* [@niooss-ledger](https://github.com/niooss-ledger)\n* [@remix7531](https://github.com/remix7531)\n"
  },
  {
    "path": "docs/default.nix",
    "content": "{ stdenv, buildPythonPackage, fetchPypi, setuptools, wheel, mkdocs\n, mkdocs-material, fetchFromGitHub, natsort, wcmatch, hax-frontend-docs\n, mkdocs-awesome-nav }:\nlet\n  mkdocs-glightbox = buildPythonPackage rec {\n    pname = \"mkdocs-glightbox\";\n    version = \"0.4.0\";\n\n    src = fetchPypi {\n      inherit pname version;\n      hash = \"sha256-OSs0IHv5WZEHGhbV+JFtHS8s1dW7Wa4pl0hczXeMcNk=\";\n    };\n\n    doCheck = false;\n\n    pyproject = true;\n    build-system = [ setuptools wheel ];\n  };\n  mkdocs-nav-weight = buildPythonPackage rec {\n    pname = \"mkdocs-nav-weight\";\n    version = \"0.0.7\";\n\n    src = fetchPypi {\n      inherit pname version;\n      hash = \"sha256-gAQGD3U3/NmWW/3uUSrCjo/T+rqdIlMkKn83TjDgbp0=\";\n    };\n\n    doCheck = false;\n\n    pyproject = true;\n    build-system = [ setuptools wheel mkdocs ];\n  };\n\nin stdenv.mkDerivation {\n  name = \"hax-docs\";\n  src = ./..;\n  buildInputs = [\n    mkdocs\n    mkdocs-material\n    mkdocs-glightbox\n    mkdocs-nav-weight\n    mkdocs-awesome-nav\n  ];\n  buildPhase = ''\n    mkdocs build\n  '';\n  installPhase = ''\n    mv site $out\n    cp -rf ${hax-frontend-docs}/share/doc/ $out/frontend/docs\n    mkdir -p $out/engine/docs/hax-engine\n    echo 'Sorry, this page is temporarily unavailable (see <a href=\"https://github.com/cryspen/hax/issues/1675\">issue</a>)' > $out/engine/docs/hax-engine/index.html\n  '';\n}\n"
  },
  {
    "path": "docs/dev/architecture.md",
    "content": "# Architecture\n\nHax is a software pipeline designed to transform Rust code into various formal verification backends such as **F\\***, **Coq**, **ProVerif**, and **EasyCrypt**. It comprises two main components:\n\n1. **The Frontend** (written in Rust)\n2. **The Engine** (written in OCaml)\n\nThe frontend hooks into the Rust compiler, producing a abstract syntax tree for a given crate. The engine then takes this AST in input, applies various transformation, to reach in the end the language of the backend: F*, Coq...\n\n## The Frontend (Rust)\n\nThe frontend is responsible for extracting and exporting Rust code's abstract syntax trees (ASTs) in a format suitable for processing by the engine (or by other tools).\n\n### [`hax-frontend-exporter` Library](https://hax.cryspen.com/frontend/docs/hax_frontend_exporter/)\n\nThis library mirrors the internal types of the Rust compiler (`rustc`) that constitute the **HIR** (High-Level Intermediate Representation), **THIR** (Typed High-Level Intermediate Representation), and **MIR** (Mid-Level Intermediate Representation) ASTs. It extends them with additional information such as attributes, trait implementations, and removes ID indirections.\n\n**`SInto` Trait:** The library defines an entry point for translating a given `rustc` value to its mirrored hax version using the [`SInto`](https://hax.cryspen.com/frontend/docs/hax_frontend_exporter/trait.SInto.html) trait (stateful `into`). For a value `x` of type `T` from `rustc`, if `T` is mirrored by hax, then `x.sinto(s)` produces an augmented and simplified \"hax-ified\" AST for `x`. Here, `s` represents the state holding information about the translation process.\n\n### `hax-driver` Binary\n\n`hax-driver` is a custom Rust compiler driver that behaves like `rustc` but performs additional tasks:\n\n1. **Item Enumeration:** Lists all items in a crate.\n2. **AST Transformation:** Applies `sinto` on each item to generate the hax-ified AST.\n3. **Output Generation:** Outputs the mirrored items into a `haxmeta` file within the `target` directory.\n\n### `cargo-hax` Binary\n\n`cargo-hax` provides a `hax` subcommand for Cargo, accessible via `cargo hax --help`. It serves as the command-line interface for hax, orchestrating both the frontend and the engine.\n\n**Workflow:**\n\n1. **Custom Build Execution:** Runs `cargo build`, instructing Cargo to use `hax-driver` instead of `rustc`.\n2. **Multiple Compiler Invocations:** `cargo build` invokes `hax-driver` multiple times with various options.\n3. **Inter-Process Communication:** `hax-driver` communicates with `cargo-hax` via `stderr` using JSON lines.\n4. **Metadata Generation:** Produces `haxmeta` files containing the transformed ASTs.\n5. **Engine Invocation (Optional):** If requested, runs the engine, passing options and `haxmeta` information via `stdin` serialized as JSON.\n6. **Interactive Communication:** Engages in interactive communication with the engine.\n7. **User Reporting:** Outputs results and diagnostics to the user.\n\n## The Engine (OCaml - [documentation](/engine/docs/hax-engine/index.html))\n\nThe engine processes the transformed ASTs and options provided via JSON input from `stdin`. It performs several key functions to convert the hax-ified Rust code into the target backend language.\n\n### Importing and Simplifying ASTs\n\n- **AST Importation:** Imports the hax-ified Rust THIR AST. This is module `Import_thir`.\n- **Internal AST Conversion:** Converts the imported AST into a simplified and opinionated internal AST designed for ease of transformation and analysis. This is mostly the functor `Ast.Make`.\n\n### Internal AST and Features\n\nThe internal AST is defined using a **functor** that takes a list of type-level booleans, referred to as **features**, and produces the AST types accordingly.\n\nFeatures are for instances, mutation, loops, unsafe code. The enumeration `Features.Enumeration` lists all those features.\n\n**Feature Witnesses:** On relevant AST nodes, feature witnesses are included to enforce constraints at the type level. For example, in the `loop` expression constructor, a witness of type `F.loop` is used, where `F` represents the current feature set. If `F.loop` is an empty type, constructing a `loop` expression is prohibited, ensuring that loops are disallowed in contexts where they are not supported.\n\n### Transformation Phases\n\nThe engine executes a sequence of **phases**, which are determined based on the target backend. Each phase:\n\n1. **Input:** Takes a list of items from an AST with specific feature constraints.\n2. **Output:** Transforms these items into a new AST type, potentially enabling or disabling features through type-level changes.\n\nThe phases can be found in the `Phases` module.\n\n### Backend Code Generation\n\nAfter completing the transformation phases:\n\n1. **Backend Printer Invocation:** Calls the printer associated with the selected backend to generate the target code.\n2. **File Map Creation:** Produces a map from file names to their contents, representing the generated code.\n3. **Output Serialization:** Outputs the file map and additional information (e.g., errors) as JSON to `stderr`.\n\n### Communication Protocol\n\nThe engine communicates asynchronously with the frontend using a protocol defined in [`hax_types::engine_api::protocol`](https://hax.cryspen.com/frontend/docs/hax_types/engine_api/protocol/index.html). This communication includes:\n\n- **Diagnostic Data:** Sending error messages, warnings, and other diagnostics.\n- **Profiling Information:** Providing performance metrics and profiling data.\n- **Pretty-Printing Requests:** Requesting formatted versions of Rust source code or diagnostics for better readability.\n\n"
  },
  {
    "path": "docs/dev/ast_ebnf.md",
    "content": "We currently take inputs from the following AST. Literals are strings, numbers and\nbooleans.\n\n``` ebnf\nchar ::= [a-zA-Z]\nstring ::= char*\ndigit ::= [0-9]\nuint ::= digit+\nint ::= (\"-\")? uint\nfloat ::= int (\".\")? uint\nbool ::= \"true\" | \"false\"\n\nlocal_var ::= ident\nglobal_var ::= rust-path-identifier\n\nliteral ::=\n| '\"' string '\"'\n| \"'\" char \"'\"\n| int\n| float (* [a] *)\n| bool\n```\n\nWe support a number of simple types characters, strings, booleans and\nnumbers. Number types for integers (8,16,32,64,128 bit or machine sized)\nand floats (16,32, or 64 bit). Composite types are tuples, fixed length\nlists (arrays), variable length lists (vectors/slices), ptr types, and\nfunction types. Lastly we have named types defined by items, e.g. enums\nand structs.\n\n``` ebnf\nty ::=\n| \"bool\"\n| \"char\"\n| \"u8\" | \"u16\" | \"u32\" | \"u64\"\n| \"u128\" | \"usize\"\n| \"i8\" | \"i16\" | \"i32\" | \"i64\"\n| \"i128\" | \"isize\"\n| \"f16\" | \"f32\" | \"f64\"  (* [a] *)\n| \"str\"\n| (ty \",\")*\n| \"[\" ty \";\" int \"]\"\n| \"[\" ty \"]\"\n| \"*const\" ty | \"*mut\" ty  (* [b] *)\n| \"*\" expr | \"*mut\" expr  (* [b] *)\n| ident\n| (ty \"->\")* ty\n| \"dyn\" (goal)+ (* [c] *)\n```\n\nThe patterns allowed reflect these types. Wildcard patterns, literal\ntypes, typed patterns, list patterns, record or tuple patterns.\n\n``` ebnf\npat ::=\n| \"_\"\n| ident \"{\" (ident \":\" pat \";\")* \"}\"\n| ident \"(\" (pat \",\")* \")\"\n| (pat \"|\")* pat\n| \"[\" (pat \",\")* \"]\"  (* [d] *)\n| \"&\" pat\n| literal\n| (\"&\")? (\"mut\")? ident (\"@\" pat)?  (* [e] *)\n```\n\nThe simple expressions are literals, local or global variables, type\ncasts, assignments and lists. Control flow expressions, if statements,\nmatch statements, loops, return, break and continue. The rest is blocks,\nmacro calls, lambda functions and borrowing.\n\n``` ebnf\nexpr ::=\n| \"if\" expr \"{\" expr \"}\" (\"else\" \"{\" expr \"}\")?\n| \"if\" \"let\" pat (\":\" ty)? \"=\" expr \"{\" expr \"}\" (\"else\" \"{\" expr \"}\")?\n| expr \"(\" (expr \",\")* \")\"\n| literal\n| \"[\" (expr \",\")* \"]\" | \"[\" expr \";\" int \"]\"\n| ident \"{\" (ident \":\"expr \";\")* \"}\"\n| ident \"{\" (ident \":\"expr \";\")* \"..\" expr \"}\"\n| \"match\" expr guard \"{\"\n((\"|\" pat)* \"=>\" (expr \",\" | \"{\" expr \"}\"))*\n\"}\"\n| \"let\" pat (\":\" ty)? \"=\" expr \";\" expr\n| \"let\" pat (\":\" ty)? \"=\" expr \"else\" \"{\" expr \"}\" \";\" expr\n| modifiers \"{\" expr \"}\"\n| local_var\n| global_var\n| expr \"as\" ty\n| \"loop\" \"{\" expr \"}\"\n| \"while\" \"(\" expr \")\" \"{\" expr \"}\"\n| \"for\" \"(\" pat \"in\" expr \")\" \"{\" expr \"}\"\n| \"for\" \"(\" \"let\" ident \"in\" expr \"..\" expr \")\" \"{\" expr \"}\"\n| \"break\" expr\n| \"continue\"\n| pat \"=\" expr\n| \"return\" expr\n| expr \"?\"\n| \"&\" (\"mut\")? expr  (* [e] *)\n| \"&\" expr \"as\" \"&const _\"  (* [b] *)\n| \"&mut\" expr \"as\" \"&mut _\"\n| \"|\" pat \"|\" expr\n```\n\nThe items supported are functions, type aliasing, enums, structs, trait\ndefinitions and implementations, and imports.\n\n``` ebnf\nitem ::=\n| \"const\" ident \"=\" expr\n| \"static\" ident \"=\" expr  (* [b] *)\n| modifiers \"fn\" ident (\"<\" (generics \",\")* \">\")? \"(\" (pat \":\" ty \",\")* \")\" (\":\" ty)? \"{\" expr \"}\"\n| \"type\" ident \"=\" ty\n| \"enum\" ident (\"<\" (generics \",\")* \">\")? \"{\" (ident (\"(\" (ty)* \")\")? \",\")* \"}\"\n| \"struct\" ident (\"<\" (generics \",\")* \">\")? \"{\" (ident \":\" ty \",\")* \"}\"\n| \"trait\" ident (\"<\" (generics \",\")* \">\")? \"{\" (trait_item)* \"}\"\n| \"impl\" (\"<\" (generics \",\")* \">\")? ident \"for\" ty \"{\" (impl_item)* \"}\"\n| \"mod\" ident \"{\" (item)* \"}\"\n| \"use\" path \";\"\n```\n\n## Full eBNF\n\n``` ebnf\nchar ::= [a-zA-Z]\nstring ::= char*\ndigit ::= [0-9]\nuint ::= digit+\nint ::= (\"-\")? uint\nfloat ::= int (\".\")? uint\nbool ::= \"true\" | \"false\"\n\nlocal_var ::= ident\nglobal_var ::= rust-path-identifier\n\nliteral ::=\n| '\"' string '\"'\n| \"'\" char \"'\"\n| int\n| float  [a]\n| bool\n\ngeneric_value ::=\n| \"'\" ident\n| ty\n| expr\n\ngoal ::=\n| ident \"<\" (generic_value \",\")* \">\"\n\nty ::=\n| \"bool\"\n| \"char\"\n| \"u8\" | \"u16\" | \"u32\" | \"u64\"\n| \"u128\" | \"usize\"\n| \"i8\" | \"i16\" | \"i32\" | \"i64\"\n| \"i128\" | \"isize\"\n| \"f16\" | \"f32\" | \"f64\"  (* [a] *)\n| \"str\"\n| (ty \",\")*\n| \"[\" ty \";\" int \"]\"\n| \"[\" ty \"]\"\n| \"*const\" ty | \"*mut\" ty  (* [b] *)\n| \"*\" expr | \"*mut\" expr  (* [b] *)\n| ident\n| (ty \"->\")* ty\n| \"dyn\" (goal)+ (* [c] *)\n\npat ::=\n| \"_\"\n| ident \"{\" (ident \":\" pat \";\")* \"}\"\n| ident \"(\" (pat \",\")* \")\"\n| (pat \"|\")* pat\n| \"[\" (pat \",\")* \"]\"  (* [d] *)\n| \"&\" pat\n| literal\n| (\"&\")? (\"mut\")? ident (\"@\" pat)?  (* [e] *)\n\nmodifiers ::=\n| \"\"\n| \"unsafe\" modifiers\n| \"const\" modifiers\n| \"async\" modifiers  (* [b] *)\n\nguard ::=\n| \"if\" \"let\" pat (\":\" ty)? \"=\" expr\n\nexpr ::=\n| \"if\" expr \"{\" expr \"}\" (\"else\" \"{\" expr \"}\")?\n| \"if\" \"let\" pat (\":\" ty)? \"=\" expr \"{\" expr \"}\" (\"else\" \"{\" expr \"}\")?\n| expr \"(\" (expr \",\")* \")\"\n| literal\n| \"[\" (expr \",\")* \"]\" | \"[\" expr \";\" int \"]\"\n| ident \"{\" (ident \":\"expr \";\")* \"}\"\n| ident \"{\" (ident \":\"expr \";\")* \"..\" expr \"}\"\n| \"match\" expr guard \"{\"\n((\"|\" pat)* \"=>\" (expr \",\" | \"{\" expr \"}\"))*\n\"}\"\n| \"let\" pat (\":\" ty)? \"=\" expr \";\" expr\n| \"let\" pat (\":\" ty)? \"=\" expr \"else\" \"{\" expr \"}\" \";\" expr\n| modifiers \"{\" expr \"}\"\n| local_var\n| global_var\n| expr \"as\" ty\n| \"loop\" \"{\" expr \"}\"\n| \"while\" \"(\" expr \")\" \"{\" expr \"}\"\n| \"for\" \"(\" pat \"in\" expr \")\" \"{\" expr \"}\"\n| \"for\" \"(\" \"let\" ident \"in\" expr \"..\" expr \")\" \"{\" expr \"}\"\n| \"break\" expr\n| \"continue\"\n| pat \"=\" expr\n| \"return\" expr\n| expr \"?\"\n| \"&\" (\"mut\")? expr  (* [e] *)\n| \"&\" expr \"as\" \"&const _\"  (* [b] *)\n| \"&mut\" expr \"as\" \"&mut _\"\n| \"|\" pat \"|\" expr\n\nimpl_item ::=\n| \"type\" ident \"=\" ty \";\"\n| modifiers \"fn\" ident (\"<\" (generics \",\")* \">\")? \"(\" (pat \":\" ty \",\")* \")\" (\":\" ty)? \"{\" expr \"}\"\n\ntrait_item ::=\n| \"type\" ident \";\"\n| modifiers \"fn\" ident (\"<\" (generics \",\")* \">\")? \"(\" (pat \":\" ty \",\")* \")\" (\":\" ty)? (\"{\" expr \"}\" | \";\")\n\nitem ::=\n| \"const\" ident \"=\" expr\n| \"static\" ident \"=\" expr  (* [b] *)\n| modifiers \"fn\" ident (\"<\" (generics \",\")* \">\")? \"(\" (pat \":\" ty \",\")* \")\" (\":\" ty)? \"{\" expr \"}\"\n| \"type\" ident \"=\" ty\n| \"enum\" ident (\"<\" (generics \",\")* \">\")? \"{\" (ident (\"(\" (ty)* \")\")? \",\")* \"}\"\n| \"struct\" ident (\"<\" (generics \",\")* \">\")? \"{\" (ident \":\" ty \",\")* \"}\"\n| \"trait\" ident (\"<\" (generics \",\")* \">\")? \"{\" (trait_item)* \"}\"\n| \"impl\" (\"<\" (generics \",\")* \">\")? ident \"for\" ty \"{\" (impl_item)* \"}\"\n| \"mod\" ident \"{\" (item)* \"}\"\n| \"use\" path \";\"\n```\n## Footnotes\n\n* **[a]** no support yet for raw pointers, async/await, static, extern, or union types\n* **[b]** partial support for nested matching and range patterns\n* **[c]** partial support for mutable borrows\n* **[d]** most backends lack support for dynamic dispatch, floating point operations\n* **[e]** some backends only handle specific forms of iterators\n"
  },
  {
    "path": "docs/dev/docs.md",
    "content": "# Docs\n\n## mkdocs material (this page)\n\nInstall dependencies\n\n```bash\npip install mkdocs-glightbox mkdocs-nav-weight mkdocs-material\n```\n\n[Official docs](https://squidfunk.github.io/mkdocs-material).\n\n### Commands\n\n* `mkdocs new [dir-name]` - Create a new project.\n* `mkdocs serve` - Start the live-reloading docs server.\n* `mkdocs build` - Build the documentation site.\n* `mkdocs -h` - Print help message and exit.\n\n### Project layout\n\n    mkdocs.yml    # The configuration file.\n    docs/\n        index.md    # The documentation homepage.\n        ...         # Other markdown pages, images and other files.\n        blog/       # The blog\n            posts/  # Blog posts\n\n### Including external files\n\n```\n;--8<-- \"engine/DEV.md:3:7\"\n```\n\n--8<-- \"engine/DEV.md:3:7\"\n"
  },
  {
    "path": "docs/dev/index.md",
    "content": "---\nweight: 5\n---\n\n# Dev on hax\nThis chapter contains information about internals of hax.\n\nPlease read the [`CONTRIBUTING.md`](https://github.com/cryspen/hax/blob/main/CONTRIBUTING.md) before opening a pull request.\n"
  },
  {
    "path": "docs/dev/libraries_macros.md",
    "content": "# Libraries\n\n# Macros and attributes\nThe hax engine understands only one attribute: `#[_hax::json(PAYLOAD)]`,\nwhere `PAYLOAD` is a JSON serialization of the Rust enum\n`hax_lib_macros_types::AttrPayload`.\n\nNote `#[_hax::json(PAYLOAD)]` is a [tool\nattribute](https://github.com/rust-lang/rust/issues/66079): an\nattribute that is never expanded.\n\nIn the engine, the OCaml module `Attr_payloads` offers an API to query\nattributes easily. The types in crate `hax_lib_macros_types` and\ncorresponding serializers/deserializers are automatically generated in\nOCaml, thus there is no manual parsing involved.\n\n## User experience\nAsking the user to type `#[_hax::json(some_long_json)]` is not very\nfriendly. Thus, the crate `hax-lib-macros` defines a bunch of [proc\nmacros](https://doc.rust-lang.org/beta/reference/procedural-macros.html)\nthat defines nice and simple-to-use macros. Those macro take care of\ncooking some `hax_lib_macros_types::AttrPayload` payload(s), then\nserialize those payloads to JSON and produce one or more\n`#[_hax::json(serialized_payload)]` attributes.\n\n"
  },
  {
    "path": "docs/engine/index.md",
    "content": "# Engine\n\nThe hax engine. Its [odoc](https://ocaml.github.io/odoc/) documentation can be found [here](./docs/hax-engine/index.html)."
  },
  {
    "path": "docs/engine/toolchain_structure/index.md",
    "content": "# Toolchain structure\n\nHax is composed of three main parts:\n\n* The frontend, which interfaces with rustc to extract Rust intermediary representation ASTs (for MIR or THIR) out of Rust code.\n* The engine, which imports the Rust THIR AST to the internal hax AST, and defines a set of transformation phases on this internal AST.\n* The backends, which make use of a set of phases from the engine, and print it to a target verification framework or language. A backend also usually needs to provide a proof library and some more utilities.\n"
  },
  {
    "path": "docs/frontend/evaluation.md",
    "content": "# Evaluation of the hax Frontend\n\nThis section provides an evaluation of the hax frontend, consisting of two parts:\n\n- A **quantitative evaluation**, which measures how effectively the frontend processes Rust code.\n- A **qualitative analysis**, which explores the frontend’s capabilities in real-world scenarios.\n\nTogether, these evaluations document the current strengths and limitations of the hax frontend.\n\n## Quantitative Evaluation\n\nThe quantitative evaluation aims to estimate how much Rust code the frontend can handle. It does **not** assess the correctness of the frontend's output.\n\n### Overview\n\nThe hax toolchain is composed of several components (see [high-level architecture](./index.md#high-level-arch)):\n\n- **Frontend**: hooks into the Rust compiler to export rich Abstract Syntax Trees (ASTs) for specified crates.\n- **Engine** and **Backends**: consume those ASTs to produce code.\n- **Libraries**: `hax-lib` provides supporting functionality, and reference models for existing Rust libraries (e.g. the [Core library](https://doc.rust-lang.org/stable/core/) of Rust).\n\nIn this quantitative evaluation, we focus on the **frontend**: the process of generating JSON-encoded ASTs from Rust code. We aim to assess:\n\n1. **Successful Extraction**: The success rate of producing ASTs.\n2. **Performance**: Ensuring the extraction process remains efficient enough for real-world usage.\n\n### Methodology\n\nFor each Rust crate in our test set, we follow these steps:\n\n1. Clone the crate's source code.\n2. Run `cargo fetch` to download its dependencies.\n3. Execute `cargo hax json --use-ids`, recording any errors and the time taken.\n4. Clean Cargo's cache with `cargo clean`.\n5. Run `cargo check`, again recording any errors and time. Since `cargo hax json` is effectively `cargo check` with extra work, this serves as our performance baseline.\n\nWe implemented this protocol in an internal Cryspen tool, which also evaluates other parts of the hax toolchain.\n\n### Crate Selection\n\nTo ensure we capture a diverse set of crates:\n\n- We include the **5,000 most downloaded** crates from crates.io.\n- We also include the **top 1,500 crates** in the **cryptography** category on crates.io, reflecting hax's relevance for verifying critical software like cryptographic libraries.\n\n### Success Rate\n\nEach crate falls into one of three categories:\n\n1. **Successful**: hax produced a valid AST.\n2. **Failed**: hax could not produce an AST (despite `cargo check` succeeding).\n3. **Both Failed**: Both `cargo check` and `cargo hax` failed.\n\n```mermaid\n%%{init: {'theme': 'base', 'themeVariables': { 'pie1': '#27ae60', 'pie2': '#f1c40f', 'pie3': '#e74c3c'}}}%%\npie showData\n    \"`cargo check` failure\" : 41\n    \"`cargo hax` failure\" : 24\n    \"Success\" : 935\n```\n\nOut of 1000 crates, our tool failed to run `cargo check` on 41 of them due to\nsetup issues. These problems typically involve missing system packages that\nCargo cannot automatically install or unusual Cargo configurations that require\nmanual intervention. We therefore exclude these 41 crates from further analysis.\n\nOf the remaining 959 crates, the hax frontend successfully processed a **vast\nmajority (97.5%)**. The remaining failures fall into four distinct categories,\nas illustrated in the pie chart below.\n\n```mermaid\n%%{init: {'theme': 'base', 'themeVariables': { 'pie1': '#c0392b', 'pie2': '#3498db', 'pie3': '#2980b9', 'pie4': '#e74c3c'}}}%%\npie showData title Frontend failures\n    \"Unsupported Rust toolchain\" : 4\n    \"Rust setup issue\" : 6\n    \"Binder panic\" : 10\n    \"Stack overflow in Rustc\" : 4\n```\n\nThe errors marked in **blue** on the chart indicate situations where the Rust\ntoolchain used by the tested crate or its dependencies is incompatible with the\nspecific version hax is pinned to, or where the crate and hax are sensitive to\ntoolchain variations. Rust edition 2024 was updated very recently, which\nexplains roughly half of these issues.\n\nThe errors shown in **red**, however, are directly related to hax. The\nbinder-related panics are a [known\nbug](https://github.com/cryspen/hax/issues/1046). Additionally, the stack\noverflow errors occur due to specific code paths in the Rust compiler being\nincorrectly triggered by hax. Ultimately, only **1.6%** of crates encounter such\nhax-specific bugs.\n\n### Performance Analysis\n\nFor the crates that successfully generated ASTs, we compared the time taken by `cargo hax json` against `cargo check`. Because crate size and complexity vary greatly, we normalized[^1] the times to allow fair comparisons.\n\n<div class=\"center-table\" markdown>\n\n| Statistic       | Cargo Check | Cargo Hax |\n|-----------------|------------:|----------:|\n| **Median**      |       0.147 |     0.780 |\n| **Mean**        |       0.215 |     0.771 |\n| **10th Decile** |       0.425 |     0.953 |\n\n</div>\n\n<!-- We break down the results into **cryptography crates** and **general crates**: -->\n\n<!-- #### Cryptography Crates\n\n| Statistic       | Cargo Check | Cargo Hax |\n|-----------------|------------:|----------:|\n| **Median**      |       0.148 |     0.796 |\n| **Mean**        |       0.199 |     0.777 |\n| **10th Decile** |       0.411 |     0.948 | -->\n\n- On average, `cargo hax` is about 4–5 times slower than `cargo check`.\n- At the **10th decile**, the slowdown is only about 2×, indicating better scaling for larger crates.\n\n### Conclusion\n\nOur quantitative evaluation shows that the hax frontend successfully extracts ASTs for a large portion of the Rust ecosystem. Nevertheless, a small portion of crates reveal performance bottlenecks or outright failures that require further investigation and optimization.\n\nThese results also highlight a few **limitations** of this initial study:\n\n- We only evaluated the **frontend** process. Other stages of the toolchain, such as the engine, backends or libraries, require separate assessments.\n- We did not assess the **correctness or completeness** of the generated JSON, highlighting the need for a qualitative analysis to verify that the extracted ASTs meet the required specifications.\n\nOverall, the hax frontend demonstrates capabilities for large-scale Rust code verification, but continued refinement is needed to handle edge cases and improve performance.\n\n[^1]: For a given crate, we normalize the times by dividing them by the total time.\n\n## Qualitative evaluation\n\nThe qualitative evaluation aims at identifying what Rust patterns the frontend can handle. It also tests whether the information extracted from the frontend describes correctly the input Rust code.\n\n### Rustc coverage test suite\n\nThe Rust compiler (rustc) has extensive test suites that describe various expectations of how it should handle Rust input. One of them is the [coverage test suite](https://rustc-dev-guide.rust-lang.org/tests/compiletest.html#coverage-tests) which contains a set of Rust inputs that is supposed to cover a wide range of Rust constructs. This test suite has been adapted to test hax.\n\nWe use the following methodology:\n- The Rust inputs from the test suite have been copied to `rustc-coverage-tests/src/`, and can be updated using a script.\n- A Rust crate structure is built around these source files, to allow hax to handle them. The files that fail `cargo check` are excluded. There are currently 26 excluded (out of 81) tests, mostly because they contain asynchronous code, which requires a runtime file that is missing in our infrastructure.\n- To test hax frontend, we run `cargo hax json`. If the command succeeds, the test is considered successful.\n\nThese tests aim at increasing the confidence in the ability of hax frontend to handle Rust inputs covering all of the language constructs. As of today, all tests are handled successfully by hax frontend. However we don't test any requirement on the output (see the following section for tests of hax frontend output quality).\n\n### Rust printer testing\n\nThis method aims at testing the quality of hax frontend's output. It uses the Rust hax engine.\n\nThis tool (written in Rust) takes the output of hax frontend (a json file describing the content of a Rust crate), it imports it as an AST (similar to the hax engine AST), and then prints this AST in Rust syntax. \n\nIf the Rust code we get out of this tool is equivalent to the Rust code it was given as input, then this means hax frontend correctly extracted the input code without losing or altering any information.\n\nThere is no easy way of testing the full input/output equivalence so the methodology here is to test that the resulting code behaves the same as the input code with respect to relevant test cases.\n\nThis work is available in the `hax-rust-engine` folder. In the `tests` subfolder, an input file is available with tests for all Rust constructs supported by the printer (currently functions and expressions). For now these tests pass after extracting and printing the file with hax frontend and the Rust printer. This means that for the Rust constructs covered by the printer and the test file, hax frontend's extraction is correct. However this still needs to be extended to test more Rust constructs.\n"
  },
  {
    "path": "docs/frontend/index.md",
    "content": "# Frontend\n\nhax is a tool designed to facilitate the formal verification of Rust programs. It enables the translation of Rust crates into formal languages like F* or Coq. Once translated, these formal representations allow to write formal proofs about the behavior and correctness of their Rust code.\n\n## User flow\n\nThis document focuses on a specific user flow: extracting F\\* code. The process\ndescribed here applies similarly to all other backends, including F*, Rocq,\nSSProve, ProVerif, and EasyCrypt.\n\nThe goal is for the user to prove a property on a Rust function, `f`, using the F\\* formal language. The function `f` is defined in the module `mymod`, within the crate `mycrate`.\n\nTo achieve this, the user follows these steps:\n\n 1. Annotate the Rust crate mycrate with the properties to be proven and, if needed, provide proof hints.\n 2. Run the command `cargo hax into -i '-** +mycrate::mymod::f' fstar`.\n 3. Execute F\\*. If F\\* fails to prove the specified properties, the user revisits step (1) to refine the annotations and proof hints.\n\nFor a practical guide on using Hax, please refer to the [manual](../manual/index.md).\n\n![](./user-flow.excalidraw.png)\n\n## High-Level Architecture of Hax {#high-level-arch}\n\nHax consists of five main components, as illustrated in the diagram below, with\neach numbered step directly corresponding to its labeled section in the diagram:\n\n1. The **frontend** handles the extraction and export of given [Rust\n   crates](https://doc.rust-lang.org/book/ch07-01-packages-and-crates.html) into\n   abstract syntax trees (ASTs).\n2. The **engine** performs a series of phases and rewrites, simplifying and\n   transforming the Rust program as needed.\n3. The **backends** --one per target language-- request the engine to simplify\n   the Rust program for their specific target and then pretty-print the program\n   as F*, Roq, PV, or other formats.\n4. The Rust helper crate, **hax-lib**, provides Hax-specific helpers and macros\n   to annotate a Rust program with properties, invariants, or proof hints.\n5. The **annotated standard library** is a work in progress partial model for the\n   Rust base libraries (`core`, `std`, `alloc`), enriched with logical\n   properties to assist in formal verification.\n\n![](./high-level-arch.excalidraw.png){:.center}\n\nIn the diagram, the dotted lines indicate dependencies between Rust or F\\*\nartifacts, flowing from left to right.\n\nThe user begins by exporting a crate (referred to as the \"Input crate\" in the\ntop left of the diagram). This crate depends on both the Rust `std` library and\n`hax-lib`. The `std` dependency is added automatically by Rust, while the\n`hax-lib` dependency is introduced by hax.\n\nThe input crate passes through the hax toolchain as follows:\n- First, the crate is parsed and exported as JSON by the **frontend** (➊), with\n  assistance from the Rust compiler.\n- Next, the **engine** processes the exported data (➋), transforming and\n  simplifying it.  \n- Finally, the **F\\*** backend generates the corresponding F\\* files (➌).\n\nSince the input crate depends on both `std` and `hax-lib`, the generated F\\*\nmodules maintain these dependencies. To handle this, Hax translates `hax-lib` to\nF\\* (➍) and also translates our model of the `std` library (➎) into F\\*.\n\n\n## Ast Extraction Workflow\n\nThis section delves into the technical details of hax's extraction process. The\nprocess involves the following components (as illustrated in the diagram below,\nwith each numbered step directly corresponding to its labeled section in the\ndiagram):\n\n1. **`cargo-hax`**: a binary that provides a [custom\n   command](https://doc.rust-lang.org/book/ch14-05-extending-cargo.html) `hax`\n   to `cargo`.\n2. **`cargo check`**: the `check` subcommand of `cargo`. It is sensitive to the\n   environment variable `RUSTC_WORKSPACE_WRAPPER`: when set, `cargo` will call\n   the program specified by `$RUSTC_WORKSPACE_WRAPPER` instead of `rustc`.\n3. **`driver-hax-frontend-exporter`**: a [custom `rustc`\n   driver](https://jyn.dev/rustc-driver/#paths). Instead of compiling Rust into\n   a binary, `driver-hax-frontend-exporter` exports ASTs as JSON.\n4. **`hax-frontend-exporter`**: a library that mirrors Rust's internal ASTs as\n   its own enriched AST representation. This library also provides bridges from\n   Rust's ASTs to its enriched ASTs.\n\n![](./workflow-diagram.excalidraw.png)\n\nWhen `cargo check` compiles a crate, it invokes `rustc` multiple times, but only\nsome of these calls are relevant to us. Additionally, `cargo check` may also\nbuild dependencies. As a result, we inject our custom export logic selectively,\nensuring that `driver-hax-frontend-exporter` behaves exactly like `rustc` in\nall other cases.\n\nDuring compilation, `rustc` produces several artifacts:\n\n- **`*.rmeta` files**: metadata files containing type information, function\n  signatures, constants, and more.\n- **`*.rlib` files**: static library artifacts with compiled Rust code and\n  metadata.\n- **Diagnostic messages**: messages on standard output for communication with\n  `cargo check`, including errors, warnings, and status updates.\n\nOur custom export logic extends this by generating additional artifacts:\n\n- **`*.haxmeta` files**: similar to Rust's `*.rmeta` files, but contains full\n  enriched abstract syntax trees. The `haxmeta` files contains a binary\n  serialization of the\n  [`HaxMeta`](https://hax.cryspen.com/frontend/docs/hax_types/driver_api/struct.HaxMeta.html)\n  type.\n- **Diagnostic messages**: sent to standard output and used to communicate\n  specifically with `cargo hax`. Those messages are JSON serializations of the\n  [`HaxDriverMessage`](https://hax.cryspen.com/frontend/docs/hax_types/driver_api/enum.HaxDriverMessage.html)\n  type.\n\nAfter calling `cargo check`, `cargo hax` parses the `*.haxmeta` files and\ncontinues further along the hax toolchain, either by outputting JSON directly or\nby calling the engine to generate files for targets such as F\\*, ProVerif, or\nRoqc.\n\n`cargo-hax`, `driver-hax-frontend-exporter`, and `hax-frontend-exporter`\ntogether form what we refer to as \"the frontend\". The engine is represented by\nthe binary `hax-engine`, which includes the backends. The frontend is\nimplemented in Rust, while the engine is implemented in OCaml. Communication\nbetween all components occurs through **stdout**, **stderr**, or **stdin**,\nusing JSON messages defined in the Rust crate\n[`hax-types`](https://hax.cryspen.com/frontend/docs/hax_types/index.html).\n\nThis section provides an overview of the workflow of the frontend of hax. \n\n<!-- \n## A Brief Tour of The Rust Compiler\n\nThe Rust compiler transforms raw source code from the user into various representations, all the way to machine code when that's what the user requests.\n\nThe Rust compiler has several intermediate representations (IR), exposing various views on Rust programs, each suited for different jobs: parsing, typing, borrow checking, etc. As illustrated below by the diagram, the following main IR are:\n\n  - **Parse AST**: an untyped abstract syntax tree (AST) just after parsing;\n  - **HIR**: Higher-level Intermediate Representation, an AST close to Rust surface language after name resolution and macro expansion;\n  - **THIR**: Typed Higher-level Intermediate Representation, a fully typed version of HIR;\n  - **MIR**: Mid-level Intermediate Representation, a simplified Rust AST, in which borrow checking takes place;\n  - **LLVM**: interfaces with LLVM.\n\n![](./rustc-diagram.excalidraw.png){: .center style=\"width:min(100%, 500px)\"}\n\n## Querying The Rust Compiler\n\nThe Rust compiler has mechanisms enabling tools to hook into it at the various compile stages. From here, it is possible to interactively ask Rust about items such as types, traits, names, etc. of a certain Rust construct inside the code.\n\nThe Rust compiler is optmizied for performance; its work is divided in many\nsmaller parts and is orchestrated by a system of lazy queries.\n\nRust is optmizied for performance, and its query system is a complex beast.\nFinding your way to the information you are looking for is not simple and\nrequires a certain familiarity with the compiler.\n\nFrom this observation, we decided to split hax in two: a first part that interacts with rustc, and a second that transform Rust ASTs to our various backends.\n\nThe goal of the frontend is to take care of all the boring and complex job of interacting with rustc.\nThe frontend takes Rust code and extracts complete ASTs, designed for easy consumption for other tools.\n\nThe ASTs we define are mirrored version of rustc's THIR and MIR, enriched with a lot of extra pieces of data.\n\n## Workflow of the JSON extraction\n\nThe frontend defines a binary `cargo-hax`, providing a [custom command](https://doc.rust-lang.org/book/ch14-05-extending-cargo.html) `hax` to `cargo` that allows you to get a JSON-encoded AST for a given Rust program.\n\nRunning `cargo hax json` invokes hax' frontend and queries for JSON.\n\nThe motivation behind hax' frontend is that interacting with the Rust compiler (rustc) can be difficult. Rustc works with its internal optimized representations and with a system of interactive queries.\n\n![](./workflow-diagram.excalidraw.png)\n\nThe hax frontend. Its [rustdoc](https://doc.rust-lang.org/rustdoc/what-is-rustdoc.html) documentation can be found [here](./docs/hax_frontend_exporter/index.html). -->\n"
  },
  {
    "path": "docs/index.md",
    "content": "# hax\n\nhax is a tool for high assurance translations of a large subset of\nRust into formal languages such as [F\\*](https://www.fstar-lang.org/) or [Rocq](https://rocq-prover.org/).\n\nHead over to the [Manual](./manual/index.md) or the [playground](https://hax-playground.cryspen.com)\nto get started!\n\n![hax overview](static/img/overview.png)\n\n## Playground\nTry out hax in the browser on the [playground](https://hax-playground.cryspen.com).\n\n[![Playground screenshot](static/img/playground.png)](https://hax-playground.cryspen.com)\n\n## Community\n\nJoin the hax community on our [Zulip chat](https://hacspec.zulipchat.com).\n"
  },
  {
    "path": "docs/javascripts/ansi_up.js",
    "content": "/**\n * Minified by jsDelivr using Terser v5.19.2.\n * Original file: /npm/ansi_up@6.0.2/ansi_up.js\n *\n * Do NOT use SRI with dynamically generated files! More information: https://www.jsdelivr.com/using-sri-with-dynamic-files\n */\n\"use strict\";var PacketKind,templateObject_1,templateObject_2,templateObject_3,__makeTemplateObject=this&&this.__makeTemplateObject||function(e,t){return Object.defineProperty?Object.defineProperty(e,\"raw\",{value:t}):e.raw=t,e};!function(e){e[e.EOS=0]=\"EOS\",e[e.Text=1]=\"Text\",e[e.Incomplete=2]=\"Incomplete\",e[e.ESC=3]=\"ESC\",e[e.Unknown=4]=\"Unknown\",e[e.SGR=5]=\"SGR\",e[e.OSCURL=6]=\"OSCURL\"}(PacketKind||(PacketKind={}));class AnsiUp{constructor(){this.VERSION=\"6.0.2\",this.setup_palettes(),this._use_classes=!1,this.bold=!1,this.faint=!1,this.italic=!1,this.underline=!1,this.fg=this.bg=null,this._buffer=\"\",this._url_allowlist={http:1,https:1},this._escape_html=!0,this.boldStyle=\"font-weight:bold\",this.faintStyle=\"opacity:0.7\",this.italicStyle=\"font-style:italic\",this.underlineStyle=\"text-decoration:underline\"}set use_classes(e){this._use_classes=e}get use_classes(){return this._use_classes}set url_allowlist(e){this._url_allowlist=e}get url_allowlist(){return this._url_allowlist}set escape_html(e){this._escape_html=e}get escape_html(){return this._escape_html}set boldStyle(e){this._boldStyle=e}get boldStyle(){return this._boldStyle}set faintStyle(e){this._faintStyle=e}get faintStyle(){return this._faintStyle}set italicStyle(e){this._italicStyle=e}get italicStyle(){return this._italicStyle}set underlineStyle(e){this._underlineStyle=e}get underlineStyle(){return this._underlineStyle}setup_palettes(){this.ansi_colors=[[{rgb:[0,0,0],class_name:\"ansi-black\"},{rgb:[187,0,0],class_name:\"ansi-red\"},{rgb:[0,187,0],class_name:\"ansi-green\"},{rgb:[187,187,0],class_name:\"ansi-yellow\"},{rgb:[0,0,187],class_name:\"ansi-blue\"},{rgb:[187,0,187],class_name:\"ansi-magenta\"},{rgb:[0,187,187],class_name:\"ansi-cyan\"},{rgb:[255,255,255],class_name:\"ansi-white\"}],[{rgb:[85,85,85],class_name:\"ansi-bright-black\"},{rgb:[255,85,85],class_name:\"ansi-bright-red\"},{rgb:[0,255,0],class_name:\"ansi-bright-green\"},{rgb:[255,255,85],class_name:\"ansi-bright-yellow\"},{rgb:[85,85,255],class_name:\"ansi-bright-blue\"},{rgb:[255,85,255],class_name:\"ansi-bright-magenta\"},{rgb:[85,255,255],class_name:\"ansi-bright-cyan\"},{rgb:[255,255,255],class_name:\"ansi-bright-white\"}]],this.palette_256=[],this.ansi_colors.forEach((e=>{e.forEach((e=>{this.palette_256.push(e)}))}));let e=[0,95,135,175,215,255];for(let t=0;t<6;++t)for(let n=0;n<6;++n)for(let i=0;i<6;++i){let s={rgb:[e[t],e[n],e[i]],class_name:\"truecolor\"};this.palette_256.push(s)}let t=8;for(let e=0;e<24;++e,t+=10){let e={rgb:[t,t,t],class_name:\"truecolor\"};this.palette_256.push(e)}}escape_txt_for_html(e){return this._escape_html?e.replace(/[&<>\"']/gm,(e=>\"&\"===e?\"&amp;\":\"<\"===e?\"&lt;\":\">\"===e?\"&gt;\":'\"'===e?\"&quot;\":\"'\"===e?\"&#x27;\":void 0)):e}append_buffer(e){var t=this._buffer+e;this._buffer=t}get_next_packet(){var e={kind:PacketKind.EOS,text:\"\",url:\"\"},t=this._buffer.length;if(0==t)return e;var n=this._buffer.indexOf(\"\u001b\");if(-1==n)return e.kind=PacketKind.Text,e.text=this._buffer,this._buffer=\"\",e;if(n>0)return e.kind=PacketKind.Text,e.text=this._buffer.slice(0,n),this._buffer=this._buffer.slice(n),e;if(0==n){if(t<3)return e.kind=PacketKind.Incomplete,e;var i=this._buffer.charAt(1);if(\"[\"!=i&&\"]\"!=i&&\"(\"!=i)return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;if(\"[\"==i){this._csi_regex||(this._csi_regex=rgx(templateObject_1||(templateObject_1=__makeTemplateObject([\"\\n                        ^                           # beginning of line\\n                                                    #\\n                                                    # First attempt\\n                        (?:                         # legal sequence\\n                          \u001b[                      # CSI\\n                          ([<-?]?)              # private-mode char\\n                          ([d;]*)                    # any digits or semicolons\\n                          ([ -/]?               # an intermediate modifier\\n                          [@-~])                # the command\\n                        )\\n                        |                           # alternate (second attempt)\\n                        (?:                         # illegal sequence\\n                          \u001b[                      # CSI\\n                          [ -~]*                # anything legal\\n                          ([\\0-\u001f:])              # anything illegal\\n                        )\\n                    \"],[\"\\n                        ^                           # beginning of line\\n                                                    #\\n                                                    # First attempt\\n                        (?:                         # legal sequence\\n                          \\\\x1b\\\\[                      # CSI\\n                          ([\\\\x3c-\\\\x3f]?)              # private-mode char\\n                          ([\\\\d;]*)                    # any digits or semicolons\\n                          ([\\\\x20-\\\\x2f]?               # an intermediate modifier\\n                          [\\\\x40-\\\\x7e])                # the command\\n                        )\\n                        |                           # alternate (second attempt)\\n                        (?:                         # illegal sequence\\n                          \\\\x1b\\\\[                      # CSI\\n                          [\\\\x20-\\\\x7e]*                # anything legal\\n                          ([\\\\x00-\\\\x1f:])              # anything illegal\\n                        )\\n                    \"]))));let t=this._buffer.match(this._csi_regex);if(null===t)return e.kind=PacketKind.Incomplete,e;if(t[4])return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;\"\"!=t[1]||\"m\"!=t[3]?e.kind=PacketKind.Unknown:e.kind=PacketKind.SGR,e.text=t[2];var s=t[0].length;return this._buffer=this._buffer.slice(s),e}if(\"]\"==i){if(t<4)return e.kind=PacketKind.Incomplete,e;if(\"8\"!=this._buffer.charAt(2)||\";\"!=this._buffer.charAt(3))return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;this._osc_st||(this._osc_st=rgxG(templateObject_2||(templateObject_2=__makeTemplateObject([\"\\n                        (?:                         # legal sequence\\n                          (\u001b\\\\)                    # ESC                           |                           # alternate\\n                          (\u0007)                      # BEL (what xterm did)\\n                        )\\n                        |                           # alternate (second attempt)\\n                        (                           # illegal sequence\\n                          [\\0-\u0006]                 # anything illegal\\n                          |                           # alternate\\n                          [\\b-\u001a]                 # anything illegal\\n                          |                           # alternate\\n                          [\u001c-\u001f]                 # anything illegal\\n                        )\\n                    \"],[\"\\n                        (?:                         # legal sequence\\n                          (\\\\x1b\\\\\\\\)                    # ESC \\\\\\n                          |                           # alternate\\n                          (\\\\x07)                      # BEL (what xterm did)\\n                        )\\n                        |                           # alternate (second attempt)\\n                        (                           # illegal sequence\\n                          [\\\\x00-\\\\x06]                 # anything illegal\\n                          |                           # alternate\\n                          [\\\\x08-\\\\x1a]                 # anything illegal\\n                          |                           # alternate\\n                          [\\\\x1c-\\\\x1f]                 # anything illegal\\n                        )\\n                    \"])))),this._osc_st.lastIndex=0;{let t=this._osc_st.exec(this._buffer);if(null===t)return e.kind=PacketKind.Incomplete,e;if(t[3])return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e}{let t=this._osc_st.exec(this._buffer);if(null===t)return e.kind=PacketKind.Incomplete,e;if(t[3])return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e}this._osc_regex||(this._osc_regex=rgx(templateObject_3||(templateObject_3=__makeTemplateObject([\"\\n                        ^                           # beginning of line\\n                                                    #\\n                        \u001b]8;                    # OSC Hyperlink\\n                        [ -:<-~]*       # params (excluding ;)\\n                        ;                           # end of params\\n                        ([!-~]{0,512})        # URL capture\\n                        (?:                         # ST\\n                          (?:\u001b\\\\)                  # ESC                           |                           # alternate\\n                          (?:\u0007)                    # BEL (what xterm did)\\n                        )\\n                        ([ -~]+)              # TEXT capture\\n                        \u001b]8;;                   # OSC Hyperlink End\\n                        (?:                         # ST\\n                          (?:\u001b\\\\)                  # ESC                           |                           # alternate\\n                          (?:\u0007)                    # BEL (what xterm did)\\n                        )\\n                    \"],[\"\\n                        ^                           # beginning of line\\n                                                    #\\n                        \\\\x1b\\\\]8;                    # OSC Hyperlink\\n                        [\\\\x20-\\\\x3a\\\\x3c-\\\\x7e]*       # params (excluding ;)\\n                        ;                           # end of params\\n                        ([\\\\x21-\\\\x7e]{0,512})        # URL capture\\n                        (?:                         # ST\\n                          (?:\\\\x1b\\\\\\\\)                  # ESC \\\\\\n                          |                           # alternate\\n                          (?:\\\\x07)                    # BEL (what xterm did)\\n                        )\\n                        ([\\\\x20-\\\\x7e]+)              # TEXT capture\\n                        \\\\x1b\\\\]8;;                   # OSC Hyperlink End\\n                        (?:                         # ST\\n                          (?:\\\\x1b\\\\\\\\)                  # ESC \\\\\\n                          |                           # alternate\\n                          (?:\\\\x07)                    # BEL (what xterm did)\\n                        )\\n                    \"]))));let n=this._buffer.match(this._osc_regex);if(null===n)return e.kind=PacketKind.ESC,e.text=this._buffer.slice(0,1),this._buffer=this._buffer.slice(1),e;e.kind=PacketKind.OSCURL,e.url=n[1],e.text=n[2];s=n[0].length;return this._buffer=this._buffer.slice(s),e}if(\"(\"==i)return e.kind=PacketKind.Unknown,this._buffer=this._buffer.slice(3),e}}ansi_to_html(e){this.append_buffer(e);for(var t=[];;){var n=this.get_next_packet();if(n.kind==PacketKind.EOS||n.kind==PacketKind.Incomplete)break;n.kind!=PacketKind.ESC&&n.kind!=PacketKind.Unknown&&(n.kind==PacketKind.Text?t.push(this.transform_to_html(this.with_state(n))):n.kind==PacketKind.SGR?this.process_ansi(n):n.kind==PacketKind.OSCURL&&t.push(this.process_hyperlink(n)))}return t.join(\"\")}with_state(e){return{bold:this.bold,faint:this.faint,italic:this.italic,underline:this.underline,fg:this.fg,bg:this.bg,text:e.text}}process_ansi(e){let t=e.text.split(\";\");for(;t.length>0;){let e=t.shift(),n=parseInt(e,10);if(isNaN(n)||0===n)this.fg=null,this.bg=null,this.bold=!1,this.faint=!1,this.italic=!1,this.underline=!1;else if(1===n)this.bold=!0;else if(2===n)this.faint=!0;else if(3===n)this.italic=!0;else if(4===n)this.underline=!0;else if(21===n)this.bold=!1;else if(22===n)this.faint=!1,this.bold=!1;else if(23===n)this.italic=!1;else if(24===n)this.underline=!1;else if(39===n)this.fg=null;else if(49===n)this.bg=null;else if(n>=30&&n<38)this.fg=this.ansi_colors[0][n-30];else if(n>=40&&n<48)this.bg=this.ansi_colors[0][n-40];else if(n>=90&&n<98)this.fg=this.ansi_colors[1][n-90];else if(n>=100&&n<108)this.bg=this.ansi_colors[1][n-100];else if((38===n||48===n)&&t.length>0){let e=38===n,i=t.shift();if(\"5\"===i&&t.length>0){let n=parseInt(t.shift(),10);n>=0&&n<=255&&(e?this.fg=this.palette_256[n]:this.bg=this.palette_256[n])}if(\"2\"===i&&t.length>2){let n=parseInt(t.shift(),10),i=parseInt(t.shift(),10),s=parseInt(t.shift(),10);if(n>=0&&n<=255&&i>=0&&i<=255&&s>=0&&s<=255){let t={rgb:[n,i,s],class_name:\"truecolor\"};e?this.fg=t:this.bg=t}}}}}transform_to_html(e){let t=e.text;if(0===t.length)return t;if(t=this.escape_txt_for_html(t),!e.bold&&!e.italic&&!e.underline&&null===e.fg&&null===e.bg)return t;let n=[],i=[],s=e.fg,l=e.bg;e.bold&&n.push(this._boldStyle),e.faint&&n.push(this._faintStyle),e.italic&&n.push(this._italicStyle),e.underline&&n.push(this._underlineStyle),this._use_classes?(s&&(\"truecolor\"!==s.class_name?i.push(`${s.class_name}-fg`):n.push(`color:rgb(${s.rgb.join(\",\")})`)),l&&(\"truecolor\"!==l.class_name?i.push(`${l.class_name}-bg`):n.push(`background-color:rgb(${l.rgb.join(\",\")})`))):(s&&n.push(`color:rgb(${s.rgb.join(\",\")})`),l&&n.push(`background-color:rgb(${l.rgb})`));let a=\"\",r=\"\";return i.length&&(a=` class=\"${i.join(\" \")}\"`),n.length&&(r=` style=\"${n.join(\";\")}\"`),`<span${r}${a}>${t}</span>`}process_hyperlink(e){let t=e.url.split(\":\");return t.length<1?\"\":this._url_allowlist[t[0]]?`<a href=\"${this.escape_txt_for_html(e.url)}\">${this.escape_txt_for_html(e.text)}</a>`:\"\"}}function rgx(e,...t){let n=e.raw[0].replace(/^\\s+|\\s+\\n|\\s*#[\\s\\S]*?\\n|\\n/gm,\"\");return new RegExp(n)}window.AnsiUp=AnsiUp;function rgxG(e,...t){let n=e.raw[0].replace(/^\\s+|\\s+\\n|\\s*#[\\s\\S]*?\\n|\\n/gm,\"\");return new RegExp(n,\"g\");}\n"
  },
  {
    "path": "docs/javascripts/fstar.js",
    "content": "/*! `ocaml` grammar compiled for Highlight.js 11.10.0 */\n  (function(){\n    var hljsGrammar = (function () {\n  'use strict';\n\n  function fstar(hljs) {\n    /* missing support for heredoc-like string (OCaml 4.0.2+) */\n    return {\n        name: 'FStar',\n        aliases: [ 'fstar', 'fst', 'fsti' ],\n      keywords: {\n        $pattern: '[a-z_]\\\\w*!?',\n        keyword: 'attributes noeq unopteq and assert assume begin by calc class default decreases effect eliminate else end ensures exception exists false friend forall fun λ function if in include inline inline_for_extraction instance introduce irreducible let logic match returns as module new new_effect layered_effect polymonadic_bind polymonadic_subcomp noextract of open opaque private quote range_of rec reifiable reify reflectable requires set_range_of sub_effect synth then total true try type unfold unfoldable val when with string',\n        built_in: 'unit',\n        literal: 'true false'\n      },\n      // illegal: /\\/\\/|>>/,\n      contains: [\n        {\n          className: 'literal',\n          begin: '\\\\[(\\\\|\\\\|)?\\\\]|\\\\(\\\\)',\n          relevance: 0\n        },\n        hljs.COMMENT(\n          '\\\\(\\\\*',\n          '\\\\*\\\\)',\n          { contains: [ 'self' ] }\n        ),\n        //   hljs.inherit(\n        //       hljs.COMMENT(),\n        //       {\n        //           match: [\n        //               /(^|\\s)/,\n        //               /\\/\\/.*$/\n        //           ],\n        //           scope: {\n        //               2: 'comment'\n        //           }\n        //       }\n        //   ),\n        { /* type variable */\n          className: 'symbol',\n          begin: '\\'[A-Za-z_](?!\\')[\\\\w\\']*'\n          /* the grammar is ambiguous on how 'a'b should be interpreted but not the compiler */\n        },\n        { /* module or constructor */\n          className: 'type',\n          begin: '\\\\b[A-Z][\\\\w\\']*',\n          relevance: 0\n        },\n        { /* don't color identifiers, but safely catch all identifiers with ' */\n          begin: '[a-z_]\\\\w*\\'[\\\\w\\']*',\n          relevance: 0\n        },\n        hljs.inherit(hljs.APOS_STRING_MODE, {\n          className: 'string',\n          relevance: 0\n        }),\n        hljs.inherit(hljs.QUOTE_STRING_MODE, { illegal: null }),\n        {\n          className: 'number',\n          begin:\n            '\\\\b(0[xX][a-fA-F0-9_]+[Lln]?|'\n            + '0[oO][0-7_]+[Lln]?|'\n            + '0[bB][01_]+[Lln]?|'\n            + '[0-9][0-9_]*([Lln]|(\\\\.[0-9_]*)?([eE][-+]?[0-9_]+)?)?)',\n          relevance: 0\n        },\n        { begin: /->/ // relevance booster\n        }\n      ]\n    };\n  }\n\n  return fstar;\n\n})();\n    hljs.registerLanguage('fstar', hljsGrammar);\n  })();\n\n// hljs.initHighlightingOnLoad();\n\n"
  },
  {
    "path": "docs/javascripts/hax_playground.js",
    "content": "const PLAYGROUND_URL = 'https://hax-playground.cryspen.com';\n\n// Fetches the commit hash for latest `main` of hax\nasync function get_latest_hax_main() {\n    let commits = await (await fetch(PLAYGROUND_URL + '/git-refs')).text();\n    return commits.match(/(.*);refs\\/remotes\\/origin\\/main;/).pop();\n}\n\n// This line should not be edited: it is used in the action `playwright-docs.yml`.\nconst HAX_PLAYGROUND_FORCED_VERSION = false;\n\n// Call into the API of the hax playground\nasync function call_playground(result_block, query, text, parent_node) {\n    let raw_query = async (API_URL, hax_version, query, files, on_line_received) => {\n        let response = await fetch(`${API_URL}/query/${hax_version}/${query}`, {\n            method: \"POST\",\n            headers: {\n                'Accept': 'application/json',\n                'Content-Type': 'application/json'\n            },\n            body: JSON.stringify(files),\n        });\n\n        let decoder = new TextDecoder();\n        let leftover = \"\";\n        let reader = response.body.getReader();\n        while (true) {\n            const { done, value } = await reader.read();\n            if (done) break;\n            leftover += decoder.decode(value);\n            let lines = leftover.split('\\n');\n            let entire_lines = lines.slice(0, -1);\n            leftover = lines.slice(-1)[0];\n            for (const line of entire_lines)\n                on_line_received(line);\n        }\n    };\n    let ansi_up = new AnsiUp();\n    let first = true;\n    let logs = document.createElement('div');\n    logs.style = 'font-size: 80%; background: #00000010; padding: 3px; white-space:pre-wrap;';\n    let hax_version = HAX_PLAYGROUND_FORCED_VERSION || await get_latest_hax_main();\n    let lean_backend = query.startsWith('lean');\n    raw_query(\n        PLAYGROUND_URL,\n        hax_version,\n        query,\n        [['src/lib.rs', text]],\n        x => {\n            if (first) {\n                result_block.style.padding = '0.7em 1.2em';\n                result_block.innerText = \"\";\n                result_block.appendChild(logs);\n            }\n            first = false;\n            let json = {};\n            try {\n                json = JSON.parse(x);\n            } catch (_) { }\n            if (json.Stderr || json.Stdout) {\n                logs.innerHTML += '<div>' + ansi_up.ansi_to_html(json.Stderr || json.Stdout) + \"</div>\";\n            }\n            if (json.Done) {\n                let out = [];\n                for (let file in json.Done.files) {\n                    if (file.endsWith('.rs'))\n                        continue;\n                    let contents = json.Done.files[file];\n                    contents = (contents.split(lean_backend ? 'set_option linter.unusedVariables false' : 'open FStar.Mul')[1] || contents).trim();\n                    contents = contents.replace(/$/gm, ' ').trim();\n                    out.push([file, contents]);\n                }\n                if (json.Done.success)\n                    result_block.innerText = \"\";\n                else\n                    result_block.innerHTML += \"<br/>\";\n                let result = document.createElement('pre');\n                result.style.whiteSpace = 'pre-wrap';\n                if (out.length == 1) {\n                    result.textContent = out[0][1];\n                } else {\n                    result.textContent = out.map(([file, s]) => '(* File: ' + file + ' *) \\n' + s).join('\\n\\n').trim();\n                }\n                result_block.appendChild(result);\n                hljs.highlightBlock(result);\n                result_block.innerHTML += `<br/><a style=\"float:right; font-family: 'Open Sans', sans-serif; font-size: 70%; cursor: pointer; color: gray; text-transform: uppercase; position: relative; top: -10px;\" href='${PLAYGROUND_URL}/#${lean_backend ? \"lean\" : \"fstar\"}/${hax_version}/${LZString.compressToEncodedURIComponent(text)}'>Open in hax playground ↗</a>`;\n                parent_node.classList.remove(\"state-success\", \"state-failure\");\n                parent_node.classList.add(\"state-\" + (json.Done.success ? \"success\" : \"failure\"));\n                if (json.Done.success && query.includes('+tc')) {\n                    result_block.innerHTML += `<div style=\"float: left; padding: 3px; padding-top: 8px; position: relative; top: 6px;\"><span style=\"color: gray;\">Status: </span><span style=\"color: green\">✓ ${lean_backend ? \"Lean\" : \"F*\"} successfully typechecked!</span></div>`;\n                }\n            }\n        },\n    );\n}\n\nfunction setup_hax_playground() {\n    if (document.querySelector('.md-hax-playground'))\n        return;\n    console.log('setup');\n    for (let e of document.querySelectorAll('pre')) {\n        let code = e.querySelector(\"code\");\n        if (!code)\n            continue;\n        let lines = [\n            ...code.children\n        ].map(line => line.innerText.replace(/^\\n+/, '').replace(/\\n+$/, ''))\n            .join(\"\\n\").trim().split('\\n');\n        console.log({ lines });\n        let contents = lines.filter(line => !line.startsWith('# ')).join('\\n');\n        let w = e.parentElement;\n        if (!w.classList.contains(\"playable\"))\n            continue;\n        let backend = w.classList.contains(\"lean-backend\") ? 'lean' : 'fstar';\n\n        code.innerHTML = \"<pre></pre>\";\n        let inner = code.children[0];\n        inner.style.backgroundColor = \"transparent\";\n        inner.classList.add(\"md-hax-playground-pre\");\n\n        let editor = new codemirror.EditorView({\n            doc: contents,\n            extensions: [codemirror.basicSetup, codemirror.rust()],\n            parent: inner,\n            lineNumbers: false,\n        });\n\n        let result_block = document.createElement(\"pre\");\n        result_block.classList.add(\"hax-playground-pre\");\n        result_block.style.fontFamily = '\"Monaco\", \"Menlo\", \"Ubuntu Mono\", \"Consolas\", \"Source Code Pro\", \"source-code-pro\", monospace';\n        result_block.style.fontSize = '0.85em';\n        result_block.style.background = '#f3f3f3';\n        w.append(result_block);\n\n        let header = lines.filter(line => line.startsWith('# ')).map(line => line.slice(2)).join('\\n');\n        let getCode = () => header + '\\n' + editor.state.doc.toString();\n\n\n        let button_translate = document.createElement(\"button\");\n        button_translate.innerHTML = `<i class=\"fa-solid fa-play\"></i>`;\n        button_translate.classList.add('md-icon');\n        button_translate.classList.add('md-clipboard');\n        button_translate.classList.add('md-hax-playground');\n        button_translate.style.right = \"2.4em\";\n        button_translate.onclick = () => {\n            call_playground(result_block, backend, getCode(), w);\n        };\n        e.prepend(button_translate);\n\n        let button_tc = document.createElement(\"button\");\n        button_tc.innerHTML = `<i class=\"fa-solid fa-check\"></i>`;\n        button_tc.classList.add('md-icon');\n        button_tc.classList.add('md-clipboard');\n        button_tc.classList.add('md-hax-playground');\n        button_tc.style.right = \"4.5em\";\n        button_tc.onclick = () => {\n            call_playground(result_block, backend + '+tc', getCode(), w);\n        };\n\n        e.prepend(button_tc);\n    }\n}\n\nwindow.addEventListener('load', () => {\n    setup_hax_playground();\n    const observer = new MutationObserver(() => {\n        if (document.querySelector('.md-hax-playground'))\n            return;\n        setTimeout(setup_hax_playground, 200);\n    });\n    observer.observe(document.querySelector('body'), { childList: true, subtree: true });\n});\n\n\n"
  },
  {
    "path": "docs/javascripts/lz-string.js",
    "content": "var LZString=function(){var r=String.fromCharCode,o=\"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\",n=\"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+-$\",e={};function t(r,o){if(!e[r]){e[r]={};for(var n=0;n<r.length;n++)e[r][r.charAt(n)]=n}return e[r][o]}var i={compressToBase64:function(r){if(null==r)return\"\";var n=i._compress(r,6,function(r){return o.charAt(r)});switch(n.length%4){default:case 0:return n;case 1:return n+\"===\";case 2:return n+\"==\";case 3:return n+\"=\"}},decompressFromBase64:function(r){return null==r?\"\":\"\"==r?null:i._decompress(r.length,32,function(n){return t(o,r.charAt(n))})},compressToUTF16:function(o){return null==o?\"\":i._compress(o,15,function(o){return r(o+32)})+\" \"},decompressFromUTF16:function(r){return null==r?\"\":\"\"==r?null:i._decompress(r.length,16384,function(o){return r.charCodeAt(o)-32})},compressToUint8Array:function(r){for(var o=i.compress(r),n=new Uint8Array(2*o.length),e=0,t=o.length;e<t;e++){var s=o.charCodeAt(e);n[2*e]=s>>>8,n[2*e+1]=s%256}return n},decompressFromUint8Array:function(o){if(null==o)return i.decompress(o);for(var n=new Array(o.length/2),e=0,t=n.length;e<t;e++)n[e]=256*o[2*e]+o[2*e+1];var s=[];return n.forEach(function(o){s.push(r(o))}),i.decompress(s.join(\"\"))},compressToEncodedURIComponent:function(r){return null==r?\"\":i._compress(r,6,function(r){return n.charAt(r)})},decompressFromEncodedURIComponent:function(r){return null==r?\"\":\"\"==r?null:(r=r.replace(/ /g,\"+\"),i._decompress(r.length,32,function(o){return t(n,r.charAt(o))}))},compress:function(o){return i._compress(o,16,function(o){return r(o)})},_compress:function(r,o,n){if(null==r)return\"\";var e,t,i,s={},u={},a=\"\",p=\"\",c=\"\",l=2,f=3,h=2,d=[],m=0,v=0;for(i=0;i<r.length;i+=1)if(a=r.charAt(i),Object.prototype.hasOwnProperty.call(s,a)||(s[a]=f++,u[a]=!0),p=c+a,Object.prototype.hasOwnProperty.call(s,p))c=p;else{if(Object.prototype.hasOwnProperty.call(u,c)){if(c.charCodeAt(0)<256){for(e=0;e<h;e++)m<<=1,v==o-1?(v=0,d.push(n(m)),m=0):v++;for(t=c.charCodeAt(0),e=0;e<8;e++)m=m<<1|1&t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}else{for(t=1,e=0;e<h;e++)m=m<<1|t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t=0;for(t=c.charCodeAt(0),e=0;e<16;e++)m=m<<1|1&t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}0==--l&&(l=Math.pow(2,h),h++),delete u[c]}else for(t=s[c],e=0;e<h;e++)m=m<<1|1&t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t>>=1;0==--l&&(l=Math.pow(2,h),h++),s[p]=f++,c=String(a)}if(\"\"!==c){if(Object.prototype.hasOwnProperty.call(u,c)){if(c.charCodeAt(0)<256){for(e=0;e<h;e++)m<<=1,v==o-1?(v=0,d.push(n(m)),m=0):v++;for(t=c.charCodeAt(0),e=0;e<8;e++)m=m<<1|1&t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}else{for(t=1,e=0;e<h;e++)m=m<<1|t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t=0;for(t=c.charCodeAt(0),e=0;e<16;e++)m=m<<1|1&t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t>>=1}0==--l&&(l=Math.pow(2,h),h++),delete u[c]}else for(t=s[c],e=0;e<h;e++)m=m<<1|1&t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t>>=1;0==--l&&(l=Math.pow(2,h),h++)}for(t=2,e=0;e<h;e++)m=m<<1|1&t,v==o-1?(v=0,d.push(n(m)),m=0):v++,t>>=1;for(;;){if(m<<=1,v==o-1){d.push(n(m));break}v++}return d.join(\"\")},decompress:function(r){return null==r?\"\":\"\"==r?null:i._decompress(r.length,32768,function(o){return r.charCodeAt(o)})},_decompress:function(o,n,e){var t,i,s,u,a,p,c,l=[],f=4,h=4,d=3,m=\"\",v=[],g={val:e(0),position:n,index:1};for(t=0;t<3;t+=1)l[t]=t;for(s=0,a=Math.pow(2,2),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;switch(s){case 0:for(s=0,a=Math.pow(2,8),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;c=r(s);break;case 1:for(s=0,a=Math.pow(2,16),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;c=r(s);break;case 2:return\"\"}for(l[3]=c,i=c,v.push(c);;){if(g.index>o)return\"\";for(s=0,a=Math.pow(2,d),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;switch(c=s){case 0:for(s=0,a=Math.pow(2,8),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;l[h++]=r(s),c=h-1,f--;break;case 1:for(s=0,a=Math.pow(2,16),p=1;p!=a;)u=g.val&g.position,g.position>>=1,0==g.position&&(g.position=n,g.val=e(g.index++)),s|=(u>0?1:0)*p,p<<=1;l[h++]=r(s),c=h-1,f--;break;case 2:return v.join(\"\")}if(0==f&&(f=Math.pow(2,d),d++),l[c])m=l[c];else{if(c!==h)return null;m=i+i.charAt(0)}v.push(m),l[h++]=i+m.charAt(0),i=m,0==--f&&(f=Math.pow(2,d),d++)}}};return i}();\"function\"==typeof define&&define.amd?define(function(){return LZString}):\"undefined\"!=typeof module&&null!=module?module.exports=LZString:\"undefined\"!=typeof angular&&null!=angular&&angular.module(\"LZString\",[]).factory(\"LZString\",function(){return LZString});\n"
  },
  {
    "path": "docs/manual/faq/include-flags.md",
    "content": "# **Rust Item Extraction Using `cargo hax`**\n\n## **Overview**\nWhen extracting Rust items with hax, it is often necessary to include only a specific subset of items from a crate. The `cargo hax into` subcommand provides the `-i` flag to control which items are included or excluded, and how their dependencies are handled. This allows precise tailoring of the extraction process.\n\n## **The `-i` Flag**\nThe `-i` flag accepts a list of patterns with modifiers to define inclusion or exclusion rules for Rust items. Patterns are processed sequentially from left to right, determining which items are extracted.\n\n### **Basic Concepts**\n- **Patterns**: Rust paths with support for `*` and `**` globs.\n  - `*` matches any single segment (e.g., `mycrate::*::myfn`).\n  - `**` matches any subpath, including empty segments (e.g., `**::myfn`).\n- **Modifiers**:\n  - `+`: Includes items and their dependencies (transitively).\n  - `+~`: Includes items and their **direct dependencies only**.\n  - `+!`: Includes only the item itself (no dependencies).\n  - `+:`: Includes only the item's type signature (no body or dependencies).\n  - `-`: Excludes items.\n\nBy default, **all items are included**, unless explicitly modified.\n\n### **Practical Examples of the `-i` Flag Usage**\n\nConsider the following crate (`mycrate`) with the `lib.rs` module:\n\n```rust\nfn interesting_function() { aux() }\nfn aux() { foo::f() }\nfn something_else() { /* ... */ }\n\nmod foo {\n    fn f() { /* ... */ }\n    fn g() { /* ... */ }\n    fn h() { /* ... */ }\n    fn interesting_function() { something() }\n    fn something() { /* ... */ }\n\n    mod bar {\n        fn interesting_function() { /* ... */ }\n    }\n}\n\nfn not_that_one() { not_that_one_dependency() }\nfn not_that_one_dependency() { /* ... */ }\n\nfn not_extracting_function(_: u8) -> u8 {\n    unsafe { /* ... */ }\n    0\n}\n```\n\n#### **1. Selectively Including Items with Dependencies**\n```bash\ncargo hax into -i '-** +mycrate::**::interesting_function' <BACKEND>\n```\n\n- **Explanation**:\n  - `-**`: Excludes all items by default.\n  - `+mycrate::**::interesting_function`: Includes all items matching `mycrate::**::interesting_function` and their dependencies.\n- **Extracted Items**:\n  1. `mycrate::interesting_function` (direct match).\n  2. `mycrate::foo::interesting_function` (direct match).\n  3. `mycrate::foo::bar::interesting_function` (direct match).\n  4. `mycrate::aux` (dependency of `mycrate::interesting_function`).\n  5. `mycrate::foo::f` (dependency of `mycrate::aux`).\n  6. `mycrate::foo::something` (dependency of `mycrate::foo::interesting_function`).\n\n#### **2. Excluding Specific Items**\n```bash\ncargo hax into -i '+** -*::not_that_one' <BACKEND>\n```\n\n- **Explanation**:\n  - `+**`: Includes all items by default.\n  - `-*::not_that_one`: Excludes any item named `not_that_one`, but keeps all other items, including `not_that_one_dependency`.\n- **Extracted Items**: All except `mycrate::not_that_one`.\n\n#### **3. Including Items Without Dependencies**\n```bash\ncargo hax into -i '-** +!mycrate::interesting_function' <BACKEND>\n```\n\n- **Explanation**:\n  - `-**`: Excludes all items by default.\n  - `+!mycrate::interesting_function`: Includes only `mycrate::interesting_function`, without dependencies.\n- **Extracted Items**: Only `mycrate::interesting_function`.\n\n#### **4. Including Items with Direct Dependencies Only**\n```bash\ncargo hax into -i '-** +~mycrate::interesting_function' <BACKEND>\n```\n\n- **Explanation**:\n  - `-**`: Excludes all items by default.\n  - `+~mycrate::interesting_function`: Includes `mycrate::interesting_function` and its direct dependencies (but not their transitive dependencies).\n- **Extracted Items**:\n  1. `mycrate::interesting_function`.\n  2. `mycrate::aux` (direct dependency).\n- **Excluded Items**:\n  - `mycrate::foo::f` (transitive dependency of `mycrate::aux`).\n\n#### **5. Including Items in Signature-Only Mode**\n```bash\ncargo hax into -i '+:mycrate::not_extracting_function' <BACKEND>\n```\n\n- **Explanation**:\n  - `+:mycrate::not_extracting_function`: Includes only the type signature of `mycrate::not_extracting_function` (e.g., as an assumed or axiomatized symbol).\n- **Extracted Items**:\n  - The type signature of `mycrate::not_extracting_function`, without its body or dependencies.\n\n\n\n#### **6. Including anonymous items using `hax_lib::include`**\nSome items like [trait impls](https://doc.rust-lang.org/reference/items/implementations.html#r-items.impl.trait), or [inherent impls](https://doc.rust-lang.org/reference/items/implementations.html#r-items.impl.inherent) have no name so it is impossible to target them specifically using the `-i` flag.\nIn this case, one can use [`hax_lib::include`](https://docs.rs/hax-lib/latest/hax_lib/attr.include.html) to extract these items, and override the default behavior for the rest of the module.\n```rust\nstruct S;\n\n#[hax_lib::include]\nimpl S {\n    fn f() {}\n}\n\nimpl S {\n    #[hax_lib::include]\n    fn g() {}\n    fn h () {}\n}\n```\nTo include only `S::f` and `S::g` in the example above, the `hax_lib::include` annotations does the trick, together with the following extraction command:\n```bash\ncargo hax into -i '-**' <BACKEND>\n```\n\n### **Summary**\nThe `-i` flag offers powerful control over extraction, allowing fine-grained inclusion and exclusion of items with various dependency handling strategies. Use it to:\n- Extract specific items and their dependencies (`+` or `+~`).\n- Exclude certain items (`-`).\n- Include items without dependencies (`+!`).\n- Extract type signatures only (`+:`).\n\nFor complex crates, this flexibility ensures only the necessary parts are extracted, optimizing analysis or transformation workflows.\n\n"
  },
  {
    "path": "docs/manual/faq/index.md",
    "content": "---\nweight: 200\n---\n\n# Troubleshooting/FAQ\n\nThis chapter captures a list of common questions or issues and how to resolve them. If you happen to run into an issue that is not documented here, please consider submitting a pull request!\n"
  },
  {
    "path": "docs/manual/faq/into.md",
    "content": "# Troubleshooting/FAQ\n"
  },
  {
    "path": "docs/manual/fstar/.nav.yml",
    "content": "title: F*"
  },
  {
    "path": "docs/manual/fstar/index.md",
    "content": "This section introduces the F\\* backend of hax. It covers how to setup a project, and the basics of how to use the hax and F\\* to verify Rust code."
  },
  {
    "path": "docs/manual/fstar/quick_start.md",
    "content": "---\nweight: 0\n---\n\n# Quick start\n\nDo you want to try hax out on a Rust crate of yours? This chapter is\nwhat you are looking for!\n\n## Setup the tools\n\n - <input type=\"checkbox\" class=\"user-checkable\"/> [Install the hax toolchain](https://github.com/hacspec/hax?tab=readme-ov-file#installation).  \n   <span style=\"margin-right:30px;\"></span>🪄 Running `cargo hax --version` should print some version info.\n - <input type=\"checkbox\" class=\"user-checkable\"/> [Install F\\*](https://github.com/FStarLang/FStar/blob/master/INSTALL.md)\n\n## Setup the crate you want to verify\n\n*Note: the instructions below assume you are in the folder of the specific crate (**not workspace!**) you want to extract.*\n\n\n - <input type=\"checkbox\" class=\"user-checkable\"/> Create the folder `proofs/fstar/extraction`folder, right next to the `Cargo.toml` of the crate you want to verify.  \n   <span style=\"margin-right:30px;\"></span>🪄 `mkdir -p proofs/fstar/extraction`\n - <input type=\"checkbox\" class=\"user-checkable\"/> Copy [this makefile](https://gist.github.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3) to `proofs/fstar/extraction/Makefile`  \n   <span style=\"margin-right:30px;\"></span>🪄 `curl -o proofs/fstar/extraction/Makefile https://gist.githubusercontent.com/W95Psp/4c304132a1f85c5af4e4959dd6b356c3/raw/Makefile`\n - <input type=\"checkbox\" class=\"user-checkable\"/> Add `hax-lib` as a dependency to your crate, enabled only when using hax.  \n   <span style=\"margin-right:30px;\"></span>🪄 `cargo add --target 'cfg(hax)' --git https://github.com/hacspec/hax hax-lib`  \n   <span style=\"margin-right:30px;\"></span><span style=\"opacity: 0;\">🪄</span> *(`hax-lib` is not mandatory, but this guide assumes it is present)*\n\n## Partial extraction\n\n*Note: the instructions below assume you are in the folder of the\nspecific crate you want to extract.*\n\nRun the command `cargo hax into fstar` to extract every item of your\ncrate as F\\* modules in the subfolder `proofs/fstar/extraction`.\n\n**What is critical? What is worth verifying?**  \nProbably, your Rust crate contains mixed kinds of code: some parts are\ncritical (e.g. the library functions at the core of your crate) while\nsome others are not (e.g. the binary driver that wraps the\nlibrary). In this case, you likely want to extract only partially your\ncrate, so that you can focus on the important part.\n\n**Partial extraction.**  \nIf you want to extract a function\n`your_crate::some_module::my_function`, you need to tell `hax` to\nextract nothing but `my_function`:\n\n```bash\ncargo hax into -i '-** +your_crate::some_module::my_function' fstar\n```\n\nNote this command will extract `my_function` but also any item\n(function, type, etc.) from your crate which is used directly or\nindirectly by `my_function`. If you don't want the dependency, use\n`+!` instead of `+` in the `-i` flag.\n\n**Unsupported Rust code.**  \nhax [doesn't support every Rust\nconstructs](https://github.com/hacspec/hax?tab=readme-ov-file#supported-subset-of-the-rust-language),\n`unsafe` code, or complicated mutation scheme. That is another reason\nfor extracting only a part of your crate. When running hax, if an item\nof your crate, say a function `my_crate::f`, is not handled by hax,\nyou can append `-my_crate::f` to the `-i` flag. You can learn more\nabout the `-i` flag [in the FAQ](../faq/include-flags.md).\n\n## Start F\\* verification\nAfter running the hax toolchain on your Rust code, you will end up\nwith various F\\* modules in the `proofs/fstar/extraction` folder. The\n`Makefile` in `proofs/fstar/extraction` will run F\\*.\n\n1. **Lax check:** the first step is to run `OTHERFLAGS=\"--lax\" make`,\n   which will run F\\* in \"lax\" mode. The lax mode just makes sure basic\n   typechecking works: it is not proving anything. This first step is\n   important because there might be missing libraries. If F\\* is not\n   able to find a definition, it is probably a `libcore` issue: you\n   probably need to edit the F\\* library, which lives in the\n   `proofs-libs` directory in the root of the hax repo.\n2. **Typecheck:** the second step is to run `make`. This will ask F\\*\n   to typecheck fully your crate. This is very likely that you need to\n   add preconditions and postconditions at this stage. Indeed, this\n   second step is about panic freedom: if F\\* can typecheck your crate,\n   it means your code *never* panics, which already is an important\n   property.\n\nTo go further, please read the next chapter.\n"
  },
  {
    "path": "docs/manual/fstar/tutorial/data-invariants.md",
    "content": "---\nweight: 2\n---\n\n# Data invariants\n\nIn the two previous chapters we saw how to write specifications on\nfunctions, be it with pre and post-condition or with lemmas. In this\nchapter, we will see how to maintain invariants with precise types.\n\n## Making illegal states unpresentable\nWith the Barrett example, we were working on a certain field, whose\nelements were represented as `i32` integers. To simplify, let's\nconsider `F₃`, the finite field with 3 elements (say `0`, `1` and\n`2`). Every element of `F3` can be represented as a `i32` integers,\nbut the converse doesn't hold: the vast majority of `i32` integers are\nnot in of `F₃`.\n\nRepresenting `F₃` as `i32`s, every time we define a function consuming\n`F₃` elements, we face the risk to consume *illegal* elements. We are\nthus back to [chapter 4.1](panic-freedom.md): we should panic on\nillegal elements, and add hax pre-conditions on every single\nfunction. That's not ideal: the property of being either `0`, `1` or\n`2` should be encoded directly on the type representing `F₃` elements.\n\n### `enum`s to then rescue\nRust alone already can solve our representation issues with\n[enums](https://doc.rust-lang.org/book/ch06-00-enums.html)! Below, we\ndefine the `enum` type `F3` which has only three constructor: `F3`\nrepresent exactly the elements of `F₃`, not more, not less.\n\n```{.rust .playable}\nenum F3 {\n    E1,\n    E2,\n    E3,\n}\n```\n\nWith `F3`, there doesn't exist illegal values at all: we can now\ndefine [*total*\nfunctions](https://en.wikipedia.org/wiki/Partial_function) on `F₃`\nelements. We dropped altogether a source of panic!\n\nSoon you want to work with a bigger finite field: say\n`F₂₃₄₇`. Representing this many `q` different elements with an Rust\nenum would be very painful... The `enum` approach falls apart.\n\n### Newtype and refinements\nSince we don't want an `enum` with 2347 elements, we have to revert to\na type that can hold this many elements. The smallest integer type\nlarge enough provided by Rust is `u16`.\n\nLet's define `F` a\n[\"newtype\"](https://matklad.github.io/2018/06/04/newtype-index-pattern.html):\na [struct](https://doc.rust-lang.org/book/ch05-00-structs.html) with\none `u16` field `v`. Notice the refinement annotation on `v`: the\nextraction of this type `F` via hax will result in a type enforcing\n`v` small enough.\n\n``` {.rust .playable}\npub const Q: u16 = 2347;\n\n#[hax_lib::attributes]\npub struct F {\n    #[hax_lib::refine(v < Q)]\n    pub v: u16,\n}\n```\n\nIn Rust, we can now define functions that operates on type `F`,\nassuming they are in bounds with respect to `F₂₃₄₇`: every such\nassumption will be checked and enforced by the proof assistant. As an\nexample, below is the implementation of the addition for type `F`.\n\n``` {.rust .playable}\n# pub const Q: u16 = 2347;\n# \n# #[hax_lib::attributes]\n# pub struct F {\n#     #[hax_lib::refine(v < Q)]\n#     pub v: u16,\n# }\n\nuse core::ops::Add;\n\nimpl Add for F {\n    type Output = Self;\n    fn add(self, rhs: Self) -> Self {\n        Self {\n            v: (self.v + rhs.v) % Q,\n        }\n    }\n}\n```\n\nHere, F\\* is able to prove automatically that (1) the addition doesn't\noverflow and (2) that the invariant of `F` is preserved. The\ndefinition of type `F` in F\\* (named `t_F`) very explicitly requires\nthe invariant as a refinement on `v`.\n"
  },
  {
    "path": "docs/manual/fstar/tutorial/index.md",
    "content": "---\nweight: 1\n---\n\n# Tutorial\n\nThis tutorial is a guide for formally verifying properties about Rust\nprograms using the hax toolchain. hax is a tool that translates Rust\nprograms to various formal programming languages.\n\nThe formal programming languages we target are called *backends*. Some\nof them, e.g. [F\\*](https://fstar-lang.org/), [Lean](https://lean-lang.org/) or\n[Coq](https://coq.inria.fr/), are general purpose formal programming\nlanguages. Others are specialized tools:\n[ProVerif](https://bblanche.gitlabpages.inria.fr/proverif/) is\ndedicated to proving properties about protocols.\n\nThis tutorial focuses on proving properties with\n[F\\*](https://fstar-lang.org/).\n"
  },
  {
    "path": "docs/manual/fstar/tutorial/panic-freedom.md",
    "content": "---\nweight: 0\n---\n\n# Panic freedom\n\nLet's start with a simple example: a function that squares a `u8`\ninteger. To extract this function to F\\* using hax, we simply need to\nrun the command `cargo hax into fstar` in the directory of the crate\nin which the function `square` is defined.\n\n*Note: throughout this tutorial, you can edit the snippets of code and\nextract to F\\* by clicking the play button (:material-play:), or even typecheck it with the button (:material-check:).*\n\n```{.rust .playable .expect-failure }\nfn square(x: u8) -> u8 {\n    x * x\n}\n```\n\nThough, if we try to verify this function, F\\* is complaining about a\nsubtyping issue: F\\* tells us that it is not able to prove that the\nresult of the multiplication `x * x` fits the range of `u8`. The\nmultiplication `x * x` might indeed be overflowing!\n\nFor instance, running `square(16)` panics: `16 * 16` is `256`, which\nis just over `255`, the largest integer that fits `u8`. Rust does not\nensure that functions are *total*: a function might panic at any\npoint, or might never terminate.\n\n\n## Rust and panicking code\nQuoting the chapter [To `panic!` or Not to\n`panic!`](https://doc.rust-lang.org/book/ch09-03-to-panic-or-not-to-panic.html)\nfrom the Rust book:\n\n> The `panic!` macro signals that your program is in a state it can't\n> handle and lets you tell the process to stop instead of trying to\n> proceed with invalid or incorrect values.\n\nA Rust program should panic only in a situation where an assumption\nor an invariant is broken: a panic models an *invalid* state. Formal\nverification is about proving such invalid state cannot occur, at all.\n\nFrom this observation emerges the urge of proving Rust programs to be\npanic-free!\n\n## Fixing our squaring function\nLet's come back to our example. There is an informal assumption to the\nmultiplication operator in Rust: the inputs should be small enough so\nthat the addition doesn't overflow.\n\nNote that Rust also provides `wrapping_mul`, a non-panicking variant\nof the multiplication on `u8` that wraps when the result is bigger\nthan `255`. Replacing the common multiplication with `wrapping_mul` in\n`square` would fix the panic, but then, `square(256)` returns zero.\nSemantically, this is not what one would expect from `square`.\n\nOur problem is that our function `square` is well-defined only when\nits input is within `0` and `15`.\n\n### Solution A: reflect the partialness of the function in Rust\nA first solution is to make `square` return an `Option<u8>` instead of a `u8`:\n``` {.rust .playable}\nfn square_option(x: u8) -> Option<u8> {\n    if x >= 16 {\n        None\n    } else {\n        Some(x * x)\n    }\n}\n```\n\nHere, F\\* is able to prove panic-freedom: calling `square` with any\ninput is safe. Though, one may argue that `square`'s input being small\nenough should really be an assumption. Having to deal with the\npossible integer overflowing whenever squaring is a huge burden. Can\nwe do better?\n\n### Solution B: add a precondition\nThe type system of Rust doesn't allow the programmer to formalize the\nassumption that `square` expects a small `u8`. This becomes\npossible using hax: one can annotate a function with a pre-condition\non its inputs.\n\nThe pre-conditions and post-conditions on a function form a\n*contract*: \"if you give me some inputs that satisfies a given formula\n(*the precondition*), I will produce a return value that satisfy\nanother formula (*the postcondition*)\". Outside this contract,\nanything might happen: the function might panic, might run forever,\nerase your disk, or anything.\n\nThe helper crate\n[hax-lib](https://github.com/cryspen/hax/tree/main/hax-lib)\nprovides the `requires`\n[proc-macro](https://doc.rust-lang.org/reference/procedural-macros.html)\nwhich lets user writing pre-conditions directly in Rust.\n\n```{.rust .playable}\n#[hax_lib::requires(x < 16)]\nfn square_requires(x: u8) -> u8 {\n    x * x\n}\n```\n\nWith this precondition, F\\* is able to prove panic freedom. From now\non, it is the responsibility of the clients of `square` to respect the\ncontract. The next step is thus be to verify, through hax extraction,\nthat `square` is used correctly at every call site.\n\n## Common panicking situations\nMultiplication is not the only panicking function provided by the Rust\nlibrary: most of the other integer arithmetic operation have such\ninformal assumptions.\n\nAnother source of panics is indexing. Indexing in an array, a slice or\na vector is a partial operation: the index might be out of range.\n\nIn the example folder of hax, you can find the [`chacha20`\nexample](https://github.com/cryspen/hax/blob/main/examples/chacha20/src/lib.rs)\nthat makes use of pre-conditions to prove panic freedom.\n\nAnother solution for safe indexing is to use the [newtype index\npattern](https://matklad.github.io/2018/06/04/newtype-index-pattern.html),\nwhich is [also supported by\nhax](https://github.com/cryspen/hax/blob/d668de4d17e5ddee3a613068dc30b71353a9db4f/tests/attributes/src/lib.rs#L98-L126). The [data invariants](data-invariants.md#newtype-and-refinements) chapter gives more details about this.\n\n"
  },
  {
    "path": "docs/manual/fstar/tutorial/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `hax` and `rustup` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHACL_HOME     ?= $(HOME)/.hax/hacl_home\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= .cache\nHINT_DIR      ?= .hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\tmkdir -p \"${HACL_HOME}\"\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\n# By default, we process all the files in the current directory\nROOTS = $(wildcard *.fst *fsti)\n\n# The following is a bash script that discovers F* libraries\ndefine FINDLIBS\n    # Prints a path if and only if it exists. Takes one argument: the\n    # path.\n    function print_if_exists() {\n        if [ -d \"$$1\" ]; then\n            echo \"$$1\"\n        fi\n    }\n    # Asks Cargo all the dependencies for the current crate or workspace,\n    # and extract all \"root\" directories for each. Takes zero argument.\n    function dependencies() {\n        cargo metadata --format-version 1 |\n            jq -r '.packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")'\n    }\n    # Find hax libraries *around* a given path. Takes one argument: the\n    # path.\n    function find_hax_libraries_at_path() {\n        path=\"$$1\"\n        # if there is a `proofs/fstar/extraction` subfolder, then that's a\n        # F* library\n        print_if_exists \"$$path/proofs/fstar/extraction\"\n        # Maybe the `proof-libs` folder of hax is around?\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\")\n        if [ $$? -eq 0 ]; then\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\"\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\"\n        fi\n    }\n    { while IFS= read path; do\n          find_hax_libraries_at_path \"$$path\"\n      done < <(dependencies)\n    } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c \"$$FINDLIBS\")\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n\n"
  },
  {
    "path": "docs/manual/fstar/tutorial/proofs/fstar/extraction/Tutorial_src.Math.Lemmas.fst",
    "content": "module Tutorial_src.Math.Lemmas\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 150\"\nopen Core\nopen FStar.Mul\n\n\nval cancel_mul_mod (a:i32) (n:i32 {v n >= 0}) : Lemma ((v a * v n) % v n == 0)\nlet cancel_mul_mod a n =\n  FStar.Math.Lemmas.cancel_mul_mod (v a) (v n)\n"
  },
  {
    "path": "docs/manual/fstar/tutorial/proofs/fstar/extraction/Tutorial_src.fst",
    "content": "module Tutorial_src\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen Core\nopen FStar.Mul\n\ntype t_F3 =\n  | F3_E1 : t_F3\n  | F3_E2 : t_F3\n  | F3_E3 : t_F3\n\nlet t_F3_cast_to_repr (x: t_F3) : isize =\n  match x with\n  | F3_E1  -> isz 0\n  | F3_E2  -> isz 1\n  | F3_E3  -> isz 3\n\nunfold\nlet t_FieldElement = i32\n\nlet v_BARRETT_MULTIPLIER: i64 = 20159L\n\nlet v_BARRETT_R: i64 = 67108864L\n\nlet v_BARRETT_SHIFT: i64 = 26L\n\nlet v_FIELD_MODULUS: i32 = 3329l\n\nlet v_Q: u16 = 2347us\n\nlet barrett_reduce (value: i32)\n    : Prims.Pure i32\n      (requires\n        (Core.Convert.f_from value <: i64) >=. (Core.Ops.Arith.Neg.neg v_BARRETT_R <: i64) &&\n        (Core.Convert.f_from value <: i64) <=. v_BARRETT_R)\n      (ensures\n        fun result ->\n          let result:i32 = result in\n          result >. (Core.Ops.Arith.Neg.neg v_FIELD_MODULUS <: i32) && result <. v_FIELD_MODULUS &&\n          (result %! v_FIELD_MODULUS <: i32) =. (value %! v_FIELD_MODULUS <: i32)) =\n  let t:i64 = (Core.Convert.f_from value <: i64) *! v_BARRETT_MULTIPLIER in\n  let t:i64 = t +! (v_BARRETT_R >>! 1l <: i64) in\n  let quotient:i64 = t >>! v_BARRETT_SHIFT in\n  let quotient:i32 = cast (quotient <: i64) <: i32 in\n  let sub:i32 = quotient *! v_FIELD_MODULUS in\n  let _:Prims.unit = Tutorial_src.Math.Lemmas.cancel_mul_mod quotient 3329l in\n  value -! sub\n\nlet decrypt (ciphertext key: u32) : u32 = ciphertext ^. key\n\nlet encrypt (plaintext key: u32) : u32 = plaintext ^. key\n\nlet encrypt_decrypt_identity (key plaintext: u32)\n    : Lemma (requires true)\n      (ensures (decrypt (encrypt plaintext key <: u32) key <: u32) =. plaintext) = ()\n\nlet square (x: u8) : u8 = x *! x\n\nlet square_ensures (x: u8)\n    : Prims.Pure u8\n      (requires x <. 16uy)\n      (ensures\n        fun result ->\n          let result:u8 = result in\n          result >=. x) = x *! x\n\nlet square_option (x: u8) : Core.Option.t_Option u8 =\n  if x >=. 16uy\n  then Core.Option.Option_None <: Core.Option.t_Option u8\n  else Core.Option.Option_Some (x *! x) <: Core.Option.t_Option u8\n\nlet square_requires (x: u8) : Prims.Pure u8 (requires x <. 16uy) (fun _ -> Prims.l_True) = x *! x\n\ntype t_F = { f_v:f_v: u16{f_v <. v_Q} }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core.Ops.Arith.t_Add t_F t_F =\n  {\n    f_Output = t_F;\n    f_add_pre = (fun (self: t_F) (rhs: t_F) -> true);\n    f_add_post = (fun (self: t_F) (rhs: t_F) (out: t_F) -> true);\n    f_add = fun (self: t_F) (rhs: t_F) -> { f_v = (self.f_v +! rhs.f_v <: u16) %! v_Q } <: t_F\n  }\n"
  },
  {
    "path": "docs/manual/fstar/tutorial/properties.md",
    "content": "---\nweight: 1\n---\n\n# Proving properties\n\nIn the last chapter, we proved one property on the `square` function:\npanic freedom. After adding a precondition, the signature of the\n`square` function was `x:u8 -> Pure u8 (requires x <. 16uy) (ensures fun _ -> True)`.\n\nThis contract stipulates that, given a small input, the function will\n_return a value_: it will not panic or diverge. We could enrich the\ncontract of `square` with a post-condition about the fact it is a\nincreasing function:\n\n``` {.rust .playable}\n#[hax_lib::requires(x < 16)]\n#[hax_lib::ensures(|result| result >= x)]\nfn square_ensures(x: u8) -> u8 {\n    x * x\n}\n```\n\nSuch a simple post-condition is automatically proven by F\\*. The\nproperties of our `square` function are not fascinating. Let's study a\nmore interesting example: [Barrett reduction](https://en.wikipedia.org/wiki/Barrett_reduction).\n\n## A concrete example of contract: Barrett reduction\n\nWhile the correctness of `square` is obvious, the Barrett reduction is\nnot.\n\nGiven `value` a field element (a `i32` whose absolute value is at most\n`BARRET_R`), the function `barrett_reduce` defined below computes\n`result` such that:\n\n- `result ≡ value (mod FIELD_MODULUS)`;\n- the absolute value of `result` is bound as follows:\n  `|result| < FIELD_MODULUS`.\n\nIt is easy to write this contract directly as `hax::requires` and\n`hax::ensures` annotations, as shown in the snippet below.\n\n```{.rust .playable}\ntype FieldElement = i32;\nconst FIELD_MODULUS: i32 = 3329;\nconst BARRETT_SHIFT: i64 = 26;\nconst BARRETT_R: i64 = 0x4000000; // 2^26\nconst BARRETT_MULTIPLIER: i64 = 20159; // ⌊(BARRETT_R / FIELD_MODULUS) + 1/2⌋\n\n#[hax_lib::fstar::options(\"--z3rlimit 500\")]\n#[hax_lib::requires((i64::from(value) >= -BARRETT_R && i64::from(value) <= BARRETT_R))]\n#[hax_lib::ensures(|result| result > -FIELD_MODULUS && result < FIELD_MODULUS\n                     && result %  FIELD_MODULUS ==  value % FIELD_MODULUS)]\nfn barrett_reduce(value: i32) -> i32 {\n    let t = i64::from(value) * BARRETT_MULTIPLIER;\n    let t = t + (BARRETT_R >> 1);\n\n    let quotient = t >> BARRETT_SHIFT;\n    let quotient = quotient as i32;\n\n    let sub = quotient * FIELD_MODULUS;\n\n    value - sub\n}\n```\n\n<!-- Note that we call to `cancel_mul_mod`, a lemma: in Rust, this have no\neffect, but in F\\*, that establishes that `(quotient * 3329) % 3329` is\nzero. -->\n\nThe proof for the code above uses the Z3 SMT solver to prove the\npost-condition.  Since the SMT solver needs to reason about non-linear\narithmetic (multiplication, modulus, division) it needs more\nresources, hence we bump up the `rlimit` to 100 in an annotation above\nthe function. With this annotation F\\* and Z3 are able to automatically\nverify this function. However, it is worth noting that the heuristic\nstrategies used by Z3 for non-linear arithmetic may sometimes fail to\ncomplete in the given `rlimit` depending on the solver version or random\nnumber generator, so we often give Z3 a generous resource limit.\n\nConversely, instead of relying on the SMT solver, we can also\nelaborate the proof of this function by hand to make it more\npredictable.  For example, before the final line of the function, \nwe could call a mathematical lemma may have to help F\\* prove\nthe correctness of the reduction.  The lemma call would be:\n```\n    fstar!(\"Math.Lemmas.cancel_mul_mod (v quotient) 3329\");\n```\nThis lemma establishes that `(quotient * 3329) % 3329` is zero. We often use lemmas like\nthese to limit our dependence on Z3. \n\nThis Barrett reduction examples is taken from\n[libcrux](https://github.com/cryspen/libcrux/tree/main)'s proof of\nKyber which is using hax and F\\*.\n\nThis example showcases an **intrinsic proof**: the function\n`barrett_reduce` not only computes a value, but it also ship a proof\nthat the post-condition holds. The pre-condition and post-condition\ngives the function a formal specification, which is useful both for\nfurther formal verification and for documentation purposes.\n\n## Extrinsic properties with lemmas\n\nConsider the `encrypt` and `decrypt` functions below. Those functions\nhave no precondition, don't have particularly interesting properties\nindividually. However, the composition of the two yields an useful\nproperty: encrypting a ciphertext and decrypting the result with a\nsame key produces the ciphertext again. `|c| decrypt(c, key)` is the\ninverse of `|p| encrypt(p, key)`.\n\n```{.rust .playable}\nfn encrypt(plaintext: u32, key: u32) -> u32 {\n    plaintext ^ key\n}\n\nfn decrypt(ciphertext: u32, key: u32) -> u32 {\n    ciphertext ^ key\n}\n```\n\nIn this situation, adding a pre- or a post-condition to either\n`encrypt` or `decrypt` is not useful: we want to state our inverse\nproperty about both of them. Better, we want this property to be\nstated directly in Rust: just as with pre and post-conditions, the\nRust sources should clearly state what is to be proven.\n\nTo this end, Hax provides a macro `lemma`. Below, the Rust function\n`encrypt_decrypt_identity` takes a key and a plaintext, and then\nstates the inverse property. The body is empty: the details of the\nproof itself are not relevant, at this stage, we only care about the\nstatement. The proof will be completed manually in the proof\nassistant.\n\n```{.rust .playable .expect-failure}\n# fn encrypt(plaintext: u32, key: u32) -> u32 {\n#     plaintext ^ key\n# }\n# \n# fn decrypt(ciphertext: u32, key: u32) -> u32 {\n#     ciphertext ^ key\n# }\n# \n#[hax_lib::lemma]\n#[hax_lib::requires(true)]\n\nfn encrypt_decrypt_identity(\n    key: u32,\n    plaintext: u32,\n) -> Proof<{ decrypt(encrypt(plaintext, key), key) == plaintext }> {\n}\n```\n"
  },
  {
    "path": "docs/manual/index.md",
    "content": "---\nweight: -5\n---\n\n# Introduction\n\nhax is a tool for high assurance translations of a large subset of\nRust into formal languages such as [F\\*](https://www.fstar-lang.org/), [Lean](https://lean-lang.org/) or [Rocq](https://rocq-prover.org/).\n\n## Usage\n\nHax is a cargo subcommand. \nThe command `cargo hax` accepts the following subcommands:\n\n* **`into`** (`cargo hax into BACKEND`): translate a Rust crate to the backend `BACKEND` (e.g. `fstar`, `coq`, `lean`).\n* **`json`** (`cargo hax json`): extract the typed AST of your crate as a JSON file.\n \nNote:\n\n* `BACKEND` can be `fstar`, `coq`, `lean`, `easycrypt` or `pro-verif`. `cargo hax into --help`\n   gives the full list of supported backends.\n* The subcommands `cargo hax`, `cargo hax into` and `cargo hax into\n   <BACKEND>` takes options. For instance, you can `cargo hax into\n   fstar --z3rlimit 100`. Use `--help` on those subcommands to list\n   all options.\n\n## Installation\n\n### Manual installation\n\n1. Make sure to have the following installed on your system:\n\n      - [`opam`](https://opam.ocaml.org/) (`opam switch create 5.1.1`)\n      - [`rustup`](https://rustup.rs/)\n      - [`nodejs`](https://nodejs.org/)\n      - [`jq`](https://jqlang.github.io/jq/)\n\n2. Clone this repo: `git clone git@github.com:hacspec/hax.git && cd hax`\n3. Run the `setup.sh` script: `./setup.sh`.\n4. Run `cargo-hax --help`\n\n### Nix\n\nThis should work on [Linux](https://nixos.org/download.html#nix-install-linux), [MacOS](https://nixos.org/download.html#nix-install-macos) and [Windows](https://nixos.org/download.html#nix-install-windows).\n\n<b>Prerequisites:</b> <a href=\"https://nixos.org/\">Nix package\nmanager</a> <i>(with <a href=\"https://nixos.wiki/wiki/Flakes\">flakes</a> enabled)</i>\n\n  - Either using the [Determinate Nix Installer](https://github.com/DeterminateSystems/nix-installer), with the following bash one-liner:\n    ```bash\n    curl --proto '=https' --tlsv1.2 -sSf -L https://install.determinate.systems/nix | sh -s -- install\n    ```\n  - or following [those steps](https://github.com/mschwaig/howto-install-nix-with-flake-support).\n\n+ **Run hax on a crate directly** to get F\\*/Coq/... (assuming you are in the crate's folder):\n   - `nix run github:hacspec/hax -- into fstar` extracts F\\*.\n\n+ **Install hax**:  `nix profile install github:hacspec/hax`, then run `cargo hax --help` anywhere\n+ **Note**: in any of the Nix commands above, replace `github:hacspec/hax` by `./dir` to compile a local checkout of hax that lives in `./some-dir`\n+ **Setup binary cache**: [using Cachix](https://app.cachix.org/cache/hax), just `cachix use hax`\n\n### Docker\n\n1. Clone this repo: `git clone git@github.com:hacspec/hax.git && cd hax`\n2. Build the docker image: `docker build -f .docker/Dockerfile . -t hax`\n3. Get a shell: `docker run -it --rm -v /some/dir/with/a/crate:/work hax bash`\n4. You can now run `cargo-hax --help` (notice here we use `cargo-hax` instead of `cargo hax`)\n\nNote: Please make sure that `$HOME/.cargo/bin` is in your `$PATH`, as\nthat is where `setup.sh` will install hax.\n\n"
  },
  {
    "path": "docs/manual/lean/index.md",
    "content": "This section introduces the Lean backend of hax. It shows the basic setup to use hax and Lean on a Rust crate, and gives an introduction to the basic features that can be used to make Lean proofs about Rust code."
  },
  {
    "path": "docs/manual/lean/internals.md",
    "content": "---\nweight: 102\n---\n\n# Internals\n\nThe encoding of Rust in Lean has three main components:\n\n* the *syntax* (items, functions, `if`-`else`, `match`, etc), defined by the *backend*\n  ([`/rust-engine/src/backends/lean.rs`](https://github.com/cryspen/hax/blob/main/rust-engine/src/backends/lean.rs))\n* the *primitives/intrinsics* (`u32`, `isize`, slices, etc) defined by in the *Prelude*\n  ([`hax-lib/proof-libs/lean`](https://github.com/cryspen/hax/tree/main/hax-lib/proof-libs/lean))\n* the models of *core* and *std* libraries.\n\nWhile mostly separated, the Backend make some assumption on the Prelude,\ntypically when it inserts notations for some symbol (i.e. `+?` for addition).\n\n[!Disclaimer] : The lean backend is still experimental. See the list of [open\nissues](https://github.com/cryspen/hax/issues?q=is%3Aissue%20state%3Aopen%20label%3Alean)\nfor known problems and workaround. See also the [Hax\nZulip](https://hacspec.zulipchat.com/) for technical support.\n\n## Backend\n\n### Monadic encoding\n\nAll rust computations can panic or diverge, while Lean ones cannot (by\ndefault). To account for this, Rust types are wrapped inside a\n[monad](https://en.wikipedia.org/wiki/Monad_(functional_programming)) that\nrepresents the possible results:\n\n```lean\ninductive Error where\n   | assertionFailure: Error\n   | integerOverflow: Error\n   | divisionByZero: Error\n   | arrayOutOfBounds: Error\n   | maximumSizeExceeded: Error\n   | panic: Error\n   | undef: Error\n\ninductive RustM.{u} (α : Type u) where\n  | ok (v: α): RustM α\n  | fail (e: Error): RustM α\n  | div\n```\n\nThis monadic encoding shows for simple expressions: the result of the lean\nextracted function is not `u32` but `RustM u32`.\n\n/// html | div[style='float: left; width: 48%;']\n```rust\nfn f (x: u32) -> u32 {\n    x + 1\n}\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n```lean\ndef f (x : u32) : RustM u32\n  := do (← x +? (1 : u32))\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\nThe backend relies on the\n[do-notation](https://lean-lang.org/doc/reference/latest//Functors___-Monads-and--do--Notation/Syntax/#do-notation):\nall functions start with the `do` keyword, indicating that the sequence of bindings should\nactually be understood as bindings in the monad, propagating potential\nerrors to the top.\n\nThe `do` keywords enables the lifting `←` and the `pure` operators. Intuitively,\nlifting turns a value of type `RustM T` into a value of type `T` by turning the\nrest of the program into a use of `bind`. Conversely, `pure` turns a value of\ntype `T` into a value of type `RustM T`. This shows also for let-bindings :\n\n\n/// html | div[style='float: left; width: 48%;']\n```rust\nfn f (x: u32) -> u32 {\n    let y = x + 1;\n    let z = y + 1;\n    y + z\n}\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n```lean\ndef f (x : u32) : RustM u32 := do\n  let y : u32 ← (pure\n    (← x +? (1 : u32)));\n  let z : u32 ← (pure\n    (← y +? (1 : u32)));\n  (← y +? z)\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\n\nCurrently, the backend does not try to be parsimonious with the introduction of `pure` and `←`.\n\n### Structs\n\n#### Type definitions\n\nRust structs are encoded as [Lean\nstructures](https://lean-lang.org/doc/reference/latest//The-Type-System/Inductive-Types/#structures). The\nspecial case of [tuple\nstructs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types)\nare also encoded as Lean structures, where the fields are numbered : `_0`, `_1`,\netc. See for instance :\n\n/// html | div[style='float: left; width: 48%;']\n\n```rust\nstruct S1 {\n    f1: usize,\n    f2: usize,\n}\n\nstruct S2 {\n    f1: S1,\n    f2: usize,\n}\n\n// Tuple structs\nstruct T0();\nstruct T1<A>(A);\nstruct T2<A, B>(A, B);\nstruct T3<A, B, C>(A, B, C);\nstruct T3p<A, B, C>(A, T2<B, C>);\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n```lean\nstructure S1 where\n  f1 : usize\n  f2 : usize\n\nstructure S2 where\n  f1 : S1\n  f2 : usize\n\nstructure T0 where\nstructure T1 A where\n  _0 : A\nstructure T2 A B where\n  _0 : A\n  _1 : B\nstructure T3 A B C where\n  _0 : A\n  _1 : B\n  _2 : C\nstructure T3p A B C where\n  _0 : A\n  _1 : (T2 B C)\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\n#### Expressions, accessors and pattern-matching\n\nBuilding, accessing and destructing structs :\n\n/// html | div[style='float: left; width: 48%;']\n```rust\n// Building\nlet s1 = S1 { f1: 0, f2: 1 };\n\nlet t3 = T3(T0(), T1(1), T2(1, 2));\n\n// Matching\nlet S1 { f1, f2 } = s1;\n\nlet T3(T0(), T1(_), T2(_, _)) = t3;\n\n// Accessing\nlet _ = (s1.f1, s1.f2);\n\nlet _ = t3.0;\nlet _ = t3.1;\nlet _ = t3.2;\nlet _ = t3.2.1;\n\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n\n```lean\n-- Building\nlet s1 : S1 ← (pure\n  (S1.mk\n    (f1 := (0 : usize))\n    (f2 := (1 : usize))));\n\nlet t3 :\n  (T3 T0 (T1 i32) (T2 i32 i32))\n  ← (pure\n      (T3.mk\n        T0.mk\n        (T1.mk (1 : i32))\n        (T2.mk\n          (1 : i32)\n          (2 : i32))));\n\n-- Matching\nlet ({f1 := (f1 : usize),\n      f2 := (f2 : usize)} : S1) ←\n  (pure s1);\n\nlet (⟨(⟨⟩ : T0),\n      (⟨(_ : i32)⟩ : (T1 i32)),\n      (⟨(_ : i32), (_ : i32)⟩\n        : (T2 i32 i32))⟩ :\n    (T3 T0 (T1 i32) (T2 i32 i32))) ←\n  (pure t3);\n\n-- Accessing\nlet (_ : (Tuple2 usize usize)) ←\n  (pure\n    (Tuple2.mk\n      (S1.f1 s1)\n      (S1.f2 s1)));\n\nlet (_ : i32) ← (pure\n  (T2._1 t2));\nlet (_ : T0) ← (pure\n  (T3._0 t3));\nlet (_ : (T1 i32)) ← (pure\n  (T3._1 t3));\nlet (_ : (T2 i32 i32)) ← (pure\n  (T3._2 t3));\nlet (_ : i32) ← (pure\n  (T2._1 (T3._2 t3)));\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\n### Enums\n\n#### Type definitions\n\nRust enums are encoded as [Lean inductive\ntypes](https://lean-lang.org/doc/reference/latest/The-Type-System/Inductive-Types/#inductive-types). Variants\nwith record fields use *named* arguments, whereas variants with tuple fields use\nnormal positional arguments.\n\n\n/// html | div[style='float: left; width: 48%;']\n```rust\n// 1. Type definition\nenum E {\n    // unit-like\n    V1,\n    V2,\n    // with positional arguments\n    V3(usize),\n    V4(usize, usize, usize),\n    // with named arguments\n    V5 { f1: usize, f2: usize },\n    V6 { f1: usize, f2: usize },\n}\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n\n```lean\ninductive E : Type\n| V1  : E\n| V2  : E\n| V3  : usize -> E\n| V4  : usize -> usize -> usize -> E\n| V5 (f1 : usize) (f2 : usize) : E\n| V6 (f1 : usize) (f2 : usize) : E\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\n#### Expressions and pattern-matching\n\n/// html | div[style='float: left;width: 48%;']\n\n```rust\n// Building\nlet e_v1 = E::V1;\nlet e_v2 = E::V2;\nlet e_v3 = E::V3(23);\nlet e_v4 = E::V4(23, 12, 1);\nlet e_v5 = E::V5 { f1: 23, f2: 43 };\nlet e_v6 = E::V6 { f1: 12, f2: 13 };\n\n// Matching\nmatch e_v1 {\n    E::V1 => (),\n    E::V2 => (),\n    E::V3(_) => (),\n    E::V4(x1, x2, x3) => {\n        let y1 = x1 + x2;\n        let y2 = y1 - x2;\n        let y3 = y2 + x3;\n        ()\n    }\n    E::V5 { f1, f2 } => (),\n    E::V6 {\n        f1,\n        f2: other_name_for_f2,\n    } => (),\n}\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n```lean\ndef enums (_ : Tuple0)\n  : RustM Tuple0\n  := do\n  let e_v1 : E ← (pure E.V1);\n  let e_v2 : E ← (pure E.V2);\n  let e_v3 : E ← (pure\n    (E.V3 (23 : usize)));\n  let e_v4 : E ← (pure\n    (E.V4\n      (23 : usize)\n      (12 : usize)\n      (1 : usize)));\n  let e_v5 : E ← (pure\n    (E.V5\n      (f1 := (23 : usize))\n      (f2 := (43 : usize))));\n  let e_v6 : E ← (pure\n    (E.V6\n      (f1 := (12 : usize))\n      (f2 := (13 : usize))));\n  (match e_v1 with\n    | (E.V1 ) => do Tuple0.mk\n    | (E.V2 ) => do Tuple0.mk\n    | (E.V3 (_ : usize))\n      => do Tuple0.mk\n    | (E.V4\n        (x1 : usize)\n        (x2 : usize)\n        (x3 : usize))\n      => do\n        let y1 : usize ← (pure\n          (← x1 +? x2));\n        let y2 : usize ← (pure\n          (← y1 -? x2));\n        let y3 : usize ← (pure\n          (← y2 +? x3));\n        Tuple0.mk\n    | (E.V5\n        (f1 := (f1 : usize))\n        (f2 := (f2 : usize)))\n      => do Tuple0.mk\n    | (E.V6\n        (f1 := (f1 : usize))\n        (f2 :=\n          (other_name_for_f2 : usize)))\n      => do Tuple0.mk)\n```\n///\n\n/// html | div[style='clear: both;']\n///\n### Traits\n\nRust traits are represented as Lean classes, while Rust impl are Lean\ninstances. The Lean code relies on the typeclass inference of Lean. Hax exposes\nidentifiers for rust impls (that are otherwise implicit), like\n`8040238289193487104`. Lean uses them for naming fields or parameters.\n\n\n/// html | div[style='float: left; width: 48%;']\n\n```rust\ntrait T1 {\n    fn f1(&self) -> usize;\n    fn f2(&self, y: &Self) -> usize;\n}\n\nstruct S;\n\nimpl T1 for S {\n    fn f1(&self) -> usize {\n        42\n    }\n fn f2(&self, y: &Self) -> usize {\n        43\n    }\n}\n\nfn f<T: T1>(x: T) -> usize {\n    x.f1() + x.f2(&x)\n}\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n```lean\nclass T1 (Self : Type) where\n  f1 : Self -> RustM usize\n  f2 : Self -> Self -> RustM usize\n\nstructure S where\n\ninstance Impl : T1 S where\n  f1 (self : S)\n    := do (42 : usize)\n  f2 (self : S) (y : S)\n    := do (43 : usize)\n\ndef f (T : Type) [(T1 T)] (x : T)\n  : RustM usize\n  := do\n  (← (← T1.f1 x) +? (← T1.f2 x x))\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\n\n#### Supertraits\n\nSuper trait bounds are represented as extra fields\n\n\n/// html | div[style='float: left; width: 48%;']\n```rust\ntrait Test<T: T1>: T2 {\n   fn f_test(&self, x: &T) -> usize;\n}\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n```lean\nclass Test\n  (Self : Type)\n  (T : Type)\n  where\n  [_constr_8040238289193487104 :\n    (T2 Self)]\n  [_constr_7570495343596639253 :\n    (T1 T)]\n  f_test :\n    Self -> T -> RustM usize\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\n#### Associated types\n\nThe support for associated types is currently restricted to types defined within\nthe current trait\n\n\n/// html | div[style='float: left; width: 48%;']\n```rust\nmod associated_types {\n    trait T1 {\n        type T;\n        fn f(&self, x: Self::T) -> Self::T;\n    }\n\n    trait T2 {\n        type T: T1;\n        fn f(&self, x: Self::T) -> usize;\n    }\n\n    trait Foo<T> {}\n    trait Bar {}\n\n    trait T3 {\n        type T: Foo<()>;\n        type Tp<T: Bar>: Foo<T>;\n        fn f<A: Bar>(&self, x: Self::T, y: Self::Tp<A>) -> usize;\n    }\n}\n```\n///\n\n/// html | div[style='float: right;width: 48%;']\n```lean\nclass Foo (Self : Type) (T : Type) where\n\n\nclass Bar (Self : Type) where\n\n\nclass T1 (Self : Type) where\n  T : Type\n  f : Self -> T -> RustM T\n\nclass T3 (Self : Type) where\n  T : Type\n  [_constr_13086648656846024831 :\n    (Foo T Tuple0)]\n  Tp : Type\n  [_constr_15450263461214744089 : (Foo Tp T)]\n  f (A : Type) [(Bar A)] :\n    Self -> T -> Tp -> RustM usize\n\nclass T2 (Self : Type) where\n  T : Type\n  [_constr_18277713886489441014 : (T1 T)]\n  f : Self -> T -> RustM usize\n\n```\n///\n\n/// html | div[style='clear: both;']\n///\n\n\n## Prelude\n\nSee the [Hax Lean library](https://github.com/cryspen/hax/tree/main/hax-lib/proof-libs/lean)\n"
  },
  {
    "path": "docs/manual/lean/quick_start.md",
    "content": "---\nweight: 100\n---\n\n# Quick start\n\n## Setup the tools\n\n - <input type=\"checkbox\" class=\"user-checkable\"/> [Install the hax toolchain](https://github.com/hacspec/hax?tab=readme-ov-file#installation).  \n   <span style=\"margin-right:30px;\"></span>🪄 Running `cargo hax --version` should print some version info.\n - <input type=\"checkbox\" class=\"user-checkable\"/> [Install Lean](https://lean-lang.org/install/)\n  - <input type=\"checkbox\" class=\"user-checkable\"/> Add `hax-lib` as a dependency to your crate, enabled only when using hax.  \n   <span style=\"margin-right:30px;\"></span>🪄 `cargo add --target 'cfg(hax)' --git https://github.com/hacspec/hax hax-lib`  \n   <span style=\"margin-right:30px;\"></span><span style=\"opacity: 0;\">🪄</span> *(`hax-lib` is not mandatory, but this guide assumes it is present)*\n\n## Setup the crate you want to verify\n\n*Note: the instructions below assume you are in the folder of the specific crate (**not workspace!**) you want to extract.*\n\n\n - <input type=\"checkbox\" class=\"user-checkable\"/> Create the folder `proofs/lean/extraction`folder, right next to the `Cargo.toml` of the crate you want to verify.  \n   <span style=\"margin-right:30px;\"></span>🪄 `mkdir -p proofs/lean/extraction`\n - <input type=\"checkbox\" class=\"user-checkable\"/> Create `proofs/lean/extraction/lakefile.toml`, and add the following content:  \n```toml\nname = \"your_crate_name\"\nversion = \"0.1.0\"\ndefaultTargets = [\"your_crate_name\"]\n\n[[lean_lib]]\nname = \"your_crate_name\"\n\n[[require]]\nname = \"Hax\"\ngit.url = \"https://github.com/cryspen/hax\"\ngit.subDir = \"hax-lib/proof-libs/lean\"\nrev = \"main\"\n``` \n - <input type=\"checkbox\" class=\"user-checkable\"/> Create `proofs/lean/extraction/lean-toolchain`,\n with the following content:\n```\nleanprover/lean4:v4.29.0-rc1 \n```\nThis version should be the same version as in the file `hax-lib/proof-libs/lean/lean-toolchain` of\nyour hax installation.\n\n## Partial extraction\n\n*Note: the instructions below assume you are in the folder of the\nspecific crate you want to extract.*\n\nRun the command `cargo hax into lean` to extract every item of your\ncrate as F\\* modules in the subfolder `proofs/lean/extraction`.\n\n**What is critical? What is worth verifying?**  \nProbably, your Rust crate contains mixed kinds of code: some parts are\ncritical (e.g. the library functions at the core of your crate) while\nsome others are not (e.g. the binary driver that wraps the\nlibrary). In this case, you likely want to extract only partially your\ncrate, so that you can focus on the important part.\n\n**Using the `-i` flag.**  \nIf you want to extract a function\n`your_crate::some_module::my_function`, you need to tell `hax` to\nextract nothing but `my_function`:\n\n```bash\ncargo hax into -i '-** +your_crate::some_module::my_function' lean\n```\n\nThis command will remove all items from extraction (`-**`) and add back `my_function`, along with all its dependencies (other functions, type definitions, etc.) from your crate. If you don't want the dependencies, you can use `+!` instead of `+`. See [the the FAQ](../faq/include-flags.md) or `cargo hax into --help` for more options for partial extraction.\n\n**Unsupported Rust code.**  \nhax [doesn't support every Rust\nconstructs](https://github.com/hacspec/hax?tab=readme-ov-file#supported-subset-of-the-rust-language),\n`unsafe` code, or complicated mutation scheme. That is another reason\nfor extracting only a part of your crate. When running hax, if an item\nof your crate, say a function `my_crate::f`, is not handled by hax,\nyou can remove it from the extraction target by adding  `-my_crate::f` as an option to the `-i` flag. \n\n## Start Lean verification\nAfter extracting your Rust code to Lean, the result is in the `proofs/lean/extraction` folder. The\n`lakefile.toml` allows you to run Lean on this folder by running `lake build` (or directly in the IDE \nusing the LSP). Contrarily to F\\*, successfully building the code doesn't prove panic freedom, this\nhappens only if the specification states that the code is panic-free. \n\n### Current limitations\nThe Lean backend of Hax is under active development, and extraction can *fail* even on supported Rust. This can come from a missing Rust feature (i.e. supported by the Hax engine but not yet by the Lean backend). Testing the same extraction target on the *F\\** backend can be an easy way to check. If all the Rust features are supported, then the extracted code can fail to build if it uses definitions from Rust `core` and `std` libraries that are missing in our Lean model (in `hax-lib/proof-libs/lean`). We're actively extending it to support idiomatic code, but feel free to report it on [zulip](https://hacspec.zulipchat.com/) or [github](https://github.com/cryspen/hax/issues)\n"
  },
  {
    "path": "docs/manual/lean/tutorial/index.md",
    "content": "---\nweight: 101\n---\n\n# Tutorial\n\nThis tutorial focuses on proving properties with the hax toolchain and its\n[Lean](https://lean-lang.org/) backend.\n"
  },
  {
    "path": "docs/manual/lean/tutorial/panic-freedom.md",
    "content": "---\nweight: 0\n---\n\n# Panic freedom\n\nLet's start with a simple example: a function that squares a `u8`\ninteger. To extract this function to Lean using hax, we simply need to\nrun the command `cargo hax into lean` in the directory of the crate\nin which the function `square` is defined.\n\n*Note: throughout this tutorial, you can edit the snippets of code and\nextract to Lean by clicking the play button (:material-play:), or even typecheck it with the button (:material-check:).*\n\n```{.rust .playable .lean-backend}\nfn square(x: u8) -> u8 {\n    x * x\n}\n```\n\nIf we run `lake build` on the result (or type-check using the playground), we get a success. If you followed the F\\* tutorial, this might be a surprise because the function is not \npanic-free. Indeed, our encoding of Rust code in Lean wraps everything in a result monad. And \nfunctions that panic return an error in this monad. To try to prove panic-freedom, we have to \nspecify that the result of `square` is expected not to be an error in this result type. A way\nto do that is the following:\n```{.rust .playable .lean-backend .expect-failure}\n#[hax_lib::requires(true)]\n#[hax_lib::ensures(|res| true)]\nfn square(x: u8) -> u8 {\n    x * x\n}\n```\nAdding a `hax_lib::requires` and a `hax_lib::ensures` annotation will make Hax generate a specification of the function, asserting panic freedom as well as the postcondition. Here, we used the trivial postcondition `true`, so we only assert panic freedom.\n\nIf we try running `lake build`\nafter extracting this code, we get an error: \n`The prover found a counterexample, consider the following assignment: value = 255`. Indeed `square(255)` \npanics because the multiplication overflows.\n\n## Rust and panicking code\nQuoting the chapter [To `panic!` or Not to\n`panic!`](https://doc.rust-lang.org/book/ch09-03-to-panic-or-not-to-panic.html)\nfrom the Rust book:\n\n> The `panic!` macro signals that your program is in a state it can't\n> handle and lets you tell the process to stop instead of trying to\n> proceed with invalid or incorrect values.\n\nA Rust program should panic only in a situation where an assumption\nor an invariant is broken: a panic models an *invalid* state. Formal\nverification is about proving such invalid state cannot occur, at all.\n\nFrom this observation emerges the urge of proving Rust programs to be\npanic-free!\n\n## Fixing our squaring function\nLet's come back to our example. There is an informal assumption to the\nmultiplication operator in Rust: the inputs should be small enough so\nthat the addition doesn't overflow.\n\nNote that Rust also provides `wrapping_mul`, a non-panicking variant\nof the multiplication on `u8` that wraps when the result is bigger\nthan `255`. Replacing the common multiplication with `wrapping_mul` in\n`square` would fix the panic, but then, `square(256)` returns zero.\nSemantically, this is not what one would expect from `square`.\n\nOur problem is that our function `square` is well-defined only when\nits input is within `0` and `15`.\n\n### Solution: add a precondition\n\nWe already added a pre-condition to specify panic-freedom but we can turn it into a more interesting pre-condition to restrict the inputs and stay in the domain where the multiplication fits in a `u8`. We only need to modify the Rust condition that is passed to the `hax_lib::requires` macro: \n\n```{.rust .playable .lean-backend}\n#[hax_lib::requires(x < 16)]\n#[hax_lib::ensures(|res| true)]\nfn square(x: u8) -> u8 {\n    x * x\n}\n```\n\nWith this precondition, Lean is able to prove panic freedom. From now\non, it is the responsibility of the clients of `square` to respect the\ncontract.\n\n## Common panicking situations\nMultiplication is not the only panicking function provided by the Rust\nlibrary: most of the other integer arithmetic operation have such\ninformal assumptions.\n\nAnother source of panics is indexing. Indexing in an array, a slice or\na vector is a partial operation: the index might be out of range.\n\nIn the example folder of hax, you can find the [`chacha20`\nexample](https://github.com/cryspen/hax/blob/main/examples/chacha20/src/lib.rs)\nthat makes use of pre-conditions to prove panic freedom.\n"
  },
  {
    "path": "docs/manual/lean/tutorial/properties.md",
    "content": "---\nweight: 1\n---\n\n# Proving properties\n\nIn the previous chapter, we proved one property of the `square` function:\npanic freedom.\n\nThis contract stipulates that, given a small input, the function will\n_return a value_: it will not panic or diverge. We could enrich the\ncontract of `square` with a post-condition about the fact it is an\nincreasing function:\n```{.rust .playable .lean-backend}\n#[hax_lib::requires(x < 16)]\n#[hax_lib::ensures(|res| res >= x)]\nfn square(x: u8) -> u8 {\n    x * x\n}\n```\nThis works as well.\n\nThe property that we prove above demonstrates a very simple case of a proof using hax and Lean. For a more complex example, a version of the Barrett example is available in the \n[`examples`](https://github.com/cryspen/hax/tree/main/examples/lean_barrett) \nsection of hax. \n\n\n"
  },
  {
    "path": "docs/overrides/main.html",
    "content": "{% extends \"base.html\" %}\n\n{% block site_meta %}\n<script type=\"module\" src=\"https://esm.sh/run\"></script>\n<link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.7.2/css/all.min.css\"\n    integrity=\"sha512-Evv84Mr4kqVGRNSgIGL/F/aIDqQb7xQ2vcrdIwxfjThSH8CSR7PBEakCr51Ck+w+/U6swU2Im1vVX0SVk9ABhg==\"\n    crossorigin=\"anonymous\" referrerpolicy=\"no-referrer\" />\n<link rel=\"stylesheet\" href=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css\">\n<script src=\"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js\"></script>\n<script type=\"module\">\n    import * as codemirror from \"https://esm.sh/codemirror\";\n    import { rust } from \"https://esm.sh/@codemirror/lang-rust\";\n    globalThis.codemirror = { rust, ...codemirror }\n</script>\n{{ super() }}\n{% endblock %}\n"
  },
  {
    "path": "docs/publications.md",
    "content": "---\nweight: 5\n---\n\nTo cite hax, please use\n**[hax: Verifying Security-Critical Rust Software using Multiple Provers](https://eprint.iacr.org/2025/142)**.\n\n# Publications\n\n* [Formal Security and Functional Verification of Cryptographic Protocol Implementations in Rust](https://eprint.iacr.org/2025/980)\n* [hax: Verifying Security-Critical Rust Software using Multiple Provers](https://eprint.iacr.org/2025/142)\n* [hacspec Tech report](https://hal.inria.fr/hal-03176482)\n* [HACSpec: A gateway to high-assurance cryptography](https://github.com/hacspec/hacspec/blob/master/rwc2023-abstract.pdf)\n* [Original hacspec paper](https://www.franziskuskiefer.de/publications/hacspec-ssr18-paper.pdf)\n\n### Secondary literature, using hacspec & hax:\n* [Last yard](https://eprint.iacr.org/2023/185)\n* [A Verified Pipeline from a Specification Language to Optimized, Safe Rust](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl22-final61.pdf) at [CoqPL'22](https://popl22.sigplan.org/details/CoqPL-2022-papers/5/A-Verified-Pipeline-from-a-Specification-Language-to-Optimized-Safe-Rust)\n* [Hax - Enabling High Assurance Cryptographic Software](https://github.com/hacspec/hacspec.github.io/blob/master/RustVerify24.pdf) at [RustVerify24](https://sites.google.com/view/rustverify2024)\n* [A formal security analysis of Blockchain voting](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper8-2.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/8/A-formal-security-analysis-of-Blockchain-voting)\n* [Specifying Smart Contract with Hax and ConCert](https://github.com/hacspec/hacspec.github.io/blob/master/coqpl24-paper9-13.pdf) at [CoqPL'24](https://popl24.sigplan.org/details/CoqPL-2024-papers/9/Specifying-Smart-Contract-with-Hax-and-ConCert)\n"
  },
  {
    "path": "docs/stylesheets/hax_playground.css",
    "content": ".md-hax-playground::after {\n    display: none;\n}\n\ntextarea.code.inline+div.CodeMirror div.CodeMirror-lines {\n    padding: 0px !important;\n    /* added !important as padding is an inline stlye */\n}\n\n.cm-editor {\n    outline: none !important;\n}\n\n.md-hax-playground~.md-code__content {\n    padding: 14px 7px 14px 7px !important;\n}\n\npre.md-hax-playground-pre {\n    margin: 0 !important;\n}\n\n.center {\n    display: block;\n    margin: 0 auto;\n}\n\n/* Center Markdown Tables (requires md_in_html extension) */\n.center-table {\n    text-align: center;\n}\n\n.md-typeset .center-table :is(td, th):not([align]) {\n    /* Reset alignment for table cells */\n    text-align: initial;\n}\n\n.mermaid {\n    text-align: center;\n}\n"
  },
  {
    "path": "docs/stylesheets/logo.css",
    "content": ".md-header__button[data-md-component=\"logo\"] {\n    display: block !important;\n}\n\nlabel.md-header__button.md-icon[for=\"__drawer\"] {\n    order: -1;\n}\n\nlabel.md-nav__title[for=\"__drawer\"] img {\n    margin: auto;\n}\n\nlabel.md-nav__title[for=\"__drawer\"] {\n    font-size: 0.0001px !important;\n    color: red !important;\n    text-align: center;\n    height: 4rem;\n    overflow: none;\n    /* color: transparent !important; */\n}\n\n.md-header__topic {\n    display: none;\n}"
  },
  {
    "path": "docs/stylesheets/tags-colors.css",
    "content": "/* This sets the color of the ADR status tags see https://github.com/squidfunk/mkdocs-material/discussions/5101 */\n\n.md-typeset .md-tag--draft,\n.md-typeset .md-tag--draft[href] {\n    background-color: #5694ca;\n    color: white;\n}\n\n.md-typeset .md-tag--accepted,\n.md-typeset .md-tag--accepted[href] {\n    background-color: #00703c;\n    color: white;\n}\n\n.md-typeset .md-tag--deprecated,\n.md-typeset .md-tag--deprecated[href] {\n    background-color: #b1b4b6;\n    color: white;\n}\n\n.md-typeset .md-tag--proposed,\n.md-typeset .md-tag--proposed[href] {\n    background-color: #003078;\n    color: white;\n}\n\n.md-typeset .md-tag--rejected,\n.md-typeset .md-tag--rejected[href] {\n    background-color: #f47738;\n    color: white;\n}\n\n.md-typeset .md-tag--superseded,\n.md-typeset .md-tag--superseded[href] {\n    background-color: #505a5f;\n    color: white;\n}\n"
  },
  {
    "path": "engine/.ocamlformat",
    "content": "profile = default\nversion = 0.27.0\n"
  },
  {
    "path": "engine/DEV.md",
    "content": "# Notes about developping on the engine\n\nNote that Hax uses [JSON schemas](https://json-schema.org/): an OCaml\nmodule `types.ml` definining the types we declared on the Rust side\nwith JSON parser and serializer is automatically generated. Thus, when\nwe refer here to a Rust type in the context of the engine, we mean its\nOCaml automatically derived counterpart in `Types.ml`.\n\nThe engine is designed to behave like a \"pure\" function.\n 1. It receives a JSON string in its stdin.\n 2. Parses the JSON as a\n [`EngineOptions`](../cli/options/engine/src/lib.rs): note this\n structure has everything the engine needs to know. The behavior of\n the engine should be completely deterministic given this structure.\n 3. From the engine options we received on stdin, we extract the\n    `input` field, that contains all the items of the Rust crate we\n    want to translate.\n 4. Those items are translated in [our internal AST](lib/ast.ml) by\n    the module [`Import_thir`](lib/import_thir.ml).\n 5. According to the engine options we just got as JSON, we choose a backend.\n 6. We run the `translate` function of that backend, that applies a\n    certain number of rewrite phases, transporting the items in a\n    type-safe manner from an AST to another.\n 7. The backend produces a list of\n    [`File`](../cli/options/engine/src/lib.rs)s. Each phase might also\n    (as a side effect) have produced diagnostics messages. Those are\n    collected in [`Diagnostics.Core.state`](lib/diagnostics.ml).\n 8. Gathering files and diagnostics, we make a\n    [`Output`](../cli/options/engine/src/lib.rs) value, serialize it\n    to JSON, and output that on stdout.  \n    *Note that the engine doesn't write or read anything on the\n    hard-drive, it is supposed to be entirely side-ffect free (when\n    not in debug mode): files are created by the [Rust\n    driver](../cli/driver/src/exporter.rs)*\n\n## Miscellaneous\n### How to show types of `Types.ml`?\n`dune build` produces that file using `utils/ocaml_of_json_schema`,\nand stores it in `build/default/lib/types.ml`.\n\nTo show the file nicely formated, use: `dune describe pp lib/types.ml` (or `dune describe pp lib/types.ml | bat -l ml`, if you have [`bat`](https://github.com/sharkdp/bat))\n\nYou can also use `dune utop` and then `#show_type Hax_engine.Types.SOME_TYPE` and `#show_constructor Hax_engine.Types.SOME_CONSTRUCTOR`.\n\n### Visitors\nThe module `Ast_visitors` provides a `Make` functor that takes a feature set and outputs a module that defines the `map`, `mapreduce` and `reduce` classes.\n\nThose visitors are generated by `./utils/generate_visitors`. \nTo see the implementation of the `Ast_visitors` module, run `dune describe pp lib/ast_visitors.ml`.\n\n### Core_models extraction\n\nCore_models extraction requires some special treatment, in particular changing the imports in the generated code. The information that the crate being extracted is core_models can be passed to the engine by setting HAX_CORE_MODELS_EXTRACTION_MODE to 'on'.\n\n## Debugging the phases\nYou can enable a debug mode that prints a Rustish AST at each phase,\nthat you can browse interactively along with the actual AST.\n\nJust add the flag `--debug-engine i` (or `-d i`, `i` stands for\n**i**nteractive) to the `into` subcommand.  At the end of the\ntranslation, `cargo hax` will spawn a webserver that lets you browse\nthe debug information. Note, you can change to port by setting the\nenvironment variable `HAX_DEBUGGER_PORT`.\n"
  },
  {
    "path": "engine/backends/coq/coq/coq_backend.ml",
    "content": "open Hax_engine\nopen Utils\nopen Base\nopen Coq_ast\n\ninclude\n  Backend.Make\n    (struct\n      open Features\n      include Off\n      include On.Slice\n      include On.Monadic_binding\n      include On.Macro\n      include On.Construct_base\n    end)\n    (struct\n      let backend = Diagnostics.Backend.Coq\n    end)\n\nmodule SubtypeToInputLanguage\n    (FA :\n      Features.T\n        with type mutable_reference = Features.Off.mutable_reference\n         and type continue = Features.Off.continue\n         and type break = Features.Off.break\n         and type mutable_reference = Features.Off.mutable_reference\n         and type mutable_pointer = Features.Off.mutable_pointer\n         and type mutable_variable = Features.Off.mutable_variable\n         and type reference = Features.Off.reference\n         and type raw_pointer = Features.Off.raw_pointer\n         and type early_exit = Features.Off.early_exit\n         and type question_mark = Features.Off.question_mark\n         and type as_pattern = Features.Off.as_pattern\n         and type lifetime = Features.Off.lifetime\n         and type monadic_action = Features.Off.monadic_action\n         and type arbitrary_lhs = Features.Off.arbitrary_lhs\n         and type nontrivial_lhs = Features.Off.nontrivial_lhs\n         and type block = Features.Off.block\n         and type quote = Features.Off.quote\n         and type dyn = Features.Off.dyn\n         and type match_guard = Features.Off.match_guard\n         and type trait_item_default = Features.Off.trait_item_default\n         and type unsafe = Features.Off.unsafe\n         and type loop = Features.Off.loop\n         and type for_loop = Features.Off.for_loop\n         and type while_loop = Features.Off.while_loop\n         and type for_index_loop = Features.Off.for_index_loop\n         and type state_passing_loop = Features.Off.state_passing_loop\n         and type fold_like_loop = Features.Off.fold_like_loop) =\nstruct\n  module FB = InputLanguage\n\n  include\n    Subtype.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Features.SUBTYPE.Id\n        include Features.SUBTYPE.On.Monadic_binding\n        include Features.SUBTYPE.On.Construct_base\n        include Features.SUBTYPE.On.Slice\n        include Features.SUBTYPE.On.Macro\n      end)\n\n  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))\nend\n\nmodule CoqNamePolicy = struct\n  include Concrete_ident.DefaultNamePolicy\n\n  (** List of all words that have a special meaning in the target language, and\n      that should thus be escaped. *)\n  let reserved_words : string Hash_set.t =\n    Hash_set.of_list\n      (module String)\n      [\n        \"Definition\";\n        \"Inductive\";\n        \"match\";\n        \"if\";\n        \"then\";\n        \"else\";\n        \"as\";\n        \"into\";\n        \"end\";\n        \"Record\";\n        \"Arguments\";\n        \"Type\";\n      ]\n  (* TODO: Make complete *)\n\n  (** Transformation applied to indexes fields name (i.e. [x.1]) *)\n  let anonymous_field_transform x = x\n\n  let named_field_prefix = Some `TypeName\n  let struct_constructor_prefix = Some \"Build_t_\"\n  let enum_constructor_prefix = Some \"AABBCC\"\n  let union_constructor_prefix = Some \"DDEEFF\"\n  let prefix__constructors_with_type = true\n  let prefix_struct_constructors_with_type = true\n  let prefix_enum_constructors_with_type = true\n  let prefix_union_constructors_with_type = true\n  let prefix_associated_item_with_trait_name = true\nend\n\nmodule AST = Ast.Make (InputLanguage)\nmodule BackendOptions = Backend.UnitBackendOptions\nopen Ast\nmodule U = Ast_utils.Make (InputLanguage)\nmodule RenderId = Concrete_ident.MakeRenderAPI (CoqNamePolicy)\nopen AST\n\nlet hardcoded_coq_headers =\n  \"(* File automatically generated by Hacspec *)\\n\\\n   From Coq Require Import ZArith.\\n\\\n   Require Import List.\\n\\\n   Import List.ListNotations.\\n\\\n   Open Scope Z_scope.\\n\\\n   Open Scope bool_scope.\\n\\\n   Require Import Ascii.\\n\\\n   Require Import String.\\n\\\n   Require Import Coq.Floats.Floats.\\n\\\n   From RecordUpdate Require Import RecordSet.\\n\\\n   Import RecordSetNotations.\\n\\\n   From Core Require Import Core.\\n\\n\"\n\nmodule BasePrinter = Generic_printer.Make (InputLanguage)\n\nmodule Make\n    (Default : sig\n      val default : string -> string\n    end)\n    (Attrs : Attrs.WITH_ITEMS) =\nstruct\n  open PPrint\n\n  let default_string_for s = \"TODO: please implement the method `\" ^ s ^ \"`\"\n  let default_document_for = default_string_for >> string\n\n  let concat_with ?(pre = empty) ?(post = empty) l =\n    concat_map (fun x -> pre ^^ x ^^ post) l\n\n  let concat_map_with ?(pre = empty) ?(post = empty) f l =\n    concat_map (fun x -> pre ^^ f x ^^ post) l\n\n  let concat_spaced_doc l = concat_map_with ~pre:space (fun x -> x#p) l\n\n  module CoqNotation = struct\n    let definition_struct keyword n name generics params typ body =\n      keyword ^^ space ^^ name ^^ generics\n      ^^ concat_with ~pre:space params\n      ^^ space ^^ colon ^^ space ^^ typ ^^ space ^^ string \":=\"\n      ^^ nest n (break 1 ^^ body)\n      ^^ dot\n\n    let proof_struct keyword name generics params statement =\n      keyword ^^ space ^^ name ^^ generics\n      ^^ concat_with ~pre:space params\n      ^^ space ^^ colon\n      ^^ nest 2 (break 1 ^^ statement ^^ dot)\n      ^^ break 1 ^^ string \"Proof\" ^^ dot ^^ space ^^ string \"Admitted\" ^^ dot\n\n    let definition = definition_struct (string \"Definition\") 2\n    let fixpoint = definition_struct (string \"Fixpoint\") 2\n    let inductive = definition_struct (string \"Inductive\") 0\n    let record = definition_struct (string \"Record\") 2\n    let instance = definition_struct (string \"Instance\") 2\n    let class_ = definition_struct (string \"Class\") 2\n    let lemma = proof_struct (string \"Lemma\")\n    let comment v = !^\"(*\" ^^ space ^^ v ^^ space ^^ !^\"*)\"\n\n    let arguments name (explicivity : bool list) =\n      if List.is_empty explicivity then empty\n      else\n        !^\"Arguments\" ^^ space ^^ name\n        ^^ concat_map_with ~pre:space\n             (function true -> string \"(_)\" | false -> string \"{_}\")\n             explicivity\n        ^^ dot\n\n    let notation pattern value =\n      !^\"Notation\" ^^ space ^^ string \"\\\"\" ^^ pattern ^^ string \"\\\"\" ^^ space\n      ^^ !^\":=\" ^^ space ^^ value ^^ dot\n\n    let notation_name name value =\n      notation (string \"'\" ^^ name ^^ string \"'\") value\n  end\n\n  type ('get_span_data, 'a) object_type =\n    ('get_span_data, 'a) BasePrinter.Gen.object_type\n\n  class printer =\n    object (self)\n      inherit BasePrinter.base\n\n      method private primitive_to_string (id : primitive_ident) : document =\n        match id with\n        | Deref -> default_document_for \"(TODO: Deref)\"\n        | Cast -> string \"cast\"\n        | LogicalOp op -> (\n            match op with And -> string \"andb\" | Or -> string \"orb\")\n\n      method arm ~arm ~span:_ = arm#p\n\n      method arm' ~super:_ ~arm_pat ~body ~guard:_ =\n        arm_pat#p ^^ space ^^ string \"=>\" ^^ nest 2 (break 1 ^^ body#p)\n\n      method attrs x1 = default_document_for \"attrs\"\n\n      method binding_mode_ByRef _x1 _x2 =\n        default_document_for \"binding_mode_ByRef\"\n\n      method binding_mode_ByValue = default_document_for \"binding_mode_ByValue\"\n      method borrow_kind_Mut _x1 = default_document_for \"borrow_kind_Mut\"\n      method borrow_kind_Shared = default_document_for \"borrow_kind_Shared\"\n      method borrow_kind_Unique = default_document_for \"borrow_kind_Unique\"\n      method common_array x1 = brackets (separate (semi ^^ space) x1)\n\n      method dyn_trait_goal ~trait:_ ~non_self_args:_ =\n        default_document_for \"dyn_trait_goal\"\n\n      method error_expr x1 = parens (string x1 ^^ string \"(* ERROR_EXPR *)\")\n      method error_item x1 = parens (string x1 ^^ string \"(* ERROR_ITEM *)\")\n      method error_pat x1 = parens (string x1 ^^ string \"(* ERROR_PAT *)\")\n      method expr ~e ~span:_ ~typ = e#p\n\n      method expr'_AddressOf ~super:_ ~mut:_ ~e:_ ~witness =\n        match witness with _ -> .\n\n      method expr'_App_application ~super:_ ~f ~args ~generics:_ =\n        f#p ^^ concat_map_with ~pre:space (fun x -> parens x#p) args\n\n      method expr'_App_constant ~super:_ ~constant ~generics:_ = constant#p\n\n      method expr'_App_field_projection ~super:_ ~field ~e =\n        field#p ^^ space ^^ e#p\n\n      method expr'_App_tuple_projection ~super:_ ~size ~nth ~e =\n        (* !^\"constructor_App_tuple_projection_\" *)\n        (* ^^ *)\n        (* (match e#v.e with *)\n        (*  | Construct { constructor; is_record; is_struct; fields; base } -> *)\n        (*    (match constructor with *)\n        (*    | `Concrete cid -> !^((RenderId.render cid).name) *)\n        (*    | _ -> !^\"TODO\") *)\n        (*  | _ -> empty) ^^ *)\n        let size =\n          match e#v.e with\n          | Construct { constructor; is_record; is_struct; fields; base } ->\n              List.length fields\n          | _ -> size (* TODO: Size argument incorrect? *)\n        in\n        List.fold_right ~init:e#p\n          ~f:(fun x y -> parens (x ^^ y))\n          ((if Stdlib.(nth != 0) then [ string \"snd\" ] else [])\n          @\n          if size - 1 - nth > 0 then\n            List.init (size - 1 - nth) ~f:(fun _ -> string \"fst\")\n          else [])\n\n      method expr'_Ascription ~super:_ ~e ~typ =\n        e#p ^^ space ^^ colon ^^ space ^^ typ#p\n\n      method expr'_Assign ~super:_ ~lhs:_ ~e:_ ~witness =\n        match witness with _ -> .\n\n      method expr'_Block ~super:_ ~e:_ ~safety_mode:_ ~witness =\n        match witness with _ -> .\n\n      method expr'_Borrow ~super:_ ~kind:_ ~e:_ ~witness =\n        match witness with _ -> .\n\n      method expr'_Break ~super:_ ~e:_ ~acc:_ ~label:_ ~witness =\n        match witness with _ -> .\n\n      method expr'_Closure ~super:_ ~params ~body ~captures:_ =\n        !^\"fun\" ^^ concat_spaced_doc params ^^ space ^^ !^\"=>\" ^^ space\n        ^^ nest 2 (break 1 ^^ body#p)\n\n      method expr'_Construct_inductive ~super:_ ~constructor ~is_record\n          ~is_struct ~fields ~base =\n        match (is_record, is_struct, base, fields) with\n        | true, true, Some x, _ ->\n            x#p\n            ^^ concat_map_with ~pre:space\n                 (fun x ->\n                   string \"<|\" ^^ constructor#p ^^ (fst x)#p ^^ space ^^ !^\":=\"\n                   ^^ space ^^ (snd x)#p ^^ space ^^ string \"|>\")\n                 fields\n        | true, true, None, [] | false, true, _, [] | false, false, _, [] ->\n            constructor#p\n        | true, true, None, _ | false, true, _, _ | false, false, _, _ ->\n            constructor#p ^^ space\n            ^^ separate_map space (fun x -> parens (snd x)#p) fields\n        | _ ->\n            constructor#p ^^ space ^^ string \"{|\" ^^ space\n            ^^ separate_map (semi ^^ space)\n                 (fun (ident, exp) ->\n                   constructor#p ^^ !^\"_\" ^^ ident#p ^^ space ^^ string \":=\"\n                   ^^ space ^^ parens exp#p)\n                 fields\n            ^^ space ^^ string \"|}\"\n\n      method expr'_Construct_tuple ~super:_ ~components =\n        if List.length components == 0 then !^\"tt\"\n        else parens (separate_map comma (fun x -> x#p) components)\n\n      method expr'_Continue ~super:_ ~acc:_ ~label:_ ~witness =\n        match witness with _ -> .\n\n      method expr'_EffectAction ~super:_ ~action:_ ~argument:_ =\n        default_document_for \"expr'_EffectAction\"\n\n      method expr'_GlobalVar_concrete ~super:_ x2 =\n        (* TODO: prefix here? *)\n        (* !^\"Build_\" ^^ *)\n        x2#p (* ^^ !^\"_record\" *)\n\n      method expr'_GlobalVar_primitive ~super:_ x2 = self#primitive_to_string x2\n\n      method expr'_If ~super:_ ~cond ~then_ ~else_ =\n        string \"if\"\n        ^^ nest 2 (break 1 ^^ cond#p)\n        ^^ break 1 ^^ string \"then\"\n        ^^ nest 2 (break 1 ^^ then_#p)\n        ^^ break 1 ^^ string \"else\"\n        ^^ nest 2\n             (break 1 ^^ match else_ with Some x -> x#p | None -> string \"tt\")\n\n      method expr'_Let ~super:_ ~monadic:_ ~lhs ~rhs ~body =\n        string \"let\" ^^ space ^^ lhs#p ^^ space ^^ string \":=\" ^^ space ^^ rhs#p\n        ^^ space ^^ string \"in\" ^^ break 1 ^^ body#p\n\n      method expr'_Literal ~super x2 =\n        parens\n          (x2#p ^^ space ^^ colon ^^ space\n         ^^ (self#_do_not_override_lazy_of_ty AstPos_expr'_Literal_x0 super.typ)\n              #p)\n\n      method expr'_LocalVar ~super:_ x2 = x2#p\n\n      method expr'_Loop ~super:_ ~body ~kind ~state ~control_flow ~label:_\n          ~witness:_ =\n        kind#p ^^ space\n        ^^ brackets\n             (Option.value ~default:(string \"is_none\")\n                (Option.map ~f:(fun x -> x#p) control_flow))\n        ^^ Option.value ~default:(string \"default\")\n             (Option.map ~f:(fun x -> x#p) state)\n        ^^ space ^^ string \"of\" ^^ space\n        ^^ parens (nest 2 (break 1 ^^ body#p))\n\n      method expr'_MacroInvokation ~super:_ ~macro:_ ~args:_ ~witness:_ =\n        default_document_for \"expr'_MacroInvokation\"\n\n      method expr'_Match ~super:_ ~scrutinee ~arms =\n        string \"match\" ^^ space ^^ scrutinee#p ^^ space ^^ string \"with\"\n        ^^ break 1\n        ^^ concat_map_with\n             ~pre:(string \"|\" ^^ space)\n             ~post:(break 1)\n             (fun x -> x#p)\n             arms\n        ^^ string \"end\"\n\n      method expr'_QuestionMark ~super:_ ~e:_ ~return_typ:_ ~witness =\n        match witness with _ -> .\n\n      method expr'_Quote ~super:_ _x2 = default_document_for \"expr'_Quote\"\n      method expr'_Return ~super:_ ~e:_ ~witness = match witness with _ -> .\n\n      method cf_kind_BreakOrReturn =\n        default_document_for \"cf_kind_BreakOrReturn\"\n\n      method cf_kind_BreakOnly = default_document_for \"cf_kind_BreakOnly\"\n      method field_pat ~field ~pat = pat#p\n\n      method generic_constraint_GCLifetime _x1 _x2 =\n        default_document_for \"generic_constraint_GCLifetime\"\n\n      method generic_constraint_GCProjection x1 = string \"`\" ^^ braces x1#p\n      method generic_constraint_GCType x1 = string \"`\" ^^ braces x1#p\n\n      method generic_param ~ident ~span:_ ~attrs:_ ~kind =\n        ident#p ^^ space ^^ colon ^^ space ^^ kind#p\n\n      method generic_param_kind_GPConst ~typ = typ#p\n\n      method generic_param_kind_GPLifetime ~witness =\n        match witness with _ -> .\n\n      method generic_param_kind_GPType = string \"Type\"\n      method generic_value_GConst x1 = x1#p\n\n      method generic_value_GLifetime ~lt:_ ~witness =\n        match witness with _ -> .\n\n      method generic_value_GType x1 = parens x1#p\n\n      method generics ~params ~constraints =\n        let params_document =\n          concat_map_with ~pre:space (fun x -> string \"`\" ^^ braces x#p) params\n        in\n        let constraints_document = concat_spaced_doc constraints in\n        params_document ^^ constraints_document\n\n      method guard ~guard:_ ~span:_ = default_document_for \"guard\"\n\n      method guard'_IfLet ~super:_ ~lhs:_ ~rhs:_ ~witness =\n        match witness with _ -> .\n\n      method impl_expr ~kind:_ ~goal = goal#p\n\n      method impl_expr_kind_Builtin _x1 =\n        default_document_for \"impl_expr_kind_Builtin\"\n\n      method impl_expr_kind_Concrete _x1 =\n        default_document_for \"impl_expr_kind_Concrete\"\n\n      method impl_expr_kind_Dyn = default_document_for \"impl_expr_kind_Dyn\"\n\n      method impl_expr_kind_ImplApp ~impl:_ ~args:_ =\n        default_document_for \"impl_expr_kind_ImplApp\"\n\n      method impl_expr_kind_LocalBound ~id:_ =\n        default_document_for \"impl_expr_kind_LocalBound\"\n\n      method impl_expr_kind_Parent ~impl:_ ~ident:_ =\n        default_document_for \"impl_expr_kind_Parent\"\n\n      method impl_expr_kind_Projection ~impl:_ ~item:_ ~ident:_ =\n        default_document_for \"impl_expr_kind_Projection\"\n\n      method impl_expr_kind_Self = default_document_for \"impl_expr_kind_Self\"\n      method impl_ident ~goal ~name:_ = goal#p\n\n      method impl_item ~ii_span:_ ~ii_generics:_ ~ii_v ~ii_ident ~ii_attrs:_ =\n        ii_ident#p ^^ space ^^ string \":=\" ^^ space ^^ ii_v#p ^^ semi\n\n      method impl_item'_IIFn ~body ~params =\n        if List.length params == 0 then body#p\n        else\n          string \"fun\" ^^ space ^^ concat_spaced_doc params ^^ string \"=>\"\n          ^^ nest 2 (break 1 ^^ body#p)\n\n      method impl_item'_IIType ~typ ~parent_bounds:_ = typ#p\n      method item ~v ~span:_ ~ident:_ ~attrs:_ = v#p ^^ break 1\n\n      method item'_Alias ~super:_ ~name ~item =\n        CoqNotation.notation_name name#p (parens item#p)\n\n      method item'_Fn ~super ~name ~generics ~body ~params ~safety:_ =\n        (* TODO: Why is type not available here ? *)\n        let is_rec =\n          Set.mem\n            (U.Reducers.collect_concrete_idents#visit_expr () body#v)\n            name#v\n        in\n        let typ =\n          (self#_do_not_override_lazy_of_ty AstPos_item'_Fn_body body#v.typ)#p\n        in\n\n        let params =\n          List.map\n            ~f:(fun x ->\n              match x#v with\n              | {\n               pat =\n                 {\n                   p = PBinding { mut; mode; var; typ = _; subpat };\n                   span : span;\n                   typ = _;\n                 };\n               typ;\n               typ_span;\n               attrs;\n              } ->\n                  x#p\n              | _ -> string \"'\" ^^ x#p)\n            params\n        in\n\n        let get_expr_of kind f : document option =\n          Attrs.associated_expr kind super.attrs\n          |> Option.map ~f:(self#entrypoint_expr >> f)\n        in\n        let get_fn_of kind f : document option =\n          Attrs.associated_fn kind super.attrs\n          |> Option.map ~f:(fun (g, p, x) ->\n                 f (g, List.hd_exn (List.rev p), self#entrypoint_expr x))\n        in\n        let requires =\n          get_expr_of Requires (fun x ->\n              x ^^ space ^^ string \"=\" ^^ space ^^ string \"true\")\n        in\n        let ensures =\n          get_expr_of Ensures (fun x ->\n              x ^^ space ^^ string \"=\" ^^ space ^^ string \"true\")\n        in\n        let ensures_fn =\n          get_fn_of Ensures (fun (g, p, x) ->\n              string \"let\" ^^ space ^^ self#entrypoint_pat p.pat ^^ space\n              ^^ string \":=\" ^^ space ^^ string \"@\" ^^ name#p ^^ space\n              ^^ concat_map_with (fun x -> x) params\n              ^^ Option.value ~default:empty\n                   (Option.map\n                      ~f:(fun r -> space ^^ string \"H_requires\")\n                      requires)\n              ^^ space ^^ string \"in\" ^^ break 1 ^^ x ^^ space ^^ string \"=\"\n              ^^ space ^^ string \"true\")\n        in\n        let is_lemma = Attrs.lemma super.attrs in\n        if is_lemma then\n          CoqNotation.lemma name#p generics#p params\n            (Option.value ~default:empty requires\n            ^^ space ^^ !^\"->\" ^^ break 1\n            ^^ Option.value ~default:empty ensures)\n        else if is_rec then\n          CoqNotation.fixpoint name#p generics#p\n            (params\n            @ Option.value ~default:[]\n                (Option.map ~f:(fun x -> [ string \"`\" ^^ braces x ]) requires))\n            typ body#p\n          ^^ Option.value ~default:empty\n               (Option.map\n                  ~f:(fun ensure ->\n                    break 1\n                    ^^ CoqNotation.lemma\n                         (name#p ^^ string \"_\" ^^ string \"ensures\")\n                         generics#p params\n                         (Option.value ~default:empty\n                            (Option.map\n                               ~f:(fun r ->\n                                 string \"forall (H_requires : \"\n                                 ^^ r ^^ string \"),\" ^^ break 1)\n                               requires)\n                         ^^ ensure))\n                  ensures_fn)\n        else\n          CoqNotation.definition name#p generics#p\n            (params\n            @ Option.value ~default:[]\n                (Option.map ~f:(fun x -> [ string \"`\" ^^ braces x ]) requires))\n            typ body#p\n          ^^ Option.value ~default:empty\n               (Option.map\n                  ~f:(fun ensure ->\n                    break 1\n                    ^^ CoqNotation.lemma\n                         (name#p ^^ string \"_\" ^^ string \"ensures\")\n                         generics#p params\n                         (Option.value ~default:empty\n                            (Option.map\n                               ~f:(fun r ->\n                                 string \"forall (H_requires : \"\n                                 ^^ r ^^ string \"),\" ^^ break 1)\n                               requires)\n                         ^^ ensure))\n                  ensures_fn)\n\n      method item'_HaxError ~super:_ _x2 = default_document_for \"item'_HaxError\"\n\n      method item'_IMacroInvokation ~super:_ ~macro:_ ~argument:_ ~span:_\n          ~witness:_ =\n        default_document_for \"item'_IMacroInvokation\"\n\n      method item'_Impl ~super ~generics ~self_ty ~of_trait ~items\n          ~parent_bounds:_ ~safety:_ =\n        let name, args = of_trait#v in\n        if Attrs.is_erased super.attrs then empty\n        else\n          CoqNotation.instance\n            (name#p ^^ string \"_\"\n            ^^ string (Int.to_string ([%hash: item] super)))\n            generics#p []\n            (name#p ^^ concat_map_with ~pre:space (fun x -> parens x#p) args)\n            (braces\n               (nest 2\n                  (concat_map_with\n                     ~pre:\n                       (break 1\n                       ^^ string (\"implaabbcc_\" ^ (RenderId.render name#v).name)\n                       ^^ !^\"_\")\n                     (fun x -> x#p)\n                     items)\n               ^^ break 1))\n\n      method item'_NotImplementedYet = string \"(* NotImplementedYet *)\"\n\n      method item'_Quote ~super:_ ~quote:_ ~origin:_ =\n        default_document_for \"item'_Quote\"\n\n      method item'_Trait ~super:_ ~name ~generics ~items ~safety:_ =\n        let _, params, constraints = generics#v in\n        CoqNotation.class_ name#p\n          (concat_map_with ~pre:space (fun x -> parens x#p) params\n          ^^ concat_map_with ~pre:space (fun x -> x#p) constraints)\n          [] !^\"Type\"\n          (braces\n             (nest 2 (concat_map_with ~pre:(break 1) (fun x -> x#p) items)\n             ^^ break 1))\n        ^^ break 1\n        ^^ CoqNotation.arguments name#p\n             (List.map ~f:(fun _ -> true) params\n             @ List.map ~f:(fun _ -> false) constraints)\n\n      method item'_TyAlias ~super:_ ~name ~generics:_ ~ty =\n        CoqNotation.notation_name name#p ty#p\n\n      method item'_Type_struct ~super:_ ~type_name:name ~constructor_name\n          ~generics ~tuple_struct ~arguments =\n        let arguments_explicity_with_ty =\n          List.map ~f:(fun _ -> true) generics#v.params\n          @ List.map ~f:(fun _ -> false) generics#v.constraints\n        in\n        let arguments_explicity_without_ty =\n          List.map ~f:(fun _ -> false) generics#v.params\n          @ List.map ~f:(fun _ -> false) generics#v.constraints\n        in\n        let base_name = (RenderId.render constructor_name#v).name ^ \"_record\" in\n        let name_doc = constructor_name#p ^^ string \"_record\" in\n        CoqNotation.record name_doc\n          (concat_map_with ~pre:space\n             (fun x -> parens (self#entrypoint_generic_param x))\n             generics#v.params\n          ^^ concat_map_with ~pre:space\n               (fun x -> self#entrypoint_generic_constraint x)\n               generics#v.constraints)\n          [] (string \"Type\")\n          (braces\n             (nest 2\n                (concat_map\n                   (fun (ident, typ, attr) ->\n                     break 1 ^^ constructor_name#p ^^ string \"_\" ^^ ident#p\n                     ^^ space ^^ colon ^^ space ^^ typ#p ^^ semi)\n                   arguments)\n             ^^ break 1))\n        ^^ break 1\n        ^^ CoqNotation.arguments (!^\"Build_\" ^^ name_doc)\n             arguments_explicity_without_ty (* arguments_explicity_with_ty *)\n        ^^ concat_map_with ~pre:(break 1)\n             (fun (ident, typ, attr) ->\n               CoqNotation.arguments\n                 (constructor_name#p ^^ !^\"_\" ^^ ident#p)\n                 arguments_explicity_without_ty)\n             arguments\n        ^^ break 1 ^^ !^\"#[export]\" ^^ space\n        ^^ (if List.is_empty arguments then empty\n            else\n              CoqNotation.instance\n                (string \"settable\" ^^ string \"_\" ^^ name_doc)\n                generics#p []\n                (!^\"Settable\" ^^ space ^^ !^\"_\")\n                (string \"settable!\" ^^ space\n                ^^ parens\n                     (!^\"Build_\" ^^ name_doc\n                     ^^ concat_map_with ~pre:space\n                          (fun (x : generic_param) ->\n                            match x with\n                            | { ident; _ } ->\n                                let idx =\n                                  (self#_do_not_override_lazy_of_local_ident\n                                     AstPos_item'_Type_generics ident)\n                                    #p\n                                in\n                                parens (idx ^^ space ^^ !^\":=\" ^^ space ^^ idx))\n                          generics#v.params)\n                ^^ space ^^ string \"<\"\n                ^^ separate_map (semi ^^ space)\n                     (fun (ident, typ, attr) ->\n                       constructor_name#p ^^ !^\"_\" ^^ ident#p)\n                     arguments\n                ^^ string \">\"))\n        ^^\n        if tuple_struct then\n          break 1\n          ^^ CoqNotation.notation_name (string base_name)\n               (string \"Build_\" ^^ name_doc)\n        else empty\n\n      (* map_def_path_item_string (fun x -> x) x#v.name *)\n\n      method item'_Type_enum ~super ~name ~generics ~variants =\n        let arguments_explicity_without_ty =\n          List.map ~f:(fun _ -> false) generics#v.params\n          @ List.map ~f:(fun _ -> false) generics#v.constraints\n        in\n\n        concat_map_with ~post:(break 1)\n          (fun x ->\n            self#item'_Type_struct ~super\n              ~constructor_name:\n                (self#_do_not_override_lazy_of_concrete_ident\n                   AstPos_variant__name x#v.name)\n              ~type_name:name ~generics ~tuple_struct:false\n              ~arguments:\n                (List.map\n                   ~f:(fun (ident, typ, attrs) ->\n                     ( self#_do_not_override_lazy_of_concrete_ident\n                         AstPos_variant__arguments ident,\n                       self#_do_not_override_lazy_of_ty\n                         AstPos_variant__arguments typ,\n                       self#_do_not_override_lazy_of_attrs AstPos_variant__attrs\n                         attrs ))\n                   x#v.arguments))\n          (List.filter ~f:(fun x -> x#v.is_record) variants)\n        ^^ CoqNotation.inductive name#p\n             (concat_map_with ~pre:space\n                (fun x -> parens (self#entrypoint_generic_param x))\n                generics#v.params\n             ^^ concat_map_with ~pre:space\n                  (fun x -> self#entrypoint_generic_constraint x)\n                  generics#v.constraints)\n             [] (string \"Type\")\n             (separate_map (break 1)\n                (fun x ->\n                  string \"|\" ^^ space ^^ x#p\n                  ^^\n                  if x#v.is_record then\n                    concat_map_with ~pre:space\n                      (fun (x : generic_param) ->\n                        (self#_do_not_override_lazy_of_local_ident\n                           AstPos_item'_Type_generics x.ident)\n                          #p)\n                      generics#v.params\n                    ^^ space ^^ !^\"->\" ^^ space ^^ !^\"_\"\n                  else empty)\n                variants)\n        ^^ concat_map_with ~pre:(break 1)\n             (fun v ->\n               CoqNotation.arguments\n                 (self#_do_not_override_lazy_of_concrete_ident\n                    AstPos_variant__arguments v#v.name)\n                   #p\n                 arguments_explicity_without_ty)\n             variants\n\n      (* ^^ break 1 ^^ !^\"Arguments\" ^^ space ^^ name#p ^^ colon *)\n      (* ^^ !^\"clear implicits\" ^^ dot ^^ break 1 ^^ !^\"Arguments\" ^^ space *)\n      (* ^^ name#p *)\n      (* ^^ concat_map (fun _ -> space ^^ !^\"(_)\") generics#v.params *)\n      (* ^^ concat_map (fun _ -> space ^^ !^\"{_}\") generics#v.constraints *)\n      (* ^^ dot *)\n\n      method item'_Use ~super:_ ~path ~is_external ~rename:_ =\n        if List.length path == 0 || is_external then empty\n        else\n          let crate =\n            String.capitalize\n              (Option.value ~default:\"(TODO CRATE)\"\n                 (Option.bind ~f:List.hd current_namespace))\n          in\n          let concat_capitalize l =\n            String.concat ~sep:\"_\" (List.map ~f:String.capitalize l)\n          in\n          let concat_capitalize_include l =\n            concat_capitalize (List.drop_last_exn l)\n            ^ \" (t_\" ^ List.last_exn l ^ \")\"\n          in\n          let path_string =\n            match path with\n            | \"crate\" :: xs -> concat_capitalize_include (crate :: xs)\n            | \"super\" :: xs ->\n                concat_capitalize\n                  (crate\n                   :: List.drop_last_exn\n                        (Option.value ~default:[]\n                           (Option.bind ~f:List.tl current_namespace))\n                  @ xs)\n            | [ a ] -> a\n            | xs -> concat_capitalize_include xs\n          in\n          if String.is_empty path_string then empty\n          else\n            string \"From\" ^^ space ^^ string crate ^^ space\n            ^^ string \"Require Import\" ^^ space ^^ string path_string ^^ dot\n            ^^ break 1 ^^ string \"Export\" ^^ space ^^ string path_string ^^ dot\n\n      method item_quote_origin ~item_kind:_ ~item_ident:_ ~position:_ =\n        default_document_for \"item_quote_origin\"\n\n      method lhs_LhsArbitraryExpr ~e:_ ~witness = match witness with _ -> .\n\n      method lhs_LhsArrayAccessor ~e:_ ~typ:_ ~index:_ ~witness =\n        match witness with _ -> .\n\n      method lhs_LhsFieldAccessor_field ~e:_ ~typ:_ ~field:_ ~witness =\n        match witness with _ -> .\n\n      method lhs_LhsFieldAccessor_tuple ~e:_ ~typ:_ ~nth:_ ~size:_ ~witness =\n        match witness with _ -> .\n\n      method lhs_LhsLocalVar ~var:_ ~typ:_ =\n        default_document_for \"lhs_LhsLocalVar\"\n\n      method lhs_LhsVecRef ~e:_ ~typ:_ ~witness:_ =\n        default_document_for \"lhs_LhsVecRef\"\n\n      method literal_Bool x1 = string (if x1 then \"true\" else \"false\")\n\n      method literal_Char x1 =\n        string \"\\\"\" ^^ string (Char.escaped x1) ^^ string \"\\\"\" ^^ string \"%char\"\n\n      method literal_Float ~value ~negative ~kind:_ =\n        (if negative then parens (!^\"-\" ^^ string value) else string value)\n        ^^ string \"%float\"\n\n      method literal_Int ~value ~negative ~kind =\n        (if negative then !^\"-\" else empty) ^^ string value\n\n      method literal_String x1 = string \"\\\"\" ^^ string x1 ^^ string \"\\\"%string\"\n\n      method loop_kind_ForIndexLoop ~start:_ ~end_:_ ~var:_ ~var_typ:_ ~witness\n          =\n        default_document_for \"loop_kind_ForIndexLoop\"\n\n      method loop_kind_ForLoop ~pat ~it ~witness =\n        braces it#p ^^ space ^^ string \"inP?\" ^^ space ^^ brackets pat#p\n\n      method loop_kind_UnconditionalLoop =\n        default_document_for \"loop_kind_UnconditionalLoop\"\n\n      method loop_kind_WhileLoop ~condition:_ ~witness:_ =\n        default_document_for \"loop_kind_WhileLoop\"\n\n      method loop_state ~init ~bpat ~witness:_ =\n        parens (init#p ^^ space ^^ !^\"state\" ^^ space ^^ bpat#p)\n\n      method modul x1 = separate_map (break 1) (fun x -> x#p) x1\n\n      method param ~pat ~typ ~typ_span:_ ~attrs:_ =\n        parens (pat#p ^^ space ^^ colon ^^ space ^^ typ#p)\n\n      method pat ~p ~span:_ ~typ:_ = p#p\n\n      method pat'_PAscription ~super:_ ~typ ~typ_span:_ ~pat =\n        pat#p ^^ space ^^ colon ^^ space ^^ typ#p\n\n      method pat'_PBinding ~super:_ ~mut:_ ~mode:_ ~var ~typ:_ ~subpat:_ = var#p\n      method pat'_PConstant ~super:_ ~lit = lit#p\n\n      method pat'_PConstruct_inductive ~super:_ ~constructor ~is_record\n          ~is_struct ~fields =\n        match (is_record, is_struct) with\n        | true, true ->\n            constructor#p ^^ space\n            ^^ parens\n                 (separate_map (comma ^^ space)\n                    (fun field_pat -> (snd field_pat)#p)\n                    fields)\n        | _, true ->\n            (* constructor#p ^^ *)\n            string \"{|\"\n            ^^ separate_map (semi ^^ space)\n                 (fun (ident, exp) ->\n                   constructor#p ^^ ident#p ^^ space ^^ string \":=\" ^^ space\n                   ^^ parens exp#p)\n                 fields\n            ^^ string \"|}\"\n        | _, false ->\n            constructor#p\n            ^^ concat_map_with ~pre:space\n                 (fun (ident, exp) -> parens exp#p)\n                 fields\n\n      method pat'_PConstruct_tuple ~super:_ ~components =\n        (* TODO: Only add `'` if you are a top-level pattern *)\n        (* string \"'\" ^^ *)\n        parens (separate_map comma (fun x -> x#p) components)\n\n      method pat'_PDeref ~super:_ ~subpat:_ ~witness:_ =\n        default_document_for \"pat'_PDeref\"\n\n      method pat'_PWild = string \"_\"\n      method printer_name = \"Coq printer\"\n\n      method projection_predicate ~impl:_ ~assoc_item ~typ =\n        string \"_\" (* TODO: name of impl#p *) ^^ dot\n        ^^ parens assoc_item#p ^^ space ^^ string \"=\" ^^ space ^^ typ#p\n\n      method safety_kind_Safe = default_document_for \"safety_kind_Safe\"\n      method safety_kind_Unsafe _x1 = default_document_for \"safety_kind_Unsafe\"\n\n      method supported_monads_MException _x1 =\n        default_document_for \"supported_monads_MException\"\n\n      method supported_monads_MOption =\n        default_document_for \"supported_monads_MOption\"\n\n      method supported_monads_MResult _x1 =\n        default_document_for \"supported_monads_MResult\"\n\n      method trait_goal ~trait ~args =\n        trait#p ^^ concat_map (fun x -> space ^^ x#p) args\n\n      method trait_item ~ti_span:_ ~ti_generics ~ti_v ~ti_ident ~ti_attrs:_ =\n        let _, params, constraints = ti_generics#v in\n        let generic_params = concat_map (fun x -> space ^^ x#p) params in\n        let filter_constraints = function\n          | GCProjection { impl = { goal = { trait; _ }; _ }; _ } -> true\n          | GCType\n              {\n                goal = { trait; args = [ GType (TAssociatedType { item; _ }) ] };\n                _;\n              } ->\n              Concrete_ident.(item == ti_ident#v)\n          | _ -> true\n        in\n        let generic_constraints_other =\n          concat_map\n            (fun x -> space ^^ self#entrypoint_generic_constraint x)\n            (List.filter ~f:filter_constraints\n               (List.map ~f:(fun x -> x#v) constraints))\n        in\n        let generic_constraints_self =\n          concat_map\n            (fun x ->\n              break 1 ^^ string \"_\" ^^ space ^^ string \"::\" ^^ space\n              ^^ self#entrypoint_generic_constraint x\n              ^^ semi)\n            (List.filter\n               ~f:(fun x -> not (filter_constraints x))\n               (List.map ~f:(fun x -> x#v) constraints))\n        in\n        ti_ident#p ^^ generic_params ^^ generic_constraints_other ^^ space\n        ^^ (match ti_v#v with TIDefault _ -> string \":=\" | _ -> colon)\n        ^^ space ^^ ti_v#p ^^ semi ^^ generic_constraints_self\n\n      method trait_item'_TIDefault ~params ~body ~witness:_ =\n        (if List.is_empty params then empty\n         else\n           string \"fun\" ^^ space\n           ^^ separate_map space (fun x -> x#p) params\n           ^^ space ^^ string \"=>\")\n        ^^ nest 2 (break 1 ^^ body#p)\n\n      method trait_item'_TIFn x1 = x1#p\n      method trait_item'_TIType x1 = string \"Type\"\n\n      method ty_TApp_application ~typ ~generics =\n        typ#p ^^ concat_map (fun x -> space ^^ parens x#p) generics\n\n      method ty_TApp_tuple ~types =\n        if List.length types == 0 then string \"unit\"\n        else parens (separate_map star (fun x -> self#entrypoint_ty x) types)\n\n      method ty_TArray ~typ ~length =\n        string \"t_Array\" ^^ space ^^ parens typ#p ^^ space ^^ parens length#p\n\n      method ty_TArrow x1 x2 =\n        concat_map (fun x -> x#p ^^ space ^^ string \"->\" ^^ space) x1 ^^ x2#p\n\n      method ty_TAssociatedType ~impl:_ ~item = item#p\n      method ty_TBool = string \"bool\"\n      method ty_TChar = string \"ascii\"\n      method ty_TDyn ~witness:_ ~goals:_ = default_document_for \"ty_TDyn\"\n      method ty_TFloat _x1 = string \"float\"\n\n      method ty_TInt x1 =\n        string \"t_\"\n        ^^\n        match x1 with\n        | { size; signedness } -> (\n            (match signedness with\n            | Unsigned -> string \"u\"\n            | Signed -> string \"i\")\n            ^^\n            match size with\n            | S8 -> string \"8\"\n            | S16 -> string \"16\"\n            | S32 -> string \"32\"\n            | S64 -> string \"64\"\n            | S128 -> string \"128\"\n            | SSize -> string \"size\")\n\n      method ty_TOpaque x1 = x1#p\n      method ty_TParam x1 = x1#p\n      method ty_TRawPointer ~witness:_ = default_document_for \"ty_TRawPointer\"\n\n      method ty_TRef ~witness:_ ~region:_ ~typ:_ ~mut:_ =\n        default_document_for \"ty_TRef\"\n\n      method ty_TSlice ~witness:_ ~ty = !^\"t_Slice\" ^^ space ^^ ty#p\n      method ty_TStr = string \"string\"\n\n      method item'_Enum_Variant ~name ~arguments ~is_record ~attrs:_ =\n        if is_record then\n          name#p ^^ space ^^ colon ^^ space ^^ name#p ^^ !^\"_record\" ^^ space\n          (* concat_map *)\n          (*   (fun (ident, typ, attr) -> *)\n          (*     ident#p ^^ space ^^ colon ^^ space ^^ typ#p) *)\n          (*   arguments *)\n        else if List.length arguments == 0 then name#p\n        else\n          name#p ^^ space ^^ colon ^^ space\n          ^^ separate_map\n               (space ^^ string \"->\" ^^ space)\n               (fun (ident, typ, attr) -> typ#p)\n               arguments\n          ^^ space ^^ string \"->\" ^^ space ^^ string \"_\"\n\n      (* method quote (quote : quote) : document = empty *)\n      method module_path_separator = \".\"\n\n      method concrete_ident ~local:_ id : document =\n        match id.name with\n        | \"not\" -> !^\"negb\"\n        | \"eq\" -> !^\"PartialEq_f_eq\"\n        | \"lt\" -> !^\"PartialOrd_f_lt\"\n        | \"gt\" -> !^\"PartialOrd_f_gt\"\n        | \"le\" -> !^\"PartialOrd_f_le\"\n        | \"ge\" -> !^\"PartialOrd_f_ge\"\n        | \"rem\" -> !^\"Rem_f_rem\"\n        | \"add\" -> !^\"Add_f_add\"\n        | \"sub\" -> !^\"Sub_f_sub\"\n        | \"mul\" -> !^\"Mul_f_mul\"\n        | \"div\" -> !^\"Div_f_div\"\n        | \"index\" -> !^\"Index_f_index\"\n        | \"f_to_string\" -> CoqNotation.comment !^\"f_to_string\"\n        | x -> !^x\n    end\n\n  let new_printer : BasePrinter.finalized_printer =\n    BasePrinter.finalize (fun () -> (new printer :> BasePrinter.printer))\nend\n\nmodule type S = sig\n  val new_printer : BasePrinter.finalized_printer\nend\n\nlet make (module M : Attrs.WITH_ITEMS) =\n  let open (\n    Make\n      (struct\n        let default x = x\n      end)\n      (M) :\n      S) in\n  new_printer\n\nlet translate m _ ~bundles:_ (items : AST.item list) : Types.file list =\n  let my_printer = make m in\n  let grouped_items =\n    U.group_items_by_namespace items\n    |> Map.to_alist\n    |> List.filter_map ~f:(fun (_, items) ->\n           let* first_item = List.hd items in\n           Some ((RenderId.render first_item.ident).path, items))\n  in\n  (grouped_items\n  |> List.map ~f:(fun (ns, items) ->\n         let mod_name =\n           String.concat ~sep:\"_\"\n             (List.map ~f:(map_first_letter String.uppercase) ns)\n         in\n         let sourcemap, contents =\n           let annotated = my_printer#entrypoint_modul items in\n           let open Generic_printer.AnnotatedString in\n           let header = pure hardcoded_coq_headers in\n           let annotated = concat header annotated in\n           (to_sourcemap annotated, to_string annotated)\n         in\n         let sourcemap = Some sourcemap in\n         let path = mod_name ^ \".v\" in\n         Types.{ path; contents; sourcemap }))\n  @ [\n      Types.\n        {\n          path = \"_CoqProject\";\n          contents =\n            \"-R ./ \" ^ \"TODO\" ^ \"\\n-arg -w\\n-arg all\\n\\n\"\n            ^ String.concat ~sep:\"\\n\"\n                (List.rev\n                   (grouped_items\n                   |> List.map ~f:(fun (ns, items) ->\n                          let mod_name =\n                            String.concat ~sep:\"_\"\n                              (List.map\n                                 ~f:(map_first_letter String.uppercase)\n                                 ns)\n                          in\n                          mod_name ^ \".v\")));\n          sourcemap = None;\n        };\n    ]\n\nopen Phase_utils\n\nmodule TransformToInputLanguage =\n  [%functor_application\n  Phases.Reject.Unsafe(Features.Rust)\n  |> Phases.Rewrite_local_self\n  |> Phases.Reject.RawOrMutPointer\n  |> Phases.And_mut_defsite\n  |> Phases.Reconstruct_asserts\n  |> Phases.Reconstruct_for_loops\n  |> Phases.Direct_and_mut\n  |> Phases.Reject.Arbitrary_lhs\n  |> Phases.Drop_blocks\n  |> Phases.Drop_match_guards\n  |> Phases.Reject.Continue\n  |> Phases.Drop_references\n  |> Phases.Trivialize_assign_lhs\n  |> Phases.Reconstruct_question_marks\n  |> Side_effect_utils.Hoist\n  |> Phases.Local_mutation\n  |> Phases.Reject.Continue\n  |> Phases.Cf_into_monads\n  |> Phases.Reject.EarlyExit\n  |> Phases.Drop_return_break_continue\n  |> Phases.Functionalize_loops\n  |> Phases.Reject.As_pattern\n  |> Phases.Reject.Dyn\n  |> Phases.Reject.Trait_item_default\n  |> Phases.Reorder_fields\n  |> Phases.Bundle_cycles\n  |> Phases.Sort_items_namespace_wise\n  |> SubtypeToInputLanguage\n  |> Identity\n  ]\n  [@ocamlformat \"disable\"]\n\nlet apply_phases (_bo : BackendOptions.t) (items : Ast.Rust.item list) :\n    AST.item list =\n  TransformToInputLanguage.ditems items\n"
  },
  {
    "path": "engine/backends/coq/coq/coq_backend.mli",
    "content": "open Hax_engine.Backend\ninclude T with module BackendOptions = UnitBackendOptions\n"
  },
  {
    "path": "engine/backends/coq/coq/dune",
    "content": "(library\n (name coq_backend)\n (package hax-engine)\n (libraries hax_engine base hacspeclib_macro_parser coq_ast)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_inline\n   ppx_functor_application\n   ppx_matches)))\n\n(env\n (_\n  (flags\n   (:standard -w -A))))\n"
  },
  {
    "path": "engine/backends/coq/coq_ast.ml",
    "content": "open Hax_engine\nopen Utils\nopen Base\n\nmodule type Library = sig\n  module Notation : sig\n    val int_repr : string -> string -> string\n    val type_str : string\n    val bool_str : string\n    val unit_str : string\n  end\nend\n\nmodule Coq =\nfunctor\n  (Lib : Library)\n  ->\n  struct\n    module AST = struct\n      type int_size = U8 | U16 | U32 | U64 | U128 | USize\n      type int_type = { size : int_size; signed : bool }\n\n      type ty =\n        | WildTy\n        | Bool\n        | Unit\n        | TypeTy\n        | Int of int_type\n        | NameTy of string\n        | RecordTy of string * (string * ty) list\n        | Product of ty list\n        | Coproduct of ty list\n        | Arrow of ty * ty\n        | ArrayTy of ty * string (* int *)\n        | SliceTy of ty\n        | AppTy of ty * ty list\n        | NatMod of string * int * string\n        | Forall of string list * string list * ty\n        | Exists of string list * string list * ty\n\n      type literal =\n        | Const_string of string\n        | Const_char of int\n        | Const_int of string * int_type\n        | Const_bool of bool\n\n      type pat =\n        | WildPat\n        | UnitPat\n        | Ident of string\n        | Lit of literal\n        | RecordPat of string * (string * pat) list\n        | ConstructorPat of string * pat list\n        | TuplePat of pat list\n        | AscriptionPat of pat * ty\n        | DisjunctivePat of pat list\n\n      type monad_type = Option | Result of ty | Exception of ty\n\n      type term =\n        | UnitTerm\n        | Let of let_args\n        | If of term * term * term\n        | Match of term * (pat * term) list\n        | Const of literal\n        | Literal of string\n        | AppFormat of string list * notation_elements list\n        | App of term * term list\n        | Var of string\n        | NameTerm of string\n        | RecordConstructor of string * (string * term) list\n        | RecordUpdate of string * term * (string * term) list\n        | Type of ty\n        | Lambda of pat list * term\n        | Tuple of term list\n        | Array of term list\n        | TypedTerm of term * ty\n\n      and notation_elements =\n        | Newline of int\n        | Typing of ty * bool * int\n        | Variable of pat * int\n        | Value of term * bool * int\n\n      and let_args = {\n        pattern : pat;\n        mut : bool;\n        value : term;\n        body : term;\n        value_typ : ty;\n        monad_typ : monad_type option;\n      }\n\n      (* TODO: I don't get why you've got InductiveCase VS BaseCase. Why not an inductive case (i.e. a variant, right?) is a name + a list of types? *)\n      type inductive_case = InductiveCase of string * ty | BaseCase of string\n\n      type argument =\n        | Implicit of pat * ty\n        | Explicit of pat * ty\n        | Typeclass of string option * ty\n\n      (* name, arguments, body, type *)\n      type definition_type = string * argument list * term * ty\n      type record_field = Named of string * ty | Coercion of string * ty\n\n      type instance_decl =\n        | InlineDef of definition_type\n        | LetDef of definition_type\n\n      type instance_decls =\n        | InstanceDecls of instance_decl list\n        | TermDef of term\n\n      type decl =\n        | MultipleDecls of decl list\n        | Unimplemented of string\n        | Comment of string\n        | Definition of definition_type\n        | ProgramDefinition of definition_type\n        | Lemma of string * argument list * term\n        | Equations of definition_type\n        | EquationsQuestionmark of definition_type\n        | Notation of string * term * string option\n        | Record of string * argument list * record_field list\n        | Inductive of string * argument list * inductive_case list\n        | Class of string * argument list * record_field list\n        | Instance of\n            string * argument list * ty * ty list * definition_type list\n        | ProgramInstance of\n            string * argument list * ty * ty list * instance_decls\n        | Require of string option * string list * string option\n        | ModuleType of string * argument list * record_field list\n        | Module of string * string * argument list * record_field list\n        | Parameter of string * ty (* definition_type minus 'term' *)\n        | HintUnfold of string * ty option\n    end\n\n    let __TODO_pat__ s = AST.Ident (s ^ \" todo(pat)\")\n    let __TODO_ty__ s : AST.ty = AST.NameTy (s ^ \" todo(ty)\")\n    let __TODO_term__ s = AST.Const (AST.Const_string (s ^ \" todo(term)\"))\n    let __TODO_item__ s = AST.Unimplemented (s ^ \" todo(item)\")\n\n    let int_size_to_string (x : AST.int_size) : string =\n      match x with\n      | AST.U8 -> \"8\"\n      | AST.U16 -> \"16\"\n      | AST.U32 -> \"32\"\n      | AST.U64 -> \"64\"\n      | AST.U128 -> \"128\"\n      | AST.USize -> \"32\"\n\n    let rec ty_to_string (x : AST.ty) : string * bool =\n      match x with\n      | AST.WildTy -> (\"_\", false)\n      | AST.Bool -> (Lib.Notation.bool_str, false)\n      | AST.Coproduct [] -> (\"⊥\", false)\n      | AST.Coproduct l ->\n          let ty_str =\n            String.concat\n              ~sep:(\" \" ^ \"∐\" ^ \" \")\n              (List.map ~f:ty_to_string_without_paren l)\n          in\n          (ty_str, true)\n      | AST.Product [] | AST.Unit ->\n          (Lib.Notation.unit_str, false (* TODO: might need paren *))\n      | AST.TypeTy -> (Lib.Notation.type_str, false (* TODO: might need paren *))\n      | AST.Int { size = AST.USize; _ } -> (\"uint_size\", false)\n      | AST.Int { size; _ } -> (\"int\" ^ int_size_to_string size, false)\n      | AST.NameTy s -> (s, false)\n      | AST.RecordTy (name, _fields) ->\n          (* [ AST.Record (name, fields) ] *) (name, false)\n      | AST.Product l ->\n          let ty_str =\n            String.concat\n              ~sep:(\" \" ^ \"×\" ^ \" \")\n              (List.map ~f:ty_to_string_without_paren l)\n          in\n          (ty_str, true)\n      | AST.Arrow (a, b) ->\n          let a_ty_str = ty_to_string_without_paren a in\n          let b_ty_str = ty_to_string_without_paren b in\n          (a_ty_str ^ \" \" ^ \"->\" ^ \" \" ^ b_ty_str, true)\n      | AST.ArrayTy (t, l) ->\n          let ty_str = ty_to_string_with_paren t in\n          ( \"nseq\" ^ \" \" ^ ty_str ^ \" \"\n            ^\n            (* Int.to_string *)\n            l,\n            true )\n      | AST.SliceTy t ->\n          let ty_str = ty_to_string_with_paren t in\n          (\"seq\" ^ \" \" ^ ty_str, true)\n      | AST.AppTy (i, []) -> ty_to_string i\n      (* | AST.AppTy (i, [ y ]) -> *)\n      (*     let ty_defs, ty_str = ty_to_string y in *)\n      (*     (ty_defs, i ^ \" \" ^ ty_str) *)\n      | AST.AppTy (i, p) ->\n          let ty_str =\n            String.concat ~sep:\" \" (List.map ~f:ty_to_string_with_paren p)\n          in\n          (ty_to_string_without_paren i ^ \" \" ^ ty_str, true)\n      | AST.NatMod (_t, _i, s) ->\n          (* [ *)\n          (*   AST.Notation *)\n          (*     ( t, *)\n          (*       AST.ArrayTy *)\n          (*         (AST.Int { size = U8; signed = false }, Int.to_string i) ); *)\n          (* ] *)\n          (\"nat_mod 0x\" ^ s, true)\n      | AST.Forall ([], [], ty) -> ty_to_string ty\n      | AST.Forall (implicit_vars, [], ty) ->\n          ( \"forall\" ^ \" \" ^ \"{\"\n            ^ String.concat ~sep:\" \" implicit_vars\n            ^ \"}\" ^ \",\" ^ \" \"\n            ^ ty_to_string_without_paren ty,\n            true )\n      | AST.Forall ([], vars, ty) ->\n          ( \"forall\" ^ \" \"\n            ^ String.concat ~sep:\" \" vars\n            ^ \",\" ^ \" \"\n            ^ ty_to_string_without_paren ty,\n            true )\n      | AST.Forall (implicit_vars, vars, ty) ->\n          ( \"forall\" ^ \" \" ^ \"{\"\n            ^ String.concat ~sep:\" \" implicit_vars\n            ^ \"}\" ^ \",\" ^ \" \"\n            ^ String.concat ~sep:\" \" vars\n            ^ \",\" ^ \" \"\n            ^ ty_to_string_without_paren ty,\n            true )\n      | AST.Exists ([], [], ty) -> ty_to_string ty\n      | AST.Exists (implicit_vars, [], ty) ->\n          ( \"exists\" ^ \" \" ^ \"{\"\n            ^ String.concat ~sep:\" \" implicit_vars\n            ^ \"}\" ^ \",\" ^ \" \"\n            ^ ty_to_string_without_paren ty,\n            true )\n      | AST.Exists ([], vars, ty) ->\n          ( \"exists\" ^ \" \"\n            ^ String.concat ~sep:\" \" vars\n            ^ \",\" ^ \" \"\n            ^ ty_to_string_without_paren ty,\n            true )\n      | AST.Exists (implicit_vars, vars, ty) ->\n          ( \"exists\" ^ \" \" ^ \"{\"\n            ^ String.concat ~sep:\" \" implicit_vars\n            ^ \"}\" ^ \",\" ^ \" \"\n            ^ String.concat ~sep:\" \" vars\n            ^ \",\" ^ \" \"\n            ^ ty_to_string_without_paren ty,\n            true )\n      | _ -> .\n\n    and ty_to_string_with_paren (x : AST.ty) : string =\n      let s, b = ty_to_string x in\n      if b then \"(\" ^ s ^ \")\" else s\n\n    and ty_to_string_without_paren (x : AST.ty) : string =\n      let s, _ = ty_to_string x in\n      s\n\n    let literal_to_string (x : AST.literal) : string =\n      match x with\n      | Const_string s -> s\n      | Const_char c -> Int.to_string c (* TODO *)\n      | Const_int (i, { size; _ }) ->\n          Lib.Notation.int_repr (int_size_to_string size) i\n      (* *)\n      | Const_bool b -> Bool.to_string b\n\n    let rec pat_to_string (x : AST.pat) (is_top_expr : bool) depth : string =\n      match x with\n      | AST.WildPat -> \"_\"\n      | AST.UnitPat -> tick_if is_top_expr ^ \"tt\"\n      | AST.Ident s -> s\n      | AST.Lit l -> literal_to_string l\n      | AST.ConstructorPat (name, args) ->\n          name ^ \" \" ^ String.concat ~sep:\" \"\n          @@ List.map ~f:(fun pat -> pat_to_string pat false depth) args\n      | AST.RecordPat (_name, []) -> \"(* Empty Record *)\" (* TODO *)\n      | AST.RecordPat (_name, args) ->\n          (* name ^ \" \" ^ *)\n          \"{|\"\n          ^ String.concat ~sep:\";\"\n              (List.map\n                 ~f:(fun (name, pat) ->\n                   newline_indent (depth + 1)\n                   ^ name ^ \" \" ^ \":=\" ^ \" \"\n                   ^ pat_to_string pat true (depth + 1))\n                 args)\n          ^ newline_indent depth ^ \"|}\"\n      | AST.TuplePat [] -> \"_\" (* TODO: empty tuple pattern? *)\n      | AST.TuplePat [ v ] -> pat_to_string v is_top_expr (depth + 1)\n      | AST.TuplePat vals ->\n          tick_if is_top_expr ^ \"(\"\n          ^ String.concat ~sep:\",\"\n              (List.map ~f:(fun t -> pat_to_string t false (depth + 1)) vals)\n          ^ \")\"\n      | AST.AscriptionPat (p, ty) ->\n          \"(\" ^ pat_to_string p true depth ^ \" \" ^ \":\" ^ \" \"\n          ^ ty_to_string_without_paren ty\n          ^ \")\" (* TODO: Should this be true of false? *)\n      | AST.DisjunctivePat pats ->\n          let f pat = pat_to_string pat true depth in\n          String.concat ~sep:\" | \" @@ List.map ~f pats\n\n    and tick_if is_top_expr = if is_top_expr then \"'\" else \"\"\n\n    let rec term_to_string (x : AST.term) depth : string * bool =\n      match x with\n      | AST.UnitTerm -> (\"tt\", false)\n      | AST.Let { pattern = pat; value = bind; value_typ = typ; body = term; _ }\n        ->\n          (* TODO: propegate type definition *)\n          let var_str = pat_to_string pat true depth in\n          let expr_str = term_to_string_without_paren bind (depth + 1) in\n          let typ_str = ty_to_string_without_paren typ in\n          let body_str = term_to_string_without_paren term depth in\n          ( \"let\" ^ \" \" ^ var_str ^ \" \" ^ \":=\" ^ \" \" ^ expr_str ^ \" \" ^ \":\"\n            ^ \" \" ^ typ_str ^ \" \" ^ \"in\" ^ newline_indent depth ^ body_str,\n            true )\n      | AST.If (cond, then_, else_) ->\n          ( \"if\" ^ \" \"\n            ^ term_to_string_without_paren cond (depth + 1)\n            ^ newline_indent depth ^ \"then\" ^ \" \"\n            ^ term_to_string_without_paren then_ (depth + 1)\n            ^ newline_indent depth ^ \"else\" ^ \" \"\n            ^ term_to_string_without_paren else_ (depth + 1),\n            true )\n      | AST.Match (match_val, arms) ->\n          ( \"match\" ^ \" \"\n            ^ term_to_string_without_paren match_val (depth + 1)\n            ^ \" \" ^ \"with\" ^ newline_indent depth\n            ^ String.concat ~sep:(newline_indent depth)\n                (List.map\n                   ~f:(fun (pat, body) ->\n                     \"|\" ^ \" \"\n                     ^ pat_to_string pat true depth\n                     ^ \" \" ^ \"=>\"\n                     ^ newline_indent (depth + 1)\n                     ^ term_to_string_without_paren body (depth + 1))\n                   arms)\n            ^ newline_indent depth ^ \"end\",\n            false )\n      | AST.Const c -> (literal_to_string c, false)\n      | AST.Literal s -> (s, false)\n      | AST.AppFormat (format, args) ->\n          ( format_to_string format\n              (List.map\n                 ~f:(function\n                   | AST.Newline n -> newline_indent (depth + n)\n                   | AST.Typing (typ, true, _n) -> ty_to_string_with_paren typ\n                   | AST.Typing (typ, false, _n) ->\n                       ty_to_string_without_paren typ\n                   | AST.Value (x, true, n) ->\n                       term_to_string_with_paren x (depth + n)\n                   | AST.Value (x, false, n) ->\n                       term_to_string_without_paren x (depth + n)\n                   | AST.Variable (p, n) -> pat_to_string p true (depth + n))\n                 args),\n            true (* TODO? Notation does not always need paren *) )\n      | AST.App (f, args) ->\n          let f_s, f_b = term_to_string f depth in\n          (f_s ^ term_list_to_string args depth, f_b || List.length args > 0)\n      | AST.Var s -> (s, false)\n      | AST.NameTerm s -> (s, false)\n      | AST.RecordConstructor (f, args) ->\n          ( \"Build_\" ^ f\n            ^ (if List.length args > 0 then \" \" else \"\")\n            ^ String.concat ~sep:\" \"\n                (List.map\n                   ~f:(fun (n, t) ->\n                     \"(\" ^ n ^ \" \" ^ \":=\" ^ \" \"\n                     ^ term_to_string_without_paren t depth\n                     ^ \")\")\n                   args),\n            true )\n      | AST.RecordUpdate (f, base, args) ->\n          ( \"Build_\" ^ f ^ \"[\"\n            ^ term_to_string_without_paren base depth\n            ^ \"]\"\n            ^ (if List.length args > 0 then \" \" else \"\")\n            ^ String.concat ~sep:\" \"\n                (List.map\n                   ~f:(fun (n, t) ->\n                     \"(\" ^ n ^ \" \" ^ \":=\" ^ \" \"\n                     ^ term_to_string_without_paren t depth\n                     ^ \")\")\n                   args),\n            true )\n      | AST.Type t ->\n          let ty_str = ty_to_string_with_paren t in\n          (* TODO: Make definitions? *)\n          (ty_str, false (* TODO? does this always need paren? *))\n      | AST.Lambda (params, body) ->\n          ( String.concat ~sep:\" \"\n              (List.map\n                 ~f:(fun x ->\n                   \"fun\" ^ \" \" ^ pat_to_string x true depth ^ \" \" ^ \"=>\")\n                 params)\n            ^ newline_indent (depth + 1)\n            ^ term_to_string_without_paren body (depth + 1),\n            true )\n      | AST.Tuple [] -> (\"tt (* Empty tuple *)\", false) (* TODO: Empty tuple? *)\n      | AST.Tuple vals ->\n          ( \"(\"\n            ^ String.concat ~sep:\",\"\n                (List.map\n                   ~f:(fun t -> term_to_string_without_paren t (depth + 1))\n                   vals)\n            ^ \")\",\n            false )\n      | AST.Array (t :: ts) ->\n          ( \"array_from_list\" ^ \" \" ^ \"[\"\n            ^ List.fold_left\n                ~init:(term_to_string_without_paren t (depth + 1))\n                ~f:(fun x y ->\n                  x ^ \";\"\n                  ^ newline_indent (depth + 1)\n                  ^ term_to_string_without_paren y (depth + 1))\n                ts\n            ^ \"]\",\n            true )\n      | AST.Array [] -> (\"!TODO empty array!\", false)\n      | AST.TypedTerm (e, t) ->\n          ( term_to_string_without_paren e depth\n            ^ \" \" ^ \":\" ^ \" \" ^ ty_to_string_with_paren t,\n            true )\n      | _ -> .\n\n    and term_to_string_with_paren (x : AST.term) depth : string =\n      let s, b = term_to_string x depth in\n      if b then \"(\" ^ s ^ \")\" else s\n\n    and term_to_string_without_paren (x : AST.term) depth : string =\n      let s, _ = term_to_string x depth in\n      s\n\n    and format_to_string (format : string list) (args : string list) : string =\n      match format with\n      | f :: fs -> (\n          match args with x :: xs -> f ^ x ^ format_to_string fs xs | [] -> f)\n      | [] -> failwith \"incorrect formatting\"\n\n    and term_list_to_string (terms : AST.term list) depth : string =\n      (if List.is_empty terms then \"\" else \" \")\n      ^ String.concat ~sep:\" \"\n          (List.map ~f:(fun t -> term_to_string_with_paren t depth) terms)\n\n    let rec decl_to_string (x : AST.decl) : string =\n      match x with\n      | AST.MultipleDecls decls ->\n          String.concat ~sep:\"\\n\" (List.map ~f:decl_to_string decls)\n      | AST.Unimplemented s -> \"(*\" ^ s ^ \"*)\"\n      | AST.Comment s -> \"(**\" ^ \" \" ^ s ^ \" \" ^ \"**)\"\n      | AST.Definition (name, arguments, term, ty) ->\n          \"Definition\" ^ \" \"\n          ^ definition_value_to_string (name, arguments, term, ty)\n      | AST.ProgramDefinition (name, arguments, term, ty) ->\n          \"Program\" ^ \" \" ^ \"Definition\" ^ \" \"\n          ^ definition_value_to_string (name, arguments, term, ty)\n          ^ fail_next_obligation\n      | AST.Lemma (name, arguments, term) ->\n          \"Lemma\" ^ \" \" ^ name ^ \" \"\n          ^ params_to_string_typed arguments\n          ^ \" \" ^ \":\" ^ \" \"\n          ^ term_to_string_without_paren term 1\n          ^ \".\"\n      | AST.Equations (name, arguments, term, ty) ->\n          \"Equations\" ^ \" \"\n          ^ definition_value_to_equation_definition (name, arguments, term, ty)\n      | AST.EquationsQuestionmark (name, arguments, term, ty) ->\n          \"Equations?\" ^ \" \"\n          ^ definition_value_to_equation_definition (name, arguments, term, ty)\n      | AST.Notation (notation, value, extra) ->\n          \"Notation\" ^ \" \" ^ \"\\\"\" ^ notation ^ \"\\\"\" ^ \" \" ^ \":=\" ^ \" \"\n          ^ term_to_string_with_paren value 0\n          ^ (match extra with None -> \"\" | Some x -> \" \" ^ \"(\" ^ x ^ \")\")\n          ^ \".\"\n      | AST.Record (name, arguments, variants) ->\n          let variants_str =\n            variants_to_string variants (newline_indent 1) \";\"\n          in\n          \"Record\" ^ \" \" ^ name\n          ^ params_to_string_typed arguments\n          ^ \" \" ^ \":\" ^ \" \" ^ \"Type\" ^ \" \" ^ \":=\" ^ \" \" ^ \"{\" ^ variants_str\n          ^ newline_indent 0 ^ \"}.\"\n      | AST.Inductive (name, arguments, variants) ->\n          let name_arguments = name ^ params_to_string_typed arguments in\n          let variants_str =\n            String.concat ~sep:(newline_indent 0)\n              (List.map\n                 ~f:(fun x ->\n                   let mid_str =\n                     match x with\n                     | AST.BaseCase ty_name -> ty_name ^ \" : \"\n                     | AST.InductiveCase (ty_name, ty) ->\n                         let ty_str = ty_to_string_with_paren ty in\n                         ty_name ^ \" \" ^ \":\" ^ \" \" ^ ty_str ^ \" \" ^ \"->\" ^ \" \"\n                   in\n                   (\"|\" ^ \" \") ^ mid_str ^ name_arguments)\n                 variants)\n          in\n          let args_str =\n            if List.is_empty arguments then \"\"\n            else\n              inductive_case_args_to_string variants\n                (newline_indent 0 ^ \"Arguments\" ^ \" \")\n                (List.fold_left ~init:\"\" ~f:(fun a _ -> a ^ \" {_}\") arguments)\n                \".\"\n          in\n          \"Inductive\" ^ \" \" ^ name_arguments ^ \" \" ^ \":\" ^ \" \" ^ \"Type\" ^ \" \"\n          ^ \":=\" ^ newline_indent 0 ^ variants_str ^ \".\" ^ args_str\n      | AST.Class (name, arguments, trait_items) ->\n          let field_str =\n            List.fold_left ~init:\"\"\n              ~f:(fun x y ->\n                let field_name, sep, field_ty =\n                  match y with\n                  | Named (field_name, field_ty) -> (field_name, \":\", field_ty)\n                  | Coercion (field_name, field_ty) ->\n                      (field_name, \":>\", field_ty)\n                  (* Should be \"::\" in newer versions of coq *)\n                in\n                let ty_str = ty_to_string_with_paren field_ty in\n                x ^ newline_indent 1 ^ field_name ^ \" \" ^ sep ^ \" \" ^ ty_str\n                ^ \" \" ^ \";\")\n              trait_items\n          in\n          \"Class\" ^ \" \" ^ name ^ \" \" ^ \"(Self : \"\n          ^ ty_to_string_with_paren AST.TypeTy\n          ^ \")\"\n          ^ params_to_string_typed arguments\n          ^ \" \" ^ \":=\" ^ \" \" ^ \"{\" ^ field_str ^ newline_indent 0 ^ \"}\" ^ \".\"\n      | AST.ModuleType (name, arguments, trait_items) ->\n          let field_str =\n            List.fold_left ~init:\"\"\n              ~f:(fun x y ->\n                x ^ newline_indent 1\n                ^\n                match y with\n                | Named (field_name, field_ty) ->\n                    decl_to_string (AST.Parameter (field_name, field_ty))\n                | Coercion (field_name, field_ty) ->\n                    decl_to_string\n                      (AST.Module\n                         (field_name, ty_to_string_with_paren field_ty, [], []))\n                (* Should be \"::\" in newer versions of coq *))\n              trait_items\n          in\n          let arguments_str = params_to_string_typed arguments in\n          \"Module Type\" ^ \" \" ^ name ^ arguments_str ^ \".\" ^ newline_indent 1\n          ^ field_str ^ newline_indent 0 ^ \"End\" ^ \" \" ^ name ^ \".\"\n      | AST.Parameter (name, typ) ->\n          String.concat ~sep:\" \" [ name; \":\"; ty_to_string_without_paren typ ]\n      | AST.Module (name, typ, arguments, _trait_items) ->\n          let arguments_str = params_to_string_typed arguments in\n          \"Module\" ^ \" \" ^ name ^ arguments_str ^ \" \" ^ \":\" ^ \" \" ^ typ ^ \".\"\n          ^ \" \" ^ \"End\" ^ \" \" ^ name ^ \".\"\n      | AST.Instance (name, arguments, self_ty, ty_list, impl_list) ->\n          let ty_list_str =\n            String.concat ~sep:\" \" (List.map ~f:ty_to_string_with_paren ty_list)\n          in\n          let impl_str =\n            List.fold_left ~init:\"\"\n              ~f:(fun x (name, arguments, term, _ty) ->\n                x ^ newline_indent 1 ^ name\n                ^ params_to_string_typed arguments\n                ^ \" \" ^ \":=\" ^ \" \"\n                ^ term_to_string_without_paren term 1\n                ^ \";\")\n              impl_list\n          in\n          let ty_str = ty_to_string_without_paren self_ty in\n          \"#[global] Instance\" ^ \" \" ^ ty_str ^ \"_\" ^ name\n          ^ params_to_string_typed arguments\n          ^ \" \" ^ \":\" ^ \" \" ^ name ^ \" \" ^ ty_list_str ^ \" \" ^ \":=\" ^ \" \" ^ \"{\"\n          ^ impl_str ^ newline_indent 0 ^ \"}\" ^ \".\"\n      | AST.ProgramInstance\n          (name, arguments, self_ty, ty_list, InstanceDecls impl_list) ->\n          let ty_list_str =\n            String.concat ~sep:\" \" (List.map ~f:ty_to_string_with_paren ty_list)\n          in\n          let impl_str, impl_str_empty =\n            let fl =\n              List.filter_map\n                ~f:(function\n                  | LetDef (name, arguments, term, ty) ->\n                      Some\n                        (\"let\" ^ \" \" ^ name ^ \" \" ^ \":=\" ^ \" \"\n                        ^ (if List.is_empty arguments then \"\"\n                           else\n                             \"fun\" ^ \" \"\n                             ^ params_to_string_typed arguments\n                             ^ \" \" ^ \"=>\" ^ \" \")\n                        ^ term_to_string_without_paren term 1\n                        ^ \" \" ^ \":\" ^ \" \"\n                        ^ ty_to_string_without_paren ty\n                        ^ \" \" ^ \"in\")\n                  | _ -> None)\n                impl_list\n            in\n            (String.concat ~sep:(newline_indent 1) fl, List.is_empty fl)\n          in\n          let arg_str =\n            String.concat\n              ~sep:(\";\" ^ newline_indent 1)\n              (List.map\n                 ~f:(function\n                   | LetDef (name, _arguments, _term, _ty) ->\n                       name ^ \" \" ^ \":=\" ^ \" \" ^ \"(\" ^ \"@\" ^ name ^ \")\"\n                   | InlineDef (name, arguments, term, ty) ->\n                       name ^ \" \" ^ \":=\" ^ \" \" ^ \"(\"\n                       ^ (if List.is_empty arguments then \"\"\n                          else\n                            \"fun\" ^ \" \"\n                            ^ params_to_string_typed arguments\n                            ^ \" \" ^ \"=>\" ^ \" \")\n                       ^ term_to_string_without_paren term 1\n                       ^ \" \" ^ \":\" ^ \" \"\n                       ^ ty_to_string_without_paren ty\n                       ^ \")\")\n                 impl_list)\n          in\n          let ty_str = ty_to_string_without_paren self_ty in\n          \"#[global] Program Instance\" ^ \" \" ^ ty_str ^ \"_\" ^ name\n          ^ params_to_string_typed arguments\n          ^ \" \" ^ \":\" ^ \" \" ^ name ^ \" \" ^ ty_list_str ^ \" \" ^ \":=\"\n          ^ newline_indent 1 ^ impl_str\n          ^ (if impl_str_empty then \"\" else newline_indent 1)\n          ^ (match impl_list with\n            | [] -> \"_\"\n            | _ -> \"{|\" (* ^ name ^ \" \" ^ ty_list_str *) ^ \" \" ^ arg_str ^ \"|}\")\n          ^ \".\" ^ fail_next_obligation\n      | AST.ProgramInstance (name, arguments, self_ty, ty_list, TermDef term) ->\n          let ty_list_str =\n            String.concat ~sep:\" \" (List.map ~f:ty_to_string_with_paren ty_list)\n          in\n          let ty_str = ty_to_string_without_paren self_ty in\n          \"#[global] Program Instance\" ^ \" \" ^ ty_str ^ \"_\" ^ name\n          ^ params_to_string_typed arguments\n          ^ \" \" ^ \":\" ^ \" \" ^ name ^ \" \" ^ ty_list_str ^ \" \" ^ \":=\"\n          ^ newline_indent 1\n          ^ term_to_string_without_paren term 1\n          ^ \".\" ^ fail_next_obligation\n      | AST.Require (_, [], _rename) -> \"\"\n      | AST.Require (None, import :: imports, rename) ->\n          (* map_first_letter String.uppercase import *)\n          let import_name =\n            match rename with\n            | Some s -> s\n            | _ ->\n                List.fold_left\n                  ~init:(map_first_letter String.uppercase import)\n                  ~f:(fun x y -> x ^ \"_\" ^ map_first_letter String.uppercase y)\n                  imports\n          in\n          \"Require Import\" ^ \" \" ^ import_name ^ \".\" ^ newline_indent 0\n          ^ \"Export\" ^ \" \" ^ import_name ^ \".\"\n      | AST.Require (Some x, imports, rename) ->\n          \"From\" ^ \" \" ^ x ^ \" \"\n          ^ decl_to_string (AST.Require (None, imports, rename))\n      | AST.HintUnfold (n, Some typ) ->\n          let ty_str = ty_to_string_without_paren typ in\n          \"Hint Unfold\" ^ \" \" ^ ty_str ^ \"_\" ^ n ^ \".\"\n      | AST.HintUnfold (n, None) -> \"Hint Unfold\" ^ \" \" ^ n ^ \".\"\n\n    and definition_value_to_equation_definition\n        ((name, arguments, term, ty) : AST.definition_type) =\n      let ty_str = ty_to_string_without_paren ty in\n      definition_value_to_shell_string\n        (name, arguments, term, ty)\n        (name ^ \" \"\n        ^ params_to_string\n            (List.filter_map\n               ~f:(fun x ->\n                 match x with Explicit (y, z) -> Some (y, z) | _ -> None)\n               arguments)\n        ^ \" \" ^ \":=\" ^ newline_indent 2\n        ^ term_to_string_without_paren term 2\n        ^ \" \" ^ \":\" ^ \" \" ^ ty_str)\n      ^ fail_next_obligation\n\n    and definition_value_to_shell_string\n        ((name, arguments, _, ty) : AST.definition_type) (body : string) :\n        string =\n      let ty_str = ty_to_string_without_paren ty in\n      name\n      ^ params_to_string_typed arguments\n      ^ \" \" ^ \":\" ^ \" \" ^ ty_str ^ \" \" ^ \":=\" ^ newline_indent 1 ^ body ^ \".\"\n\n    and definition_value_to_string\n        ((name, arguments, term, ty) : AST.definition_type) : string =\n      definition_value_to_shell_string\n        (name, arguments, term, ty)\n        (term_to_string_without_paren term 1)\n\n    and fail_next_obligation : string =\n      newline_indent 0 ^ \"Fail\" ^ \" \" ^ \"Next\" ^ \" \" ^ \"Obligation.\"\n\n    and params_to_string_typed params : string =\n      if List.is_empty params then \"\"\n      else\n        \" \"\n        ^ String.concat ~sep:\" \"\n            (List.map\n               ~f:(fun param ->\n                 match param with\n                 | Implicit (pat, ty) ->\n                     \"{\" ^ pat_to_string pat true 0 ^ \" \" ^ \":\" ^ \" \"\n                     ^ ty_to_string_without_paren ty\n                     ^ \"}\"\n                 | Explicit (pat, ty) ->\n                     \"(\" ^ pat_to_string pat true 0 ^ \" \" ^ \":\" ^ \" \"\n                     ^ ty_to_string_without_paren ty\n                     ^ \")\"\n                 | Typeclass (None, ty) ->\n                     \"`{\" ^ \" \" ^ ty_to_string_without_paren ty ^ \"}\"\n                 | Typeclass (Some name, ty) ->\n                     \"`{\" ^ name ^ \" \" ^ \":\" ^ \" \"\n                     ^ ty_to_string_without_paren ty\n                     ^ \"}\")\n               params)\n\n    and params_to_string params : string =\n      String.concat ~sep:\"\"\n        (List.map\n           ~f:(fun (pat, _ty) ->\n             (* let ty_str = ty_to_string_without_paren ty in *)\n             pat_to_string pat true 0 ^ \" \")\n           params)\n\n    (* and inductive_case_to_string variants pre post : string = *)\n    (*   match variants with *)\n    (*   | x :: xs -> *)\n    (*       let mid_str = *)\n    (*         match x with *)\n    (*         | AST.BaseCase ty_name -> ty_name *)\n    (*         | AST.InductiveCase (ty_name, ty) -> *)\n    (*             let ty_str = ty_to_string ty in *)\n    (*             ty_name ^ \" \" ^ \":\" ^ \" \" ^ ty_str ^ \" \" ^ \"->\" ^ \" \" *)\n    (*       in *)\n    (*       let variants_str = inductive_case_to_string xs pre post in *)\n    (*       pre ^ mid_str ^ post ^ variants_str *)\n    (*   | [] -> \"\" *)\n\n    and inductive_case_args_to_string variants pre mid post : string =\n      String.concat ~sep:\"\"\n        (List.map\n           ~f:(fun x ->\n             let mid_str, ty_str =\n               match x with\n               | AST.BaseCase ty_name -> (ty_name, \"\")\n               | AST.InductiveCase (ty_name, ty) ->\n                   (ty_name, \" \" ^ ty_to_string_with_paren ty)\n             in\n             pre ^ mid_str ^ mid ^ ty_str ^ post)\n           variants)\n\n    and variants_to_string variants pre post : string =\n      String.concat ~sep:\"\"\n        (List.map\n           ~f:(fun y ->\n             let ty_name, _sep, ty =\n               match y with\n               | Named (ty_name, ty) -> (ty_name, \":\", ty)\n               | Coercion (ty_name, ty) -> (ty_name, \":>\", ty)\n               (* Should be \"::\" in newer versions of coq *)\n             in\n             pre ^ ty_name ^ \" \" ^ \":\" ^ \" \"\n             ^ ty_to_string_without_paren ty\n             ^ post)\n           variants)\n  end\n"
  },
  {
    "path": "engine/backends/coq/dune",
    "content": "(library\n (name coq_ast)\n (package hax-engine)\n (libraries hax_engine base hacspeclib_macro_parser)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_inline\n   ppx_functor_application\n   ppx_matches)))\n\n; (env\n;  (_\n;   (flags\n;    (:standard -warn-error -A -warn-error +8))))\n\n(env\n (_\n  (flags\n   (:standard -w +A-4-40-42-44))))\n"
  },
  {
    "path": "engine/backends/coq/ssprove/dune",
    "content": "(library\n (name ssprove_backend)\n (package hax-engine)\n (libraries hax_engine base hacspeclib_macro_parser coq_ast)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_inline\n   ppx_functor_application\n   ppx_matches)))\n\n(env\n (_\n  (flags\n   (:standard -w -A))))\n"
  },
  {
    "path": "engine/backends/coq/ssprove/ssprove_backend.ml",
    "content": "open Hax_engine\nopen Utils\nopen Base\nopen Coq_ast\n\ninclude\n  Backend.Make\n    (struct\n      open Features\n      include Off\n      include On.Slice\n      include On.Monadic_binding\n      include On.Macro\n      include On.Construct_base\n      include On.Loop\n      include On.For_loop\n      include On.While_loop\n      include On.For_index_loop\n      include On.State_passing_loop\n      include On.Fold_like_loop\n    end)\n    (struct\n      let backend = Diagnostics.Backend.SSProve\n    end)\n\nmodule SubtypeToInputLanguage\n    (FA :\n      Features.T\n        with type mutable_reference = Features.Off.mutable_reference\n         and type continue = Features.Off.continue\n         and type break = Features.Off.break\n         and type mutable_pointer = Features.Off.mutable_pointer\n         and type mutable_variable = Features.Off.mutable_variable\n         and type reference = Features.Off.reference\n         and type raw_pointer = Features.Off.raw_pointer\n         and type early_exit = Features.Off.early_exit\n         and type question_mark = Features.Off.question_mark\n         and type as_pattern = Features.Off.as_pattern\n         and type lifetime = Features.Off.lifetime\n         and type monadic_action = Features.Off.monadic_action\n         and type arbitrary_lhs = Features.Off.arbitrary_lhs\n         and type nontrivial_lhs = Features.Off.nontrivial_lhs\n         and type quote = Features.Off.quote\n         and type block = Features.Off.block\n         and type dyn = Features.Off.dyn\n         and type match_guard = Features.Off.match_guard\n         and type trait_item_default = Features.Off.trait_item_default\n         and type unsafe = Features.Off.unsafe) =\nstruct\n  module FB = InputLanguage\n\n  include\n    Subtype.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Features.SUBTYPE.Id\n        include Features.SUBTYPE.On.Monadic_binding\n        include Features.SUBTYPE.On.Construct_base\n        include Features.SUBTYPE.On.Slice\n        include Features.SUBTYPE.On.Macro\n        include Features.SUBTYPE.On.Loop\n        include Features.SUBTYPE.On.For_loop\n        include Features.SUBTYPE.On.While_loop\n        include Features.SUBTYPE.On.For_index_loop\n        include Features.SUBTYPE.On.State_passing_loop\n        include Features.SUBTYPE.On.Fold_like_loop\n      end)\n\n  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))\nend\n\nmodule AST = Ast.Make (InputLanguage)\nmodule BackendOptions = Backend.UnitBackendOptions\nopen Ast\n\nmodule CoqNamePolicy = struct\n  include Concrete_ident.DefaultNamePolicy\n\n  let reserved_words = Hash_set.of_list (module String) [ \"left\"; \"right\" ]\n  (* let temp = Hash_set.create (module String) in *)\n  (* temp *)\n  (* Hash_set.add temp \"left\" *)\nend\n\nmodule U = Ast_utils.Make (InputLanguage)\nmodule RenderId = Concrete_ident.MakeRenderAPI (CoqNamePolicy)\nopen AST\n\nmodule SSProveLibrary : Library = struct\n  module Notation = struct\n    let int_repr (_x : string) (i : string) : string = i\n    let type_str : string = \"choice_type\"\n    let bool_str : string = \"'bool\"\n    let unit_str : string = \"'unit\"\n  end\nend\n\nmodule SSP = Coq (SSProveLibrary)\n\nmodule SSPExtraDefinitions (* : ANALYSIS *) = struct\n  let wrap_type_in_both (a : SSP.AST.ty) =\n    SSP.AST.AppTy (SSP.AST.NameTy \"both\", [ a ])\n\n  let unit_term : SSP.AST.term =\n    SSP.AST.TypedTerm (SSP.AST.UnitTerm, SSP.AST.Unit)\n\n  let rec variables_of_ssp_pat (p : SSP.AST.pat) : string list =\n    match p with\n    | RecordPat (_, npl) -> List.concat_map ~f:(snd >> variables_of_ssp_pat) npl\n    | ConstructorPat (_, pl) -> List.concat_map ~f:variables_of_ssp_pat pl\n    | TuplePat pl -> List.concat_map ~f:variables_of_ssp_pat pl\n    | AscriptionPat (p, _) -> variables_of_ssp_pat p\n    | Ident x -> [ x ]\n    | DisjunctivePat pl -> List.concat_map ~f:variables_of_ssp_pat pl\n    | WildPat | UnitPat | Lit _ -> []\n\n  let letb\n      ({ pattern; mut; value; body; value_typ; monad_typ } : SSP.AST.let_args) :\n      SSP.AST.term =\n    match monad_typ with\n    | Some (Exception _typ) ->\n        SSP.AST.AppFormat\n          ( [\n              \"letm[choice_typeMonad.result_bind_code \";\n              (*typ*)\n              \"] \";\n              (*p*)\n              \" := \";\n              (*expr*)\n              \" in\";\n              \"\";\n              (*body*)\n              \"\";\n            ],\n            [\n              SSP.AST.Typing (value_typ, true, 0);\n              SSP.AST.Variable (pattern, 0);\n              SSP.AST.Value (value, false, 0);\n              SSP.AST.Newline 0;\n              SSP.AST.Value (body, false, 0);\n            ] )\n    | Some (Result _typ) ->\n        SSP.AST.AppFormat\n          ( [\n              \"letm[choice_typeMonad.result_bind_code \";\n              (*typ*)\n              \"] \";\n              (*p*)\n              \" := \";\n              (*expr*)\n              \" in\";\n              \"\";\n              (*body*)\n              \"\";\n            ],\n            [\n              SSP.AST.Typing (value_typ, true, 0);\n              SSP.AST.Variable (pattern, 0);\n              SSP.AST.Value (value, false, 0);\n              SSP.AST.Newline 0;\n              SSP.AST.Value (body, false, 0);\n            ] )\n    | Some Option ->\n        SSP.AST.AppFormat\n          ( [\n              \"letm[choice_typeMonad.option_bind_code] \";\n              (*p*)\n              \" := \";\n              (*expr*)\n              \" in\";\n              \"\";\n              (*body*)\n              \"\";\n            ],\n            [\n              SSP.AST.Variable (pattern, 0);\n              SSP.AST.Value (value, false, 0);\n              SSP.AST.Newline 0;\n              SSP.AST.Value (body, false, 0);\n            ] )\n    | None ->\n        if mut then\n          SSP.AST.AppFormat\n            ( [\n                \"letb \";\n                (*p*)\n                \" loc(\" (*p_loc*);\n                \") := \";\n                (*expr*)\n                \" in\";\n                \"\";\n                (*body*)\n                \"\";\n              ],\n              [\n                SSP.AST.Variable (pattern, 0);\n                SSP.AST.Variable\n                  ( (match\n                       List.map\n                         ~f:(fun x -> SSP.AST.Ident (x ^ \"_loc\"))\n                         (variables_of_ssp_pat pattern)\n                     with\n                    | [] -> SSP.AST.WildPat\n                    | [ x ] -> x\n                    | xs -> SSP.AST.TuplePat xs),\n                    0 );\n                SSP.AST.Value (value, false, 0);\n                SSP.AST.Newline 0;\n                SSP.AST.Value (body, false, 0);\n              ] )\n        else\n          SSP.AST.AppFormat\n            ( [ \"letb \"; (*p*) \" := \"; (*expr*) \" in\"; \"\"; (*body*) \"\" ],\n              [\n                SSP.AST.Variable (pattern, 0);\n                SSP.AST.Value (value, false, 0);\n                SSP.AST.Newline 0;\n                SSP.AST.Value (body, false, 0);\n              ] )\n\n  let rec pat_as_expr (p : SSP.AST.pat) : (SSP.AST.pat * SSP.AST.term) option =\n    match p with\n    | WildPat | UnitPat -> None\n    | Ident s -> Some (SSP.AST.Ident s, SSP.AST.Var s)\n    | Lit l -> Some (SSP.AST.Lit l, Const l)\n    | RecordPat (s, sps) ->\n        let v =\n          List.filter_map\n            ~f:(fun (s, ps) ->\n              Option.map ~f:(fun (p, t) -> ((s, p), (s, t))) (pat_as_expr ps))\n            sps\n        in\n        Some\n          ( SSP.AST.RecordPat (s, List.map ~f:fst v),\n            SSP.AST.RecordConstructor (s, List.map ~f:snd v) )\n    | ConstructorPat (_, ps) | TuplePat ps ->\n        let pt_list = List.filter_map ~f:pat_as_expr ps in\n        Some\n          ( TuplePat (List.map ~f:fst pt_list),\n            SSP.AST.Tuple (List.map ~f:snd pt_list) )\n    | AscriptionPat (p, _) -> pat_as_expr p (* TypedTerm (, t) *)\n    | DisjunctivePat ps ->\n        let pt_list = List.filter_map ~f:pat_as_expr ps in\n        Some\n          ( TuplePat (List.map ~f:fst pt_list),\n            SSP.AST.Tuple (List.map ~f:snd pt_list) )\n\n  let ifb ((cond, then_, else_) : SSP.AST.term * SSP.AST.term * SSP.AST.term) :\n      SSP.AST.term =\n    SSP.AST.AppFormat\n      ( [ \"ifb \"; (*expr*) \"\"; \"then \"; \"\"; \"else \"; \"\" ],\n        [\n          SSP.AST.Value (cond, false, 0);\n          SSP.AST.Newline 0;\n          SSP.AST.Value (then_, false, 0);\n          SSP.AST.Newline 0;\n          SSP.AST.Value (else_, false, 0);\n        ] )\n\n  let matchb ((expr, arms) : SSP.AST.term * (SSP.AST.pat * SSP.AST.term) list) :\n      SSP.AST.term =\n    SSP.AST.AppFormat\n      ( [ \"matchb \"; (*expr*) \" with\" ]\n        @ List.concat_map ~f:(fun _ -> [ \"| \"; \" =>\"; \"\"; \"\" ]) arms\n        @ [ \"end\" ],\n        [ SSP.AST.Value (expr, false, 0); SSP.AST.Newline 0 ]\n        @ List.concat_map\n            ~f:(fun (arm_pat, body) ->\n              [\n                SSP.AST.Variable (arm_pat, 0);\n                SSP.AST.Newline 1;\n                SSP.AST.Value (body, false, 1);\n                SSP.AST.Newline 0;\n              ])\n            arms )\n\n  let updatable_record\n      ((name, arguments, variants) :\n        string * SSP.AST.argument list * SSP.AST.record_field list) :\n      SSP.AST.decl =\n    let fields =\n      List.concat_map\n        ~f:(function\n          | SSP.AST.Named (x, y) -> [ (x, y) ] | SSP.AST.Coercion _ -> [])\n        variants\n    in\n    let ty_name =\n      \"(\"\n      ^ String.concat ~sep:\" \"\n          (name\n          :: List.filter_map\n               ~f:(fun x ->\n                 match x with\n                 | SSP.AST.Explicit (p, _t) ->\n                     Some (SSP.pat_to_string p false 0)\n                 | _ -> None)\n               arguments)\n      ^ \")\"\n    in\n    SSP.AST.MultipleDecls\n      ([\n         SSP.AST.Definition\n           ( name,\n             arguments,\n             SSP.AST.Type (SSP.AST.Product (List.map ~f:snd fields)),\n             SSP.AST.TypeTy );\n       ]\n      @ List.mapi\n          ~f:(fun i (x, y) ->\n            SSP.AST.Equations\n              ( x,\n                List.map\n                  ~f:(function\n                    | SSP.AST.Explicit (a, b) -> SSP.AST.Implicit (a, b)\n                    | v -> v)\n                  arguments\n                @ [\n                    SSP.AST.Explicit\n                      ( SSP.AST.Ident \"s\",\n                        wrap_type_in_both (SSP.AST.NameTy name) );\n                  ],\n                SSP.AST.App\n                  ( SSP.AST.Var \"bind_both\",\n                    [\n                      SSP.AST.Var \"s\";\n                      SSP.AST.Lambda\n                        ( [ SSP.AST.Ident \"x\" ],\n                          (* SSP.AST.App *)\n                          (*   ( SSP.AST.Var \"solve_lift\", *)\n                          (* [ *)\n                          SSP.AST.App\n                            ( SSP.AST.Var \"ret_both\",\n                              [\n                                SSP.AST.TypedTerm\n                                  ( List.fold_right ~init:(SSP.AST.Var \"x\")\n                                      ~f:(fun x y ->\n                                        SSP.AST.App (SSP.AST.Var x, [ y ]))\n                                      ((if Stdlib.(i != 0) then [ \"snd\" ]\n                                        else [])\n                                      @ List.init\n                                          (List.length fields - 1 - i)\n                                          ~f:(fun _ -> \"fst\")),\n                                    y );\n                              ] )\n                          (* ] ) *) );\n                    ] ),\n                wrap_type_in_both y ))\n          fields\n      @ [\n          SSP.AST.Equations\n            ( \"Build_\" ^ name,\n              List.map\n                ~f:(function\n                  | SSP.AST.Explicit (a, b) -> SSP.AST.Implicit (a, b) | v -> v)\n                arguments\n              @ List.mapi\n                  ~f:(fun i (x, y) ->\n                    SSP.AST.Implicit (SSP.AST.Ident x, wrap_type_in_both y))\n                  fields,\n              List.fold_left\n                ~init:\n                  ((* SSP.AST.App *)\n                   (*   ( SSP.AST.Var \"solve_lift\", *)\n                   (* [ *)\n                     SSP.AST.App\n                     ( SSP.AST.Var \"ret_both\",\n                       [\n                         SSP.AST.TypedTerm\n                           ( SSP.AST.Tuple\n                               (List.map\n                                  ~f:(fst >> fun x -> SSP.AST.Var x)\n                                  fields),\n                             SSP.AST.NameTy ty_name );\n                       ] )\n                     (* ] ) *))\n                ~f:(fun z (x, _y) ->\n                  SSP.AST.App\n                    ( SSP.AST.Var \"bind_both\",\n                      [ SSP.AST.Var x; SSP.AST.Lambda ([ SSP.AST.Ident x ], z) ]\n                    ))\n                fields,\n              SSP.AST.NameTy (\"both\" ^ \" \" ^ ty_name) )\n          (* :: SSP.AST.Arguments (\"Build_\" ^ pconcrete_ident name,) *);\n        ]\n      (* @ [SSP.AST.ProgramInstance *)\n      (*      (\\* (name, arguments, self_ty, ty_list, impl_list) *\\) *)\n      (*      ( \"Settable\", *)\n      (*        [], *)\n      (*        SSP.AST.NameTy name, *)\n      (*        [wrap_type_in_both (SSP.AST.NameTy name)], *)\n      (*        SSP.AST.InstanceDecls [SSP.AST.LetDef (\"mkT\", [], *)\n      (*          SSP.AST.App (SSP.AST.Var \"fun x => \", [ *)\n      (*          List.fold_left *)\n      (*           ~init: *)\n      (*             ((\\* SSP.AST.App *\\) *)\n      (*              (\\*   ( SSP.AST.Var \"solve_lift\", *\\) *)\n      (*                  (\\* [ *\\) *)\n      (*                    SSP.AST.App *)\n      (*                      ( SSP.AST.Var \"ret_both\", *)\n      (*                        [ *)\n      (*                          SSP.AST.TypedTerm *)\n      (*                            ( SSP.AST.Tuple *)\n      (*                                (List.map *)\n      (*                                   ~f:(fst >> fun x -> SSP.AST.Var x) *)\n      (*                                   fields), *)\n      (*                              SSP.AST.NameTy ty_name ); *)\n      (*                        ] ); *)\n      (*                    (\\* ] ) *\\)) *)\n      (*           ~f:(fun z (x, _y) -> *)\n      (*             SSP.AST.App *)\n      (*               ( SSP.AST.Var \"bind_both\", *)\n      (*                 [ SSP.AST.App (SSP.AST.Var x , [ (SSP.AST.Var \"x\") ] ); SSP.AST.Lambda ([ SSP.AST.Ident x ], z) ] *)\n      (*               )) *)\n      (*           fields]), *)\n      (*          SSP.AST.WildTy)] *)\n      (*      ) *)\n      (*   ] *)\n      @ List.mapi\n          ~f:(fun i (x, _y) ->\n            SSP.AST.Notation\n              ( \"'Build_\" ^ name ^ \"'\" ^ \" \" ^ \"'['\" ^ \" \" ^ \"x\" ^ \" \" ^ \"']'\"\n                ^ \" \" ^ \"'('\" ^ \" \" ^ \"'\" ^ x ^ \"'\" ^ \" \" ^ \"':='\" ^ \" \" ^ \"y\"\n                ^ \" \" ^ \"')'\",\n                SSP.AST.App\n                  ( SSP.AST.Var (\"Build_\" ^ name),\n                    List.mapi\n                      ~f:(fun j (x, _y) ->\n                        SSP.AST.AppFormat\n                          ( [ x ^ \" \" ^ \":=\" ^ \" \"; (*v*) \"\" ],\n                            [\n                              SSP.AST.Value\n                                ( (if Stdlib.(j == i) then SSP.AST.Var \"y\"\n                                   else\n                                     SSP.AST.App\n                                       (SSP.AST.Var x, [ SSP.AST.Var \"x\" ])),\n                                  false,\n                                  0 );\n                            ] ))\n                      fields ),\n                None ))\n          fields)\n\n  let both_enum\n      ((name, arguments, cases) :\n        string * SSP.AST.argument list * SSP.AST.inductive_case list) :\n      SSP.AST.decl =\n    SSP.AST.MultipleDecls\n      ((* Type definition *)\n       SSP.AST.Definition\n         ( (* \"t_\" ^ *) name,\n           arguments,\n           SSP.AST.Type\n             (SSP.AST.Coproduct\n                (List.map\n                   ~f:(function\n                     | BaseCase _ -> SSP.AST.Unit\n                     | InductiveCase (_, typ) -> typ)\n                   cases))\n           (* (SSP.AST.NameTy (\"chFin (mkpos \" ^ number_of_cases ^ \")\")) *),\n           SSP.AST.TypeTy )\n      :: (* Index names and constructors *)\n         List.concat_mapi cases ~f:(fun i c ->\n             let v_name, curr_typ =\n               match c with\n               | BaseCase v_name -> (v_name, [])\n               | InductiveCase (v_name, typ) -> (v_name, [ typ ])\n             in\n             let injections inner_val =\n               List.fold_left ~init:inner_val\n                 ~f:(fun y x -> SSP.AST.App (SSP.AST.Var x, [ y ]))\n                 ((if Stdlib.(i != 0) then [ \"inr\" ] else [])\n                 @ List.init (List.length cases - 1 - i) ~f:(fun _ -> \"inl\"))\n             in\n             let definition_body =\n               let inject_argument inner_val =\n                 (* SSP.AST.App *)\n                 (*   ( SSP.AST.Var \"solve_lift\", *)\n                 (* [ *)\n                 SSP.AST.App\n                   ( SSP.AST.Var \"ret_both\",\n                     [\n                       SSP.AST.TypedTerm\n                         (injections inner_val, SSP.AST.NameTy name);\n                     ] )\n                 (* ] ) *)\n               in\n               match curr_typ with\n               | [] -> inject_argument unit_term\n               | _ ->\n                   SSP.AST.App\n                     ( SSP.AST.Var \"bind_both\",\n                       [\n                         SSP.AST.Var \"x\";\n                         SSP.AST.Lambda\n                           ( [ SSP.AST.Ident \"x\" ],\n                             inject_argument (SSP.AST.Var \"x\") );\n                       ] )\n             in\n             [\n               (let arg, body =\n                  match curr_typ with\n                  | [] ->\n                      (\"\", injections SSP.AST.UnitTerm)\n                      (* TODO: Fix unit translation *)\n                  | _ -> (\" \" ^ \"x\", injections (SSP.AST.Var \"x\"))\n                in\n                SSP.AST.Notation\n                  (\"'\" ^ v_name ^ \"_case\" ^ \"'\" ^ arg, body, Some \"at level 100\"));\n               SSP.AST.Equations\n                 ( v_name,\n                   List.map\n                     ~f:(fun x ->\n                       SSP.AST.Explicit (SSP.AST.Ident \"x\", wrap_type_in_both x))\n                     curr_typ,\n                   definition_body,\n                   wrap_type_in_both (SSP.AST.NameTy name) );\n             ]))\nend\n\nmodule StaticAnalysis (* : ANALYSIS *) = struct\n  module FunctionDependency (* : ANALYSIS *) =\n    [%functor_application\n    Analyses.Function_dependency InputLanguage]\n\n  module MutableVariables (* : ANALYSIS *) =\n    [%functor_application\n    Analyses.Mutable_variables InputLanguage]\n\n  type analysis_data = { mut_var : MutableVariables.analysis_data }\n\n  let analyse items =\n    let func_dep = FunctionDependency.analyse items in\n    let mut_var =\n      MutableVariables.analyse (func_dep : MutableVariables.pre_data) items\n    in\n    { mut_var }\nend\n\nmodule Context = struct\n  type t = {\n    current_namespace : string list;\n    analysis_data : StaticAnalysis.analysis_data;\n  }\nend\n\nlet primitive_to_string (id : Ast.primitive_ident) : string =\n  match id with\n  | Deref -> \"(TODO: Deref)\" (* failwith \"Deref\" *)\n  | Cast -> \"cast_int (WS2 := _)\" (* failwith \"Cast\" *)\n  | LogicalOp op -> ( match op with And -> \"andb\" | Or -> \"orb\")\n\nopen Phase_utils\n\nmodule TransformToInputLanguage =\n  [%functor_application\n    Phases.Reject.Unsafe(Features.Rust)\n    |> Phases.Rewrite_local_self\n    |> Phases.Reject.RawOrMutPointer\n    |> Phases.And_mut_defsite\n    |> Phases.Reconstruct_asserts\n    |> Phases.Reconstruct_for_loops\n    |> Phases.Direct_and_mut\n    |> Phases.Reject.Arbitrary_lhs\n    |> Phases.Drop_blocks\n    |> Phases.Drop_match_guards\n    |> Phases.Reject.Continue\n    |> Phases.Drop_references\n    |> Phases.Trivialize_assign_lhs\n    |> Phases.Reconstruct_question_marks\n    |> Side_effect_utils.Hoist\n    |> Phases.Local_mutation\n    (* |> Phases.State_passing_loop *)\n    |> Phases.Reject.Continue\n    |> Phases.Cf_into_monads\n    |> Phases.Reject.EarlyExit\n    (* |> Phases.Functionalize_loops *)\n    |> Phases.Reject.As_pattern\n    |> Phases.Reject.Dyn\n    |> Phases.Reject.Trait_item_default\n    |> Phases.Bundle_cycles\n    |> Phases.Sort_items_namespace_wise\n    |> SubtypeToInputLanguage\n    |> Identity\n  ]\n  [@ocamlformat \"disable\"]\n\n(* let token_list (tokens : string) : string list list = *)\n(*   List.map ~f:(split_str ~on:\"=\") (split_str ~on:\",\" tokens) *)\n\n(* let get_argument (s : string) (token_list : string list list) = *)\n(*   List.find_map *)\n(*     ~f:(function *)\n(*       | [ v; a ] when String.equal (String.strip v) s -> Some a | _ -> None) *)\n(*     token_list *)\n\n(* let strip (x : string) = *)\n(*   String.strip *)\n(*     ?drop:(Some (function '\\\"' -> true | _ -> false)) *)\n(*     (String.strip x) *)\n\n(* let strip_or_error (err : string) (s : string option) span = *)\n(*   match s with *)\n(*   | Some x -> strip x *)\n(*   | None -> Error.unimplemented ~details:err span *)\n\nlet pconcrete_ident (id : Ast.concrete_ident) : string =\n  (RenderId.render id).name\n\nlet plocal_ident (e : Local_ident.t) : string =\n  RenderId.local_ident\n    (match String.chop_prefix ~prefix:\"impl \" e.name with\n    | Some name ->\n        let name = \"impl_\" ^ Int.to_string ([%hash: string] name) in\n        { e with name }\n    | _ -> e)\n\nmodule Make\n    (Attrs : Attrs.WITH_ITEMS)\n    (Ctx : sig\n      val ctx : Context.t\n    end) =\nstruct\n  open Ctx\n\n  let pglobal_ident (id : Ast.global_ident) : string =\n    match id with\n    | `Projector (`Concrete cid) | `Concrete cid -> pconcrete_ident cid\n    | `Primitive p_id -> primitive_to_string p_id\n    | `TupleType _i -> \"TODO (global ident) tuple type\"\n    | `TupleCons _i -> \"TODO (global ident) tuple cons\"\n    | `Projector (`TupleField (_i, _j)) | `TupleField (_i, _j) ->\n        \"TODO (global ident) tuple field\"\n    | _ -> .\n\n  (* module TODOs_debug = struct *)\n  (*   let __TODO_pat__ _ s = SSP.AST.Ident (s ^ \" todo(pat)\") *)\n  (*   let __TODO_ty__ _ s : SSP.AST.ty = SSP.AST.NameTy (s ^ \" todo(ty)\") *)\n  (*   let __TODO_item__ _ s = SSP.AST.Unimplemented (s ^ \" todo(item)\") *)\n\n  (*   let __TODO_term__ _ s = *)\n  (*     SSP.AST.Const (SSP.AST.Const_string (s ^ \" todo(term)\")) *)\n  (* end *)\n\n  module TODOs = struct\n    let __TODO_ty__ span s : SSP.AST.ty =\n      Error.unimplemented ~details:(\"[ty] node \" ^ s) span\n\n    let __TODO_pat__ span s =\n      Error.unimplemented ~details:(\"[pat] node \" ^ s) span\n\n    let __TODO_term__ span s =\n      Error.unimplemented ~details:(\"[expr] node \" ^ s) span\n\n    let __TODO_item__ _span s = SSP.AST.Unimplemented (s ^ \" todo(item)\")\n  end\n\n  open TODOs\n\n  let pint_kind (k : Ast.int_kind) : SSP.AST.int_type =\n    {\n      size =\n        (match k.size with\n        | S8 -> U8\n        | S16 -> U16\n        | S32 -> U32\n        | S64 -> U64\n        | S128 -> U128\n        | SSize -> USize);\n      signed = Stdlib.(k.signedness == Signed);\n    }\n\n  let pliteral (e : Ast.literal) =\n    match e with\n    | String s -> SSP.AST.Const_string s\n    | Char c -> SSP.AST.Const_char (Char.to_int c)\n    | Int { value; kind; _ } -> SSP.AST.Const_int (value, pint_kind kind)\n    | Float _ -> failwith \"Float: todo\"\n    | Bool b -> SSP.AST.Const_bool b\n\n  let operators =\n    let c = Ast.Global_ident.of_name ~value:true in\n    [\n      (c Rust_primitives__hax__array_of_list, (3, [ \"\"; \".a[\"; \"]<-\"; \"\" ]));\n      (c Core__ops__index__Index__index, (2, [ \"\"; \".a[\"; \"]\" ]));\n      (c Core__ops__bit__BitXor__bitxor, (2, [ \"\"; \" .^ \"; \"\" ]));\n      (c Core__ops__bit__BitAnd__bitand, (2, [ \"\"; \" .& \"; \"\" ]));\n      (c Core__ops__bit__BitOr__bitor, (2, [ \"\"; \" .| \"; \"\" ]));\n      (c Core__ops__arith__Add__add, (2, [ \"\"; \" .+ \"; \"\" ]));\n      (c Core__ops__arith__Sub__sub, (2, [ \"\"; \" .- \"; \"\" ]));\n      (c Core__ops__arith__Mul__mul, (2, [ \"\"; \" .* \"; \"\" ]));\n      (c Core__ops__arith__Div__div, (2, [ \"\"; \" ./ \"; \"\" ]));\n      (c Core__cmp__PartialEq__eq, (2, [ \"\"; \" =.? \"; \"\" ]));\n      (c Core__cmp__PartialOrd__lt, (2, [ \"\"; \" <.? \"; \"\" ]));\n      (c Core__cmp__PartialOrd__le, (2, [ \"\"; \" <=.? \"; \"\" ]));\n      (c Core__cmp__PartialOrd__ge, (2, [ \"\"; \" >=.? \"; \"\" ]));\n      (c Core__cmp__PartialOrd__gt, (2, [ \"\"; \" >.? \"; \"\" ]));\n      (c Core__cmp__PartialEq__ne, (2, [ \"\"; \" <> \"; \"\" ]));\n      (c Core__ops__arith__Rem__rem, (2, [ \"\"; \" .% \"; \"\" ]));\n      (c Core__ops__bit__Shl__shl, (2, [ \"\"; \" shift_left \"; \"\" ]));\n      (c Core__ops__bit__Shr__shr, (2, [ \"\"; \" shift_right \"; \"\" ]));\n    ]\n    |> Map.of_alist_exn (module Ast.Global_ident)\n\n  module LocalIdentOrLisIis =\n  StaticAnalysis.MutableVariables.LocalIdentOrData (struct\n    type ty = string list * string list [@@deriving compare, sexp]\n  end)\n\n  let rec pty span (t : ty) : SSP.AST.ty =\n    match t with\n    | TBool -> SSP.AST.Bool\n    | TChar -> __TODO_ty__ span \"char\"\n    | TInt k -> SSP.AST.Int (pint_kind k)\n    | TStr -> SSP.AST.NameTy \"chString\"\n    | TApp { ident = `TupleType 0; args = []; _ } -> SSP.AST.Unit\n    | TApp { ident = `TupleType 1; args = [ GType ty ]; _ } -> pty span ty\n    | TApp { ident = `TupleType n; args; _ } when n >= 2 ->\n        SSP.AST.Product (args_ty span args)\n    | TApp { ident; args; _ } ->\n        SSP.AST.AppTy (SSP.AST.NameTy (pglobal_ident ident), args_ty span args)\n    | TArrow ([ TApp { ident = `TupleType 0; args = []; _ } ], output) ->\n        pty span output\n    | TArrow (inputs, output) ->\n        List.fold_right ~init:(pty span output)\n          ~f:(fun x y -> SSP.AST.Arrow (x, y))\n          (List.map ~f:(pty span) inputs)\n    | TFloat _ -> __TODO_ty__ span \"pty: Float\"\n    | TArray { typ; length = { e = Literal (Int { value; _ }); _ }; _ } ->\n        SSP.AST.ArrayTy (pty span typ, value)\n    | TArray { typ; length } ->\n        SSP.AST.ArrayTy\n          ( pty span typ,\n            \"(\" ^ \"is_pure\" ^ \" \" ^ \"(\"\n            ^ SSP.term_to_string_with_paren\n                (pexpr (Map.empty (module Local_ident)) false length)\n                0\n            ^ \")\" ^ \")\" )\n        (* TODO: check int.to_string is correct! *)\n    | TSlice { ty; _ } -> SSP.AST.SliceTy (pty span ty)\n    | TParam i -> SSP.AST.NameTy (plocal_ident i)\n    | TAssociatedType { item; _ } -> SSP.AST.NameTy (pconcrete_ident item)\n    | TOpaque _ -> __TODO_ty__ span \"pty: TAssociatedType/TOpaque\"\n    | _ -> .\n\n  and args_ty span (args : generic_value list) : SSP.AST.ty list =\n    List.map\n      ~f:(function\n        | GLifetime _ -> __TODO_ty__ span \"lifetime\"\n        | GType typ -> pty span typ\n        | GConst { typ; _ } ->\n            SSPExtraDefinitions.wrap_type_in_both (pty span typ))\n      args\n  (* match args with *)\n  (* | arg :: xs -> *)\n  (*     (match arg with *)\n  (*     | GLifetime { lt; witness } -> __TODO_ty__ span \"lifetime\" *)\n  (*     | GType typ -> pty span typ *)\n  (*     | GConst { typ; _ } -> *)\n  (*         wrap_type_in_both \"(fset [])\" \"(fset [])\" (pty span typ)) *)\n  (*     :: args_ty span xs *)\n  (* | [] -> [] *)\n\n  and ppat (p : pat) : SSP.AST.pat =\n    match p.p with\n    | PWild -> SSP.AST.WildPat\n    | PAscription { typ; pat; _ } ->\n        SSP.AST.AscriptionPat (ppat pat, pty p.span typ)\n    | PBinding\n        {\n          mut = Immutable;\n          mode = _;\n          var;\n          typ = _ (* we skip type annot here *);\n          _;\n        } ->\n        SSP.AST.Ident (plocal_ident var)\n    | PBinding\n        {\n          mut = Mutable _;\n          mode = _;\n          var;\n          typ = _ (* we skip type annot here *);\n          _;\n        } ->\n        SSP.AST.Ident (plocal_ident var) (* TODO Mutable binding ! *)\n    | POr { subpats } -> SSP.AST.DisjunctivePat (List.map ~f:ppat subpats)\n    | PArray _ -> __TODO_pat__ p.span \"Parray?\"\n    | PConstruct { constructor = `TupleCons 0; fields = []; _ } ->\n        SSP.AST.WildPat (* UnitPat *)\n    | PConstruct { constructor = `TupleCons 1; fields = [ _ ]; _ } ->\n        __TODO_pat__ p.span \"tuple 1\"\n    | PConstruct { constructor = `TupleCons _n; fields = args; _ } ->\n        SSP.AST.TuplePat (List.map ~f:(fun { pat; _ } -> ppat pat) args)\n    (* Record *)\n    | PConstruct { is_record = true; _ } -> __TODO_pat__ p.span \"record pattern\"\n    (* (\\* SSP.AST.Ident (pglobal_ident name) *\\) *)\n    (* SSP.AST.RecordPat (pglobal_ident name, List.map ~f:(fun {field; pat} -> (pglobal_ident field, ppat pat)) args) *)\n    (*       (\\* SSP.AST.ConstructorPat (pglobal_ident name ^ \"_case\", [SSP.AST.Ident \"temp\"]) *\\) *)\n    (*       (\\* List.map ~f:(fun {field; pat} -> (pat, SSP.AST.App (SSP.AST.Var (pglobal_ident field), [SSP.AST.Var \"temp\"]))) args *\\) *)\n    (* Enum *)\n    | PConstruct { constructor = name; fields = args; is_record = false; _ } ->\n        SSP.AST.ConstructorPat\n          ( pglobal_ident name,\n            match args with\n            | [] -> []\n            | _ -> [ SSP.AST.TuplePat (List.map ~f:(fun p -> ppat p.pat) args) ]\n          )\n    | PConstant { lit } -> SSP.AST.Lit (pliteral lit)\n    | _ -> .\n\n  (* and analyse_fset (data : StaticAnalysis.MutableVariables.analysis_data) items = *)\n  (*   (object *)\n  (*      inherit [_] expr_reduce as super *)\n  (*      inherit [_] U.Reducers.expr_list_monoid as m (\\* TODO: Raname into list monoid *\\) *)\n  (*      method visit_t _ _ = m#zero *)\n  (*      (\\* method visit_mutability (_f : string -> _ -> _) (ctx : string) _ = m#zero *\\) *)\n  (*      method visit_mutability (f : string -> _ -> _) (ctx : string) mu = *)\n  (*        match mu with Mutable wit -> f ctx wit | _ -> m#zero *)\n\n  (*      method! visit_PBinding env mut _ var _typ subpat = *)\n  (*        m#plus *)\n  (*          (match mut with *)\n  (*           | Mutable _ -> *)\n  (*             var.name *)\n  (*           | Immutable -> *)\n  (*             (\\* Set.singleton (module U.TypedLocalIdent) (var, typ) *\\) *)\n  (*             \"\") *)\n  (*          (Option.value_map subpat ~default:m#zero *)\n  (*             ~f:(fst >> super#visit_pat env)) *)\n\n  (*      method! visit_global_ident (env : string) (x : Global_ident.t) = *)\n  (*        match x with *)\n  (*        | `Projector (`Concrete cid) | `Concrete cid -> *)\n  (*          (match Map.find data (Uprint.Concrete_ident_view.to_definition_name cid) with *)\n  (*           | Some (x,_) -> Set.of_list (module LocalIdent) x *)\n  (*           | _ -> m#zero) *)\n  (*        | _ -> m#zero *)\n\n  (*      method visit_expr (env : string) e = [(e, env)] (\\* :: super#visit_expr f e *\\) *)\n  (*   end) *)\n  (*   #visit_expr *)\n  (*     \"\" *)\n\n  and pexpr (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)\n      (add_solve : bool) (e : expr) : SSP.AST.term =\n    let span = e.span in\n    (* (match (add_solve, e.e) with *)\n    (* | ( true, *)\n    (*     ( Construct { is_record = true; _ } *)\n    (*     | If _ (\\* | Match _ *\\) | Literal _ *)\n    (*     | Construct { constructor = `TupleCons _; _ } *)\n    (*     | App _ | GlobalVar _ | LocalVar _ ) ) -> *)\n    (*     fun x -> x (\\* SSP.AST.App (SSP.AST.Var \"solve_lift\", [ x ]) *\\) *)\n    (* | _ -> fun x -> x) *)\n    match e.e with\n    | Literal lit ->\n        SSP.AST.App\n          ( SSP.AST.Var \"ret_both\",\n            [ SSP.AST.TypedTerm (SSP.AST.Const (pliteral lit), pty span e.typ) ]\n          )\n    | LocalVar local_ident -> SSP.AST.NameTerm (plocal_ident local_ident)\n    | GlobalVar (`TupleCons 0)\n    | Construct { constructor = `TupleCons 0; fields = []; _ } ->\n        SSP.AST.App (SSP.AST.Var \"ret_both\", [ SSPExtraDefinitions.unit_term ])\n    | GlobalVar global_ident -> SSP.AST.Var (pglobal_ident global_ident)\n    | App\n        {\n          f = { e = GlobalVar (`Projector (`TupleField (i, j))); _ };\n          args = [ _ ];\n          _;\n        } ->\n        (* SSP.AST.App (SSP.AST.Var (Int.to_string i), [ SSP.AST.Var (Int.to_string j) ]) *)\n        __TODO_term__ span \"app global vcar projector tuple\"\n    | App\n        {\n          f;\n          args =\n            [\n              {\n                e =\n                  ( GlobalVar (`TupleCons 0)\n                  | Construct { constructor = `TupleCons 0; fields = []; _ } );\n              };\n            ];\n          _;\n        } ->\n        (pexpr env false) f\n    | App { f = { e = GlobalVar x; _ }; args; _ } when Map.mem operators x ->\n        let arity, op = Map.find_exn operators x in\n        if List.length args <> arity then failwith \"Bad arity\";\n        let args =\n          List.map\n            ~f:(fun x -> SSP.AST.Value ((pexpr env false) x, true, 0))\n            args\n        in\n        SSP.AST.AppFormat (op, args)\n    (* | App { f = { e = GlobalVar x }; args } -> *)\n    (*    __TODO_term__ span \"GLOBAL APP?\" *)\n    | App { f; args; _ } ->\n        let base = (pexpr env false) f in\n        let args = List.map ~f:(pexpr env false) args in\n        SSP.AST.App (base, args)\n    | If { cond; then_; else_ } ->\n        SSPExtraDefinitions.ifb\n          ( (pexpr env false) cond,\n            (pexpr env false) then_,\n            Option.value_map else_ ~default:(SSP.AST.Literal \"()\")\n              ~f:(pexpr env false) )\n    | Array l -> SSP.AST.Array (List.map ~f:(pexpr env add_solve) l)\n    | Let { lhs; rhs; body; monadic } ->\n        let extra_set, _extra_env =\n          LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env rhs\n        in\n        let new_env =\n          extend_env env\n            (Map.of_alist_exn\n               (module Local_ident)\n               (List.map\n                  ~f:(fun v -> (v, extra_set))\n                  (Set.to_list (U.Reducers.variables_of_pat lhs))))\n        in\n        let new_env =\n          match (monadic, is_mutable_pat lhs) with\n          | None, true ->\n              extend_env new_env\n                (Map.of_alist_exn\n                   (module Local_ident)\n                   (List.map\n                      ~f:(fun v -> (v, [ LocalIdentOrLisIis.W.Identifier v ]))\n                      (Set.to_list (U.Reducers.variables_of_pat lhs))))\n          | _, _ -> new_env\n        in\n        SSPExtraDefinitions.letb\n          {\n            pattern = ppat lhs;\n            mut = is_mutable_pat lhs;\n            value = (pexpr env false) rhs;\n            body = (pexpr new_env add_solve) body;\n            value_typ =\n              (match monadic with\n              | Some (MException typ, _) -> pty span typ\n              | Some (MResult typ, _) -> pty span typ\n              | _ ->\n                  SSP.AST.WildTy\n                  (* TODO : What should the correct type be here? `lhs.span lhs.typ` *));\n            monad_typ =\n              Option.map\n                ~f:(fun (m, _) ->\n                  match m with\n                  | MException typ -> SSP.AST.Exception (pty span typ)\n                  | MResult typ -> SSP.AST.Result (pty span typ)\n                  | MOption -> SSP.AST.Option)\n                monadic;\n          }\n    | EffectAction _ -> . (* __TODO_term__ span \"monadic action\" *)\n    | Match\n        {\n          scrutinee;\n          arms =\n            [\n              {\n                arm =\n                  {\n                    arm_pat =\n                      {\n                        p =\n                          PConstruct\n                            {\n                              fields = [ { pat; _ } ];\n                              is_record = false;\n                              is_struct = true;\n                              _;\n                            };\n                        _;\n                      };\n                    body;\n                  };\n                _;\n              };\n            ];\n        } ->\n        (* Record match expressions *)\n        (* (pexpr env true) body *)\n        SSPExtraDefinitions.letb\n          {\n            pattern = ppat pat;\n            mut = false;\n            value = (pexpr env false) scrutinee;\n            body = (pexpr env true) body;\n            value_typ = pty pat.span pat.typ;\n            monad_typ = None;\n          }\n    | Match { scrutinee; arms } ->\n        SSPExtraDefinitions.matchb\n          ( (pexpr env false) scrutinee,\n            List.map\n              ~f:(fun { arm = { arm_pat; body }; _ } ->\n                match arm_pat.p with\n                | PConstruct\n                    {\n                      constructor = name;\n                      fields = args;\n                      is_record = false;\n                      is_struct = false;\n                    } -> (\n                    let arg_tuple =\n                      SSP.AST.TuplePat (List.map ~f:(fun p -> ppat p.pat) args)\n                    in\n                    ( SSP.AST.ConstructorPat\n                        ( pglobal_ident name ^ \"_case\",\n                          match args with [] -> [] | _ -> [ arg_tuple ] ),\n                      match\n                        (args, SSPExtraDefinitions.pat_as_expr arg_tuple)\n                      with\n                      | _ :: _, Some (redefine_pat, redefine_expr) ->\n                          SSPExtraDefinitions.letb\n                            {\n                              pattern = redefine_pat (* TODO *);\n                              mut = false;\n                              value =\n                                SSP.AST.App\n                                  ( SSP.AST.Var \"ret_both\",\n                                    [\n                                      SSP.AST.TypedTerm\n                                        ( redefine_expr,\n                                          SSP.AST.Product\n                                            (List.map\n                                               ~f:(fun x ->\n                                                 pty arm_pat.span x.pat.typ)\n                                               args) );\n                                    ] );\n                              body = (pexpr env true) body;\n                              value_typ =\n                                SSP.AST.Product\n                                  (List.map\n                                     ~f:(fun x -> pty arm_pat.span x.pat.typ)\n                                     args);\n                              monad_typ = None;\n                            }\n                      | _, _ -> (pexpr env true) body ))\n                | _ -> (ppat arm_pat, (pexpr env true) body))\n              arms )\n    | Ascription _ -> __TODO_term__ span \"asciption\"\n    | Construct { constructor = `TupleCons 1; fields = [ (_, e) ]; _ } ->\n        (pexpr env false) e\n    | Construct { constructor = `TupleCons _n; fields; _ } ->\n        SSP.AST.App\n          ( SSP.AST.Var \"prod_b\",\n            [ SSP.AST.Tuple (List.map ~f:(snd >> pexpr env false) fields) ] )\n    | Construct { is_record = true; constructor; fields; base = None; _ } ->\n        SSP.AST.RecordConstructor\n          ( \"t_\" ^ pglobal_ident constructor,\n            List.map\n              ~f:(fun (f, e) -> (pglobal_ident f, (pexpr env false) e))\n              fields )\n    | Construct { is_record = true; constructor; fields; base = Some (x, _); _ }\n      ->\n        SSP.AST.RecordUpdate\n          ( pglobal_ident constructor,\n            (pexpr env false) x,\n            List.map\n              ~f:(fun (f, e) -> (pglobal_ident f, (pexpr env false) e))\n              fields )\n    (* TODO: Is there only 1 field? *)\n    | Construct { constructor; fields = [ (_f, e) ]; _ } ->\n        SSP.AST.App\n          (SSP.AST.Var (pglobal_ident constructor), [ (pexpr env add_solve) e ])\n    | Construct { constructor; fields; _ } ->\n        (* __TODO_term__ span \"constructor\" *)\n        SSP.AST.App\n          ( SSP.AST.Var (pglobal_ident constructor),\n            List.map ~f:(snd >> pexpr env add_solve) fields )\n    | Closure { params; body; _ } ->\n        SSP.AST.Lambda\n          ( List.map ~f:ppat params,\n            (pexpr (extend_env_with_params env params) add_solve) body )\n    | MacroInvokation { macro; _ } ->\n        Error.raise\n        @@ {\n             kind = UnsupportedMacro { id = [%show: Ast.global_ident] macro };\n             span = e.span;\n           }\n    | Assign _ ->\n        SSP.AST.Const (SSP.AST.Const_string (\"assign\" ^ \" todo(term)\"))\n    (* __TODO_term__ span \"assign\" *)\n    | Loop { body; kind; state = None; label; witness } ->\n        (pexpr env false)\n          {\n            e =\n              Loop\n                {\n                  body;\n                  kind;\n                  state =\n                    Some\n                      {\n                        init =\n                          {\n                            e =\n                              Construct\n                                {\n                                  is_record = false;\n                                  is_struct = false;\n                                  base = None;\n                                  constructor = `TupleCons 0;\n                                  fields = [];\n                                };\n                            span = Span.dummy ();\n                            typ = TApp { ident = `TupleType 0; args = [] };\n                          };\n                        bpat =\n                          {\n                            p =\n                              PConstruct\n                                {\n                                  constructor = `TupleCons 0;\n                                  fields = [];\n                                  is_record = false;\n                                  is_struct = false;\n                                };\n                            span = Span.dummy ();\n                            typ = TApp { ident = `TupleType 0; args = [] };\n                          };\n                        witness =\n                          Features.On.state_passing_loop\n                          (* state_passing_loop *);\n                      };\n                  label;\n                  witness;\n                  control_flow = None;\n                  (* TODO? *)\n                };\n            typ = e.typ;\n            span = e.span;\n          }\n    | Loop\n        {\n          body;\n          kind = ForIndexLoop { start; end_; var; _ };\n          state = Some { init; bpat; _ };\n          _;\n        } ->\n        SSP.AST.App\n          ( SSP.AST.Var \"foldi_both\",\n            [\n              (pexpr env false) start;\n              (pexpr env false) end_;\n              SSP.AST.Lambda\n                ( [\n                    (* SSP.AST.Ident \"{L I _ _}\";  *)\n                    SSP.AST.Ident (plocal_ident var);\n                  ],\n                  SSP.AST.App\n                    ( SSP.AST.Var \"ssp\",\n                      [\n                        SSP.AST.Lambda\n                          ( [ ppat bpat ],\n                            both_type_expr\n                              (extend_env env\n                                 (Map.of_alist_exn\n                                    (module Local_ident)\n                                    ([\n                                       ( var,\n                                         [\n                                           LocalIdentOrLisIis.W.Data\n                                             ( [ plocal_ident var ^ \"?\" ],\n                                               [ plocal_ident var ^ \"?\" ] );\n                                         ] );\n                                     ]\n                                    @ List.map\n                                        ~f:(fun v ->\n                                          ( v,\n                                            [\n                                              LocalIdentOrLisIis.W.Data\n                                                ( [ plocal_ident v ^ \"!\" ],\n                                                  [ plocal_ident v ^ \"!\" ] );\n                                            ] ))\n                                        (vars_from_pat bpat))))\n                              true [] body );\n                      ] ) );\n              (pexpr env false) init;\n            ] )\n    | Loop\n        {\n          body;\n          kind = ForLoop { pat; it; _ };\n          state = Some { init; bpat; _ };\n          _;\n        } ->\n        let extra_set_init, _extra_env =\n          LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env init\n        in\n        let new_env =\n          extend_env env\n            (Map.of_alist_exn\n               (module Local_ident)\n               (List.map\n                  ~f:(fun v -> (v, extra_set_init))\n                  (Set.to_list (U.Reducers.variables_of_pat bpat))))\n        in\n        let extra_set_iter, _extra_env =\n          LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env it\n        in\n        let new_env =\n          extend_env new_env\n            (Map.of_alist_exn\n               (module Local_ident)\n               (List.map\n                  ~f:(fun v -> (v, extra_set_iter))\n                  (Set.to_list (U.Reducers.variables_of_pat bpat))))\n        in\n        SSP.AST.App\n          ( SSP.AST.Var \"foldi_both_list\",\n            [\n              (pexpr env false) it;\n              SSP.AST.Lambda\n                ( [ (* SSP.AST.Ident \"{L I _ _}\";  *) ppat pat ],\n                  SSP.AST.App\n                    ( SSP.AST.Var \"ssp\",\n                      [\n                        SSP.AST.Lambda\n                          ( [ ppat bpat ],\n                            both_type_expr new_env true\n                              (extra_set_iter @ extra_set_init)\n                              body );\n                      ] ) );\n              (pexpr env false) init;\n            ] )\n    | Loop _ ->\n        SSP.AST.Const (SSP.AST.Const_string (\"other loop\" ^ \" todo(term)\"))\n    (* __TODO_term__ span \"other loop\" *)\n    (* | Break { e; _ } -> *)\n    (*     SSP.AST.Const (SSP.AST.Const_string (\"break\" ^ \" todo(term)\")) *)\n    (*     (* __TODO_term__ span \"break\" *) *)\n    | _ -> .\n\n  and vars_from_pat : pat -> Local_ident.t list =\n    U.Reducers.variables_of_pat >> Set.to_list\n\n  and env_from_param (params : pat list) :\n      LocalIdentOrLisIis.W.t list Map.M(Local_ident).t =\n    Map.of_alist_exn\n      (module Local_ident)\n      (List.concat_mapi\n         ~f:(fun i pat ->\n           List.map\n             ~f:(fun var ->\n               ( var,\n                 [\n                   LocalIdentOrLisIis.W.Data\n                     ( [ \"L\" ^ Int.to_string (i + 1) ],\n                       [ \"I\" ^ Int.to_string (i + 1) ] );\n                 ] ))\n             (vars_from_pat pat))\n         params)\n\n  and extend_env (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)\n      (env_ext : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t) :\n      LocalIdentOrLisIis.W.t list Map.M(Local_ident).t =\n    Map.merge_skewed env env_ext ~combine:(fun ~key:_ a b -> a @ b)\n  (* TODO: Just combine values? Should do this as sets! *)\n\n  and extend_env_with_params\n      (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)\n      (params : pat list) : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t =\n    extend_env env (env_from_param params)\n\n  and analyse_env_of_expr\n      (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t) (e : expr)\n      extra_set =\n    let expr_env, new_env =\n      LocalIdentOrLisIis.analyse_expr ctx.analysis_data.mut_var env e\n    in\n    let expr_env = expr_env @ extra_set in\n    let identifiers =\n      List.filter_map\n        ~f:(function Identifier x -> Some x | Data _ -> None)\n        expr_env\n    in\n    let data =\n      List.filter_map\n        ~f:(function Identifier _ -> None | Data x -> Some x)\n        expr_env\n    in\n    let lis, iis = (List.concat *** List.concat) (List.unzip data) in\n    (identifiers, lis, iis, new_env)\n\n  and both_type_expr (env : LocalIdentOrLisIis.W.t list Map.M(Local_ident).t)\n      (add_solve : bool) (extra_set : LocalIdentOrLisIis.W.t list) (e : expr) =\n    let identifiers, lis, iis, _new_env = analyse_env_of_expr env e extra_set in\n    SSP.AST.TypedTerm\n      ( (pexpr env add_solve) e,\n        SSPExtraDefinitions.wrap_type_in_both (pty e.span e.typ) )\n\n  and is_mutable_pat (pat : pat) =\n    match pat.p with\n    | PWild -> false\n    | PAscription { pat; _ } -> is_mutable_pat pat\n    | PConstruct { constructor = `TupleCons _; fields = args; _ } ->\n        List.fold ~init:false ~f:( || )\n          (List.map ~f:(fun p -> is_mutable_pat p.pat) args)\n    | PConstruct _ -> false\n    | PArray _ ->\n        (* List.fold ~init:false ~f:(||) (List.map ~f:(fun p -> is_mutable_pat p) args) *)\n        false\n    | PConstant _ -> false\n    | PBinding { mut = Mutable _; _ } -> true\n    | PBinding _ -> false\n    | POr _ ->\n        (* List.fold ~init:false ~f:( || ) *)\n        (*   (List.map ~f:(fun p -> is_mutable_pat p) subpats) *)\n        false\n        (* TODO? *)\n    | _ -> .\n\n  let pgeneric_param_as_argument span : AST.generic_param -> SSP.AST.argument =\n    function\n    | { ident; kind; _ } ->\n        SSP.AST.Implicit\n          ( SSP.AST.Ident (plocal_ident ident),\n            match kind with\n            | GPType (* { default = Some t } *) ->\n                SSP.AST.NameTy (plocal_ident ident) (* pty span t *)\n            | GPConst { typ = t } ->\n                SSPExtraDefinitions.wrap_type_in_both (pty span t)\n            (* | GPType { default = None } -> SSP.AST.WildTy *)\n            | _ -> . )\n\n  let pgeneric_constraints_as_argument span :\n      generic_constraint -> SSP.AST.argument list = function\n    | GCType { goal = { trait; args }; _ } ->\n        [\n          SSP.AST.Typeclass\n            ( None,\n              SSP.AST.AppTy\n                ( SSP.AST.NameTy (pconcrete_ident trait),\n                  List.map\n                    ~f:(function\n                      | GType typ -> pty span typ\n                      | GConst { typ; _ } ->\n                          SSPExtraDefinitions.wrap_type_in_both (pty span typ)\n                      | _ -> .)\n                    args ) );\n        ]\n    | GCProjection { impl; assoc_item; typ } ->\n        []\n        (* Error.unimplemented ~issue_id:549 *)\n        (*   ~details:\"Projections of an associated type is not yet supported.\" *)\n        (*   span *)\n    | _ -> .\n\n  let pgeneric (span : Ast.span) (generics : AST.generics) :\n      SSP.AST.argument list =\n    List.map ~f:(pgeneric_param_as_argument span) generics.params\n    @ List.concat_map\n        ~f:(pgeneric_constraints_as_argument span)\n        generics.constraints\n\n  let rec split_arrow_in_args (a : SSP.AST.ty) : SSP.AST.ty list * SSP.AST.ty =\n    match a with\n    | SSP.AST.Arrow (x, y) ->\n        let l, r = split_arrow_in_args y in\n        (x :: l, r)\n    | _ -> ([], a)\n\n  let rec wrap_type_in_enumerator_helper (i : int) (a : SSP.AST.ty) =\n    let l, r = split_arrow_in_args a in\n    let size, t =\n      List.fold_left\n        ~f:(fun (yi, ys) x ->\n          let size, x_val = wrap_type_in_enumerator_helper yi x in\n          ( size,\n            match ys with\n            | Some v -> Some (SSP.AST.Arrow (v, x_val))\n            | None -> Some x_val ))\n        ~init:(i, None) l\n    in\n    match t with\n    | Some v ->\n        (size, SSP.AST.Arrow (v, SSPExtraDefinitions.wrap_type_in_both r))\n    | None -> (size + 1, SSPExtraDefinitions.wrap_type_in_both r)\n\n  let wrap_type_in_enumerator (a : SSP.AST.ty) =\n    let size, v = wrap_type_in_enumerator_helper 0 a in\n    (* Throw away anotation of last type, and replace with accumulation of all locations and imports *)\n    let xs, a =\n      match v with\n      | SSP.AST.Arrow (x, SSP.AST.AppTy (SSP.AST.NameTy _, [ a ])) -> ([ x ], a)\n      | SSP.AST.AppTy (SSP.AST.NameTy _, [ a ]) -> ([], a)\n      | _ ->\n          Error.unimplemented\n            ~details:\n              \"SSProve: TODO: wrap_type_in_enumerator encountered an \\\n               unexpected type\"\n            (Span.dummy ())\n    in\n    let ret_ty =\n      List.fold\n        ~init:(SSPExtraDefinitions.wrap_type_in_both a)\n        ~f:(fun y x -> SSP.AST.Arrow (x, y))\n        xs\n    in\n    (size, ret_ty)\n\n  let rec pitem (e : AST.item) : SSP.AST.decl list =\n    try pitem_unwrapped e\n    with Diagnostics.SpanFreeError.Exn _kind ->\n      [ SSP.AST.Unimplemented \"item error backend\" ]\n\n  and pitem_unwrapped (e : AST.item) : SSP.AST.decl list =\n    let span = e.span in\n    let decls_from_item =\n      match e.v with\n      | Fn { name = f_name; generics; body; params } ->\n          [\n            (let args, ret_typ =\n               lift_definition_type_to_both f_name\n                 (pgeneric span generics\n                 @ List.map\n                     ~f:(fun { pat; typ; _ } ->\n                       SSP.AST.Explicit (ppat pat, pty span typ))\n                     params)\n                 (pty span body.typ)\n             in\n             if Attrs.lemma e.attrs then\n               SSP.AST.Lemma\n                 ( pconcrete_ident f_name,\n                   args,\n                   (pexpr\n                      (extend_env_with_params\n                         (Map.empty (module Local_ident))\n                         (List.map ~f:(fun { pat; _ } -> pat) params))\n                      true)\n                     (Option.value ~default:body\n                        (Attrs.associated_expr Ensures e.attrs)) )\n             else\n               SSP.AST.Equations\n                 ( pconcrete_ident f_name,\n                   args,\n                   (pexpr\n                      (extend_env_with_params\n                         (Map.empty (module Local_ident))\n                         (List.map ~f:(fun { pat; _ } -> pat) params))\n                      true)\n                     body,\n                   ret_typ ));\n          ]\n      | TyAlias { name; generics; ty } ->\n          let g = pgeneric span generics in\n          [\n            (if List.is_empty g then\n               SSP.AST.Notation\n                 ( \"'\" ^ pconcrete_ident name ^ \"'\",\n                   SSP.AST.Type (pty span ty),\n                   None )\n             else\n               SSP.AST.Definition\n                 ( pconcrete_ident name,\n                   g,\n                   SSP.AST.Type (pty span ty),\n                   SSP.AST.TypeTy ));\n          ]\n      (* record *)\n      | Type\n          {\n            name;\n            generics;\n            variants = [ { name = _record_name; arguments; _ } ];\n            is_struct = true;\n          } ->\n          [\n            SSPExtraDefinitions.updatable_record\n              ( pconcrete_ident name,\n                pgeneric span generics,\n                List.map\n                  ~f:(fun (x, y) -> SSP.AST.Named (x, y))\n                  (p_record_record span arguments) );\n          ]\n      (* enum *)\n      | Type { name; generics; variants; _ } ->\n          (* Define all record types in enums (no anonymous records) *)\n          List.filter_map variants\n            ~f:(fun { name = v_name; arguments; is_record; _ } ->\n              if is_record then\n                Some\n                  (SSPExtraDefinitions.updatable_record\n                     ( (match\n                          String.chop_prefix ~prefix:\"C_\"\n                            (pconcrete_ident v_name)\n                        with\n                       | Some name -> \"t_\" ^ name\n                       | _ -> failwith \"Incorrect prefix of record name in enum\"),\n                       pgeneric span generics,\n                       List.map\n                         ~f:(fun (x, y) -> SSP.AST.Named (x, y))\n                         (p_record_record span arguments) ))\n              else None)\n          @ [\n              SSPExtraDefinitions.both_enum\n                ( pconcrete_ident name,\n                  pgeneric span generics,\n                  List.map variants\n                    ~f:(fun { name = v_name; arguments; is_record; _ } ->\n                      if is_record then\n                        SSP.AST.InductiveCase\n                          ( pconcrete_ident v_name,\n                            SSP.AST.RecordTy\n                              ( (match\n                                   String.chop_prefix ~prefix:\"C_\"\n                                     (pconcrete_ident v_name)\n                                 with\n                                | Some name -> \"t_\" ^ name\n                                | _ ->\n                                    failwith\n                                      \"Incorrect prefix of record name in enum\"),\n                                p_record_record span arguments ) )\n                      else\n                        match arguments with\n                        | [] -> SSP.AST.BaseCase (pconcrete_ident v_name)\n                        | [ (_arg_name, arg_ty, _attr) ] ->\n                            SSP.AST.InductiveCase\n                              (* arg_name = ?? *)\n                              (pconcrete_ident v_name, pty span arg_ty)\n                        | _ ->\n                            SSP.AST.InductiveCase\n                              ( pconcrete_ident v_name,\n                                SSP.AST.Product\n                                  (List.map\n                                     ~f:((fun (_x, y, _z) -> y) >> pty span)\n                                     arguments) )) );\n            ]\n      | IMacroInvokation { macro; argument; _ } -> (\n          let unsupported () =\n            let id = [%show: concrete_ident] macro in\n            Error.raise { kind = UnsupportedMacro { id }; span = e.span }\n          in\n          match RenderId.render macro with\n          | { path = \"hacspec_lib\" :: _; name } -> (\n              match name with\n              | \"public_nat_mod\" ->\n                  let open Hacspeclib_macro_parser in\n                  let o : PublicNatMod.t =\n                    PublicNatMod.parse argument |> Result.ok_or_failwith\n                  in\n                  [\n                    SSP.AST.Notation\n                      ( \"'\" ^ \"t_\" ^ o.type_name ^ \"'\",\n                        SSP.AST.Type\n                          (SSP.AST.NatMod\n                             ( o.type_of_canvas,\n                               o.bit_size_of_field,\n                               o.modulo_value )),\n                        None );\n                    SSP.AST.Definition\n                      ( o.type_name,\n                        [],\n                        SSP.AST.Var \"id\",\n                        SSP.AST.Arrow\n                          ( SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.type_name)),\n                            SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.type_name)) ) );\n                  ]\n              | \"bytes\" ->\n                  let open Hacspeclib_macro_parser in\n                  let o : Bytes.t =\n                    Bytes.parse argument |> Result.ok_or_failwith\n                  in\n                  [\n                    SSP.AST.Notation\n                      ( \"'\" ^ \"t_\" ^ o.bytes_name ^ \"'\",\n                        SSP.AST.Type\n                          (SSP.AST.ArrayTy\n                             ( SSP.AST.Int { size = SSP.AST.U8; signed = false },\n                               (* int_of_string *) o.size )),\n                        None );\n                    SSP.AST.Definition\n                      ( o.bytes_name,\n                        [],\n                        SSP.AST.Var \"id\",\n                        SSP.AST.Arrow\n                          ( SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.bytes_name)),\n                            SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.bytes_name)) ) );\n                  ]\n              | \"unsigned_public_integer\" ->\n                  let open Hacspeclib_macro_parser in\n                  let o =\n                    UnsignedPublicInteger.parse argument\n                    |> Result.ok_or_failwith\n                  in\n                  [\n                    SSP.AST.Notation\n                      ( \"'\" ^ \"t_\" ^ o.integer_name ^ \"'\",\n                        SSP.AST.Type\n                          (SSP.AST.ArrayTy\n                             ( SSP.AST.Int { size = SSP.AST.U8; signed = false },\n                               Int.to_string ((o.bits + 7) / 8) )),\n                        None );\n                    SSP.AST.Definition\n                      ( o.integer_name,\n                        [],\n                        SSP.AST.Var \"id\",\n                        SSP.AST.Arrow\n                          ( SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.integer_name)),\n                            SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.integer_name)) ) );\n                  ]\n              | \"public_bytes\" ->\n                  let open Hacspeclib_macro_parser in\n                  let o : Bytes.t =\n                    Bytes.parse argument |> Result.ok_or_failwith\n                  in\n                  let typ =\n                    SSP.AST.ArrayTy\n                      ( SSP.AST.Int { size = SSP.AST.U8; signed = false },\n                        (* int_of_string *) o.size )\n                  in\n                  [\n                    SSP.AST.Notation\n                      (\"'\" ^ \"t_\" ^ o.bytes_name ^ \"'\", SSP.AST.Type typ, None);\n                    SSP.AST.Definition\n                      ( o.bytes_name,\n                        [],\n                        SSP.AST.Var \"id\",\n                        SSP.AST.Arrow\n                          ( SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.bytes_name)),\n                            SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.bytes_name)) ) );\n                  ]\n              | \"array\" ->\n                  let open Hacspeclib_macro_parser in\n                  let o : Array.t =\n                    Array.parse argument |> Result.ok_or_failwith\n                  in\n                  let typ =\n                    match o.typ with\n                    | \"U128\" -> SSP.AST.U128\n                    | \"U64\" -> SSP.AST.U64\n                    | \"U32\" -> SSP.AST.U32\n                    | \"U16\" -> SSP.AST.U16\n                    | \"U8\" -> SSP.AST.U8\n                    | _usize -> SSP.AST.U32 (* TODO: usize? *)\n                  in\n                  [\n                    SSP.AST.Notation\n                      ( \"'\" ^ \"t_\" ^ o.array_name ^ \"'\",\n                        SSP.AST.Type\n                          (SSP.AST.ArrayTy\n                             ( SSP.AST.Int { size = typ; signed = false },\n                               (* int_of_string *) o.size )),\n                        None );\n                    SSP.AST.Definition\n                      ( o.array_name,\n                        [],\n                        SSP.AST.Var \"id\",\n                        SSP.AST.Arrow\n                          ( SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.array_name)),\n                            SSPExtraDefinitions.wrap_type_in_both\n                              (SSP.AST.NameTy (\"t_\" ^ o.array_name)) ) );\n                  ]\n              | _ -> unsupported ())\n          | _ -> unsupported ())\n      | Use { path; is_external; rename } ->\n          let _ns_path = ctx.current_namespace in\n          if is_external then []\n          else\n            [ SSP.AST.Require (None, (* ns_crate:: ns_path @ *) path, rename) ]\n      | HaxError s -> [ __TODO_item__ span s ]\n      | NotImplementedYet -> [ __TODO_item__ span \"Not implemented yet?\" ]\n      | Alias _ -> [ __TODO_item__ span \"Not implemented yet? alias\" ]\n      | Trait { name; items; generics } ->\n          [\n            SSP.AST.Class\n              ( pconcrete_ident name,\n                (match pgeneric span generics with\n                | SSP.AST.Implicit (x, y) :: xs -> SSP.AST.Explicit (x, y) :: xs\n                | x -> x),\n                List.concat_map\n                  ~f:(fun x ->\n                    match x.ti_v with\n                    | TIFn fn_ty ->\n                        let size, value =\n                          wrap_type_in_enumerator (pty x.ti_span fn_ty)\n                        in\n                        [\n                          SSP.AST.Named\n                            ( pconcrete_ident x.ti_ident,\n                              SSP.AST.Forall ([], [], value) );\n                        ]\n                    | TIType impl_idents ->\n                        SSP.AST.Named\n                          (pconcrete_ident x.ti_ident, SSP.AST.TypeTy)\n                        :: List.map\n                             ~f:(fun { goal = tr; _ } ->\n                               SSP.AST.Coercion\n                                 ( pconcrete_ident x.ti_ident ^ \"_\"\n                                   ^ pconcrete_ident tr.trait,\n                                   SSP.AST.AppTy\n                                     ( SSP.AST.NameTy (pconcrete_ident tr.trait),\n                                       [\n                                         SSP.AST.NameTy\n                                           (pconcrete_ident x.ti_ident);\n                                       ] ) ))\n                             impl_idents\n                    | _ -> .)\n                  items );\n          ]\n      | Impl { generics; self_ty; of_trait = name, gen_vals; items } ->\n          [\n            SSP.AST.ProgramInstance\n              ( pconcrete_ident name,\n                pgeneric span generics,\n                pty span self_ty,\n                args_ty span gen_vals,\n                SSP.AST.InstanceDecls\n                  (List.concat_map\n                     ~f:(fun x ->\n                       match x.ii_v with\n                       | IIFn { body; params } ->\n                           [\n                             (let args, ret_typ =\n                                lift_definition_type_to_both x.ii_ident\n                                  (List.map\n                                     ~f:(fun { pat; typ; _ } ->\n                                       SSP.AST.Explicit (ppat pat, pty span typ))\n                                     params)\n                                  (pty span body.typ)\n                              in\n                              SSP.AST.LetDef\n                                ( pconcrete_ident x.ii_ident,\n                                  args,\n                                  (pexpr\n                                     (extend_env_with_params\n                                        (Map.empty (module Local_ident))\n                                        (List.map\n                                           ~f:(fun { pat; _ } -> pat)\n                                           params))\n                                     true)\n                                    body,\n                                  ret_typ ));\n                           ]\n                       | IIType { typ; _ } ->\n                           [\n                             SSP.AST.LetDef\n                               ( pconcrete_ident x.ii_ident,\n                                 [],\n                                 SSP.AST.Type (pty span typ),\n                                 SSP.AST.TypeTy );\n                           ])\n                     items) );\n          ]\n          @ [\n              SSP.AST.HintUnfold (pconcrete_ident name, Some (pty span self_ty));\n            ]\n    in\n    decls_from_item\n\n  and new_arguments (arguments : SSP.AST.argument list) =\n    snd\n      (List.fold_left ~init:(0, [])\n         ~f:(fun (i, y) arg ->\n           let f = SSPExtraDefinitions.wrap_type_in_both in\n           match arg with\n           | Implicit (p, t) -> (i, y @ [ SSP.AST.Implicit (p, t) ])\n           | Explicit (p, t) -> (i + 1, y @ [ SSP.AST.Explicit (p, f t) ])\n           | Typeclass (so, t) -> (i, y @ [ SSP.AST.Typeclass (so, t) ]))\n         arguments)\n\n  and lift_definition_type_to_both (name : concrete_ident)\n      (arguments : SSP.AST.argument list) (typ : SSP.AST.ty) :\n      SSP.AST.argument list * SSP.AST.ty =\n    let new_args = new_arguments arguments in\n    let return_typ = both_return_type_from_name name typ in\n    (new_args, return_typ)\n\n  and both_return_type_from_name name typ =\n    SSPExtraDefinitions.wrap_type_in_both typ\n\n  and p_record_record span arguments : (string * SSP.AST.ty) list =\n    List.map\n      ~f:(function\n        | arg_name, arg_ty, _arg_attrs ->\n            (pconcrete_ident arg_name, pty span arg_ty))\n      arguments\nend\n\nmodule type S = sig\n  val pitem : AST.item -> SSP.AST.decl list\n  (* val pgeneric : Ast.span -> AST.generics -> SSP.AST.argument list *)\nend\n\nlet make (module M : Attrs.WITH_ITEMS) ctx =\n  (module Make\n            (M)\n            (struct\n              let ctx = ctx\n            end) : S)\n\nlet decls_to_string (decls : SSP.AST.decl list) : string =\n  String.concat ~sep:\"\\n\" (List.map ~f:SSP.decl_to_string decls)\n\nlet print_item m (analysis_data : StaticAnalysis.analysis_data)\n    (item : AST.item) : SSP.AST.decl list =\n  let (module Print) =\n    make m\n      { current_namespace = (RenderId.render item.ident).path; analysis_data }\n  in\n  Print.pitem item\n\nlet cleanup_item_strings =\n  List.map ~f:String.strip\n  >> List.filter ~f:(String.is_empty >> not)\n  >> String.concat ~sep:\"\\n\\n\"\n\n(* module ConCert = struct *)\n(*   let translate_concert_annotations *)\n(*       (analysis_data : StaticAnalysis.analysis_data) (e : item) : *)\n(*       SSP.AST.decl list = *)\n(*     let (module Print) = *)\n(*       make *)\n(*         { *)\n(*           current_namespace = U.Concrete_ident_view.to_namespace e.ident; *)\n(*           analysis_data; *)\n(*         } *)\n(*     in *)\n(*     match e.v with *)\n(*     | Fn { name = f_name; generics; _ } -> *)\n(*         List.concat_map *)\n(*           ~f:(fun { kind; span } -> *)\n(*             match kind with *)\n(*             | Tool { path; tokens } -> ( *)\n(*                 let token_list = token_list tokens in *)\n(*                 match path with *)\n(*                 | \"hax::init\" -> *)\n(*                     let contract = *)\n(*                       strip_or_error \"contract argument missing\" *)\n(*                         (get_argument \"contract\" token_list) *)\n(*                         e.span *)\n(*                     in *)\n(*                     [ *)\n(*                       SSP.AST.Definition *)\n(*                         ( \"init_\" ^ contract, *)\n(*                           [ *)\n(*                             SSP.AST.Explicit *)\n(*                               (SSP.AST.Ident \"chain\", SSP.AST.NameTy \"Chain\"); *)\n(*                             SSP.AST.Explicit *)\n(*                               ( SSP.AST.Ident \"ctx\", *)\n(*                                 SSP.AST.NameTy \"ContractCallContext\" ); *)\n(*                             SSP.AST.Explicit *)\n(*                               ( SSP.AST.Ident \"st\", *)\n(*                                 SSP.AST.NameTy (\"state_\" ^ contract) ); *)\n(*                           ], *)\n(*                           SSP.AST.App *)\n(*                             (SSP.AST.Var \"ResultMonad.Ok\", [ SSP.AST.Var \"st\" ]), *)\n(*                           SSP.AST.AppTy *)\n(*                             ( SSP.AST.NameTy \"ResultMonad.result\", *)\n(*                               [ *)\n(*                                 SSP.AST.NameTy (\"state_\" ^ contract); *)\n(*                                 SSP.AST.NameTy \"t_ParseError\"; *)\n(*                               ] ) ); *)\n(*                     ] *)\n(*                 | \"hax::receive\" -> *)\n(*                     let contract = *)\n(*                       strip_or_error \"contract argument missing\" *)\n(*                         (get_argument \"contract\" token_list) *)\n(*                         e.span *)\n(*                     in *)\n(*                     let name = *)\n(*                       strip_or_error \"name argument missing\" *)\n(*                         (get_argument \"name\" token_list) *)\n(*                         e.span *)\n(*                     in *)\n(*                     let parameter = get_argument \"parameter\" token_list in *)\n(*                     (\\* let logger = get_argument \"logger\" token_list in *\\) *)\n(*                     (\\* let payable = get_argument \"payable\" token_list in *\\) *)\n(*                     let param_instances, param_list, count, param_vars = *)\n(*                       match parameter with *)\n(*                       | Some x -> *)\n(*                           ( [ *)\n(*                               SSP.AST.ProgramInstance *)\n(*                                 ( \"t_HasReceiveContext\", *)\n(*                                   [], *)\n(*                                   SSP.AST.NameTy (\"t_\" ^ strip x), *)\n(*                                   [ *)\n(*                                     SSP.AST.NameTy (\"t_\" ^ strip x); *)\n(*                                     SSP.AST.Unit; *)\n(*                                   ], *)\n(*                                   SSP.AST.InstanceDecls *)\n(*                                     [ *)\n(*                                       SSP.AST.InlineDef *)\n(*                                         ( \"f_get\", *)\n(*                                           [ *)\n(*                                             SSP.AST.Implicit *)\n(*                                               ( SSP.AST.Ident \"Ctx\", *)\n(*                                                 SSP.AST.WildTy ); *)\n(*                                             SSP.AST.Implicit *)\n(*                                               ( SSP.AST.Ident \"L\", *)\n(*                                                 (SSP.AST.NameTy *)\n(*                                                    \"{fset Location}\" *)\n(*                                                   : SSP.AST.ty) ); *)\n(*                                             SSP.AST.Implicit *)\n(*                                               ( SSP.AST.Ident \"I\", *)\n(*                                                 (SSP.AST.NameTy \"Interface\" *)\n(*                                                   : SSP.AST.ty) ); *)\n(*                                           ], *)\n(*                                           SSP.AST.Var *)\n(* \"(solve_lift (@ret_both \\ *)\n   (*                                              (t_ParamType × t_Result Ctx \\ *)\n   (*                                              t_ParseError)) (tt, inr tt))\", *)\n(*                                           SSP.AST.WildTy ); *)\n(*                                     ] ); *)\n(*                               SSP.AST.ProgramInstance *)\n(*                                 ( \"t_Sized\", *)\n(*                                   [], *)\n(*                                   SSP.AST.NameTy (\"t_\" ^ strip x), *)\n(*                                   [ SSP.AST.NameTy (\"t_\" ^ strip x) ], *)\n(*                                   SSP.AST.TermDef *)\n(*                                     (SSP.AST.Lambda *)\n(*                                        ([ SSP.AST.Ident \"x\" ], SSP.AST.Var \"x\")) *)\n(*                                 ); *)\n(*                             ], *)\n(*                             [ *)\n(*                               SSP.AST.Explicit *)\n(*                                 ( SSP.AST.Ident \"ctx\", *)\n(*                                   SSPExtraDefinitions.wrap_type_in_both \"L0\" *)\n(*                                     \"I0\" *)\n(*                                     (SSP.AST.NameTy (\"t_\" ^ strip x)) ); *)\n(*                             ], *)\n(*                             1, *)\n(*                             [ SSP.AST.Var \"ctx\" ] ) *)\n(*                       | _ -> ([], [], 0, []) *)\n(*                     in *)\n(*                     param_instances *)\n(*                     @ [ *)\n(*                         SSP.AST.Definition *)\n(*                           ( \"receive_\" ^ contract ^ \"_\" ^ name, *)\n(*                             Print.pgeneric span generics *)\n(*                             @ List.map *)\n(*                                 ~f:(fun x -> *)\n(*                                   SSP.AST.Implicit *)\n(*                                     ( SSP.AST.Ident x, *)\n(*                                       (SSP.AST.NameTy \"{fset Location}\" *)\n(*                                         : SSP.AST.ty) )) *)\n(*                                 (List.map *)\n(*                                    ~f:(fun i -> \"L\" ^ Int.to_string i) *)\n(*                                    (List.range 0 (count + 1))) *)\n(*                             @ List.map *)\n(*                                 ~f:(fun x -> *)\n(*                                   SSP.AST.Implicit *)\n(*                                     ( SSP.AST.Ident x, *)\n(*                                       (SSP.AST.NameTy \"Interface\" : SSP.AST.ty) *)\n(*                                     )) *)\n(*                                 (List.map *)\n(*                                    ~f:(fun i -> \"I\" ^ Int.to_string i) *)\n(*                                    (List.range 0 (count + 1))) *)\n(*                             @ param_list *)\n(*                             @ [ *)\n(*                                 SSP.AST.Explicit *)\n(*                                   ( SSP.AST.Ident \"st\", *)\n(*                                     SSPExtraDefinitions.wrap_type_in_both *)\n(*                                       (\"L\" ^ Int.to_string count) *)\n(*                                       (\"I\" ^ Int.to_string count) *)\n(*                                       (SSP.AST.NameTy (\"state_\" ^ contract)) ); *)\n(*                                 (\\* TODO: L, I *\\) *)\n(*                               ], *)\n(*                             (\\* Arguments *\\) *)\n(*                             SSP.AST.App *)\n(*                               ( SSP.AST.Var (pconcrete_ident f_name) *)\n(*                                 (\\* contract *\\), *)\n(*                                 param_vars @ [ SSP.AST.Var \"st\" ] ), *)\n(*                             SSPExtraDefinitions.wrap_type_in_both \"_\" \"_\" *)\n(*                               (SSP.AST.NameTy *)\n(*                                  (\"t_Result ((v_A × state_\" ^ contract *)\n(*                                 ^ \")) (t_ParseError)\")) ); *)\n(*                         (\\* TODO: L , I *\\) *)\n(*                       ] *)\n(*                 | _ -> []) *)\n(*             | _ -> []) *)\n(*           e.attrs *)\n(*     | Type { name; variants = [ _ ]; is_struct = true; _ } -> *)\n(*         List.concat_map *)\n(*           ~f:(fun { kind; _ } -> *)\n(*             match kind with *)\n(*             | Tool { path; tokens } when String.equal path \"hax::contract_state\" *)\n(*               -> *)\n(*                 let token_list = token_list tokens in *)\n(*                 let contract = *)\n(*                   strip_or_error \"contract argument missing\" *)\n(*                     (get_argument \"contract\" token_list) *)\n(*                     e.span *)\n(*                 in *)\n(*                 [ *)\n(*                   SSP.AST.Definition *)\n(*                     ( \"state_\" ^ contract, *)\n(*                       [], *)\n(*                       SSP.AST.Var (pconcrete_ident name), *)\n(*                       SSP.AST.TypeTy ); *)\n(*                 ] *)\n(*             | _ -> []) *)\n(*           e.attrs *)\n(*     | _ -> [] *)\n\n(*   let concert_contract_type_decls (items : item list) : SSP.AST.decl list list = *)\n(*     let contract_items = *)\n(*       List.filter_map *)\n(*         ~f:(function *)\n(*           | { kind = Tool { path; tokens }; _ } *)\n(*             when String.equal path \"hax::receive\" -> *)\n(*               let token_list = token_list tokens in *)\n(*               let contract = *)\n(*                 strip_or_error \"contract argument missing\" *)\n(*                   (get_argument \"contract\" token_list) *)\n(*                   (Span.dummy ()) *)\n(*                 (\\* TODO: carry span information *\\) *)\n(*               in *)\n(*               let name = *)\n(*                 strip_or_error \"name argument missing\" *)\n(*                   (get_argument \"name\" token_list) *)\n(*                   (Span.dummy ()) *)\n(*                 (\\* TODO: carry span information *\\) *)\n(*               in *)\n(*               let parameter = get_argument \"parameter\" token_list in *)\n(*               Some (contract, parameter, name) *)\n(*           | _ -> None) *)\n(*         (List.concat_map ~f:(fun x -> x.attrs) items) *)\n(*     in *)\n(*     if List.is_empty contract_items then [] *)\n(*     else *)\n(*       let contract_map = *)\n(*         List.fold_left *)\n(*           ~init:(Map.empty (module String)) *)\n(*           ~f:(fun y (x_name, x_parameter, x_item) -> *)\n(*             Map.set y ~key:x_name *)\n(*               ~data: *)\n(*                 (Option.value ~default:[] (Map.find y x_name) *)\n(*                 @ [ (x_parameter, x_item) ])) *)\n(*           contract_items *)\n(*       in *)\n(*       List.map *)\n(*         ~f:(fun contract -> *)\n(*           let receive_functions : (_ * string) list = *)\n(*             Option.value ~default:[] (Map.find contract_map contract) *)\n(*           in *)\n(*           [ *)\n(*             SSP.AST.Inductive *)\n(*               ( \"Msg_\" ^ contract, *)\n(*                 [], *)\n(*                 List.map *)\n(*                   ~f:(function *)\n(*                     | Some param, x_item -> *)\n(*                         SSP.AST.InductiveCase *)\n(*                           ( \"msg_\" ^ contract ^ \"_\" ^ x_item, *)\n(*                             SSP.AST.NameTy (\"t_\" ^ strip param) ) *)\n(*                     | None, x_item -> *)\n(*                         SSP.AST.BaseCase (\"msg_\" ^ contract ^ \"_\" ^ x_item)) *)\n(*                   receive_functions ); *)\n(*             SSP.AST.ProgramInstance *)\n(*               ( \"t_HasReceiveContext\", *)\n(*                 [], *)\n(*                 SSP.AST.NameTy (\"state_\" ^ contract), *)\n(*                 [ SSP.AST.NameTy (\"state_\" ^ contract); SSP.AST.Unit ], *)\n(*                 SSP.AST.InstanceDecls *)\n(*                   [ *)\n(*                     SSP.AST.InlineDef *)\n(*                       ( \"f_get\", *)\n(*                         [ *)\n(*                           SSP.AST.Explicit (SSP.AST.Ident \"Ctx\", SSP.AST.WildTy); *)\n(*                           SSP.AST.Implicit *)\n(*                             ( SSP.AST.Ident \"L\", *)\n(*                               (SSP.AST.NameTy \"{fset Location}\" : SSP.AST.ty) ); *)\n(*                           SSP.AST.Implicit *)\n(*                             ( SSP.AST.Ident \"I\", *)\n(*                               (SSP.AST.NameTy \"Interface\" : SSP.AST.ty) ); *)\n(*                         ], *)\n(*                         SSP.AST.Var *)\n(* \"(solve_lift (@ret_both (t_ParamType × t_Result Ctx \\ *)\n   (*                            t_ParseError)) (tt, inr tt))\", *)\n(*                         SSP.AST.WildTy ); *)\n(*                   ] ); *)\n(*             SSP.AST.ProgramInstance *)\n(*               ( \"t_Sized\", *)\n(*                 [], *)\n(*                 SSP.AST.NameTy (\"state_\" ^ contract), *)\n(*                 [ SSP.AST.NameTy (\"state_\" ^ contract) ], *)\n(*                 SSP.AST.TermDef *)\n(*                   (SSP.AST.Lambda ([ SSP.AST.Ident \"x\" ], SSP.AST.Var \"x\")) ); *)\n(*             SSP.AST.ProgramInstance *)\n(*               ( \"t_HasActions\", *)\n(*                 [], *)\n(*                 SSP.AST.NameTy (\"state_\" ^ contract), *)\n(*                 [ SSP.AST.NameTy (\"state_\" ^ contract) ], *)\n(*                 SSP.AST.TermDef (SSP.AST.Var \"Admitted\") ); *)\n(*             SSP.AST.Equations *)\n(*               ( \"receive_\" ^ contract, *)\n(*                 [ *)\n(*                   SSP.AST.Explicit *)\n(*                     (SSP.AST.Ident \"chain\", SSP.AST.NameTy \"Chain\"); *)\n(*                   SSP.AST.Explicit *)\n(*                     (SSP.AST.Ident \"ctx\", SSP.AST.NameTy \"ContractCallContext\"); *)\n(*                   SSP.AST.Explicit *)\n(*                     (SSP.AST.Ident \"st\", SSP.AST.NameTy (\"state_\" ^ contract)); *)\n(*                   SSP.AST.Explicit *)\n(*                     ( SSP.AST.Ident \"msg\", *)\n(*                       SSP.AST.NameTy (\"Datatypes.option Msg_\" ^ contract) ); *)\n(*                 ], *)\n(*                 SSP.AST.Match *)\n(*                   ( SSP.AST.Var \"msg\", *)\n(*                     List.map *)\n(*                       ~f:(function *)\n(*                         | Some _param, x_item -> *)\n(*                             ( SSP.AST.Ident *)\n(*                                 (\"Some\" ^ \" \" ^ \"(\" ^ \"msg_\" ^ contract ^ \"_\" *)\n(*                                ^ x_item ^ \" \" ^ \"val\" ^ \")\"), *)\n(*                               SSP.AST.Var *)\n(*                                 (\"match (is_pure (both_prog (receive_\" *)\n(*                                ^ contract ^ \"_\" ^ x_item *)\n(* ^ \" (ret_both val) (ret_both st)))) with\\n\\ *)\n   (*                                  \\         | inl x => ResultMonad.Ok ((fst x), \\ *)\n   (*                                   [])\\n\\ *)\n   (*                                  \\         | inr x => ResultMonad.Err x\\n\\ *)\n   (*                                  \\         end\") ) *)\n(*                         | None, x_item -> *)\n(*                             ( SSP.AST.Ident *)\n(*                                 (\"Some\" ^ \" \" ^ \"msg_\" ^ contract ^ \"_\" ^ x_item), *)\n(*                               SSP.AST.Var *)\n(*                                 (\"match (is_pure (both_prog (receive_\" *)\n(*                                ^ contract ^ \"_\" ^ x_item *)\n(* ^ \" (ret_both st)))) with\\n\\ *)\n   (*                                  \\         | inl x => ResultMonad.Ok ((fst x), \\ *)\n   (*                                   [])\\n\\ *)\n   (*                                  \\         | inr x => ResultMonad.Err x\\n\\ *)\n   (*                                  \\         end\") )) *)\n(*                       receive_functions *)\n(*                     @ [ (SSP.AST.WildPat, SSP.AST.Var \"ResultMonad.Err tt\") ] ), *)\n(*                 SSP.AST.NameTy *)\n(*                   (\"ResultMonad.result (state_\" ^ contract *)\n(*                  ^ \" * list ActionBody) t_ParseError\") ); *)\n(*             SSP.AST.ProgramInstance *)\n(*               ( \"Serializable\", *)\n(*                 [], *)\n(*                 SSP.AST.NameTy (\"state_\" ^ contract), *)\n(*                 [ SSP.AST.NameTy (\"state_\" ^ contract) ], *)\n(*                 SSP.AST.InstanceDecls [] ); *)\n(*             SSP.AST.ProgramInstance *)\n(*               ( \"Serializable\", *)\n(*                 [], *)\n(*                 SSP.AST.NameTy (\"Msg_\" ^ contract), *)\n(*                 [ SSP.AST.NameTy (\"Msg_\" ^ contract) ], *)\n(*                 SSP.AST.TermDef *)\n(*                   (SSP.AST.Var *)\n(*                      (\"Derive Serializable Msg_OVN_rect<\" *)\n(*                      ^ String.concat ~sep:\",\" *)\n(*                          (List.map *)\n(*                             ~f:(fun x -> \"msg_\" ^ contract ^ \"_\" ^ snd x) *)\n(*                             receive_functions) *)\n(*                      ^ \">\")) ); *)\n(*             SSP.AST.Definition *)\n(*               ( \"contract_\" ^ contract, *)\n(*                 [], *)\n(*                 SSP.AST.App *)\n(*                   ( SSP.AST.Var \"build_contract\", *)\n(*                     [ *)\n(*                       SSP.AST.Var (\"init_\" ^ contract); *)\n(*                       SSP.AST.Var (\"receive_\" ^ contract); *)\n(*                     ] ), *)\n(*                 SSP.AST.AppTy *)\n(*                   ( SSP.AST.NameTy \"Contract\", *)\n(*                     [ *)\n(*                       SSP.AST.NameTy (\"state_\" ^ contract); *)\n(*                       SSP.AST.NameTy (\"Msg_\" ^ contract); *)\n(*                       SSP.AST.NameTy (\"state_\" ^ contract); *)\n(*                       SSP.AST.NameTy \"t_ParseError\"; *)\n(*                     ] ) ); *)\n(*           ]) *)\n(*         (Map.keys contract_map) *)\n\n(*   let concert_header = *)\n(*     [ *)\n(*       SSP.AST.Comment \"Concert lib part\"; *)\n(*       SSP.AST.Require (Some \"ConCert.Utils\", [ \"Extras\" ], None); *)\n(*       SSP.AST.Require (Some \"ConCert.Utils\", [ \"Automation\" ], None); *)\n(*       SSP.AST.Require (Some \"ConCert.Execution\", [ \"Serializable\" ], None); *)\n(*       SSP.AST.Require (Some \"ConCert.Execution\", [ \"Blockchain\" ], None); *)\n(*       SSP.AST.Require (Some \"ConCert.Execution\", [ \"ContractCommon\" ], None); *)\n(*       SSP.AST.Require (Some \"ConCert.Execution\", [ \"Serializable\" ], None); *)\n(*       SSP.AST.Require (None, [ \"ConCertLib\" ], None); *)\n(*     ] *)\n(* end *)\n\nlet process_annotation (x : 'a list) (f2 : ('b * ('a -> 'b)) list) : 'b list =\n  List.concat_map\n    ~f:(fun (d, f) ->\n      let temp = List.map ~f x in\n      if List.is_empty (List.concat temp) then [] else d :: temp)\n    f2\n\nlet string_of_items m (x, y) =\n  cleanup_item_strings\n    (List.map ~f:decls_to_string\n       (process_annotation x\n          [\n            ([], print_item m y);\n            (* ConCert.(concert_header, translate_concert_annotations y); *)\n          ]\n          (* @ ConCert.concert_contract_type_decls x *)))\n\n(* TODO move into string_of_items, as SSP.AST decl *)\nlet hardcoded_coq_headers =\n  \"(* File automatically generated by Hacspec *)\\n\\\n   Set Warnings \\\"-notation-overridden,-ambiguous-paths\\\".\\n\\\n   From Crypt Require Import choice_type Package Prelude.\\n\\\n   Import PackageNotation.\\n\\\n   From extructures Require Import ord fset.\\n\\\n   From mathcomp Require Import word_ssrZ word.\\n\\\n   (* From Jasmin Require Import word. *)\\n\\n\\\n   From Coq Require Import ZArith.\\n\\\n   From Coq Require Import Strings.String.\\n\\\n   Import List.ListNotations.\\n\\\n   Open Scope list_scope.\\n\\\n   Open Scope Z_scope.\\n\\\n   Open Scope bool_scope.\\n\\n\\\n   From Hacspec Require Import ChoiceEquality.\\n\\\n   From Hacspec Require Import LocationUtility.\\n\\\n   From Hacspec Require Import Hacspec_Lib_Comparable.\\n\\\n   From Hacspec Require Import Hacspec_Lib_Pre.\\n\\\n   From Hacspec Require Import Hacspec_Lib.\\n\\n\\\n   Open Scope hacspec_scope.\\n\\\n   Import choice.Choice.Exports.\\n\\n\\\n   From RecordUpdate Require Import RecordUpdate.\\n\\n\\\n   Import RecordSetNotations.\\n\\n\\\n   Obligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\\n\"\n\nlet translate m (_bo : BackendOptions.t) ~(bundles : AST.item list list)\n    (items : AST.item list) : Types.file list =\n  let analysis_data = StaticAnalysis.analyse items in\n  U.group_items_by_namespace items\n  |> Map.to_alist\n  |> List.filter_map ~f:(fun (_, items) ->\n         let* first_item = List.hd items in\n         Some ((RenderId.render first_item.ident).path, items))\n  |> List.map ~f:(fun (ns, items) ->\n         let mod_name =\n           String.concat ~sep:\"_\"\n             (List.map ~f:(map_first_letter String.uppercase) ns)\n         in\n         let file_content =\n           hardcoded_coq_headers ^ \"\\n\"\n           ^ string_of_items m (items, analysis_data)\n           ^ \"\\n\"\n         in\n         Types.\n           { path = mod_name ^ \".v\"; contents = file_content; sourcemap = None })\n\nlet apply_phases (_bo : BackendOptions.t) (i : Ast.Rust.item list) :\n    AST.item list =\n  TransformToInputLanguage.ditems i\n"
  },
  {
    "path": "engine/backends/coq/ssprove/ssprove_backend.mli",
    "content": "open Hax_engine.Backend\ninclude T with module BackendOptions = UnitBackendOptions\n"
  },
  {
    "path": "engine/backends/easycrypt/dune",
    "content": "(library\n (name easycrypt_backend)\n (package hax-engine)\n (libraries hax_engine)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_inline\n   ppx_functor_application\n   ppx_matches)))\n\n(env\n (_\n  (flags\n   (:standard -w -A))))\n"
  },
  {
    "path": "engine/backends/easycrypt/easycrypt_backend.ml",
    "content": "(* -------------------------------------------------------------------- *)\nopen Hax_engine\nopen Base\n\n(* -------------------------------------------------------------------- *)\n\ninclude\n  Backend.Make\n    (struct\n      open Features\n      include Off\n      include On.Loop\n      include On.For_index_loop\n      include On.Mutable_variable\n      include On.Macro\n      include On.Construct_base\n    end)\n    (struct\n      let backend = Diagnostics.Backend.EasyCrypt\n    end)\n\nmodule BackendOptions = Backend.UnitBackendOptions\nmodule AST = Ast.Make (InputLanguage)\nmodule ECNamePolicy = Concrete_ident.DefaultNamePolicy\nmodule U = Ast_utils.Make (InputLanguage)\nmodule RenderId = Concrete_ident.MakeRenderAPI (ECNamePolicy)\nopen AST\n\nmodule RejectNotEC (FA : Features.T) = struct\n  module FB = InputLanguage\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let mutable_variable _ _ = Features.On.mutable_variable\n        let loop _ _ = Features.On.loop\n        let continue = reject\n        let mutable_reference = reject\n        let mutable_pointer = reject\n        let reference = reject\n        let slice = reject\n        let raw_pointer = reject\n        let early_exit = reject\n        let question_mark = reject\n        let break = reject\n        let macro _ _ = Features.On.macro\n        let as_pattern = reject\n        let lifetime = reject\n        let monadic_action = reject\n        let monadic_binding = reject\n        let arbitrary_lhs = reject\n        let state_passing_loop = reject\n        let fold_like_loop = reject\n        let nontrivial_lhs = reject\n        let block = reject\n        let for_loop = reject\n        let while_loop = reject\n        let quote = reject\n        let dyn = reject\n        let match_guard = reject\n        let trait_item_default = reject\n        let unsafe = reject\n        let construct_base _ _ = Features.On.construct_base\n        let for_index_loop _ _ = Features.On.for_index_loop\n\n        let metadata =\n          Phase_utils.Metadata.make (Reject (NotInBackendLang EasyCrypt))\n      end)\nend\n\ntype nmtree = { subnms : (string, nmtree) Map.Poly.t; items : AST.item list }\n\nmodule NM = struct\n  let empty : nmtree = { subnms = Map.Poly.empty; items = [] }\n\n  let rec push_using_longname (the : nmtree) (nm : string list)\n      (item : AST.item) =\n    match nm with\n    | [] -> { the with items = the.items @ [ item ] }\n    | name :: nm ->\n        let update (subnm : nmtree option) =\n          let subnm = Option.value ~default:empty subnm in\n          push_using_longname subnm nm item\n        in\n\n        { the with subnms = Map.Poly.update ~f:update the.subnms name }\n\n  let push_using_namespace (the : nmtree) (nm : string list) (item : AST.item) =\n    push_using_longname the (List.rev nm) item\n\n  let push (the : nmtree) (item : AST.item) =\n    push_using_namespace the (RenderId.render item.ident).path item\nend\n\nlet suffix_of_size (size : Ast.size) =\n  match size with\n  | Ast.S8 -> \"8\"\n  | Ast.S16 -> \"16\"\n  | Ast.S32 -> \"32\"\n  | Ast.S64 -> \"64\"\n  | Ast.S128 -> \"128\"\n  | Ast.SSize -> \"P\"\n\nlet suffix_of_signedness (s : Ast.signedness) =\n  match s with Signed -> \"S\" | Unsigned -> \"U\"\n\nlet intmodule_of_kind (Ast.{ size; signedness } : Ast.int_kind) =\n  Stdlib.Format.sprintf \"W%s%s\"\n    (suffix_of_signedness signedness)\n    (suffix_of_size size)\n\nlet translate' (_bo : BackendOptions.t) (items : AST.item list) :\n    Types.file list =\n  let items = List.fold_left ~init:NM.empty ~f:NM.push items in\n\n  let rec doit (fmt : Formatter.t) (the : nmtree) =\n    the.subnms\n    |> Map.Poly.iteri ~f:(fun ~key ~data ->\n           Stdlib.Format.fprintf fmt \"theory %s.@.\" key;\n           doit fmt data;\n           Stdlib.Format.fprintf fmt \"end.@.\");\n\n    let doitems (fmt : Formatter.t) =\n      the.items\n      |> List.iter ~f:(fun item ->\n             match item.v with\n             | Fn { name; generics; body; params }\n               when List.is_empty generics.params ->\n                 let name = (RenderId.render name).name in\n\n                 doit_fn fmt (name, params, body)\n             | Fn _ -> assert false\n             | TyAlias _ -> assert false\n             | Type _ -> assert false\n             | Trait _ -> assert false\n             | Impl _ -> assert false\n             | HaxError _ -> ()\n             | IMacroInvokation _ -> ()\n             | Use _ -> ()\n             | Alias _ -> ()\n             | NotImplementedYet -> ()\n             | _ -> .)\n    in\n\n    if not (List.is_empty the.items) then\n      Stdlib.Format.fprintf fmt \"@[<v>module Procs = {@,  @[<v>%t@]@,}@]@,\"\n        doitems\n  and doit_fn (fmt : Formatter.t) (name, params, body) =\n    let pp_param (fmt : Formatter.t) (p : param) =\n      match p.pat.p with\n      | PBinding { var; typ; mode = ByValue; mut = Immutable; subpat = None } ->\n          Stdlib.Format.fprintf fmt \"%s : %a\" var.name doit_type typ\n      | _ -> assert false\n    in\n\n    Stdlib.Format.fprintf fmt \"@[<v>proc %s(%a) = {@,  @[<v>%a@]@,}@]@\\n@\\n\"\n      name\n      (Stdlib.Format.pp_print_list\n         ~pp_sep:(fun fmt () -> Stdlib.Format.fprintf fmt \", \")\n         pp_param)\n      params doit_stmt body\n  and doit_concrete_ident (fmt : Formatter.t) (p : Concrete_ident.t) =\n    Stdlib.Format.fprintf fmt \"%s\" (RenderId.render p).name\n  and doit_type (fmt : Formatter.t) (typ : ty) =\n    match typ with\n    | TBool -> assert false\n    | TChar -> assert false\n    | TInt kind -> Stdlib.Format.fprintf fmt \"%s.t\" (intmodule_of_kind kind)\n    | TFloat _ -> assert false\n    | TStr -> assert false\n    | TApp { ident = `Concrete ident; args = [] } ->\n        doit_concrete_ident fmt ident\n    | TApp { ident = `Concrete ident; args } ->\n        Stdlib.Format.fprintf fmt \"(%a) %a\"\n          (Stdlib.Format.pp_print_list\n             ~pp_sep:(fun fmt () -> Stdlib.Format.fprintf fmt \", \")\n             doit_type_arg)\n          args doit_concrete_ident ident\n    | TApp _ -> assert false\n    | TArray _ -> assert false\n    | TParam _ -> assert false\n    | TArrow (_, _) -> assert false\n    | TAssociatedType _ -> assert false\n    | TOpaque _ -> assert false\n    | _ -> .\n  and doit_type_arg (fmt : Formatter.t) (tyarg : generic_value) =\n    match tyarg with GType ty -> doit_type fmt ty | _ -> assert false\n  and doit_stmt (fmt : Formatter.t) (expr : expr) =\n    let foo () =\n      Stdlib.Format.eprintf \"%a@.@.\" pp_expr expr;\n      assert false\n    in\n\n    match expr.e with\n    | If { cond; then_; else_ = None } ->\n        Stdlib.Format.fprintf fmt \"@[<v>if (%a) {@,  @[<v>%a@]@,}@]\" doit_expr\n          cond doit_stmt then_\n    | If _ -> assert false\n    | Let\n        {\n          lhs =\n            {\n              p =\n                PBinding\n                  {\n                    mut = _;\n                    mode = ByValue;\n                    var = { name; _ };\n                    subpat = None;\n                    _;\n                  };\n              _;\n            };\n          rhs;\n          body;\n          monadic = None;\n        } ->\n        Stdlib.Format.fprintf fmt \"%s <- %a;@,\" name doit_expr rhs;\n        Stdlib.Format.fprintf fmt \"%a\" doit_stmt body\n    | Let\n        {\n          lhs = { p = PWild; typ = TApp { ident = `TupleType 0; args = [] }; _ };\n          rhs;\n          body;\n          monadic = None;\n        } ->\n        Stdlib.Format.fprintf fmt \"%a@,\" doit_stmt rhs;\n        Stdlib.Format.fprintf fmt \"%a\" doit_stmt body\n    | Let _ -> foo ()\n    | Assign { lhs; e; _ } ->\n        Stdlib.Format.fprintf fmt \"%a <- %a;\" doit_lhs lhs doit_expr e\n    | Match _ -> foo ()\n    | Loop\n        {\n          body;\n          kind = ForIndexLoop { start; end_; var = { name; _ }; _ };\n          state = None;\n          _;\n        } ->\n        let _ = match start.typ with TInt kind -> kind | _ -> assert false in\n\n        Stdlib.Format.fprintf fmt \"%s <- %a;@,\" name doit_expr start;\n        Stdlib.Format.fprintf fmt \"@[<v>while (%s < %a) {@,  @[<v>%a%t@]@,}@]\"\n          name doit_expr end_ doit_stmt body (fun fmt ->\n            Stdlib.Format.fprintf fmt \"%s <- %s + 1;@,\" name name)\n    | Loop _ -> foo ()\n    | MacroInvokation _ -> foo ()\n    | GlobalVar (`TupleCons 0) -> ()\n    | Ascription _ | Array _ | Closure _ -> assert false\n    | App _ | Literal _ | Construct _ | LocalVar _ | GlobalVar _ ->\n        Stdlib.Format.fprintf fmt \"return %a;\" doit_expr expr\n    | _ -> .\n  and doit_lhs (fmt : Formatter.t) (lhs : lhs) =\n    match lhs with\n    | LhsFieldAccessor _ | LhsVecRef _ | LhsArrayAccessor { e = LhsVecRef _; _ }\n      ->\n        assert false\n    | LhsArrayAccessor\n        { e = LhsLocalVar { var = { name; _ }; _ }; index; typ = _; _ } ->\n        Stdlib.Format.fprintf fmt \"%s.[%a]\" name doit_expr index\n    | LhsLocalVar { var = { name; _ }; _ } ->\n        Stdlib.Format.fprintf fmt \"%s\" name\n    | _ -> .\n  and doit_expr (fmt : Formatter.t) (expr : expr) =\n    match expr.e with\n    | If _ -> assert false\n    | App { f = { e = GlobalVar ident; _ }; args = [ a; i ]; _ }\n      when Ast.Global_ident.eq_name Core__ops__index__Index__index ident ->\n        Stdlib.Format.fprintf fmt \"(%a).[%a]\" doit_expr a doit_expr i\n    | App { f = { e = GlobalVar (`Concrete op); _ }; args = [ e1; e2 ]; _ }\n      when Concrete_ident.(\n             eq_name Core__ops__bit__BitXor__bitxor op\n             || eq_name Core__ops__bit__BitAnd__bitand op\n             || eq_name Core__ops__bit__BitOr__bitor op\n             || eq_name Core__ops__arith__Add__add op\n             || eq_name Core__ops__arith__Mul__mul op\n             || eq_name Core__cmp__PartialEq__ne op\n             || eq_name Core__cmp__PartialEq__eq op) ->\n        Stdlib.Format.fprintf fmt \"(%a) %s (%a)\" doit_expr e1\n          (match (RenderId.render op).name with\n          | \"bitxor\" -> \"^\"\n          | \"bitand\" -> \"&\"\n          | \"bitor\" -> \"|\"\n          | \"add\" -> \"+\"\n          | \"mul\" -> \"*\"\n          | \"eq\" -> \"=\"\n          | \"ne\" -> \"<>\"\n          | _ -> assert false)\n          doit_expr e2\n    | App { f = { e = GlobalVar (`Concrete ident); _ }; args = []; _ } ->\n        Stdlib.Format.fprintf fmt \"%a\" doit_concrete_ident ident\n    | App { f = { e = GlobalVar (`Concrete ident); _ }; args; _ } ->\n        Stdlib.Format.fprintf fmt \"%a %a\" doit_concrete_ident ident\n          (Stdlib.Format.pp_print_list\n             ~pp_sep:(fun fmt () -> Stdlib.Format.fprintf fmt \" \")\n             (fun fmt e -> Stdlib.Format.fprintf fmt \"(%a)\" doit_expr e))\n          args\n    | App _ ->\n        Stdlib.Format.eprintf \"%a@.@.\" pp_expr expr;\n        assert false\n    | Literal (Int { value; kind; _ }) ->\n        Stdlib.Format.fprintf fmt \"%s.ofint %a\" (intmodule_of_kind kind)\n          String.pp value\n    | Literal _ -> assert false\n    | Array _ -> assert false\n    | Construct\n        {\n          constructor = `Concrete ident;\n          is_record = false;\n          is_struct = false;\n          base = None;\n          fields = _;\n        } ->\n        Stdlib.Format.eprintf \"%a.\" doit_concrete_ident ident\n    | Construct _ -> assert false\n    | Match _ -> assert false\n    | Let _ -> assert false\n    | LocalVar { name; _ } -> Stdlib.Format.fprintf fmt \"%s\" name\n    | GlobalVar _ -> assert false\n    | Ascription _ -> assert false\n    | MacroInvokation _ -> assert false\n    | Assign _ -> assert false\n    | Loop _ -> assert false\n    (* | ForLoop _ -> assert false *)\n    | Closure _ -> assert false\n    | _ -> .\n  in\n\n  doit Stdlib.Format.err_formatter items;\n  []\n\nlet translate _ (bo : BackendOptions.t) ~(bundles : AST.item list list)\n    (items : AST.item list) : Types.file list =\n  try translate' bo items\n  with Assert_failure (file, line, col) ->\n    Diagnostics.failure ~context:(Backend FStar) ~span:(Span.dummy ())\n      (AssertionFailure\n         {\n           details =\n             \"Assertion failed in \" ^ file ^ \":\" ^ Int.to_string line ^ \":\"\n             ^ Int.to_string col;\n         })\n\nopen Phase_utils\n\nmodule TransformToInputLanguage =\n  [%functor_application\n  Phases.Reject.RawOrMutPointer Features.Rust |> Phases.Reject.Unsafe\n  |> Phases.And_mut_defsite |> Phases.Reconstruct_asserts\n  |> Phases.Reconstruct_for_loops |> Phases.Direct_and_mut |> Phases.Drop_blocks\n  |> Phases.Reject.Continue |> Phases.Drop_references |> Phases.Bundle_cycles\n  |> Phases.Sort_items_namespace_wise |> RejectNotEC]\n\nlet apply_phases (_bo : BackendOptions.t) (items : Ast.Rust.item list) :\n    AST.item list =\n  TransformToInputLanguage.ditems items\n"
  },
  {
    "path": "engine/backends/easycrypt/easycrypt_backend.mli",
    "content": "open Hax_engine.Backend\ninclude T with module BackendOptions = UnitBackendOptions\n"
  },
  {
    "path": "engine/backends/fstar/dune",
    "content": "(library\n (name fstar_backend)\n (package hax-engine)\n (wrapped false)\n (libraries hax_engine base fstar_surface_ast hacspeclib_macro_parser)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_inline\n   ppx_functor_application\n   ppx_matches)))\n\n(env\n (_\n  (flags\n   (:standard -w -A))))\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/.gitignore",
    "content": "_build\nresult\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/.ocamlformat-ignore",
    "content": "*\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_BaseTypes.ml",
    "content": "type char   = FStar_Char.char[@@deriving yojson,show]\ntype float  = Base.Float.t\ntype double = Base.Float.t\ntype byte   = Base.Int.t\ntype int8   = Stdint.Int8.t\ntype uint8  = Stdint.Uint8.t\ntype int16   = Stdint.Int16.t\ntype uint16  = Stdint.Uint16.t\ntype int32  = Stdint.Int32.t\ntype int64  = Stdint.Int64.t\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Char.ml",
    "content": "module UChar = BatUChar\ntype char = int[@@deriving yojson,show]\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Compiler_Effect.ml",
    "content": "let op_Bar_Greater (x : 'a) (f : ('a -> 'b)) : 'b = f x\nlet op_Less_Bar  (f : ('a -> 'b)) (x : 'a) : 'b = f x\n\ntype 'a ref' = 'a ref[@@deriving yojson,show]\ntype 'a ref = 'a ref'[@@deriving yojson,show]\n\nlet op_Bang (r:'a ref) = !r\nlet op_Colon_Equals x y = x := y\nlet alloc x = ref x\nlet raise = raise\nlet exit i = exit (Z.to_int i)\nlet try_with f1 f2 = try f1 () with | e -> f2 e\nexception Failure = Failure\nlet failwith x = raise (Failure x)\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Compiler_List.ml",
    "content": "(* We give an implementation here using OCaml's BatList,\n   which provides tail-recursive versions of most functions *)\ninclude FStar_List\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Compiler_Range.ml",
    "content": "open Prims\ntype file_name = Prims.string[@@deriving yojson,show]\ntype pos = {\n  line: Prims.int ;\n  col: Prims.int }[@@deriving yojson,show,yojson,show]\nlet (__proj__Mkpos__item__line : pos -> Prims.int) =\n  fun projectee -> match projectee with | { line; col;_} -> line\nlet (__proj__Mkpos__item__col : pos -> Prims.int) =\n  fun projectee -> match projectee with | { line; col;_} -> col\nlet (max : Prims.int -> Prims.int -> Prims.int) =\n  fun i -> fun j -> if i < j then j else i\nlet (pos_geq : pos -> pos -> Prims.bool) =\n  fun p1 ->\n    fun p2 ->\n      (p1.line > p2.line) || ((p1.line = p2.line) && (p1.col >= p2.col))\ntype rng = {\n  file_name: file_name ;\n  start_pos: pos ;\n  end_pos: pos }[@@deriving yojson,show,yojson,show]\nlet (__proj__Mkrng__item__file_name : rng -> file_name) =\n  fun projectee ->\n    match projectee with\n    | { file_name = file_name1; start_pos; end_pos;_} -> file_name1\nlet (__proj__Mkrng__item__start_pos : rng -> pos) =\n  fun projectee ->\n    match projectee with\n    | { file_name = file_name1; start_pos; end_pos;_} -> start_pos\nlet (__proj__Mkrng__item__end_pos : rng -> pos) =\n  fun projectee ->\n    match projectee with\n    | { file_name = file_name1; start_pos; end_pos;_} -> end_pos\ntype range = {\n  def_range: rng ;\n  use_range: rng }[@@deriving yojson,show,yojson,show]\nlet (__proj__Mkrange__item__def_range : range -> rng) =\n  fun projectee ->\n    match projectee with | { def_range; use_range;_} -> def_range\nlet (__proj__Mkrange__item__use_range : range -> rng) =\n  fun projectee ->\n    match projectee with | { def_range; use_range;_} -> use_range\nlet (dummy_pos : pos) = { line = Prims.int_zero; col = Prims.int_zero }\nlet (dummy_rng : rng) =\n  { file_name = \" dummy\"; start_pos = dummy_pos; end_pos = dummy_pos }\nlet (dummyRange : range) = { def_range = dummy_rng; use_range = dummy_rng }\nlet (use_range : range -> rng) = fun r -> r.use_range\nlet (def_range : range -> rng) = fun r -> r.def_range\nlet (range_of_rng : rng -> rng -> range) =\n  fun d -> fun u -> { def_range = d; use_range = u }\nlet (set_use_range : range -> rng -> range) =\n  fun r2 ->\n    fun use_rng ->\n      if use_rng <> dummy_rng\n      then { def_range = (r2.def_range); use_range = use_rng }\n      else r2\nlet (set_def_range : range -> rng -> range) =\n  fun r2 ->\n    fun def_rng ->\n      if def_rng <> dummy_rng\n      then { def_range = def_rng; use_range = (r2.use_range) }\n      else r2\nlet (mk_pos : Prims.int -> Prims.int -> pos) =\n  fun l ->\n    fun c -> { line = (max Prims.int_zero l); col = (max Prims.int_zero c) }\nlet (mk_rng : file_name -> pos -> pos -> rng) =\n  fun file_name1 ->\n    fun start_pos ->\n      fun end_pos -> { file_name = file_name1; start_pos; end_pos }\nlet (mk_range : Prims.string -> pos -> pos -> range) =\n  fun f -> fun b -> fun e -> let r = mk_rng f b e in range_of_rng r r\nlet (union_rng : rng -> rng -> rng) =\n  fun r1 ->\n    fun r2 ->\n      if r1.file_name <> r2.file_name\n      then r2\n      else\n        (let start_pos =\n           if pos_geq r1.start_pos r2.start_pos\n           then r2.start_pos\n           else r1.start_pos in\n         let end_pos =\n           if pos_geq r1.end_pos r2.end_pos then r1.end_pos else r2.end_pos in\n         mk_rng r1.file_name start_pos end_pos)\nlet (union_ranges : range -> range -> range) =\n  fun r1 ->\n    fun r2 ->\n      let uu___ = union_rng r1.def_range r2.def_range in\n      let uu___1 = union_rng r1.use_range r2.use_range in\n      { def_range = uu___; use_range = uu___1 }\nlet (rng_included : rng -> rng -> Prims.bool) =\n  fun r1 ->\n    fun r2 ->\n      if r1.file_name <> r2.file_name\n      then false\n      else\n        (pos_geq r1.start_pos r2.start_pos) &&\n          (pos_geq r2.end_pos r1.end_pos)\nlet (string_of_pos : pos -> Prims.string) =\n  fun pos1 ->\n    let uu___ = FStar_Compiler_Util.string_of_int pos1.line in\n    let uu___1 = FStar_Compiler_Util.string_of_int pos1.col in\n    FStar_Compiler_Util.format2 \"%s,%s\" uu___ uu___1\nlet (string_of_file_name : Prims.string -> Prims.string) =\n  fun f ->\n    f\nlet (file_of_range : range -> Prims.string) =\n  fun r -> let f = (r.def_range).file_name in string_of_file_name f\nlet (set_file_of_range : range -> Prims.string -> range) =\n  fun r ->\n    fun f ->\n      {\n        def_range =\n          (let uu___ = r.def_range in\n           {\n             file_name = f;\n             start_pos = (uu___.start_pos);\n             end_pos = (uu___.end_pos)\n           });\n        use_range = (r.use_range)\n      }\nlet (string_of_rng : rng -> Prims.string) =\n  fun r ->\n    let uu___ = string_of_file_name r.file_name in\n    let uu___1 = string_of_pos r.start_pos in\n    let uu___2 = string_of_pos r.end_pos in\n    FStar_Compiler_Util.format3 \"%s(%s-%s)\" uu___ uu___1 uu___2\nlet (string_of_def_range : range -> Prims.string) =\n  fun r -> string_of_rng r.def_range\nlet (string_of_use_range : range -> Prims.string) =\n  fun r -> string_of_rng r.use_range\nlet (string_of_range : range -> Prims.string) =\n  fun r -> string_of_def_range r\nlet (start_of_range : range -> pos) = fun r -> (r.def_range).start_pos\nlet (end_of_range : range -> pos) = fun r -> (r.def_range).end_pos\nlet (file_of_use_range : range -> Prims.string) =\n  fun r -> (r.use_range).file_name\nlet (start_of_use_range : range -> pos) = fun r -> (r.use_range).start_pos\nlet (end_of_use_range : range -> pos) = fun r -> (r.use_range).end_pos\nlet (line_of_pos : pos -> Prims.int) = fun p -> p.line\nlet (col_of_pos : pos -> Prims.int) = fun p -> p.col\nlet (end_range : range -> range) =\n  fun r ->\n    mk_range (r.def_range).file_name (r.def_range).end_pos\n      (r.def_range).end_pos\nlet (compare_rng : rng -> rng -> Prims.int) =\n  fun r1 ->\n    fun r2 ->\n      let fcomp = FStar_String.compare r1.file_name r2.file_name in\n      if fcomp = Prims.int_zero\n      then\n        let start1 = r1.start_pos in\n        let start2 = r2.start_pos in\n        let lcomp = start1.line - start2.line in\n        (if lcomp = Prims.int_zero then start1.col - start2.col else lcomp)\n      else fcomp\nlet (compare : range -> range -> Prims.int) =\n  fun r1 -> fun r2 -> compare_rng r1.def_range r2.def_range\nlet (compare_use_range : range -> range -> Prims.int) =\n  fun r1 -> fun r2 -> compare_rng r1.use_range r2.use_range\nlet (range_before_pos : range -> pos -> Prims.bool) =\n  fun m1 -> fun p -> let uu___ = end_of_range m1 in pos_geq p uu___\nlet (end_of_line : pos -> pos) =\n  fun p -> { line = (p.line); col = FStar_Compiler_Util.max_int }\nlet (extend_to_end_of_line : range -> range) =\n  fun r ->\n    let uu___ = file_of_range r in\n    let uu___1 = start_of_range r in\n    let uu___2 = let uu___3 = end_of_range r in end_of_line uu___3 in\n    mk_range uu___ uu___1 uu___2\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Compiler_Util.ml",
    "content": "let ensure_decimal s = Z.to_string (Z.of_string s)\n\n\nlet max_int = Z.of_int max_int\nlet is_letter c = if c > 255 then false else BatChar.is_letter (BatChar.chr c)\nlet is_digit  c = if c > 255 then false else BatChar.is_digit  (BatChar.chr c)\nlet is_letter_or_digit c = is_letter c || is_digit c\nlet is_symbol c = if c > 255 then false else BatChar.is_symbol (BatChar.chr c)\n\n(* Modeled after: Char.IsPunctuation in .NET\n   (http://www.dotnetperls.com/char-ispunctuation)\n*)\nlet is_punctuation c = List.mem c [33; 34; 35; 37; 38; 39; 40; 41; 42; 44; 45; 46; 47; 58; 59; 63; 64; 91; 92; 93; 95; 123; 125]\n(*'!','\"','#','%','&','\\'','(',')','*',',','-','.','/',':',';','?','@','[','\\\\',']','_','{','}'*)\n\nlet return_all x = x\n\nlet get_file_last_modification_time f = (BatUnix.stat f).BatUnix.st_mtime\nlet is_before t1 t2 = compare t1 t2 < 0\nlet string_of_time = string_of_float\n\nexception Impos\n\nlet cur_sigint_handler : Sys.signal_behavior ref =\n  ref Sys.Signal_default\n\nexception SigInt\ntype sigint_handler = Sys.signal_behavior\n\nlet sigint_ignore: sigint_handler =\n  Sys.Signal_ignore\n\nlet sigint_delay = ref 0\nlet sigint_pending = ref false\n\nlet raise_sigint _ =\n  sigint_pending := false;\n  raise SigInt\n\nlet raise_sigint_maybe_delay _ =\n  (* This function should not do anything complicated, lest it cause deadlocks.\n   * Calling print_string, for example, can cause a deadlock (print_string →\n   * caml_flush → process_pending_signals → caml_execute_signal → raise_sigint →\n   * print_string → caml_io_mutex_lock ⇒ deadlock) *)\n  if !sigint_delay = 0\n  then raise_sigint ()\n  else sigint_pending := true\n\nlet sigint_raise: sigint_handler =\n  Sys.Signal_handle raise_sigint_maybe_delay\n\nlet set_sigint_handler sigint_handler =\n  cur_sigint_handler := sigint_handler;\n  Sys.set_signal Sys.sigint !cur_sigint_handler\n\nlet with_sigint_handler handler f =\n  let original_handler = !cur_sigint_handler in\n  BatPervasives.finally\n    (fun () -> Sys.set_signal Sys.sigint original_handler)\n    (fun () -> set_sigint_handler handler; f ())\n    ()\n\nlet get_file_extension (fn:string) : string = snd (BatString.rsplit fn \".\")\nlet is_path_absolute path_str =\n  let open Batteries.Incubator in\n  let open BatPathGen.OfString in\n  let path_str' = of_string path_str in\n  is_absolute path_str'\nlet join_paths path_str0 path_str1 =\n  let open Batteries.Incubator in\n  let open BatPathGen.OfString in\n  let open BatPathGen.OfString.Operators in\n  to_string ((of_string path_str0) //@ (of_string path_str1))\n\nlet normalize_file_path (path_str:string) =\n  let open Batteries.Incubator in\n  let open BatPathGen.OfString in\n  let open BatPathGen.OfString.Operators in\n  to_string\n    (normalize_in_tree\n       (let path = of_string path_str in\n         if is_absolute path then\n           path\n         else\n           let pwd = of_string (BatSys.getcwd ()) in\n           pwd //@ path))\n\ntype stream_reader = BatIO.input\nlet open_stdin () = BatIO.stdin\nlet read_line s =\n  try\n    Some (BatIO.read_line s)\n  with\n    _ -> None\nlet nread (s:stream_reader) (n:Z.t) =\n  try\n    Some (BatIO.nread s (Z.to_int n))\n  with\n    _ -> None\n\nlet poll_stdin (f:float) =\n    try \n      let ready_fds, _, _ = Unix.select [Unix.stdin] [] [] f in\n      match ready_fds with\n      | [] -> false\n      | _ -> true\n    with\n    | _ -> false\n\ntype string_builder = BatBuffer.t\nlet new_string_builder () = BatBuffer.create 256\nlet clear_string_builder b = BatBuffer.clear b\nlet string_of_string_builder b = BatBuffer.contents b\nlet string_builder_append b s = BatBuffer.add_string b s\n\nlet message_of_exn (e:exn) = Printexc.to_string e\nlet trace_of_exn (e:exn) = Printexc.get_backtrace ()\n\ntype 'a set = ('a list) * ('a -> 'a -> bool)\n[@@deriving show]\nlet set_to_yojson _ _ = `Null\nlet set_of_yojson _ _ = failwith \"cannot readback\"\n\nlet set_is_empty ((s, _):'a set) =\n  match s with\n  | [] -> true\n  | _ -> false\n\nlet as_set (l:'a list) (cmp:('a -> 'a -> Z.t)) = (l, fun x y -> cmp x y = Z.zero)\nlet new_set (cmp:'a -> 'a -> Z.t) : 'a set = as_set [] cmp\n\nlet set_elements ((s1, eq):'a set) : 'a list =\n  let rec aux out = function\n    | [] -> BatList.rev_append out []\n    | hd::tl ->\n       if BatList.exists (eq hd) out then\n         aux out tl\n       else\n         aux (hd::out) tl in\n  aux [] s1\n\nlet set_add a ((s, b):'a set) = (s@[a], b)\nlet set_remove x ((s1, eq):'a set) =\n  (BatList.filter (fun y -> not (eq x y)) s1, eq)\nlet set_mem a ((s, b):'a set) = BatList.exists (b a) s\nlet set_union ((s1, b):'a set) ((s2, _):'a set) = (s1@s2, b)\nlet set_intersect ((s1, eq):'a set) ((s2, _):'a set) =\n  (BatList.filter (fun y -> BatList.exists (eq y) s2) s1, eq)\nlet set_is_subset_of ((s1, eq):'a set) ((s2, _):'a set) =\n  BatList.for_all (fun y -> BatList.exists (eq y) s2) s1\nlet set_count ((s1, _):'a set) = Z.of_int (BatList.length s1)\nlet set_difference ((s1, eq):'a set) ((s2, _):'a set) : 'a set =\n  (BatList.filter (fun y -> not (BatList.exists (eq y) s2)) s1, eq)\nlet set_symmetric_difference ((s1, eq):'a set) ((s2, _):'a set) : 'a set =\n  set_union (set_difference (s1, eq) (s2, eq))\n            (set_difference (s2, eq) (s1, eq))\nlet set_eq ((s1, eq):'a set) ((s2, _):'a set) : bool =\n  set_is_empty (set_symmetric_difference (s1, eq) (s2, eq))\n\n(* module StringOps = *)\n(*   struct *)\n(*     type t = string *)\n(*     let equal (x:t) (y:t) = x=y *)\n(*     let compare (x:t) (y:t) = BatString.compare x y *)\n(*     let hash (x:t) = BatHashtbl.hash x *)\n(*   end *)\n\n(* module StringHashtbl = BatHashtbl.Make(StringOps) *)\n(* module StringMap = BatMap.Make(StringOps) *)\n\n(* type 'value smap = 'value StringHashtbl.t *)\n(* let smap_create (i:Z.t) : 'value smap = StringHashtbl.create (Z.to_int i) *)\n(* let smap_clear (s:('value smap)) = StringHashtbl.clear s *)\n(* let smap_add (m:'value smap) k (v:'value) = StringHashtbl.replace m k v *)\n(* let smap_of_list (l: (string * 'value) list) = *)\n(*   let s = StringHashtbl.create (BatList.length l) in *)\n(*   FStar_List.iter (fun (x,y) -> smap_add s x y) l; *)\n(*   s *)\n(* let smap_try_find (m:'value smap) k = StringHashtbl.find_option m k *)\n(* let smap_fold (m:'value smap) f a = StringHashtbl.fold f m a *)\n(* let smap_remove (m:'value smap) k = StringHashtbl.remove m k *)\n(* let smap_keys (m:'value smap) = smap_fold m (fun k _ acc -> k::acc) [] *)\n(* let smap_copy (m:'value smap) = StringHashtbl.copy m *)\n(* let smap_size (m:'value smap) = StringHashtbl.length m *)\n(* let smap_iter (m:'value smap) f = StringHashtbl.iter f m *)\n\n(* exception PSMap_Found *)\n(* type 'value psmap = 'value StringMap.t *)\n(* let psmap_empty (_: unit) : 'value psmap = StringMap.empty *)\n(* let psmap_add (map: 'value psmap) (key: string) (value: 'value) = StringMap.add key value map *)\n(* let psmap_find_default (map: 'value psmap) (key: string) (dflt: 'value) = *)\n(*   StringMap.find_default dflt key map *)\n(* let psmap_try_find (map: 'value psmap) (key: string) = *)\n(*   StringMap.Exceptionless.find key map *)\n(* let psmap_fold (m:'value psmap) f a = StringMap.fold f m a *)\n(* let psmap_find_map (m:'value psmap) f = *)\n(*   let res = ref None in *)\n(*   let upd k v = *)\n(*     let r = f k v in *)\n(*     if r <> None then (res := r; raise PSMap_Found) in *)\n(*   (try StringMap.iter upd m with PSMap_Found -> ()); *)\n(*   !res *)\n(* let psmap_modify (m: 'value psmap) (k: string) (upd: 'value option -> 'value) = *)\n(*   StringMap.modify_opt k (fun vopt -> Some (upd vopt)) m *)\n\n(* let psmap_merge (m1: 'value psmap) (m2: 'value psmap) : 'value psmap = *)\n(*   psmap_fold m1 (fun k v m -> psmap_add m k v) m2 *)\n\n(* module ZHashtbl = BatHashtbl.Make(Z) *)\n(* module ZMap = BatMap.Make(Z) *)\n\n(* type 'value imap = 'value ZHashtbl.t *)\n(* let imap_create (i:Z.t) : 'value imap = ZHashtbl.create (Z.to_int i) *)\n(* let imap_clear (s:('value imap)) = ZHashtbl.clear s *)\n(* let imap_add (m:'value imap) k (v:'value) = ZHashtbl.replace m k v *)\n(* let imap_of_list (l: (Z.t * 'value) list) = *)\n(*   let s = ZHashtbl.create (BatList.length l) in *)\n(*   FStar_List.iter (fun (x,y) -> imap_add s x y) l; *)\n(*   s *)\n(* let imap_try_find (m:'value imap) k = ZHashtbl.find_option m k *)\n(* let imap_fold (m:'value imap) f a = ZHashtbl.fold f m a *)\n(* let imap_remove (m:'value imap) k = ZHashtbl.remove m k *)\n(* let imap_keys (m:'value imap) = imap_fold m (fun k _ acc -> k::acc) [] *)\n(* let imap_copy (m:'value imap) = ZHashtbl.copy m *)\n\n(* type 'value pimap = 'value ZMap.t *)\n(* let pimap_empty (_: unit) : 'value pimap = ZMap.empty *)\n(* let pimap_add (map: 'value pimap) (key: Z.t) (value: 'value) = ZMap.add key value map *)\n(* let pimap_find_default (map: 'value pimap) (key: Z.t) (dflt: 'value) = *)\n(*   ZMap.find_default dflt key map *)\n(* let pimap_try_find (map: 'value pimap) (key: Z.t) = *)\n(*   ZMap.Exceptionless.find key map *)\n(* let pimap_fold (m:'value pimap) f a = ZMap.fold f m a *)\n\n(* restore pre-2.11 BatString.nsplit behavior,\n   see https://github.com/ocaml-batteries-team/batteries-included/issues/845 *)\nlet batstring_nsplit s t =\n  if s = \"\" then [] else BatString.split_on_string t s\n\nlet format (fmt:string) (args:string list) =\n  let frags = batstring_nsplit fmt \"%s\" in\n  if BatList.length frags <> BatList.length args + 1 then\n    failwith (\"Not enough arguments to format string \" ^fmt^ \" : expected \" ^ (Stdlib.string_of_int (BatList.length frags)) ^ \" got [\" ^ (BatString.concat \", \" args) ^ \"] frags are [\" ^ (BatString.concat \", \" frags) ^ \"]\")\n  else\n    let args = args@[\"\"] in\n    BatList.fold_left2 (fun out frag arg -> out ^ frag ^ arg) \"\" frags args\n\nlet format1 f a = format f [a]\nlet format2 f a b = format f [a;b]\nlet format3 f a b c = format f [a;b;c]\nlet format4 f a b c d = format f [a;b;c;d]\nlet format5 f a b c d e = format f [a;b;c;d;e]\nlet format6 f a b c d e g = format f [a;b;c;d;e;g]\n\nlet flush_stdout () = flush stdout\n\nlet stdout_isatty () = Some (Unix.isatty Unix.stdout)\n\nlet colorize s colors =\n  match colors with\n  | (c1,c2) ->\n     match stdout_isatty () with\n     | Some true -> format3 \"%s%s%s\" c1 s c2\n     | _ -> s\n\nlet colorize_bold s =\n  match stdout_isatty () with\n  | Some true -> format3 \"%s%s%s\" \"\\x1b[39;1m\" s \"\\x1b[0m\"\n  | _ -> s\n\nlet colorize_red s =\n  match stdout_isatty () with\n  | Some true -> format3 \"%s%s%s\" \"\\x1b[31;1m\" s \"\\x1b[0m\"\n  | _ -> s\n\nlet colorize_cyan s =\n  match stdout_isatty () with\n  | Some true -> format3 \"%s%s%s\" \"\\x1b[36;1m\" s \"\\x1b[0m\"\n  | _ -> s\n\nlet pr  = Printf.printf\nlet spr = Printf.sprintf\nlet fpr = Printf.fprintf\n\ntype json =\n| JsonNull\n| JsonBool of bool\n| JsonInt of Z.t\n| JsonStr of string\n| JsonList of json list\n| JsonAssoc of (string * json) list\n\ntype printer = {\n  printer_prinfo: string -> unit;\n  printer_prwarning: string -> unit;\n  printer_prerror: string -> unit;\n  printer_prgeneric: string -> (unit -> string) -> (unit -> json) -> unit\n}\n\nlet default_printer =\n  { printer_prinfo = (fun s -> pr \"%s\" s; flush stdout);\n    printer_prwarning = (fun s -> fpr stderr \"%s\" (colorize_cyan s); flush stdout; flush stderr);\n    printer_prerror = (fun s -> fpr stderr \"%s\" (colorize_red s); flush stdout; flush stderr);\n    printer_prgeneric = fun label get_string get_json -> pr \"%s: %s\" label (get_string ())}\n\nlet current_printer = ref default_printer\nlet set_printer printer = current_printer := printer\n\nlet print_raw s = set_binary_mode_out stdout true; pr \"%s\" s; flush stdout\nlet print_string s = (!current_printer).printer_prinfo s\nlet print_generic label to_string to_json a = (!current_printer).printer_prgeneric label (fun () -> to_string a) (fun () -> to_json a)\nlet print_any s = (!current_printer).printer_prinfo (Marshal.to_string s [])\nlet strcat s1 s2 = s1 ^ s2\nlet concat_l sep (l:string list) = BatString.concat sep l\n\nlet string_of_unicode (bytes:int array) =\n  BatArray.fold_left (fun acc b -> acc^(BatUTF8.init 1 (fun _ -> BatUChar.of_int b))) \"\" bytes\nlet unicode_of_string (string:string) =\n  let n = BatUTF8.length string in\n  let t = Array.make n 0 in\n  let i = ref 0 in\n  BatUTF8.iter (fun c -> t.(!i) <- BatUChar.code c; incr i) string;\n  t\nlet base64_encode s = BatBase64.str_encode s\nlet base64_decode s = BatBase64.str_decode s\nlet char_of_int i = Z.to_int i\nlet int_of_string = Z.of_string\nlet safe_int_of_string x = try Some (int_of_string x) with Invalid_argument _ -> None\nlet int_of_char x = Z.of_int x\nlet int_of_byte x = x\nlet int_of_uint8 x = Z.of_int (Char.code x)\nlet uint16_of_int i = Z.to_int i\nlet byte_of_char c = c\n\nlet float_of_string s = float_of_string s\nlet float_of_byte b = float_of_int (Char.code b)\nlet float_of_int32 = float_of_int\nlet float_of_int64 = BatInt64.to_float\n\nlet int_of_int32 i = i\nlet int32_of_int i = BatInt32.of_int i\n\nlet string_of_int = Z.to_string\nlet string_of_bool = string_of_bool\nlet string_of_int32 = BatInt32.to_string\nlet string_of_int64 = BatInt64.to_string\nlet string_of_float = string_of_float\nlet string_of_char i = BatUTF8.init 1 (fun _ -> BatUChar.chr i)\nlet hex_string_of_byte (i:int) =\n  let hs = spr \"%x\" i in\n  if (String.length hs = 1) then \"0\" ^ hs\n  else hs\nlet string_of_bytes = string_of_unicode\nlet bytes_of_string = unicode_of_string\nlet starts_with = BatString.starts_with\nlet trim_string = BatString.trim\nlet ends_with = BatString.ends_with\nlet char_at s index = BatUChar.code (BatUTF8.get s (Z.to_int index))\nlet is_upper c = 65 <= c && c <= 90\nlet contains (s1:string) (s2:string) = BatString.exists s1 s2\nlet substring_from s index = BatString.tail s (Z.to_int index)\nlet substring s i j = BatString.sub s (Z.to_int i) (Z.to_int j)\nlet replace_char (s:string) c1 c2 =\n  let c1, c2 = BatUChar.chr c1, BatUChar.chr c2 in\n  BatUTF8.map (fun x -> if x = c1 then c2 else x) s\nlet replace_chars (s:string) c (by:string) =\n  BatString.replace_chars (fun x -> if x = Char.chr c then by else BatString.of_char x) s\n(* let hashcode s = Z.of_int (StringOps.hash s) *)\nlet compare s1 s2 = Z.of_int (BatString.compare s1 s2)\nlet split s sep = BatString.split_on_string sep s\nlet splitlines s = split s \"\\n\"\n\nlet iof = int_of_float\nlet foi = float_of_int\n\nlet print1 a b = print_string (format1 a b)\nlet print2 a b c = print_string (format2 a b c)\nlet print3 a b c d = print_string (format3 a b c d)\nlet print4 a b c d e = print_string (format4 a b c d e)\nlet print5 a b c d e f = print_string (format5 a b c d e f)\nlet print6 a b c d e f g = print_string (format6 a b c d e f g)\nlet print fmt args = print_string (format fmt args)\n\nlet print_error s = (!current_printer).printer_prerror s\nlet print1_error a b = print_error (format1 a b)\nlet print2_error a b c = print_error (format2 a b c)\nlet print3_error a b c d = print_error (format3 a b c d)\n\nlet print_warning s = (!current_printer).printer_prwarning s\nlet print1_warning a b = print_warning (format1 a b)\nlet print2_warning a b c = print_warning (format2 a b c)\nlet print3_warning a b c d = print_warning (format3 a b c d)\n\nlet stderr = stderr\nlet stdout = stdout\n\nlet fprint oc fmt args = Printf.fprintf oc \"%s\" (format fmt args)\n\n[@@deriving yojson,show]\n\nlet is_left = function\n  | FStar_Pervasives.Inl _ -> true\n  | _ -> false\n\nlet is_right = function\n  | FStar_Pervasives.Inr _ -> true\n  | _ -> false\n\nlet left = function\n  | FStar_Pervasives.Inl x -> x\n  | _ -> failwith \"Not in left\"\nlet right = function\n  | FStar_Pervasives.Inr x -> x\n  | _ -> failwith \"Not in right\"\n\nlet (-<-) f g x = f (g x)\n\nlet find_dup f l =\n  let rec aux = function\n    | hd::tl ->\n       let hds, tl' = BatList.partition (f hd) tl in\n       (match hds with\n        | [] -> aux tl'\n        | _ -> Some hd)\n    | _ -> None in\n  aux l\n\nlet nodups f l = match find_dup f l with | None -> true | _ -> false\n\nlet remove_dups f l =\n  let rec aux out = function\n    | hd::tl -> let _, tl' = BatList.partition (f hd) tl in aux (hd::out) tl'\n    | _ -> out in\n  aux [] l\n\nlet must = function\n  | Some x -> x\n  | None -> failwith \"Empty option\"\n\nlet dflt x = function\n  | None   -> x\n  | Some x -> x\n\nlet bind_opt opt f =\n  match opt with\n  | None -> None\n  | Some x -> f x\n\nlet map_opt opt f =\n  match opt with\n  | None -> None\n  | Some x -> Some (f x)\n\nlet try_find f l = BatList.find_opt f l\n\nlet for_all f l = BatList.for_all f l\nlet for_some f l = BatList.exists f l\n\nlet first_N n l =\n  let n = Z.to_int n in\n  let rec f acc i l =\n    if i = n then BatList.rev acc,l else\n      match l with\n      | h::tl -> f (h::acc) (i+1) tl\n      | _     -> failwith \"firstN\"\n  in\n  f [] 0 l\n\nlet nth_tail n l =\n  let rec aux n l =\n    if n=0 then l else aux (n - 1) (BatList.tl l)\n  in\n  aux (Z.to_int n) l\n\nlet prefix l =\n  match BatList.rev l with\n  | hd::tl -> BatList.rev tl, hd\n  | _ -> failwith \"impossible\"\n\nlet mk_ref a = ref a\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Const.ml",
    "content": "open Prims\ntype signedness =\n  | Unsigned \n  | Signed [@@deriving yojson,show]\ntype width =\n  | Int8 \n  | Int16 \n  | Int32 \n  | Int64 \n  | Sizet [@@deriving yojson,show]\ntype sconst =\n  | Const_effect \n  | Const_unit \n  | Const_bool of Prims.bool \n  | Const_int of (Prims.string * (signedness * width)\n  FStar_Pervasives_Native.option) \n  | Const_char of FStar_BaseTypes.char \n  | Const_real of Prims.string \n  | Const_string of (Prims.string * FStar_Compiler_Range.range) \n  | Const_range_of \n  | Const_set_range_of \n  | Const_range of FStar_Compiler_Range.range \n  | Const_reify of FStar_Ident.lid FStar_Pervasives_Native.option \n  | Const_reflect of FStar_Ident.lid [@@deriving yojson,show]\nlet (eq_const : sconst -> sconst -> Prims.bool) =\n  fun c1 ->\n    fun c2 ->\n      match (c1, c2) with\n      | (Const_int (s1, o1), Const_int (s2, o2)) ->\n          (let uu___ = FStar_Compiler_Util.ensure_decimal s1 in\n           let uu___1 = FStar_Compiler_Util.ensure_decimal s2 in\n           uu___ = uu___1) && (o1 = o2)\n      | (Const_string (a, uu___), Const_string (b, uu___1)) -> a = b\n      | (Const_reflect l1, Const_reflect l2) -> FStar_Ident.lid_equals l1 l2\n      | (Const_reify uu___, Const_reify uu___1) -> true\n      | uu___ -> c1 = c2\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Errors.ml",
    "content": "open Prims\nexception Invalid_warn_error_setting of Prims.string \nlet (uu___is_Invalid_warn_error_setting : Prims.exn -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Invalid_warn_error_setting uu___ -> true\n    | uu___ -> false\nlet (__proj__Invalid_warn_error_setting__item__uu___ :\n  Prims.exn -> Prims.string) =\n  fun projectee ->\n    match projectee with | Invalid_warn_error_setting uu___ -> uu___\nlet lookup_error :\n  'uuuuu 'uuuuu1 'uuuuu2 .\n    ('uuuuu * 'uuuuu1 * 'uuuuu2) Prims.list ->\n      'uuuuu -> ('uuuuu * 'uuuuu1 * 'uuuuu2)\n  =\n  fun settings ->\n    fun e ->\n      let uu___ =\n        FStar_Compiler_Util.try_find\n          (fun uu___1 -> match uu___1 with | (v, uu___2, i) -> e = v)\n          settings in\n      match uu___ with\n      | FStar_Pervasives_Native.Some i -> i\n      | FStar_Pervasives_Native.None ->\n          failwith \"Impossible: unrecognized error\"\nlet lookup_error_range :\n  'uuuuu 'uuuuu1 .\n    ('uuuuu * 'uuuuu1 * Prims.int) Prims.list ->\n      (Prims.int * Prims.int) -> ('uuuuu * 'uuuuu1 * Prims.int) Prims.list\n  =\n  fun settings ->\n    fun uu___ ->\n      match uu___ with\n      | (l, h) ->\n          let uu___1 =\n            FStar_Compiler_List.partition\n              (fun uu___2 ->\n                 match uu___2 with\n                 | (uu___3, uu___4, i) -> (l <= i) && (i <= h)) settings in\n          (match uu___1 with | (matches, uu___2) -> matches)\nlet (error_number : FStar_Errors_Codes.error_setting -> Prims.int) =\n  fun uu___ -> match uu___ with | (uu___1, uu___2, i) -> i\nlet (errno : FStar_Errors_Codes.raw_error -> Prims.int) =\n  fun e ->\n    let uu___ = lookup_error FStar_Errors_Codes.default_settings e in\n    error_number uu___\nlet (warn_on_use_errno : Prims.int) =\n  errno FStar_Errors_Codes.Warning_WarnOnUse\nlet (defensive_errno : Prims.int) =\n  errno FStar_Errors_Codes.Warning_Defensive\nlet (call_to_erased_errno : Prims.int) =\n  errno FStar_Errors_Codes.Error_CallToErased\nlet (update_flags :\n  (FStar_Errors_Codes.error_flag * Prims.string) Prims.list ->\n    FStar_Errors_Codes.error_setting Prims.list)\n  =\n  fun l ->\n    let set_one_flag i flag default_flag =\n      match (flag, default_flag) with\n      | (FStar_Errors_Codes.CWarning, FStar_Errors_Codes.CAlwaysError) ->\n          let uu___ =\n            let uu___1 =\n              let uu___2 = FStar_Compiler_Util.string_of_int i in\n              FStar_Compiler_Util.format1 \"cannot turn error %s into warning\"\n                uu___2 in\n            Invalid_warn_error_setting uu___1 in\n          FStar_Compiler_Effect.raise uu___\n      | (FStar_Errors_Codes.CError, FStar_Errors_Codes.CAlwaysError) ->\n          let uu___ =\n            let uu___1 =\n              let uu___2 = FStar_Compiler_Util.string_of_int i in\n              FStar_Compiler_Util.format1 \"cannot turn error %s into warning\"\n                uu___2 in\n            Invalid_warn_error_setting uu___1 in\n          FStar_Compiler_Effect.raise uu___\n      | (FStar_Errors_Codes.CSilent, FStar_Errors_Codes.CAlwaysError) ->\n          let uu___ =\n            let uu___1 =\n              let uu___2 = FStar_Compiler_Util.string_of_int i in\n              FStar_Compiler_Util.format1 \"cannot silence error %s\" uu___2 in\n            Invalid_warn_error_setting uu___1 in\n          FStar_Compiler_Effect.raise uu___\n      | (uu___, FStar_Errors_Codes.CFatal) ->\n          let uu___1 =\n            let uu___2 =\n              let uu___3 = FStar_Compiler_Util.string_of_int i in\n              FStar_Compiler_Util.format1\n                \"cannot change the error level of fatal error %s\" uu___3 in\n            Invalid_warn_error_setting uu___2 in\n          FStar_Compiler_Effect.raise uu___1\n      | uu___ -> flag in\n    let set_flag_for_range uu___ =\n      match uu___ with\n      | (flag, range) ->\n          let errs =\n            lookup_error_range FStar_Errors_Codes.default_settings range in\n          FStar_Compiler_List.map\n            (fun uu___1 ->\n               match uu___1 with\n               | (v, default_flag, i) ->\n                   let uu___2 = set_one_flag i flag default_flag in\n                   (v, uu___2, i)) errs in\n    let compute_range uu___ =\n      match uu___ with\n      | (flag, s) ->\n          let r = FStar_Compiler_Util.split s \"..\" in\n          let uu___1 =\n            match r with\n            | r1::r2::[] ->\n                let uu___2 = FStar_Compiler_Util.int_of_string r1 in\n                let uu___3 = FStar_Compiler_Util.int_of_string r2 in\n                (uu___2, uu___3)\n            | uu___2 ->\n                let uu___3 =\n                  let uu___4 =\n                    FStar_Compiler_Util.format1\n                      \"Malformed warn-error range %s\" s in\n                  Invalid_warn_error_setting uu___4 in\n                FStar_Compiler_Effect.raise uu___3 in\n          (match uu___1 with | (l1, h) -> (flag, (l1, h))) in\n    let error_range_settings = FStar_Compiler_List.map compute_range l in\n    let uu___ =\n      FStar_Compiler_List.collect set_flag_for_range error_range_settings in\n    FStar_Compiler_List.op_At uu___ FStar_Errors_Codes.default_settings\ntype error =\n  (FStar_Errors_Codes.raw_error * Prims.string * FStar_Compiler_Range.range *\n    Prims.string Prims.list)\ntype issue_level =\n  | ENotImplemented \n  | EInfo \n  | EWarning \n  | EError \nlet (uu___is_ENotImplemented : issue_level -> Prims.bool) =\n  fun projectee ->\n    match projectee with | ENotImplemented -> true | uu___ -> false\nlet (uu___is_EInfo : issue_level -> Prims.bool) =\n  fun projectee -> match projectee with | EInfo -> true | uu___ -> false\nlet (uu___is_EWarning : issue_level -> Prims.bool) =\n  fun projectee -> match projectee with | EWarning -> true | uu___ -> false\nlet (uu___is_EError : issue_level -> Prims.bool) =\n  fun projectee -> match projectee with | EError -> true | uu___ -> false\ntype issue =\n  {\n  issue_msg: Prims.string ;\n  issue_level: issue_level ;\n  issue_range: FStar_Compiler_Range.range FStar_Pervasives_Native.option ;\n  issue_number: Prims.int FStar_Pervasives_Native.option ;\n  issue_ctx: Prims.string Prims.list }\nlet (__proj__Mkissue__item__issue_msg : issue -> Prims.string) =\n  fun projectee ->\n    match projectee with\n    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;\n        issue_ctx;_} -> issue_msg\nlet (__proj__Mkissue__item__issue_level : issue -> issue_level) =\n  fun projectee ->\n    match projectee with\n    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;\n        issue_ctx;_} -> issue_level1\nlet (__proj__Mkissue__item__issue_range :\n  issue -> FStar_Compiler_Range.range FStar_Pervasives_Native.option) =\n  fun projectee ->\n    match projectee with\n    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;\n        issue_ctx;_} -> issue_range\nlet (__proj__Mkissue__item__issue_number :\n  issue -> Prims.int FStar_Pervasives_Native.option) =\n  fun projectee ->\n    match projectee with\n    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;\n        issue_ctx;_} -> issue_number\nlet (__proj__Mkissue__item__issue_ctx : issue -> Prims.string Prims.list) =\n  fun projectee ->\n    match projectee with\n    | { issue_msg; issue_level = issue_level1; issue_range; issue_number;\n        issue_ctx;_} -> issue_ctx\ntype error_handler =\n  {\n  eh_add_one: issue -> unit ;\n  eh_count_errors: unit -> Prims.int ;\n  eh_report: unit -> issue Prims.list ;\n  eh_clear: unit -> unit }\nlet (__proj__Mkerror_handler__item__eh_add_one :\n  error_handler -> issue -> unit) =\n  fun projectee ->\n    match projectee with\n    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} -> eh_add_one\nlet (__proj__Mkerror_handler__item__eh_count_errors :\n  error_handler -> unit -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} ->\n        eh_count_errors\nlet (__proj__Mkerror_handler__item__eh_report :\n  error_handler -> unit -> issue Prims.list) =\n  fun projectee ->\n    match projectee with\n    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} -> eh_report\nlet (__proj__Mkerror_handler__item__eh_clear : error_handler -> unit -> unit)\n  =\n  fun projectee ->\n    match projectee with\n    | { eh_add_one; eh_count_errors; eh_report; eh_clear;_} -> eh_clear\nexception Error of error \nlet (uu___is_Error : Prims.exn -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error uu___ -> true | uu___ -> false\nlet (__proj__Error__item__uu___ : Prims.exn -> error) =\n  fun projectee -> match projectee with | Error uu___ -> uu___\nexception Err of (FStar_Errors_Codes.raw_error * Prims.string * Prims.string\n  Prims.list) \nlet (uu___is_Err : Prims.exn -> Prims.bool) =\n  fun projectee -> match projectee with | Err uu___ -> true | uu___ -> false\nlet (__proj__Err__item__uu___ :\n  Prims.exn ->\n    (FStar_Errors_Codes.raw_error * Prims.string * Prims.string Prims.list))\n  = fun projectee -> match projectee with | Err uu___ -> uu___\nexception Warning of error \nlet (uu___is_Warning : Prims.exn -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning uu___ -> true | uu___ -> false\nlet (__proj__Warning__item__uu___ : Prims.exn -> error) =\n  fun projectee -> match projectee with | Warning uu___ -> uu___\nexception Stop \nlet (uu___is_Stop : Prims.exn -> Prims.bool) =\n  fun projectee -> match projectee with | Stop -> true | uu___ -> false\nexception Empty_frag \nlet (uu___is_Empty_frag : Prims.exn -> Prims.bool) =\n  fun projectee -> match projectee with | Empty_frag -> true | uu___ -> false\nlet (ctx_string : Prims.string Prims.list -> Prims.string) =\n  fun ctx -> \"\"\nlet (issue_message : issue -> Prims.string) =\n  fun i ->\n    let uu___ = ctx_string i.issue_ctx in\n    FStar_String.op_Hat i.issue_msg uu___\nlet (format_issue : issue -> Prims.string) =\n  fun issue1 ->\n    let level_header =\n      match issue1.issue_level with\n      | EInfo -> \"Info\"\n      | EWarning -> \"Warning\"\n      | EError -> \"Error\"\n      | ENotImplemented -> \"Feature not yet implemented: \" in\n    let uu___ =\n      match issue1.issue_range with\n      | FStar_Pervasives_Native.None -> (\"\", \"\")\n      | FStar_Pervasives_Native.Some r when\n          r = FStar_Compiler_Range.dummyRange ->\n          let uu___1 =\n            let uu___2 =\n              let uu___3 = FStar_Compiler_Range.def_range r in\n              let uu___4 =\n                FStar_Compiler_Range.def_range\n                  FStar_Compiler_Range.dummyRange in\n              uu___3 = uu___4 in\n            if uu___2\n            then \"\"\n            else\n              (let uu___4 = FStar_Compiler_Range.string_of_range r in\n               FStar_Compiler_Util.format1 \" (see also %s)\" uu___4) in\n          (\"\", uu___1)\n      | FStar_Pervasives_Native.Some r ->\n          let uu___1 =\n            let uu___2 = FStar_Compiler_Range.string_of_use_range r in\n            FStar_Compiler_Util.format1 \"%s: \" uu___2 in\n          let uu___2 =\n            let uu___3 =\n              (let uu___4 = FStar_Compiler_Range.use_range r in\n               let uu___5 = FStar_Compiler_Range.def_range r in\n               uu___4 = uu___5) ||\n                (let uu___4 = FStar_Compiler_Range.def_range r in\n                 let uu___5 =\n                   FStar_Compiler_Range.def_range\n                     FStar_Compiler_Range.dummyRange in\n                 uu___4 = uu___5) in\n            if uu___3\n            then \"\"\n            else\n              (let uu___5 = FStar_Compiler_Range.string_of_range r in\n               FStar_Compiler_Util.format1 \" (see also %s)\" uu___5) in\n          (uu___1, uu___2) in\n    match uu___ with\n    | (range_str, see_also_str) ->\n        let issue_number =\n          match issue1.issue_number with\n          | FStar_Pervasives_Native.None -> \"\"\n          | FStar_Pervasives_Native.Some n ->\n              let uu___1 = FStar_Compiler_Util.string_of_int n in\n              FStar_Compiler_Util.format1 \" %s\" uu___1 in\n        let uu___1 = issue_message issue1 in\n        FStar_Compiler_Util.format5 \"%s(%s%s) %s%s\" range_str level_header\n          issue_number uu___1 see_also_str\nlet (print_issue : issue -> unit) =\n  fun issue1 ->\n    let printer =\n      match issue1.issue_level with\n      | EInfo -> FStar_Compiler_Util.print_string\n      | EWarning -> FStar_Compiler_Util.print_warning\n      | EError -> FStar_Compiler_Util.print_error\n      | ENotImplemented -> FStar_Compiler_Util.print_error in\n    let uu___ =\n      let uu___1 = format_issue issue1 in FStar_String.op_Hat uu___1 \"\\n\" in\n    printer uu___\nlet (compare_issues : issue -> issue -> Prims.int) =\n  fun i1 ->\n    fun i2 ->\n      match ((i1.issue_range), (i2.issue_range)) with\n      | (FStar_Pervasives_Native.None, FStar_Pervasives_Native.None) ->\n          Prims.int_zero\n      | (FStar_Pervasives_Native.None, FStar_Pervasives_Native.Some uu___) ->\n          ~- Prims.int_one\n      | (FStar_Pervasives_Native.Some uu___, FStar_Pervasives_Native.None) ->\n          Prims.int_one\n      | (FStar_Pervasives_Native.Some r1, FStar_Pervasives_Native.Some r2) ->\n          FStar_Compiler_Range.compare_use_range r1 r2\nlet (mk_default_handler : Prims.bool -> error_handler) =\n  fun print ->\n    let issues = FStar_Compiler_Util.mk_ref [] in\n    let err_count = FStar_Compiler_Util.mk_ref Prims.int_zero in\n    let add_one e =\n      if e.issue_level = EError\n      then\n        (let uu___1 =\n           let uu___2 = FStar_Compiler_Effect.op_Bang err_count in\n           Prims.int_one + uu___2 in\n         FStar_Compiler_Effect.op_Colon_Equals err_count uu___1)\n      else ();\n      (match e.issue_level with\n       | EInfo -> print_issue e\n       | uu___2 ->\n           let uu___3 =\n             let uu___4 = FStar_Compiler_Effect.op_Bang issues in e :: uu___4 in\n           FStar_Compiler_Effect.op_Colon_Equals issues uu___3);\n      (let uu___3 =\n         (false) &&\n           (e.issue_number = (FStar_Pervasives_Native.Some defensive_errno)) in\n       if uu___3 then failwith \"Aborting due to --defensive abort\" else ()) in\n    let count_errors uu___ = FStar_Compiler_Effect.op_Bang err_count in\n    let report uu___ =\n      let unique_issues =\n        let uu___1 = FStar_Compiler_Effect.op_Bang issues in\n        FStar_Compiler_Util.remove_dups (fun i0 -> fun i1 -> i0 = i1) uu___1 in\n      let sorted_unique_issues =\n        FStar_Compiler_List.sortWith compare_issues unique_issues in\n      if print\n      then FStar_Compiler_List.iter print_issue sorted_unique_issues\n      else ();\n      sorted_unique_issues in\n    let clear uu___ =\n      FStar_Compiler_Effect.op_Colon_Equals issues [];\n      FStar_Compiler_Effect.op_Colon_Equals err_count Prims.int_zero in\n    {\n      eh_add_one = add_one;\n      eh_count_errors = count_errors;\n      eh_report = report;\n      eh_clear = clear\n    }\nlet (default_handler : error_handler) = mk_default_handler true\nlet (current_handler : error_handler FStar_Compiler_Effect.ref) =\n  FStar_Compiler_Util.mk_ref default_handler\nlet (mk_issue :\n  issue_level ->\n    FStar_Compiler_Range.range FStar_Pervasives_Native.option ->\n      Prims.string ->\n        Prims.int FStar_Pervasives_Native.option ->\n          Prims.string Prims.list -> issue)\n  =\n  fun level ->\n    fun range ->\n      fun msg ->\n        fun n ->\n          fun ctx ->\n            {\n              issue_msg = msg;\n              issue_level = level;\n              issue_range = range;\n              issue_number = n;\n              issue_ctx = ctx\n            }\nlet (get_err_count : unit -> Prims.int) =\n  fun uu___ ->\n    let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in\n    uu___1.eh_count_errors ()\nlet (wrapped_eh_add_one : error_handler -> issue -> unit) =\n  fun h ->\n    fun issue1 ->\n      h.eh_add_one issue1;\n      ()\nlet (add_one : issue -> unit) =\n  fun issue1 ->\n      (\n         let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in\n         wrapped_eh_add_one uu___1 issue1)\nlet (add_many : issue Prims.list -> unit) =\n  fun issues ->\n      (\n         let uu___1 =\n           let uu___2 = FStar_Compiler_Effect.op_Bang current_handler in\n           wrapped_eh_add_one uu___2 in\n         FStar_Compiler_List.iter uu___1 issues)\nlet (report_all : unit -> issue Prims.list) =\n  fun uu___ ->\n    let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in\n    uu___1.eh_report ()\nlet (clear : unit -> unit) =\n  fun uu___ ->\n    let uu___1 = FStar_Compiler_Effect.op_Bang current_handler in\n    uu___1.eh_clear ()\nlet (set_handler : error_handler -> unit) =\n  fun handler ->\n    let issues = report_all () in\n    clear ();\n    FStar_Compiler_Effect.op_Colon_Equals current_handler handler;\n    add_many issues\ntype error_context_t =\n  {\n  push: Prims.string -> unit ;\n  pop: unit -> Prims.string ;\n  clear: unit -> unit ;\n  get: unit -> Prims.string Prims.list ;\n  set: Prims.string Prims.list -> unit }\nlet (__proj__Mkerror_context_t__item__push :\n  error_context_t -> Prims.string -> unit) =\n  fun projectee ->\n    match projectee with | { push; pop; clear = clear1; get; set;_} -> push\nlet (__proj__Mkerror_context_t__item__pop :\n  error_context_t -> unit -> Prims.string) =\n  fun projectee ->\n    match projectee with | { push; pop; clear = clear1; get; set;_} -> pop\nlet (__proj__Mkerror_context_t__item__clear :\n  error_context_t -> unit -> unit) =\n  fun projectee ->\n    match projectee with | { push; pop; clear = clear1; get; set;_} -> clear1\nlet (__proj__Mkerror_context_t__item__get :\n  error_context_t -> unit -> Prims.string Prims.list) =\n  fun projectee ->\n    match projectee with | { push; pop; clear = clear1; get; set;_} -> get\nlet (__proj__Mkerror_context_t__item__set :\n  error_context_t -> Prims.string Prims.list -> unit) =\n  fun projectee ->\n    match projectee with | { push; pop; clear = clear1; get; set;_} -> set\nlet (error_context : error_context_t) =\n  let ctxs = FStar_Compiler_Util.mk_ref [] in\n  let push s =\n    let uu___ =\n      let uu___1 = FStar_Compiler_Effect.op_Bang ctxs in s :: uu___1 in\n    FStar_Compiler_Effect.op_Colon_Equals ctxs uu___ in\n  let pop s =\n    let uu___ = FStar_Compiler_Effect.op_Bang ctxs in\n    match uu___ with\n    | h::t -> (FStar_Compiler_Effect.op_Colon_Equals ctxs t; h)\n    | uu___1 -> failwith \"cannot pop error prefix...\" in\n  let clear1 uu___ = FStar_Compiler_Effect.op_Colon_Equals ctxs [] in\n  let get uu___ = FStar_Compiler_Effect.op_Bang ctxs in\n  let set c = FStar_Compiler_Effect.op_Colon_Equals ctxs c in\n  { push; pop; clear = clear1; get; set }\nlet (get_ctx : unit -> Prims.string Prims.list) =\n  fun uu___ -> error_context.get ()\nlet (diag : FStar_Compiler_Range.range -> Prims.string -> unit) =\n  fun r ->\n    fun msg ->\n      if false\n      then\n        add_one\n          (mk_issue EInfo (FStar_Pervasives_Native.Some r) msg\n             FStar_Pervasives_Native.None [])\n      else ()\nlet (warn_unsafe_options :\n  FStar_Compiler_Range.range FStar_Pervasives_Native.option ->\n    Prims.string -> unit)\n  =\n  fun rng_opt ->\n    fun msg -> ()\nlet (set_option_warning_callback_range :\n  FStar_Compiler_Range.range FStar_Pervasives_Native.option -> unit) =\n  fun ropt ->\n    ()\n    (* FStar_Options.set_option_warning_callback (warn_unsafe_options ropt) *)\nlet (uu___279 :\n  (((Prims.string -> FStar_Errors_Codes.error_setting Prims.list) -> unit) *\n    (unit -> FStar_Errors_Codes.error_setting Prims.list)))\n  =\n  let parser_callback =\n    FStar_Compiler_Util.mk_ref FStar_Pervasives_Native.None in\n  (* let error_flags = FStar_Compiler_Util.smap_create (Prims.of_int (10)) in *)\n  let set_error_flags uu___ = () in\n  let get_error_flags uu___ =\n     FStar_Errors_Codes.default_settings in\n  let set_callbacks f =\n    FStar_Compiler_Effect.op_Colon_Equals parser_callback\n      (FStar_Pervasives_Native.Some f)\n    (* FStar_Options.set_option_warning_callback *)\n    (*   (warn_unsafe_options FStar_Pervasives_Native.None) *)\n  in\n  (set_callbacks, get_error_flags)\nlet (t_set_parse_warn_error :\n  (Prims.string -> FStar_Errors_Codes.error_setting Prims.list) -> unit) =\n  match uu___279 with\n  | (t_set_parse_warn_error1, error_flags) -> t_set_parse_warn_error1\nlet (error_flags : unit -> FStar_Errors_Codes.error_setting Prims.list) =\n  match uu___279 with\n  | (t_set_parse_warn_error1, error_flags1) -> error_flags1\nlet (set_parse_warn_error :\n  (Prims.string -> FStar_Errors_Codes.error_setting Prims.list) -> unit) =\n  t_set_parse_warn_error\nlet (lookup :\n  FStar_Errors_Codes.raw_error -> FStar_Errors_Codes.error_setting) =\n  fun err ->\n    let flags = error_flags () in\n    let uu___ = lookup_error flags err in\n    match uu___ with\n    | (v, level, i) ->\n        let with_level level1 = (v, level1, i) in\n        (match v with\n         | uu___1 -> with_level level)\n\nlet raise_error :\n  'a .\n    (FStar_Errors_Codes.raw_error * Prims.string) ->\n      FStar_Compiler_Range.range -> 'a\n  =\n  fun uu___ ->\n    fun r ->\n      match uu___ with\n      | (e, msg) ->\n          let uu___1 =\n            let uu___2 =\n              let uu___3 = error_context.get () in (e, msg, r, uu___3) in\n            Error uu___2 in\n          FStar_Compiler_Effect.raise uu___1\nlet raise_err : 'a . (FStar_Errors_Codes.raw_error * Prims.string) -> 'a =\n  fun uu___ ->\n    match uu___ with\n    | (e, msg) ->\n        let uu___1 =\n          let uu___2 = let uu___3 = error_context.get () in (e, msg, uu___3) in\n          Err uu___2 in\n        FStar_Compiler_Effect.raise uu___1\n\nlet (log_issue_ctx :\n  FStar_Compiler_Range.range ->\n    (FStar_Errors_Codes.raw_error * Prims.string) ->\n      Prims.string Prims.list -> unit)\n  =\n  fun r ->\n    fun uu___ ->\n      fun ctx ->\n        match uu___ with\n        | (e, msg) ->\n            let uu___1 = lookup e in\n            (match uu___1 with\n             | (uu___2, FStar_Errors_Codes.CAlwaysError, errno1) ->\n                 add_one\n                   (mk_issue EError (FStar_Pervasives_Native.Some r) msg\n                      (FStar_Pervasives_Native.Some errno1) ctx)\n             | (uu___2, FStar_Errors_Codes.CError, errno1) ->\n                 add_one\n                   (mk_issue EError (FStar_Pervasives_Native.Some r) msg\n                      (FStar_Pervasives_Native.Some errno1) ctx)\n             | (uu___2, FStar_Errors_Codes.CWarning, errno1) ->\n                 add_one\n                   (mk_issue EWarning (FStar_Pervasives_Native.Some r) msg\n                      (FStar_Pervasives_Native.Some errno1) ctx)\n             | (uu___2, FStar_Errors_Codes.CSilent, uu___3) -> ()\n             | (uu___2, FStar_Errors_Codes.CFatal, errno1) ->\n                 let i =\n                   mk_issue EError (FStar_Pervasives_Native.Some r) msg\n                     (FStar_Pervasives_Native.Some errno1) ctx in\n                 let uu___3 = false in\n                 if uu___3\n                 then add_one i\n                 else\n                   (let uu___5 =\n                      let uu___6 = format_issue i in\n                      FStar_String.op_Hat\n                        \"don't use log_issue to report fatal error, should use raise_error: \"\n                        uu___6 in\n                    failwith uu___5))\nlet (log_issue :\n  FStar_Compiler_Range.range ->\n    (FStar_Errors_Codes.raw_error * Prims.string) -> unit)\n  =\n  fun r ->\n    fun uu___ ->\n      match uu___ with\n      | (e, msg) ->\n          let ctx = error_context.get () in log_issue_ctx r (e, msg) ctx\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Errors_Codes.ml",
    "content": "open Prims\ntype error_flag =\n  | CFatal \n  | CAlwaysError \n  | CError \n  | CWarning \n  | CSilent \nlet (uu___is_CFatal : error_flag -> Prims.bool) =\n  fun projectee -> match projectee with | CFatal -> true | uu___ -> false\nlet (uu___is_CAlwaysError : error_flag -> Prims.bool) =\n  fun projectee ->\n    match projectee with | CAlwaysError -> true | uu___ -> false\nlet (uu___is_CError : error_flag -> Prims.bool) =\n  fun projectee -> match projectee with | CError -> true | uu___ -> false\nlet (uu___is_CWarning : error_flag -> Prims.bool) =\n  fun projectee -> match projectee with | CWarning -> true | uu___ -> false\nlet (uu___is_CSilent : error_flag -> Prims.bool) =\n  fun projectee -> match projectee with | CSilent -> true | uu___ -> false\ntype raw_error =\n  | Error_DependencyAnalysisFailed \n  | Error_IDETooManyPops \n  | Error_IDEUnrecognized \n  | Error_InductiveTypeNotSatisfyPositivityCondition \n  | Error_InvalidUniverseVar \n  | Error_MissingFileName \n  | Error_ModuleFileNameMismatch \n  | Error_OpPlusInUniverse \n  | Error_OutOfRange \n  | Error_ProofObligationFailed \n  | Error_TooManyFiles \n  | Error_TypeCheckerFailToProve \n  | Error_TypeError \n  | Error_UncontrainedUnificationVar \n  | Error_UnexpectedGTotComputation \n  | Error_UnexpectedInstance \n  | Error_UnknownFatal_AssertionFailure \n  | Error_Z3InvocationError \n  | Error_IDEAssertionFailure \n  | Error_Z3SolverError \n  | Fatal_AbstractTypeDeclarationInInterface \n  | Fatal_ActionMustHaveFunctionType \n  | Fatal_AlreadyDefinedTopLevelDeclaration \n  | Fatal_ArgumentLengthMismatch \n  | Fatal_AssertionFailure \n  | Fatal_AssignToImmutableValues \n  | Fatal_AssumeValInInterface \n  | Fatal_BadlyInstantiatedSynthByTactic \n  | Fatal_BadSignatureShape \n  | Fatal_BinderAndArgsLengthMismatch \n  | Fatal_BothValAndLetInInterface \n  | Fatal_CardinalityConstraintViolated \n  | Fatal_ComputationNotTotal \n  | Fatal_ComputationTypeNotAllowed \n  | Fatal_ComputedTypeNotMatchAnnotation \n  | Fatal_ConstructorArgLengthMismatch \n  | Fatal_ConstructorFailedCheck \n  | Fatal_ConstructorNotFound \n  | Fatal_ConstsructorBuildWrongType \n  | Fatal_CycleInRecTypeAbbreviation \n  | Fatal_DataContructorNotFound \n  | Fatal_DefaultQualifierNotAllowedOnEffects \n  | Fatal_DefinitionNotFound \n  | Fatal_DisjuctivePatternVarsMismatch \n  | Fatal_DivergentComputationCannotBeIncludedInTotal \n  | Fatal_DuplicateInImplementation \n  | Fatal_DuplicateModuleOrInterface \n  | Fatal_DuplicateTopLevelNames \n  | Fatal_DuplicateTypeAnnotationAndValDecl \n  | Fatal_EffectCannotBeReified \n  | Fatal_EffectConstructorNotFullyApplied \n  | Fatal_EffectfulAndPureComputationMismatch \n  | Fatal_EffectNotFound \n  | Fatal_EffectsCannotBeComposed \n  | Fatal_ErrorInSolveDeferredConstraints \n  | Fatal_ErrorsReported \n  | Fatal_EscapedBoundVar \n  | Fatal_ExpectedArrowAnnotatedType \n  | Fatal_ExpectedGhostExpression \n  | Fatal_ExpectedPureExpression \n  | Fatal_ExpectNormalizedEffect \n  | Fatal_ExpectTermGotFunction \n  | Fatal_ExpectTrivialPreCondition \n  | Fatal_FailToCompileNativeTactic \n  | Fatal_FailToExtractNativeTactic \n  | Fatal_FailToProcessPragma \n  | Fatal_FailToResolveImplicitArgument \n  | Fatal_FailToSolveUniverseInEquality \n  | Fatal_FieldsNotBelongToSameRecordType \n  | Fatal_ForbiddenReferenceToCurrentModule \n  | Fatal_FreeVariables \n  | Fatal_FunctionTypeExpected \n  | Fatal_IdentifierNotFound \n  | Fatal_IllAppliedConstant \n  | Fatal_IllegalCharInByteArray \n  | Fatal_IllegalCharInOperatorName \n  | Fatal_IllTyped \n  | Fatal_ImpossibleAbbrevLidBundle \n  | Fatal_ImpossibleAbbrevRenameBundle \n  | Fatal_ImpossibleInductiveWithAbbrev \n  | Fatal_ImpossiblePrePostAbs \n  | Fatal_ImpossiblePrePostArrow \n  | Fatal_ImpossibleToGenerateDMEffect \n  | Fatal_ImpossibleTypeAbbrevBundle \n  | Fatal_ImpossibleTypeAbbrevSigeltBundle \n  | Fatal_IncludeModuleNotPrepared \n  | Fatal_IncoherentInlineUniverse \n  | Fatal_IncompatibleKinds \n  | Fatal_IncompatibleNumberOfTypes \n  | Fatal_IncompatibleSetOfUniverse \n  | Fatal_IncompatibleUniverse \n  | Fatal_InconsistentImplicitArgumentAnnotation \n  | Fatal_InconsistentImplicitQualifier \n  | Fatal_InconsistentQualifierAnnotation \n  | Fatal_InferredTypeCauseVarEscape \n  | Fatal_InlineRenamedAsUnfold \n  | Fatal_InsufficientPatternArguments \n  | Fatal_InterfaceAlreadyProcessed \n  | Fatal_InterfaceNotImplementedByModule \n  | Fatal_InterfaceWithTypeImplementation \n  | Fatal_InvalidFloatingPointNumber \n  | Fatal_InvalidFSDocKeyword \n  | Fatal_InvalidIdentifier \n  | Fatal_InvalidLemmaArgument \n  | Fatal_InvalidNumericLiteral \n  | Fatal_InvalidRedefinitionOfLexT \n  | Fatal_InvalidUnicodeInStringLiteral \n  | Fatal_InvalidUTF8Encoding \n  | Fatal_InvalidWarnErrorSetting \n  | Fatal_LetBoundMonadicMismatch \n  | Fatal_LetMutableForVariablesOnly \n  | Fatal_LetOpenModuleOnly \n  | Fatal_LetRecArgumentMismatch \n  | Fatal_MalformedActionDeclaration \n  | Fatal_MismatchedPatternType \n  | Fatal_MismatchUniversePolymorphic \n  | Fatal_MissingDataConstructor \n  | Fatal_MissingExposeInterfacesOption \n  | Fatal_MissingFieldInRecord \n  | Fatal_MissingImplementation \n  | Fatal_MissingImplicitArguments \n  | Fatal_MissingInterface \n  | Fatal_MissingNameInBinder \n  | Fatal_MissingPrimsModule \n  | Fatal_MissingQuantifierBinder \n  | Fatal_ModuleExpected \n  | Fatal_ModuleFileNotFound \n  | Fatal_ModuleFirstStatement \n  | Fatal_ModuleNotFound \n  | Fatal_ModuleOrFileNotFound \n  | Fatal_MonadAlreadyDefined \n  | Fatal_MoreThanOneDeclaration \n  | Fatal_MultipleLetBinding \n  | Fatal_NameNotFound \n  | Fatal_NameSpaceNotFound \n  | Fatal_NegativeUniverseConstFatal_NotSupported \n  | Fatal_NoFileProvided \n  | Fatal_NonInductiveInMutuallyDefinedType \n  | Fatal_NonLinearPatternNotPermitted \n  | Fatal_NonLinearPatternVars \n  | Fatal_NonSingletonTopLevel \n  | Fatal_NonSingletonTopLevelModule \n  | Error_NonTopRecFunctionNotFullyEncoded \n  | Fatal_NonTrivialPreConditionInPrims \n  | Fatal_NonVariableInductiveTypeParameter \n  | Fatal_NotApplicationOrFv \n  | Fatal_NotEnoughArgsToEffect \n  | Fatal_NotEnoughArgumentsForEffect \n  | Fatal_NotFunctionType \n  | Fatal_NotSupported \n  | Fatal_NotTopLevelModule \n  | Fatal_NotValidFStarFile \n  | Fatal_NotValidIncludeDirectory \n  | Fatal_OneModulePerFile \n  | Fatal_OpenGoalsInSynthesis \n  | Fatal_OptionsNotCompatible \n  | Fatal_OutOfOrder \n  | Fatal_ParseErrors \n  | Fatal_ParseItError \n  | Fatal_PolyTypeExpected \n  | Fatal_PossibleInfiniteTyp \n  | Fatal_PreModuleMismatch \n  | Fatal_QulifierListNotPermitted \n  | Fatal_RecursiveFunctionLiteral \n  | Fatal_ReflectOnlySupportedOnEffects \n  | Fatal_ReservedPrefix \n  | Fatal_SMTOutputParseError \n  | Fatal_SMTSolverError \n  | Fatal_SyntaxError \n  | Fatal_SynthByTacticError \n  | Fatal_TacticGotStuck \n  | Fatal_TcOneFragmentFailed \n  | Fatal_TermOutsideOfDefLanguage \n  | Fatal_ToManyArgumentToFunction \n  | Fatal_TooManyOrTooFewFileMatch \n  | Fatal_TooManyPatternArguments \n  | Fatal_TooManyUniverse \n  | Fatal_TypeMismatch \n  | Fatal_TypeWithinPatternsAllowedOnVariablesOnly \n  | Fatal_UnableToReadFile \n  | Fatal_UnepxectedOrUnboundOperator \n  | Fatal_UnexpectedBinder \n  | Fatal_UnexpectedBindShape \n  | Fatal_UnexpectedChar \n  | Fatal_UnexpectedComputationTypeForLetRec \n  | Fatal_UnexpectedConstructorType \n  | Fatal_UnexpectedDataConstructor \n  | Fatal_UnexpectedEffect \n  | Fatal_UnexpectedEmptyRecord \n  | Fatal_UnexpectedExpressionType \n  | Fatal_UnexpectedFunctionParameterType \n  | Fatal_UnexpectedGeneralizedUniverse \n  | Fatal_UnexpectedGTotForLetRec \n  | Fatal_UnexpectedGuard \n  | Fatal_UnexpectedIdentifier \n  | Fatal_UnexpectedImplicitArgument \n  | Fatal_UnexpectedImplictArgument \n  | Fatal_UnexpectedInductivetype \n  | Fatal_UnexpectedLetBinding \n  | Fatal_UnexpectedModuleDeclaration \n  | Fatal_UnexpectedNumberOfUniverse \n  | Fatal_UnexpectedNumericLiteral \n  | Fatal_UnexpectedPattern \n  | Fatal_UnexpectedPosition \n  | Fatal_UnExpectedPreCondition \n  | Fatal_UnexpectedReturnShape \n  | Fatal_UnexpectedSignatureForMonad \n  | Fatal_UnexpectedTerm \n  | Fatal_UnexpectedTermInUniverse \n  | Fatal_UnexpectedTermType \n  | Fatal_UnexpectedTermVQuote \n  | Fatal_UnexpectedUniversePolymorphicReturn \n  | Fatal_UnexpectedUniverseVariable \n  | Fatal_UnfoldableDeprecated \n  | Fatal_UnificationNotWellFormed \n  | Fatal_Uninstantiated \n  | Error_UninstantiatedUnificationVarInTactic \n  | Fatal_UninstantiatedVarInTactic \n  | Fatal_UniverseMightContainSumOfTwoUnivVars \n  | Fatal_UniversePolymorphicInnerLetBound \n  | Fatal_UnknownAttribute \n  | Fatal_UnknownToolForDep \n  | Fatal_UnrecognizedExtension \n  | Fatal_UnresolvedPatternVar \n  | Fatal_UnsupportedConstant \n  | Fatal_UnsupportedDisjuctivePatterns \n  | Fatal_UnsupportedQualifier \n  | Fatal_UserTacticFailure \n  | Fatal_ValueRestriction \n  | Fatal_VariableNotFound \n  | Fatal_WrongBodyTypeForReturnWP \n  | Fatal_WrongDataAppHeadFormat \n  | Fatal_WrongDefinitionOrder \n  | Fatal_WrongResultTypeAfterConstrutor \n  | Fatal_WrongTerm \n  | Fatal_WhenClauseNotSupported \n  | Unused01 \n  | Warning_AddImplicitAssumeNewQualifier \n  | Warning_AdmitWithoutDefinition \n  | Warning_CachedFile \n  | Warning_DefinitionNotTranslated \n  | Warning_DependencyFound \n  | Warning_DeprecatedEqualityOnBinder \n  | Warning_DeprecatedOpaqueQualifier \n  | Warning_DocOverwrite \n  | Warning_FileNotWritten \n  | Warning_Filtered \n  | Warning_FunctionLiteralPrecisionLoss \n  | Warning_FunctionNotExtacted \n  | Warning_HintFailedToReplayProof \n  | Warning_HitReplayFailed \n  | Warning_IDEIgnoreCodeGen \n  | Warning_IllFormedGoal \n  | Warning_InaccessibleArgument \n  | Warning_IncoherentImplicitQualifier \n  | Warning_IrrelevantQualifierOnArgumentToReflect \n  | Warning_IrrelevantQualifierOnArgumentToReify \n  | Warning_MalformedWarnErrorList \n  | Warning_MetaAlienNotATmUnknown \n  | Warning_MultipleAscriptions \n  | Warning_NondependentUserDefinedDataType \n  | Warning_NonListLiteralSMTPattern \n  | Warning_NormalizationFailure \n  | Warning_NotDependentArrow \n  | Warning_NotEmbedded \n  | Warning_PatternMissingBoundVar \n  | Warning_RecursiveDependency \n  | Warning_RedundantExplicitCurrying \n  | Warning_SMTPatTDeprecated \n  | Warning_SMTPatternIllFormed \n  | Warning_TopLevelEffect \n  | Warning_UnboundModuleReference \n  | Warning_UnexpectedFile \n  | Warning_UnexpectedFsTypApp \n  | Warning_UnexpectedZ3Output \n  | Warning_UnprotectedTerm \n  | Warning_UnrecognizedAttribute \n  | Warning_UpperBoundCandidateAlreadyVisited \n  | Warning_UseDefaultEffect \n  | Warning_WrongErrorLocation \n  | Warning_Z3InvocationWarning \n  | Warning_PluginNotImplemented \n  | Warning_MissingInterfaceOrImplementation \n  | Warning_ConstructorBuildsUnexpectedType \n  | Warning_ModuleOrFileNotFoundWarning \n  | Error_NoLetMutable \n  | Error_BadImplicit \n  | Warning_DeprecatedDefinition \n  | Fatal_SMTEncodingArityMismatch \n  | Warning_Defensive \n  | Warning_CantInspect \n  | Warning_NilGivenExplicitArgs \n  | Warning_ConsAppliedExplicitArgs \n  | Warning_UnembedBinderKnot \n  | Fatal_TacticProofRelevantGoal \n  | Warning_TacAdmit \n  | Fatal_IncoherentPatterns \n  | Error_NoSMTButNeeded \n  | Fatal_UnexpectedAntiquotation \n  | Fatal_SplicedUndef \n  | Fatal_SpliceUnembedFail \n  | Warning_ExtractionUnexpectedEffect \n  | Error_DidNotFail \n  | Warning_UnappliedFail \n  | Warning_QuantifierWithoutPattern \n  | Error_EmptyFailErrs \n  | Warning_logicqualifier \n  | Fatal_CyclicDependence \n  | Error_InductiveAnnotNotAType \n  | Fatal_FriendInterface \n  | Error_CannotRedefineConst \n  | Error_BadClassDecl \n  | Error_BadInductiveParam \n  | Error_FieldShadow \n  | Error_UnexpectedDM4FType \n  | Fatal_EffectAbbreviationResultTypeMismatch \n  | Error_AlreadyCachedAssertionFailure \n  | Error_MustEraseMissing \n  | Warning_EffectfulArgumentToErasedFunction \n  | Fatal_EmptySurfaceLet \n  | Warning_UnexpectedCheckedFile \n  | Fatal_ExtractionUnsupported \n  | Warning_SMTErrorReason \n  | Warning_CoercionNotFound \n  | Error_QuakeFailed \n  | Error_IllSMTPat \n  | Error_IllScopedTerm \n  | Warning_UnusedLetRec \n  | Fatal_Effects_Ordering_Coherence \n  | Warning_BleedingEdge_Feature \n  | Warning_IgnoredBinding \n  | Warning_CouldNotReadHints \n  | Fatal_BadUvar \n  | Warning_WarnOnUse \n  | Warning_DeprecatedAttributeSyntax \n  | Warning_DeprecatedGeneric \n  | Error_BadSplice \n  | Error_UnexpectedUnresolvedUvar \n  | Warning_UnfoldPlugin \n  | Error_LayeredMissingAnnot \n  | Error_CallToErased \n  | Error_ErasedCtor \n  | Error_RemoveUnusedTypeParameter \n  | Warning_NoMagicInFSharp \n  | Error_BadLetOpenRecord \n  | Error_UnexpectedTypeclassInstance \n  | Warning_AmbiguousResolveImplicitsHook \n  | Warning_SplitAndRetryQueries \n  | Warning_DeprecatedLightDoNotation \n  | Warning_FailedToCheckInitialTacticGoal \n  | Warning_Adhoc_IndexedEffect_Combinator \n  | Error_PluginDynlink \n  | Error_InternalQualifier \n  | Warning_NameEscape \nlet (uu___is_Error_DependencyAnalysisFailed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_DependencyAnalysisFailed -> true\n    | uu___ -> false\nlet (uu___is_Error_IDETooManyPops : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_IDETooManyPops -> true | uu___ -> false\nlet (uu___is_Error_IDEUnrecognized : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_IDEUnrecognized -> true | uu___ -> false\nlet (uu___is_Error_InductiveTypeNotSatisfyPositivityCondition :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_InductiveTypeNotSatisfyPositivityCondition -> true\n    | uu___ -> false\nlet (uu___is_Error_InvalidUniverseVar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_InvalidUniverseVar -> true | uu___ -> false\nlet (uu___is_Error_MissingFileName : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_MissingFileName -> true | uu___ -> false\nlet (uu___is_Error_ModuleFileNameMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_ModuleFileNameMismatch -> true\n    | uu___ -> false\nlet (uu___is_Error_OpPlusInUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_OpPlusInUniverse -> true | uu___ -> false\nlet (uu___is_Error_OutOfRange : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_OutOfRange -> true | uu___ -> false\nlet (uu___is_Error_ProofObligationFailed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_ProofObligationFailed -> true\n    | uu___ -> false\nlet (uu___is_Error_TooManyFiles : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_TooManyFiles -> true | uu___ -> false\nlet (uu___is_Error_TypeCheckerFailToProve : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_TypeCheckerFailToProve -> true\n    | uu___ -> false\nlet (uu___is_Error_TypeError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_TypeError -> true | uu___ -> false\nlet (uu___is_Error_UncontrainedUnificationVar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_UncontrainedUnificationVar -> true\n    | uu___ -> false\nlet (uu___is_Error_UnexpectedGTotComputation : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_UnexpectedGTotComputation -> true\n    | uu___ -> false\nlet (uu___is_Error_UnexpectedInstance : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_UnexpectedInstance -> true | uu___ -> false\nlet (uu___is_Error_UnknownFatal_AssertionFailure : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_UnknownFatal_AssertionFailure -> true\n    | uu___ -> false\nlet (uu___is_Error_Z3InvocationError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_Z3InvocationError -> true | uu___ -> false\nlet (uu___is_Error_IDEAssertionFailure : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_IDEAssertionFailure -> true | uu___ -> false\nlet (uu___is_Error_Z3SolverError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_Z3SolverError -> true | uu___ -> false\nlet (uu___is_Fatal_AbstractTypeDeclarationInInterface :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_AbstractTypeDeclarationInInterface -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ActionMustHaveFunctionType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ActionMustHaveFunctionType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_AlreadyDefinedTopLevelDeclaration :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_AlreadyDefinedTopLevelDeclaration -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ArgumentLengthMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ArgumentLengthMismatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_AssertionFailure : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_AssertionFailure -> true | uu___ -> false\nlet (uu___is_Fatal_AssignToImmutableValues : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_AssignToImmutableValues -> true\n    | uu___ -> false\nlet (uu___is_Fatal_AssumeValInInterface : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_AssumeValInInterface -> true\n    | uu___ -> false\nlet (uu___is_Fatal_BadlyInstantiatedSynthByTactic : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_BadlyInstantiatedSynthByTactic -> true\n    | uu___ -> false\nlet (uu___is_Fatal_BadSignatureShape : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_BadSignatureShape -> true | uu___ -> false\nlet (uu___is_Fatal_BinderAndArgsLengthMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_BinderAndArgsLengthMismatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_BothValAndLetInInterface : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_BothValAndLetInInterface -> true\n    | uu___ -> false\nlet (uu___is_Fatal_CardinalityConstraintViolated : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_CardinalityConstraintViolated -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ComputationNotTotal : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ComputationNotTotal -> true | uu___ -> false\nlet (uu___is_Fatal_ComputationTypeNotAllowed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ComputationTypeNotAllowed -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ComputedTypeNotMatchAnnotation : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_ComputedTypeNotMatchAnnotation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ConstructorArgLengthMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ConstructorArgLengthMismatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ConstructorFailedCheck : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ConstructorFailedCheck -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ConstructorNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ConstructorNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_ConstsructorBuildWrongType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ConstsructorBuildWrongType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_CycleInRecTypeAbbreviation : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_CycleInRecTypeAbbreviation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DataContructorNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DataContructorNotFound -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DefaultQualifierNotAllowedOnEffects :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DefaultQualifierNotAllowedOnEffects -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DefinitionNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_DefinitionNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_DisjuctivePatternVarsMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DisjuctivePatternVarsMismatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DivergentComputationCannotBeIncludedInTotal :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DivergentComputationCannotBeIncludedInTotal -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DuplicateInImplementation : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DuplicateInImplementation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DuplicateModuleOrInterface : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DuplicateModuleOrInterface -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DuplicateTopLevelNames : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DuplicateTopLevelNames -> true\n    | uu___ -> false\nlet (uu___is_Fatal_DuplicateTypeAnnotationAndValDecl :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_DuplicateTypeAnnotationAndValDecl -> true\n    | uu___ -> false\nlet (uu___is_Fatal_EffectCannotBeReified : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_EffectCannotBeReified -> true\n    | uu___ -> false\nlet (uu___is_Fatal_EffectConstructorNotFullyApplied :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_EffectConstructorNotFullyApplied -> true\n    | uu___ -> false\nlet (uu___is_Fatal_EffectfulAndPureComputationMismatch :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_EffectfulAndPureComputationMismatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_EffectNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_EffectNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_EffectsCannotBeComposed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_EffectsCannotBeComposed -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ErrorInSolveDeferredConstraints : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_ErrorInSolveDeferredConstraints -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ErrorsReported : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ErrorsReported -> true | uu___ -> false\nlet (uu___is_Fatal_EscapedBoundVar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_EscapedBoundVar -> true | uu___ -> false\nlet (uu___is_Fatal_ExpectedArrowAnnotatedType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ExpectedArrowAnnotatedType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ExpectedGhostExpression : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ExpectedGhostExpression -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ExpectedPureExpression : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ExpectedPureExpression -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ExpectNormalizedEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ExpectNormalizedEffect -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ExpectTermGotFunction : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ExpectTermGotFunction -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ExpectTrivialPreCondition : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ExpectTrivialPreCondition -> true\n    | uu___ -> false\nlet (uu___is_Fatal_FailToCompileNativeTactic : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_FailToCompileNativeTactic -> true\n    | uu___ -> false\nlet (uu___is_Fatal_FailToExtractNativeTactic : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_FailToExtractNativeTactic -> true\n    | uu___ -> false\nlet (uu___is_Fatal_FailToProcessPragma : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_FailToProcessPragma -> true | uu___ -> false\nlet (uu___is_Fatal_FailToResolveImplicitArgument : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_FailToResolveImplicitArgument -> true\n    | uu___ -> false\nlet (uu___is_Fatal_FailToSolveUniverseInEquality : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_FailToSolveUniverseInEquality -> true\n    | uu___ -> false\nlet (uu___is_Fatal_FieldsNotBelongToSameRecordType : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_FieldsNotBelongToSameRecordType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ForbiddenReferenceToCurrentModule :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ForbiddenReferenceToCurrentModule -> true\n    | uu___ -> false\nlet (uu___is_Fatal_FreeVariables : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_FreeVariables -> true | uu___ -> false\nlet (uu___is_Fatal_FunctionTypeExpected : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_FunctionTypeExpected -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IdentifierNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_IdentifierNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_IllAppliedConstant : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_IllAppliedConstant -> true | uu___ -> false\nlet (uu___is_Fatal_IllegalCharInByteArray : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_IllegalCharInByteArray -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IllegalCharInOperatorName : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_IllegalCharInOperatorName -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IllTyped : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_IllTyped -> true | uu___ -> false\nlet (uu___is_Fatal_ImpossibleAbbrevLidBundle : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossibleAbbrevLidBundle -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ImpossibleAbbrevRenameBundle : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossibleAbbrevRenameBundle -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ImpossibleInductiveWithAbbrev : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossibleInductiveWithAbbrev -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ImpossiblePrePostAbs : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossiblePrePostAbs -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ImpossiblePrePostArrow : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossiblePrePostArrow -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ImpossibleToGenerateDMEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossibleToGenerateDMEffect -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ImpossibleTypeAbbrevBundle : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossibleTypeAbbrevBundle -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ImpossibleTypeAbbrevSigeltBundle :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ImpossibleTypeAbbrevSigeltBundle -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IncludeModuleNotPrepared : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_IncludeModuleNotPrepared -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IncoherentInlineUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_IncoherentInlineUniverse -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IncompatibleKinds : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_IncompatibleKinds -> true | uu___ -> false\nlet (uu___is_Fatal_IncompatibleNumberOfTypes : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_IncompatibleNumberOfTypes -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IncompatibleSetOfUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_IncompatibleSetOfUniverse -> true\n    | uu___ -> false\nlet (uu___is_Fatal_IncompatibleUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_IncompatibleUniverse -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InconsistentImplicitArgumentAnnotation :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InconsistentImplicitArgumentAnnotation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InconsistentImplicitQualifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InconsistentImplicitQualifier -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InconsistentQualifierAnnotation : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_InconsistentQualifierAnnotation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InferredTypeCauseVarEscape : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InferredTypeCauseVarEscape -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InlineRenamedAsUnfold : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InlineRenamedAsUnfold -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InsufficientPatternArguments : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InsufficientPatternArguments -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InterfaceAlreadyProcessed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InterfaceAlreadyProcessed -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InterfaceNotImplementedByModule : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_InterfaceNotImplementedByModule -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InterfaceWithTypeImplementation : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_InterfaceWithTypeImplementation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InvalidFloatingPointNumber : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InvalidFloatingPointNumber -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InvalidFSDocKeyword : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_InvalidFSDocKeyword -> true | uu___ -> false\nlet (uu___is_Fatal_InvalidIdentifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_InvalidIdentifier -> true | uu___ -> false\nlet (uu___is_Fatal_InvalidLemmaArgument : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InvalidLemmaArgument -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InvalidNumericLiteral : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InvalidNumericLiteral -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InvalidRedefinitionOfLexT : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InvalidRedefinitionOfLexT -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InvalidUnicodeInStringLiteral : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InvalidUnicodeInStringLiteral -> true\n    | uu___ -> false\nlet (uu___is_Fatal_InvalidUTF8Encoding : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_InvalidUTF8Encoding -> true | uu___ -> false\nlet (uu___is_Fatal_InvalidWarnErrorSetting : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_InvalidWarnErrorSetting -> true\n    | uu___ -> false\nlet (uu___is_Fatal_LetBoundMonadicMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_LetBoundMonadicMismatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_LetMutableForVariablesOnly : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_LetMutableForVariablesOnly -> true\n    | uu___ -> false\nlet (uu___is_Fatal_LetOpenModuleOnly : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_LetOpenModuleOnly -> true | uu___ -> false\nlet (uu___is_Fatal_LetRecArgumentMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_LetRecArgumentMismatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MalformedActionDeclaration : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MalformedActionDeclaration -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MismatchedPatternType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MismatchedPatternType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MismatchUniversePolymorphic : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MismatchUniversePolymorphic -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MissingDataConstructor : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MissingDataConstructor -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MissingExposeInterfacesOption : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MissingExposeInterfacesOption -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MissingFieldInRecord : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MissingFieldInRecord -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MissingImplementation : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MissingImplementation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MissingImplicitArguments : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MissingImplicitArguments -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MissingInterface : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_MissingInterface -> true | uu___ -> false\nlet (uu___is_Fatal_MissingNameInBinder : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_MissingNameInBinder -> true | uu___ -> false\nlet (uu___is_Fatal_MissingPrimsModule : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_MissingPrimsModule -> true | uu___ -> false\nlet (uu___is_Fatal_MissingQuantifierBinder : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MissingQuantifierBinder -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ModuleExpected : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ModuleExpected -> true | uu___ -> false\nlet (uu___is_Fatal_ModuleFileNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ModuleFileNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_ModuleFirstStatement : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ModuleFirstStatement -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ModuleNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ModuleNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_ModuleOrFileNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ModuleOrFileNotFound -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MonadAlreadyDefined : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_MonadAlreadyDefined -> true | uu___ -> false\nlet (uu___is_Fatal_MoreThanOneDeclaration : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_MoreThanOneDeclaration -> true\n    | uu___ -> false\nlet (uu___is_Fatal_MultipleLetBinding : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_MultipleLetBinding -> true | uu___ -> false\nlet (uu___is_Fatal_NameNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NameNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_NameSpaceNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NameSpaceNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_NegativeUniverseConstFatal_NotSupported :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NegativeUniverseConstFatal_NotSupported -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NoFileProvided : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NoFileProvided -> true | uu___ -> false\nlet (uu___is_Fatal_NonInductiveInMutuallyDefinedType :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NonInductiveInMutuallyDefinedType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NonLinearPatternNotPermitted : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NonLinearPatternNotPermitted -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NonLinearPatternVars : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NonLinearPatternVars -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NonSingletonTopLevel : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NonSingletonTopLevel -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NonSingletonTopLevelModule : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NonSingletonTopLevelModule -> true\n    | uu___ -> false\nlet (uu___is_Error_NonTopRecFunctionNotFullyEncoded :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_NonTopRecFunctionNotFullyEncoded -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NonTrivialPreConditionInPrims : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NonTrivialPreConditionInPrims -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NonVariableInductiveTypeParameter :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NonVariableInductiveTypeParameter -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NotApplicationOrFv : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NotApplicationOrFv -> true | uu___ -> false\nlet (uu___is_Fatal_NotEnoughArgsToEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NotEnoughArgsToEffect -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NotEnoughArgumentsForEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NotEnoughArgumentsForEffect -> true\n    | uu___ -> false\nlet (uu___is_Fatal_NotFunctionType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NotFunctionType -> true | uu___ -> false\nlet (uu___is_Fatal_NotSupported : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NotSupported -> true | uu___ -> false\nlet (uu___is_Fatal_NotTopLevelModule : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NotTopLevelModule -> true | uu___ -> false\nlet (uu___is_Fatal_NotValidFStarFile : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_NotValidFStarFile -> true | uu___ -> false\nlet (uu___is_Fatal_NotValidIncludeDirectory : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_NotValidIncludeDirectory -> true\n    | uu___ -> false\nlet (uu___is_Fatal_OneModulePerFile : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_OneModulePerFile -> true | uu___ -> false\nlet (uu___is_Fatal_OpenGoalsInSynthesis : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_OpenGoalsInSynthesis -> true\n    | uu___ -> false\nlet (uu___is_Fatal_OptionsNotCompatible : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_OptionsNotCompatible -> true\n    | uu___ -> false\nlet (uu___is_Fatal_OutOfOrder : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_OutOfOrder -> true | uu___ -> false\nlet (uu___is_Fatal_ParseErrors : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ParseErrors -> true | uu___ -> false\nlet (uu___is_Fatal_ParseItError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ParseItError -> true | uu___ -> false\nlet (uu___is_Fatal_PolyTypeExpected : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_PolyTypeExpected -> true | uu___ -> false\nlet (uu___is_Fatal_PossibleInfiniteTyp : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_PossibleInfiniteTyp -> true | uu___ -> false\nlet (uu___is_Fatal_PreModuleMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_PreModuleMismatch -> true | uu___ -> false\nlet (uu___is_Fatal_QulifierListNotPermitted : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_QulifierListNotPermitted -> true\n    | uu___ -> false\nlet (uu___is_Fatal_RecursiveFunctionLiteral : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_RecursiveFunctionLiteral -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ReflectOnlySupportedOnEffects : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ReflectOnlySupportedOnEffects -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ReservedPrefix : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ReservedPrefix -> true | uu___ -> false\nlet (uu___is_Fatal_SMTOutputParseError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_SMTOutputParseError -> true | uu___ -> false\nlet (uu___is_Fatal_SMTSolverError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_SMTSolverError -> true | uu___ -> false\nlet (uu___is_Fatal_SyntaxError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_SyntaxError -> true | uu___ -> false\nlet (uu___is_Fatal_SynthByTacticError : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_SynthByTacticError -> true | uu___ -> false\nlet (uu___is_Fatal_TacticGotStuck : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_TacticGotStuck -> true | uu___ -> false\nlet (uu___is_Fatal_TcOneFragmentFailed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_TcOneFragmentFailed -> true | uu___ -> false\nlet (uu___is_Fatal_TermOutsideOfDefLanguage : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_TermOutsideOfDefLanguage -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ToManyArgumentToFunction : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ToManyArgumentToFunction -> true\n    | uu___ -> false\nlet (uu___is_Fatal_TooManyOrTooFewFileMatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_TooManyOrTooFewFileMatch -> true\n    | uu___ -> false\nlet (uu___is_Fatal_TooManyPatternArguments : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_TooManyPatternArguments -> true\n    | uu___ -> false\nlet (uu___is_Fatal_TooManyUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_TooManyUniverse -> true | uu___ -> false\nlet (uu___is_Fatal_TypeMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_TypeMismatch -> true | uu___ -> false\nlet (uu___is_Fatal_TypeWithinPatternsAllowedOnVariablesOnly :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_TypeWithinPatternsAllowedOnVariablesOnly -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnableToReadFile : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnableToReadFile -> true | uu___ -> false\nlet (uu___is_Fatal_UnepxectedOrUnboundOperator : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnepxectedOrUnboundOperator -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedBinder : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedBinder -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedBindShape : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedBindShape -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedChar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedChar -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedComputationTypeForLetRec :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedComputationTypeForLetRec -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedConstructorType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedConstructorType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedDataConstructor : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedDataConstructor -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedEffect -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedEmptyRecord : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedEmptyRecord -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedExpressionType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedExpressionType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedFunctionParameterType : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedFunctionParameterType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedGeneralizedUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedGeneralizedUniverse -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedGTotForLetRec : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedGTotForLetRec -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedGuard : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedGuard -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedIdentifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedIdentifier -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedImplicitArgument : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedImplicitArgument -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedImplictArgument : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedImplictArgument -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedInductivetype : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedInductivetype -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedLetBinding : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedLetBinding -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedModuleDeclaration : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedModuleDeclaration -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedNumberOfUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedNumberOfUniverse -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedNumericLiteral : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedNumericLiteral -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedPattern : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedPattern -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedPosition : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedPosition -> true | uu___ -> false\nlet (uu___is_Fatal_UnExpectedPreCondition : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnExpectedPreCondition -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedReturnShape : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedReturnShape -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedSignatureForMonad : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedSignatureForMonad -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedTerm : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedTerm -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedTermInUniverse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedTermInUniverse -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedTermType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnexpectedTermType -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedTermVQuote : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedTermVQuote -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedUniversePolymorphicReturn :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedUniversePolymorphicReturn -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnexpectedUniverseVariable : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedUniverseVariable -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnfoldableDeprecated : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnfoldableDeprecated -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnificationNotWellFormed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnificationNotWellFormed -> true\n    | uu___ -> false\nlet (uu___is_Fatal_Uninstantiated : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_Uninstantiated -> true | uu___ -> false\nlet (uu___is_Error_UninstantiatedUnificationVarInTactic :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_UninstantiatedUnificationVarInTactic -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UninstantiatedVarInTactic : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UninstantiatedVarInTactic -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UniverseMightContainSumOfTwoUnivVars :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UniverseMightContainSumOfTwoUnivVars -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UniversePolymorphicInnerLetBound :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UniversePolymorphicInnerLetBound -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnknownAttribute : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnknownAttribute -> true | uu___ -> false\nlet (uu___is_Fatal_UnknownToolForDep : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnknownToolForDep -> true | uu___ -> false\nlet (uu___is_Fatal_UnrecognizedExtension : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnrecognizedExtension -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnresolvedPatternVar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnresolvedPatternVar -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnsupportedConstant : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UnsupportedConstant -> true | uu___ -> false\nlet (uu___is_Fatal_UnsupportedDisjuctivePatterns : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnsupportedDisjuctivePatterns -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UnsupportedQualifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnsupportedQualifier -> true\n    | uu___ -> false\nlet (uu___is_Fatal_UserTacticFailure : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_UserTacticFailure -> true | uu___ -> false\nlet (uu___is_Fatal_ValueRestriction : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_ValueRestriction -> true | uu___ -> false\nlet (uu___is_Fatal_VariableNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_VariableNotFound -> true | uu___ -> false\nlet (uu___is_Fatal_WrongBodyTypeForReturnWP : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_WrongBodyTypeForReturnWP -> true\n    | uu___ -> false\nlet (uu___is_Fatal_WrongDataAppHeadFormat : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_WrongDataAppHeadFormat -> true\n    | uu___ -> false\nlet (uu___is_Fatal_WrongDefinitionOrder : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_WrongDefinitionOrder -> true\n    | uu___ -> false\nlet (uu___is_Fatal_WrongResultTypeAfterConstrutor : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Fatal_WrongResultTypeAfterConstrutor -> true\n    | uu___ -> false\nlet (uu___is_Fatal_WrongTerm : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_WrongTerm -> true | uu___ -> false\nlet (uu___is_Fatal_WhenClauseNotSupported : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_WhenClauseNotSupported -> true\n    | uu___ -> false\nlet (uu___is_Unused01 : raw_error -> Prims.bool) =\n  fun projectee -> match projectee with | Unused01 -> true | uu___ -> false\nlet (uu___is_Warning_AddImplicitAssumeNewQualifier : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Warning_AddImplicitAssumeNewQualifier -> true\n    | uu___ -> false\nlet (uu___is_Warning_AdmitWithoutDefinition : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_AdmitWithoutDefinition -> true\n    | uu___ -> false\nlet (uu___is_Warning_CachedFile : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_CachedFile -> true | uu___ -> false\nlet (uu___is_Warning_DefinitionNotTranslated : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_DefinitionNotTranslated -> true\n    | uu___ -> false\nlet (uu___is_Warning_DependencyFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_DependencyFound -> true | uu___ -> false\nlet (uu___is_Warning_DeprecatedEqualityOnBinder : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_DeprecatedEqualityOnBinder -> true\n    | uu___ -> false\nlet (uu___is_Warning_DeprecatedOpaqueQualifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_DeprecatedOpaqueQualifier -> true\n    | uu___ -> false\nlet (uu___is_Warning_DocOverwrite : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_DocOverwrite -> true | uu___ -> false\nlet (uu___is_Warning_FileNotWritten : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_FileNotWritten -> true | uu___ -> false\nlet (uu___is_Warning_Filtered : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_Filtered -> true | uu___ -> false\nlet (uu___is_Warning_FunctionLiteralPrecisionLoss : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Warning_FunctionLiteralPrecisionLoss -> true\n    | uu___ -> false\nlet (uu___is_Warning_FunctionNotExtacted : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_FunctionNotExtacted -> true\n    | uu___ -> false\nlet (uu___is_Warning_HintFailedToReplayProof : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_HintFailedToReplayProof -> true\n    | uu___ -> false\nlet (uu___is_Warning_HitReplayFailed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_HitReplayFailed -> true | uu___ -> false\nlet (uu___is_Warning_IDEIgnoreCodeGen : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_IDEIgnoreCodeGen -> true | uu___ -> false\nlet (uu___is_Warning_IllFormedGoal : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_IllFormedGoal -> true | uu___ -> false\nlet (uu___is_Warning_InaccessibleArgument : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_InaccessibleArgument -> true\n    | uu___ -> false\nlet (uu___is_Warning_IncoherentImplicitQualifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_IncoherentImplicitQualifier -> true\n    | uu___ -> false\nlet (uu___is_Warning_IrrelevantQualifierOnArgumentToReflect :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_IrrelevantQualifierOnArgumentToReflect -> true\n    | uu___ -> false\nlet (uu___is_Warning_IrrelevantQualifierOnArgumentToReify :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_IrrelevantQualifierOnArgumentToReify -> true\n    | uu___ -> false\nlet (uu___is_Warning_MalformedWarnErrorList : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_MalformedWarnErrorList -> true\n    | uu___ -> false\nlet (uu___is_Warning_MetaAlienNotATmUnknown : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_MetaAlienNotATmUnknown -> true\n    | uu___ -> false\nlet (uu___is_Warning_MultipleAscriptions : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_MultipleAscriptions -> true\n    | uu___ -> false\nlet (uu___is_Warning_NondependentUserDefinedDataType :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_NondependentUserDefinedDataType -> true\n    | uu___ -> false\nlet (uu___is_Warning_NonListLiteralSMTPattern : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_NonListLiteralSMTPattern -> true\n    | uu___ -> false\nlet (uu___is_Warning_NormalizationFailure : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_NormalizationFailure -> true\n    | uu___ -> false\nlet (uu___is_Warning_NotDependentArrow : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_NotDependentArrow -> true | uu___ -> false\nlet (uu___is_Warning_NotEmbedded : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_NotEmbedded -> true | uu___ -> false\nlet (uu___is_Warning_PatternMissingBoundVar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_PatternMissingBoundVar -> true\n    | uu___ -> false\nlet (uu___is_Warning_RecursiveDependency : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_RecursiveDependency -> true\n    | uu___ -> false\nlet (uu___is_Warning_RedundantExplicitCurrying : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_RedundantExplicitCurrying -> true\n    | uu___ -> false\nlet (uu___is_Warning_SMTPatTDeprecated : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_SMTPatTDeprecated -> true | uu___ -> false\nlet (uu___is_Warning_SMTPatternIllFormed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_SMTPatternIllFormed -> true\n    | uu___ -> false\nlet (uu___is_Warning_TopLevelEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_TopLevelEffect -> true | uu___ -> false\nlet (uu___is_Warning_UnboundModuleReference : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_UnboundModuleReference -> true\n    | uu___ -> false\nlet (uu___is_Warning_UnexpectedFile : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_UnexpectedFile -> true | uu___ -> false\nlet (uu___is_Warning_UnexpectedFsTypApp : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_UnexpectedFsTypApp -> true\n    | uu___ -> false\nlet (uu___is_Warning_UnexpectedZ3Output : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_UnexpectedZ3Output -> true\n    | uu___ -> false\nlet (uu___is_Warning_UnprotectedTerm : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_UnprotectedTerm -> true | uu___ -> false\nlet (uu___is_Warning_UnrecognizedAttribute : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_UnrecognizedAttribute -> true\n    | uu___ -> false\nlet (uu___is_Warning_UpperBoundCandidateAlreadyVisited :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_UpperBoundCandidateAlreadyVisited -> true\n    | uu___ -> false\nlet (uu___is_Warning_UseDefaultEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_UseDefaultEffect -> true | uu___ -> false\nlet (uu___is_Warning_WrongErrorLocation : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_WrongErrorLocation -> true\n    | uu___ -> false\nlet (uu___is_Warning_Z3InvocationWarning : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_Z3InvocationWarning -> true\n    | uu___ -> false\nlet (uu___is_Warning_PluginNotImplemented : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_PluginNotImplemented -> true\n    | uu___ -> false\nlet (uu___is_Warning_MissingInterfaceOrImplementation :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_MissingInterfaceOrImplementation -> true\n    | uu___ -> false\nlet (uu___is_Warning_ConstructorBuildsUnexpectedType :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_ConstructorBuildsUnexpectedType -> true\n    | uu___ -> false\nlet (uu___is_Warning_ModuleOrFileNotFoundWarning : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_ModuleOrFileNotFoundWarning -> true\n    | uu___ -> false\nlet (uu___is_Error_NoLetMutable : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_NoLetMutable -> true | uu___ -> false\nlet (uu___is_Error_BadImplicit : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_BadImplicit -> true | uu___ -> false\nlet (uu___is_Warning_DeprecatedDefinition : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_DeprecatedDefinition -> true\n    | uu___ -> false\nlet (uu___is_Fatal_SMTEncodingArityMismatch : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_SMTEncodingArityMismatch -> true\n    | uu___ -> false\nlet (uu___is_Warning_Defensive : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_Defensive -> true | uu___ -> false\nlet (uu___is_Warning_CantInspect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_CantInspect -> true | uu___ -> false\nlet (uu___is_Warning_NilGivenExplicitArgs : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_NilGivenExplicitArgs -> true\n    | uu___ -> false\nlet (uu___is_Warning_ConsAppliedExplicitArgs : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_ConsAppliedExplicitArgs -> true\n    | uu___ -> false\nlet (uu___is_Warning_UnembedBinderKnot : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_UnembedBinderKnot -> true | uu___ -> false\nlet (uu___is_Fatal_TacticProofRelevantGoal : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_TacticProofRelevantGoal -> true\n    | uu___ -> false\nlet (uu___is_Warning_TacAdmit : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_TacAdmit -> true | uu___ -> false\nlet (uu___is_Fatal_IncoherentPatterns : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_IncoherentPatterns -> true | uu___ -> false\nlet (uu___is_Error_NoSMTButNeeded : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_NoSMTButNeeded -> true | uu___ -> false\nlet (uu___is_Fatal_UnexpectedAntiquotation : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_UnexpectedAntiquotation -> true\n    | uu___ -> false\nlet (uu___is_Fatal_SplicedUndef : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_SplicedUndef -> true | uu___ -> false\nlet (uu___is_Fatal_SpliceUnembedFail : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_SpliceUnembedFail -> true | uu___ -> false\nlet (uu___is_Warning_ExtractionUnexpectedEffect : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_ExtractionUnexpectedEffect -> true\n    | uu___ -> false\nlet (uu___is_Error_DidNotFail : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_DidNotFail -> true | uu___ -> false\nlet (uu___is_Warning_UnappliedFail : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_UnappliedFail -> true | uu___ -> false\nlet (uu___is_Warning_QuantifierWithoutPattern : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_QuantifierWithoutPattern -> true\n    | uu___ -> false\nlet (uu___is_Error_EmptyFailErrs : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_EmptyFailErrs -> true | uu___ -> false\nlet (uu___is_Warning_logicqualifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_logicqualifier -> true | uu___ -> false\nlet (uu___is_Fatal_CyclicDependence : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_CyclicDependence -> true | uu___ -> false\nlet (uu___is_Error_InductiveAnnotNotAType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_InductiveAnnotNotAType -> true\n    | uu___ -> false\nlet (uu___is_Fatal_FriendInterface : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_FriendInterface -> true | uu___ -> false\nlet (uu___is_Error_CannotRedefineConst : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_CannotRedefineConst -> true | uu___ -> false\nlet (uu___is_Error_BadClassDecl : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_BadClassDecl -> true | uu___ -> false\nlet (uu___is_Error_BadInductiveParam : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_BadInductiveParam -> true | uu___ -> false\nlet (uu___is_Error_FieldShadow : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_FieldShadow -> true | uu___ -> false\nlet (uu___is_Error_UnexpectedDM4FType : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_UnexpectedDM4FType -> true | uu___ -> false\nlet (uu___is_Fatal_EffectAbbreviationResultTypeMismatch :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_EffectAbbreviationResultTypeMismatch -> true\n    | uu___ -> false\nlet (uu___is_Error_AlreadyCachedAssertionFailure : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_AlreadyCachedAssertionFailure -> true\n    | uu___ -> false\nlet (uu___is_Error_MustEraseMissing : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_MustEraseMissing -> true | uu___ -> false\nlet (uu___is_Warning_EffectfulArgumentToErasedFunction :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_EffectfulArgumentToErasedFunction -> true\n    | uu___ -> false\nlet (uu___is_Fatal_EmptySurfaceLet : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_EmptySurfaceLet -> true | uu___ -> false\nlet (uu___is_Warning_UnexpectedCheckedFile : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_UnexpectedCheckedFile -> true\n    | uu___ -> false\nlet (uu___is_Fatal_ExtractionUnsupported : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_ExtractionUnsupported -> true\n    | uu___ -> false\nlet (uu___is_Warning_SMTErrorReason : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_SMTErrorReason -> true | uu___ -> false\nlet (uu___is_Warning_CoercionNotFound : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_CoercionNotFound -> true | uu___ -> false\nlet (uu___is_Error_QuakeFailed : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_QuakeFailed -> true | uu___ -> false\nlet (uu___is_Error_IllSMTPat : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_IllSMTPat -> true | uu___ -> false\nlet (uu___is_Error_IllScopedTerm : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_IllScopedTerm -> true | uu___ -> false\nlet (uu___is_Warning_UnusedLetRec : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_UnusedLetRec -> true | uu___ -> false\nlet (uu___is_Fatal_Effects_Ordering_Coherence : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Fatal_Effects_Ordering_Coherence -> true\n    | uu___ -> false\nlet (uu___is_Warning_BleedingEdge_Feature : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_BleedingEdge_Feature -> true\n    | uu___ -> false\nlet (uu___is_Warning_IgnoredBinding : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_IgnoredBinding -> true | uu___ -> false\nlet (uu___is_Warning_CouldNotReadHints : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_CouldNotReadHints -> true | uu___ -> false\nlet (uu___is_Fatal_BadUvar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Fatal_BadUvar -> true | uu___ -> false\nlet (uu___is_Warning_WarnOnUse : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_WarnOnUse -> true | uu___ -> false\nlet (uu___is_Warning_DeprecatedAttributeSyntax : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_DeprecatedAttributeSyntax -> true\n    | uu___ -> false\nlet (uu___is_Warning_DeprecatedGeneric : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_DeprecatedGeneric -> true | uu___ -> false\nlet (uu___is_Error_BadSplice : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_BadSplice -> true | uu___ -> false\nlet (uu___is_Error_UnexpectedUnresolvedUvar : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_UnexpectedUnresolvedUvar -> true\n    | uu___ -> false\nlet (uu___is_Warning_UnfoldPlugin : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_UnfoldPlugin -> true | uu___ -> false\nlet (uu___is_Error_LayeredMissingAnnot : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_LayeredMissingAnnot -> true | uu___ -> false\nlet (uu___is_Error_CallToErased : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_CallToErased -> true | uu___ -> false\nlet (uu___is_Error_ErasedCtor : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_ErasedCtor -> true | uu___ -> false\nlet (uu___is_Error_RemoveUnusedTypeParameter : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_RemoveUnusedTypeParameter -> true\n    | uu___ -> false\nlet (uu___is_Warning_NoMagicInFSharp : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_NoMagicInFSharp -> true | uu___ -> false\nlet (uu___is_Error_BadLetOpenRecord : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_BadLetOpenRecord -> true | uu___ -> false\nlet (uu___is_Error_UnexpectedTypeclassInstance : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Error_UnexpectedTypeclassInstance -> true\n    | uu___ -> false\nlet (uu___is_Warning_AmbiguousResolveImplicitsHook : raw_error -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | Warning_AmbiguousResolveImplicitsHook -> true\n    | uu___ -> false\nlet (uu___is_Warning_SplitAndRetryQueries : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_SplitAndRetryQueries -> true\n    | uu___ -> false\nlet (uu___is_Warning_DeprecatedLightDoNotation : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_DeprecatedLightDoNotation -> true\n    | uu___ -> false\nlet (uu___is_Warning_FailedToCheckInitialTacticGoal :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_FailedToCheckInitialTacticGoal -> true\n    | uu___ -> false\nlet (uu___is_Warning_Adhoc_IndexedEffect_Combinator :\n  raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Warning_Adhoc_IndexedEffect_Combinator -> true\n    | uu___ -> false\nlet (uu___is_Error_PluginDynlink : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_PluginDynlink -> true | uu___ -> false\nlet (uu___is_Error_InternalQualifier : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Error_InternalQualifier -> true | uu___ -> false\nlet (uu___is_Warning_NameEscape : raw_error -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Warning_NameEscape -> true | uu___ -> false\ntype error_setting = (raw_error * error_flag * Prims.int)\nlet (default_settings : error_setting Prims.list) =\n  [(Error_DependencyAnalysisFailed, CAlwaysError, Prims.int_zero);\n  (Error_IDETooManyPops, CAlwaysError, Prims.int_one);\n  (Error_IDEUnrecognized, CAlwaysError, (Prims.of_int (2)));\n  (Error_InductiveTypeNotSatisfyPositivityCondition, CAlwaysError,\n    (Prims.of_int (3)));\n  (Error_InvalidUniverseVar, CAlwaysError, (Prims.of_int (4)));\n  (Error_MissingFileName, CAlwaysError, (Prims.of_int (5)));\n  (Error_ModuleFileNameMismatch, CAlwaysError, (Prims.of_int (6)));\n  (Error_OpPlusInUniverse, CAlwaysError, (Prims.of_int (7)));\n  (Error_OutOfRange, CAlwaysError, (Prims.of_int (8)));\n  (Error_ProofObligationFailed, CError, (Prims.of_int (9)));\n  (Error_TooManyFiles, CAlwaysError, (Prims.of_int (10)));\n  (Error_TypeCheckerFailToProve, CAlwaysError, (Prims.of_int (11)));\n  (Error_TypeError, CAlwaysError, (Prims.of_int (12)));\n  (Error_UncontrainedUnificationVar, CAlwaysError, (Prims.of_int (13)));\n  (Error_UnexpectedGTotComputation, CAlwaysError, (Prims.of_int (14)));\n  (Error_UnexpectedInstance, CAlwaysError, (Prims.of_int (15)));\n  (Error_UnknownFatal_AssertionFailure, CError, (Prims.of_int (16)));\n  (Error_Z3InvocationError, CAlwaysError, (Prims.of_int (17)));\n  (Error_IDEAssertionFailure, CAlwaysError, (Prims.of_int (18)));\n  (Error_Z3SolverError, CError, (Prims.of_int (19)));\n  (Fatal_AbstractTypeDeclarationInInterface, CFatal, (Prims.of_int (20)));\n  (Fatal_ActionMustHaveFunctionType, CFatal, (Prims.of_int (21)));\n  (Fatal_AlreadyDefinedTopLevelDeclaration, CFatal, (Prims.of_int (22)));\n  (Fatal_ArgumentLengthMismatch, CFatal, (Prims.of_int (23)));\n  (Fatal_AssertionFailure, CFatal, (Prims.of_int (24)));\n  (Fatal_AssignToImmutableValues, CFatal, (Prims.of_int (25)));\n  (Fatal_AssumeValInInterface, CFatal, (Prims.of_int (26)));\n  (Fatal_BadlyInstantiatedSynthByTactic, CFatal, (Prims.of_int (27)));\n  (Fatal_BadSignatureShape, CFatal, (Prims.of_int (28)));\n  (Fatal_BinderAndArgsLengthMismatch, CFatal, (Prims.of_int (29)));\n  (Fatal_BothValAndLetInInterface, CFatal, (Prims.of_int (30)));\n  (Fatal_CardinalityConstraintViolated, CFatal, (Prims.of_int (31)));\n  (Fatal_ComputationNotTotal, CFatal, (Prims.of_int (32)));\n  (Fatal_ComputationTypeNotAllowed, CFatal, (Prims.of_int (33)));\n  (Fatal_ComputedTypeNotMatchAnnotation, CFatal, (Prims.of_int (34)));\n  (Fatal_ConstructorArgLengthMismatch, CFatal, (Prims.of_int (35)));\n  (Fatal_ConstructorFailedCheck, CFatal, (Prims.of_int (36)));\n  (Fatal_ConstructorNotFound, CFatal, (Prims.of_int (37)));\n  (Fatal_ConstsructorBuildWrongType, CFatal, (Prims.of_int (38)));\n  (Fatal_CycleInRecTypeAbbreviation, CFatal, (Prims.of_int (39)));\n  (Fatal_DataContructorNotFound, CFatal, (Prims.of_int (40)));\n  (Fatal_DefaultQualifierNotAllowedOnEffects, CFatal, (Prims.of_int (41)));\n  (Fatal_DefinitionNotFound, CFatal, (Prims.of_int (42)));\n  (Fatal_DisjuctivePatternVarsMismatch, CFatal, (Prims.of_int (43)));\n  (Fatal_DivergentComputationCannotBeIncludedInTotal, CFatal,\n    (Prims.of_int (44)));\n  (Fatal_DuplicateInImplementation, CFatal, (Prims.of_int (45)));\n  (Fatal_DuplicateModuleOrInterface, CFatal, (Prims.of_int (46)));\n  (Fatal_DuplicateTopLevelNames, CFatal, (Prims.of_int (47)));\n  (Fatal_DuplicateTypeAnnotationAndValDecl, CFatal, (Prims.of_int (48)));\n  (Fatal_EffectCannotBeReified, CFatal, (Prims.of_int (49)));\n  (Fatal_EffectConstructorNotFullyApplied, CFatal, (Prims.of_int (50)));\n  (Fatal_EffectfulAndPureComputationMismatch, CFatal, (Prims.of_int (51)));\n  (Fatal_EffectNotFound, CFatal, (Prims.of_int (52)));\n  (Fatal_EffectsCannotBeComposed, CFatal, (Prims.of_int (53)));\n  (Fatal_ErrorInSolveDeferredConstraints, CFatal, (Prims.of_int (54)));\n  (Fatal_ErrorsReported, CFatal, (Prims.of_int (55)));\n  (Fatal_EscapedBoundVar, CFatal, (Prims.of_int (56)));\n  (Fatal_ExpectedArrowAnnotatedType, CFatal, (Prims.of_int (57)));\n  (Fatal_ExpectedGhostExpression, CFatal, (Prims.of_int (58)));\n  (Fatal_ExpectedPureExpression, CFatal, (Prims.of_int (59)));\n  (Fatal_ExpectNormalizedEffect, CFatal, (Prims.of_int (60)));\n  (Fatal_ExpectTermGotFunction, CFatal, (Prims.of_int (61)));\n  (Fatal_ExpectTrivialPreCondition, CFatal, (Prims.of_int (62)));\n  (Fatal_FailToExtractNativeTactic, CFatal, (Prims.of_int (63)));\n  (Fatal_FailToCompileNativeTactic, CFatal, (Prims.of_int (64)));\n  (Fatal_FailToProcessPragma, CFatal, (Prims.of_int (65)));\n  (Fatal_FailToResolveImplicitArgument, CFatal, (Prims.of_int (66)));\n  (Fatal_FailToSolveUniverseInEquality, CFatal, (Prims.of_int (67)));\n  (Fatal_FieldsNotBelongToSameRecordType, CFatal, (Prims.of_int (68)));\n  (Fatal_ForbiddenReferenceToCurrentModule, CFatal, (Prims.of_int (69)));\n  (Fatal_FreeVariables, CFatal, (Prims.of_int (70)));\n  (Fatal_FunctionTypeExpected, CFatal, (Prims.of_int (71)));\n  (Fatal_IdentifierNotFound, CFatal, (Prims.of_int (72)));\n  (Fatal_IllAppliedConstant, CFatal, (Prims.of_int (73)));\n  (Fatal_IllegalCharInByteArray, CFatal, (Prims.of_int (74)));\n  (Fatal_IllegalCharInOperatorName, CFatal, (Prims.of_int (75)));\n  (Fatal_IllTyped, CFatal, (Prims.of_int (76)));\n  (Fatal_ImpossibleAbbrevLidBundle, CFatal, (Prims.of_int (77)));\n  (Fatal_ImpossibleAbbrevRenameBundle, CFatal, (Prims.of_int (78)));\n  (Fatal_ImpossibleInductiveWithAbbrev, CFatal, (Prims.of_int (79)));\n  (Fatal_ImpossiblePrePostAbs, CFatal, (Prims.of_int (80)));\n  (Fatal_ImpossiblePrePostArrow, CFatal, (Prims.of_int (81)));\n  (Fatal_ImpossibleToGenerateDMEffect, CFatal, (Prims.of_int (82)));\n  (Fatal_ImpossibleTypeAbbrevBundle, CFatal, (Prims.of_int (83)));\n  (Fatal_ImpossibleTypeAbbrevSigeltBundle, CFatal, (Prims.of_int (84)));\n  (Fatal_IncludeModuleNotPrepared, CFatal, (Prims.of_int (85)));\n  (Fatal_IncoherentInlineUniverse, CFatal, (Prims.of_int (86)));\n  (Fatal_IncompatibleKinds, CFatal, (Prims.of_int (87)));\n  (Fatal_IncompatibleNumberOfTypes, CFatal, (Prims.of_int (88)));\n  (Fatal_IncompatibleSetOfUniverse, CFatal, (Prims.of_int (89)));\n  (Fatal_IncompatibleUniverse, CFatal, (Prims.of_int (90)));\n  (Fatal_InconsistentImplicitArgumentAnnotation, CFatal, (Prims.of_int (91)));\n  (Fatal_InconsistentImplicitQualifier, CFatal, (Prims.of_int (92)));\n  (Fatal_InconsistentQualifierAnnotation, CFatal, (Prims.of_int (93)));\n  (Fatal_InferredTypeCauseVarEscape, CFatal, (Prims.of_int (94)));\n  (Fatal_InlineRenamedAsUnfold, CFatal, (Prims.of_int (95)));\n  (Fatal_InsufficientPatternArguments, CFatal, (Prims.of_int (96)));\n  (Fatal_InterfaceAlreadyProcessed, CFatal, (Prims.of_int (97)));\n  (Fatal_InterfaceNotImplementedByModule, CFatal, (Prims.of_int (98)));\n  (Fatal_InterfaceWithTypeImplementation, CFatal, (Prims.of_int (99)));\n  (Fatal_InvalidFloatingPointNumber, CFatal, (Prims.of_int (100)));\n  (Fatal_InvalidFSDocKeyword, CFatal, (Prims.of_int (101)));\n  (Fatal_InvalidIdentifier, CFatal, (Prims.of_int (102)));\n  (Fatal_InvalidLemmaArgument, CFatal, (Prims.of_int (103)));\n  (Fatal_InvalidNumericLiteral, CFatal, (Prims.of_int (104)));\n  (Fatal_InvalidRedefinitionOfLexT, CFatal, (Prims.of_int (105)));\n  (Fatal_InvalidUnicodeInStringLiteral, CFatal, (Prims.of_int (106)));\n  (Fatal_InvalidUTF8Encoding, CFatal, (Prims.of_int (107)));\n  (Fatal_InvalidWarnErrorSetting, CFatal, (Prims.of_int (108)));\n  (Fatal_LetBoundMonadicMismatch, CFatal, (Prims.of_int (109)));\n  (Fatal_LetMutableForVariablesOnly, CFatal, (Prims.of_int (110)));\n  (Fatal_LetOpenModuleOnly, CFatal, (Prims.of_int (111)));\n  (Fatal_LetRecArgumentMismatch, CFatal, (Prims.of_int (112)));\n  (Fatal_MalformedActionDeclaration, CFatal, (Prims.of_int (113)));\n  (Fatal_MismatchedPatternType, CFatal, (Prims.of_int (114)));\n  (Fatal_MismatchUniversePolymorphic, CFatal, (Prims.of_int (115)));\n  (Fatal_MissingDataConstructor, CFatal, (Prims.of_int (116)));\n  (Fatal_MissingExposeInterfacesOption, CFatal, (Prims.of_int (117)));\n  (Fatal_MissingFieldInRecord, CFatal, (Prims.of_int (118)));\n  (Fatal_MissingImplementation, CFatal, (Prims.of_int (119)));\n  (Fatal_MissingImplicitArguments, CFatal, (Prims.of_int (120)));\n  (Fatal_MissingInterface, CFatal, (Prims.of_int (121)));\n  (Fatal_MissingNameInBinder, CFatal, (Prims.of_int (122)));\n  (Fatal_MissingPrimsModule, CFatal, (Prims.of_int (123)));\n  (Fatal_MissingQuantifierBinder, CFatal, (Prims.of_int (124)));\n  (Fatal_ModuleExpected, CFatal, (Prims.of_int (125)));\n  (Fatal_ModuleFileNotFound, CFatal, (Prims.of_int (126)));\n  (Fatal_ModuleFirstStatement, CFatal, (Prims.of_int (127)));\n  (Fatal_ModuleNotFound, CFatal, (Prims.of_int (128)));\n  (Fatal_ModuleOrFileNotFound, CFatal, (Prims.of_int (129)));\n  (Fatal_MonadAlreadyDefined, CFatal, (Prims.of_int (130)));\n  (Fatal_MoreThanOneDeclaration, CFatal, (Prims.of_int (131)));\n  (Fatal_MultipleLetBinding, CFatal, (Prims.of_int (132)));\n  (Fatal_NameNotFound, CFatal, (Prims.of_int (133)));\n  (Fatal_NameSpaceNotFound, CFatal, (Prims.of_int (134)));\n  (Fatal_NegativeUniverseConstFatal_NotSupported, CFatal,\n    (Prims.of_int (135)));\n  (Fatal_NoFileProvided, CFatal, (Prims.of_int (136)));\n  (Fatal_NonInductiveInMutuallyDefinedType, CFatal, (Prims.of_int (137)));\n  (Fatal_NonLinearPatternNotPermitted, CFatal, (Prims.of_int (138)));\n  (Fatal_NonLinearPatternVars, CFatal, (Prims.of_int (139)));\n  (Fatal_NonSingletonTopLevel, CFatal, (Prims.of_int (140)));\n  (Fatal_NonSingletonTopLevelModule, CFatal, (Prims.of_int (141)));\n  (Error_NonTopRecFunctionNotFullyEncoded, CError, (Prims.of_int (142)));\n  (Fatal_NonTrivialPreConditionInPrims, CFatal, (Prims.of_int (143)));\n  (Fatal_NonVariableInductiveTypeParameter, CFatal, (Prims.of_int (144)));\n  (Fatal_NotApplicationOrFv, CFatal, (Prims.of_int (145)));\n  (Fatal_NotEnoughArgsToEffect, CFatal, (Prims.of_int (146)));\n  (Fatal_NotEnoughArgumentsForEffect, CFatal, (Prims.of_int (147)));\n  (Fatal_NotFunctionType, CFatal, (Prims.of_int (148)));\n  (Fatal_NotSupported, CFatal, (Prims.of_int (149)));\n  (Fatal_NotTopLevelModule, CFatal, (Prims.of_int (150)));\n  (Fatal_NotValidFStarFile, CFatal, (Prims.of_int (151)));\n  (Fatal_NotValidIncludeDirectory, CFatal, (Prims.of_int (152)));\n  (Fatal_OneModulePerFile, CFatal, (Prims.of_int (153)));\n  (Fatal_OpenGoalsInSynthesis, CFatal, (Prims.of_int (154)));\n  (Fatal_OptionsNotCompatible, CFatal, (Prims.of_int (155)));\n  (Fatal_OutOfOrder, CFatal, (Prims.of_int (156)));\n  (Fatal_ParseErrors, CFatal, (Prims.of_int (157)));\n  (Fatal_ParseItError, CFatal, (Prims.of_int (158)));\n  (Fatal_PolyTypeExpected, CFatal, (Prims.of_int (159)));\n  (Fatal_PossibleInfiniteTyp, CFatal, (Prims.of_int (160)));\n  (Fatal_PreModuleMismatch, CFatal, (Prims.of_int (161)));\n  (Fatal_QulifierListNotPermitted, CFatal, (Prims.of_int (162)));\n  (Fatal_RecursiveFunctionLiteral, CFatal, (Prims.of_int (163)));\n  (Fatal_ReflectOnlySupportedOnEffects, CFatal, (Prims.of_int (164)));\n  (Fatal_ReservedPrefix, CFatal, (Prims.of_int (165)));\n  (Fatal_SMTOutputParseError, CFatal, (Prims.of_int (166)));\n  (Fatal_SMTSolverError, CFatal, (Prims.of_int (167)));\n  (Fatal_SyntaxError, CFatal, (Prims.of_int (168)));\n  (Fatal_SynthByTacticError, CFatal, (Prims.of_int (169)));\n  (Fatal_TacticGotStuck, CFatal, (Prims.of_int (170)));\n  (Fatal_TcOneFragmentFailed, CFatal, (Prims.of_int (171)));\n  (Fatal_TermOutsideOfDefLanguage, CFatal, (Prims.of_int (172)));\n  (Fatal_ToManyArgumentToFunction, CFatal, (Prims.of_int (173)));\n  (Fatal_TooManyOrTooFewFileMatch, CFatal, (Prims.of_int (174)));\n  (Fatal_TooManyPatternArguments, CFatal, (Prims.of_int (175)));\n  (Fatal_TooManyUniverse, CFatal, (Prims.of_int (176)));\n  (Fatal_TypeMismatch, CFatal, (Prims.of_int (177)));\n  (Fatal_TypeWithinPatternsAllowedOnVariablesOnly, CFatal,\n    (Prims.of_int (178)));\n  (Fatal_UnableToReadFile, CFatal, (Prims.of_int (179)));\n  (Fatal_UnepxectedOrUnboundOperator, CFatal, (Prims.of_int (180)));\n  (Fatal_UnexpectedBinder, CFatal, (Prims.of_int (181)));\n  (Fatal_UnexpectedBindShape, CFatal, (Prims.of_int (182)));\n  (Fatal_UnexpectedChar, CFatal, (Prims.of_int (183)));\n  (Fatal_UnexpectedComputationTypeForLetRec, CFatal, (Prims.of_int (184)));\n  (Fatal_UnexpectedConstructorType, CFatal, (Prims.of_int (185)));\n  (Fatal_UnexpectedDataConstructor, CFatal, (Prims.of_int (186)));\n  (Fatal_UnexpectedEffect, CFatal, (Prims.of_int (187)));\n  (Fatal_UnexpectedEmptyRecord, CFatal, (Prims.of_int (188)));\n  (Fatal_UnexpectedExpressionType, CFatal, (Prims.of_int (189)));\n  (Fatal_UnexpectedFunctionParameterType, CFatal, (Prims.of_int (190)));\n  (Fatal_UnexpectedGeneralizedUniverse, CFatal, (Prims.of_int (191)));\n  (Fatal_UnexpectedGTotForLetRec, CFatal, (Prims.of_int (192)));\n  (Fatal_UnexpectedGuard, CFatal, (Prims.of_int (193)));\n  (Fatal_UnexpectedIdentifier, CFatal, (Prims.of_int (194)));\n  (Fatal_UnexpectedImplicitArgument, CFatal, (Prims.of_int (195)));\n  (Fatal_UnexpectedImplictArgument, CFatal, (Prims.of_int (196)));\n  (Fatal_UnexpectedInductivetype, CFatal, (Prims.of_int (197)));\n  (Fatal_UnexpectedLetBinding, CFatal, (Prims.of_int (198)));\n  (Fatal_UnexpectedModuleDeclaration, CFatal, (Prims.of_int (199)));\n  (Fatal_UnexpectedNumberOfUniverse, CFatal, (Prims.of_int (200)));\n  (Fatal_UnexpectedNumericLiteral, CFatal, (Prims.of_int (201)));\n  (Fatal_UnexpectedPattern, CFatal, (Prims.of_int (203)));\n  (Fatal_UnexpectedPosition, CFatal, (Prims.of_int (204)));\n  (Fatal_UnExpectedPreCondition, CFatal, (Prims.of_int (205)));\n  (Fatal_UnexpectedReturnShape, CFatal, (Prims.of_int (206)));\n  (Fatal_UnexpectedSignatureForMonad, CFatal, (Prims.of_int (207)));\n  (Fatal_UnexpectedTerm, CFatal, (Prims.of_int (208)));\n  (Fatal_UnexpectedTermInUniverse, CFatal, (Prims.of_int (209)));\n  (Fatal_UnexpectedTermType, CFatal, (Prims.of_int (210)));\n  (Fatal_UnexpectedTermVQuote, CFatal, (Prims.of_int (211)));\n  (Fatal_UnexpectedUniversePolymorphicReturn, CFatal, (Prims.of_int (212)));\n  (Fatal_UnexpectedUniverseVariable, CFatal, (Prims.of_int (213)));\n  (Fatal_UnfoldableDeprecated, CFatal, (Prims.of_int (214)));\n  (Fatal_UnificationNotWellFormed, CFatal, (Prims.of_int (215)));\n  (Fatal_Uninstantiated, CFatal, (Prims.of_int (216)));\n  (Error_UninstantiatedUnificationVarInTactic, CError, (Prims.of_int (217)));\n  (Fatal_UninstantiatedVarInTactic, CFatal, (Prims.of_int (218)));\n  (Fatal_UniverseMightContainSumOfTwoUnivVars, CFatal, (Prims.of_int (219)));\n  (Fatal_UniversePolymorphicInnerLetBound, CFatal, (Prims.of_int (220)));\n  (Fatal_UnknownAttribute, CFatal, (Prims.of_int (221)));\n  (Fatal_UnknownToolForDep, CFatal, (Prims.of_int (222)));\n  (Fatal_UnrecognizedExtension, CFatal, (Prims.of_int (223)));\n  (Fatal_UnresolvedPatternVar, CFatal, (Prims.of_int (224)));\n  (Fatal_UnsupportedConstant, CFatal, (Prims.of_int (225)));\n  (Fatal_UnsupportedDisjuctivePatterns, CFatal, (Prims.of_int (226)));\n  (Fatal_UnsupportedQualifier, CFatal, (Prims.of_int (227)));\n  (Fatal_UserTacticFailure, CFatal, (Prims.of_int (228)));\n  (Fatal_ValueRestriction, CFatal, (Prims.of_int (229)));\n  (Fatal_VariableNotFound, CFatal, (Prims.of_int (230)));\n  (Fatal_WrongBodyTypeForReturnWP, CFatal, (Prims.of_int (231)));\n  (Fatal_WrongDataAppHeadFormat, CFatal, (Prims.of_int (232)));\n  (Fatal_WrongDefinitionOrder, CFatal, (Prims.of_int (233)));\n  (Fatal_WrongResultTypeAfterConstrutor, CFatal, (Prims.of_int (234)));\n  (Fatal_WrongTerm, CFatal, (Prims.of_int (235)));\n  (Fatal_WhenClauseNotSupported, CFatal, (Prims.of_int (236)));\n  (Unused01, CFatal, (Prims.of_int (237)));\n  (Warning_PluginNotImplemented, CError, (Prims.of_int (238)));\n  (Warning_AddImplicitAssumeNewQualifier, CWarning, (Prims.of_int (239)));\n  (Warning_AdmitWithoutDefinition, CWarning, (Prims.of_int (240)));\n  (Warning_CachedFile, CWarning, (Prims.of_int (241)));\n  (Warning_DefinitionNotTranslated, CWarning, (Prims.of_int (242)));\n  (Warning_DependencyFound, CWarning, (Prims.of_int (243)));\n  (Warning_DeprecatedEqualityOnBinder, CWarning, (Prims.of_int (244)));\n  (Warning_DeprecatedOpaqueQualifier, CWarning, (Prims.of_int (245)));\n  (Warning_DocOverwrite, CWarning, (Prims.of_int (246)));\n  (Warning_FileNotWritten, CWarning, (Prims.of_int (247)));\n  (Warning_Filtered, CWarning, (Prims.of_int (248)));\n  (Warning_FunctionLiteralPrecisionLoss, CWarning, (Prims.of_int (249)));\n  (Warning_FunctionNotExtacted, CWarning, (Prims.of_int (250)));\n  (Warning_HintFailedToReplayProof, CWarning, (Prims.of_int (251)));\n  (Warning_HitReplayFailed, CWarning, (Prims.of_int (252)));\n  (Warning_IDEIgnoreCodeGen, CWarning, (Prims.of_int (253)));\n  (Warning_IllFormedGoal, CWarning, (Prims.of_int (254)));\n  (Warning_InaccessibleArgument, CWarning, (Prims.of_int (255)));\n  (Warning_IncoherentImplicitQualifier, CWarning, (Prims.of_int (256)));\n  (Warning_IrrelevantQualifierOnArgumentToReflect, CWarning,\n    (Prims.of_int (257)));\n  (Warning_IrrelevantQualifierOnArgumentToReify, CWarning,\n    (Prims.of_int (258)));\n  (Warning_MalformedWarnErrorList, CWarning, (Prims.of_int (259)));\n  (Warning_MetaAlienNotATmUnknown, CWarning, (Prims.of_int (260)));\n  (Warning_MultipleAscriptions, CWarning, (Prims.of_int (261)));\n  (Warning_NondependentUserDefinedDataType, CWarning, (Prims.of_int (262)));\n  (Warning_NonListLiteralSMTPattern, CWarning, (Prims.of_int (263)));\n  (Warning_NormalizationFailure, CWarning, (Prims.of_int (264)));\n  (Warning_NotDependentArrow, CWarning, (Prims.of_int (265)));\n  (Warning_NotEmbedded, CWarning, (Prims.of_int (266)));\n  (Warning_PatternMissingBoundVar, CWarning, (Prims.of_int (267)));\n  (Warning_RecursiveDependency, CWarning, (Prims.of_int (268)));\n  (Warning_RedundantExplicitCurrying, CWarning, (Prims.of_int (269)));\n  (Warning_SMTPatTDeprecated, CWarning, (Prims.of_int (270)));\n  (Warning_SMTPatternIllFormed, CWarning, (Prims.of_int (271)));\n  (Warning_TopLevelEffect, CWarning, (Prims.of_int (272)));\n  (Warning_UnboundModuleReference, CWarning, (Prims.of_int (273)));\n  (Warning_UnexpectedFile, CWarning, (Prims.of_int (274)));\n  (Warning_UnexpectedFsTypApp, CWarning, (Prims.of_int (275)));\n  (Warning_UnexpectedZ3Output, CError, (Prims.of_int (276)));\n  (Warning_UnprotectedTerm, CWarning, (Prims.of_int (277)));\n  (Warning_UnrecognizedAttribute, CWarning, (Prims.of_int (278)));\n  (Warning_UpperBoundCandidateAlreadyVisited, CWarning, (Prims.of_int (279)));\n  (Warning_UseDefaultEffect, CWarning, (Prims.of_int (280)));\n  (Warning_WrongErrorLocation, CWarning, (Prims.of_int (281)));\n  (Warning_Z3InvocationWarning, CWarning, (Prims.of_int (282)));\n  (Warning_MissingInterfaceOrImplementation, CWarning, (Prims.of_int (283)));\n  (Warning_ConstructorBuildsUnexpectedType, CWarning, (Prims.of_int (284)));\n  (Warning_ModuleOrFileNotFoundWarning, CWarning, (Prims.of_int (285)));\n  (Error_NoLetMutable, CAlwaysError, (Prims.of_int (286)));\n  (Error_BadImplicit, CAlwaysError, (Prims.of_int (287)));\n  (Warning_DeprecatedDefinition, CWarning, (Prims.of_int (288)));\n  (Fatal_SMTEncodingArityMismatch, CFatal, (Prims.of_int (289)));\n  (Warning_Defensive, CWarning, (Prims.of_int (290)));\n  (Warning_CantInspect, CWarning, (Prims.of_int (291)));\n  (Warning_NilGivenExplicitArgs, CWarning, (Prims.of_int (292)));\n  (Warning_ConsAppliedExplicitArgs, CWarning, (Prims.of_int (293)));\n  (Warning_UnembedBinderKnot, CWarning, (Prims.of_int (294)));\n  (Fatal_TacticProofRelevantGoal, CFatal, (Prims.of_int (295)));\n  (Warning_TacAdmit, CWarning, (Prims.of_int (296)));\n  (Fatal_IncoherentPatterns, CFatal, (Prims.of_int (297)));\n  (Error_NoSMTButNeeded, CAlwaysError, (Prims.of_int (298)));\n  (Fatal_UnexpectedAntiquotation, CFatal, (Prims.of_int (299)));\n  (Fatal_SplicedUndef, CFatal, (Prims.of_int (300)));\n  (Fatal_SpliceUnembedFail, CFatal, (Prims.of_int (301)));\n  (Warning_ExtractionUnexpectedEffect, CWarning, (Prims.of_int (302)));\n  (Error_DidNotFail, CError, (Prims.of_int (303)));\n  (Warning_UnappliedFail, CWarning, (Prims.of_int (304)));\n  (Warning_QuantifierWithoutPattern, CSilent, (Prims.of_int (305)));\n  (Error_EmptyFailErrs, CAlwaysError, (Prims.of_int (306)));\n  (Warning_logicqualifier, CWarning, (Prims.of_int (307)));\n  (Fatal_CyclicDependence, CFatal, (Prims.of_int (308)));\n  (Error_InductiveAnnotNotAType, CError, (Prims.of_int (309)));\n  (Fatal_FriendInterface, CFatal, (Prims.of_int (310)));\n  (Error_CannotRedefineConst, CError, (Prims.of_int (311)));\n  (Error_BadClassDecl, CError, (Prims.of_int (312)));\n  (Error_BadInductiveParam, CFatal, (Prims.of_int (313)));\n  (Error_FieldShadow, CFatal, (Prims.of_int (314)));\n  (Error_UnexpectedDM4FType, CFatal, (Prims.of_int (315)));\n  (Fatal_EffectAbbreviationResultTypeMismatch, CFatal, (Prims.of_int (316)));\n  (Error_AlreadyCachedAssertionFailure, CFatal, (Prims.of_int (317)));\n  (Error_MustEraseMissing, CWarning, (Prims.of_int (318)));\n  (Warning_EffectfulArgumentToErasedFunction, CWarning, (Prims.of_int (319)));\n  (Fatal_EmptySurfaceLet, CFatal, (Prims.of_int (320)));\n  (Warning_UnexpectedCheckedFile, CWarning, (Prims.of_int (321)));\n  (Fatal_ExtractionUnsupported, CFatal, (Prims.of_int (322)));\n  (Warning_SMTErrorReason, CWarning, (Prims.of_int (323)));\n  (Warning_CoercionNotFound, CWarning, (Prims.of_int (324)));\n  (Error_QuakeFailed, CError, (Prims.of_int (325)));\n  (Error_IllSMTPat, CError, (Prims.of_int (326)));\n  (Error_IllScopedTerm, CError, (Prims.of_int (327)));\n  (Warning_UnusedLetRec, CWarning, (Prims.of_int (328)));\n  (Fatal_Effects_Ordering_Coherence, CError, (Prims.of_int (329)));\n  (Warning_BleedingEdge_Feature, CWarning, (Prims.of_int (330)));\n  (Warning_IgnoredBinding, CWarning, (Prims.of_int (331)));\n  (Warning_CouldNotReadHints, CWarning, (Prims.of_int (333)));\n  (Fatal_BadUvar, CFatal, (Prims.of_int (334)));\n  (Warning_WarnOnUse, CSilent, (Prims.of_int (335)));\n  (Warning_DeprecatedAttributeSyntax, CSilent, (Prims.of_int (336)));\n  (Warning_DeprecatedGeneric, CWarning, (Prims.of_int (337)));\n  (Error_BadSplice, CError, (Prims.of_int (338)));\n  (Error_UnexpectedUnresolvedUvar, CAlwaysError, (Prims.of_int (339)));\n  (Warning_UnfoldPlugin, CWarning, (Prims.of_int (340)));\n  (Error_LayeredMissingAnnot, CAlwaysError, (Prims.of_int (341)));\n  (Error_CallToErased, CError, (Prims.of_int (342)));\n  (Error_ErasedCtor, CError, (Prims.of_int (343)));\n  (Error_RemoveUnusedTypeParameter, CWarning, (Prims.of_int (344)));\n  (Warning_NoMagicInFSharp, CWarning, (Prims.of_int (345)));\n  (Error_BadLetOpenRecord, CAlwaysError, (Prims.of_int (346)));\n  (Error_UnexpectedTypeclassInstance, CAlwaysError, (Prims.of_int (347)));\n  (Warning_AmbiguousResolveImplicitsHook, CWarning, (Prims.of_int (348)));\n  (Warning_SplitAndRetryQueries, CWarning, (Prims.of_int (349)));\n  (Warning_DeprecatedLightDoNotation, CWarning, (Prims.of_int (350)));\n  (Warning_FailedToCheckInitialTacticGoal, CSilent, (Prims.of_int (351)));\n  (Warning_Adhoc_IndexedEffect_Combinator, CWarning, (Prims.of_int (352)));\n  (Error_PluginDynlink, CError, (Prims.of_int (353)));\n  (Error_InternalQualifier, CAlwaysError, (Prims.of_int (354)));\n  (Warning_NameEscape, CWarning, (Prims.of_int (355)))]"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Getopt.ml",
    "content": "let noshort = 0\ntype 'a opt_variant =\n  | ZeroArgs of (unit -> 'a)\n  | OneArg of (string -> 'a) * string\ntype 'a opt' = FStar_Char.char * string * 'a opt_variant * string\ntype opt = unit opt'\ntype parse_cmdline_res =\n  | Empty\n  | Help\n  | Error of string\n  | Success\n\nlet bind l f =\n    match l with\n    | Help\n    | Error _ -> l\n    | Success -> f ()\n    (* | Empty  *)\n    (* ^ Empty does not occur internally. *)\n\n(* Returns None if this wasn't an option arg (did not start with \"-\")\n * Otherwise, returns Some (o, s) where [s] is the trimmed option, and [o]\n * is the opt we found in specs (possibly None if not present, which should\n * trigger an error) *)\nlet find_matching_opt specs s : (opt option * string) option =\n  if String.length s < 2 then\n    None\n  else if String.sub s 0 2 = \"--\" then\n    (* long opts *)\n    let strim = String.sub s 2 ((String.length s) - 2) in\n    let o = FStar_List.tryFind (fun (_, option, _, _) -> option = strim) specs in\n    Some (o, strim)\n  else if String.sub s 0 1 = \"-\" then\n    (* short opts *)\n    let strim = String.sub s 1 ((String.length s) - 1) in\n    let o = FStar_List.tryFind (fun (shortoption, _, _, _) -> FStar_String.make Z.one shortoption = strim) specs in\n    Some (o, strim)\n  else\n    None\n\n(* remark: doesn't work with files starting with -- *)\nlet rec parse (opts:opt list) def ar ix max i : parse_cmdline_res =\n  if ix > max then Success\n  else\n    let arg = ar.(ix) in\n    let go_on () = bind (def arg) (fun _ -> parse opts def ar (ix + 1) max (i + 1)) in\n    match find_matching_opt opts arg with\n    | None -> go_on ()\n    | Some (None, _) -> Error (\"unrecognized option '\" ^ arg ^ \"'\\n\")\n    | Some (Some (_, _, p, _), argtrim) ->\n      begin match p with\n      | ZeroArgs f -> f (); parse opts def ar (ix + 1) max (i + 1)\n      | OneArg (f, _) ->\n         if ix + 1 > max\n         then Error (\"last option '\" ^ argtrim ^ \"' takes an argument but has none\\n\")\n         else\n           let r =\n               try (f (ar.(ix + 1)); Success)\n               with _ -> Error (\"wrong argument given to option `\" ^ argtrim ^ \"`\\n\")\n           in bind r (fun () -> parse opts def ar (ix + 2) max (i + 1))\n      end\n\nlet parse_array specs others args offset =\n  parse specs others args offset (Array.length args - 1) 0\n\nlet parse_cmdline specs others =\n  if Array.length Sys.argv = 1 then Empty\n  else parse_array specs others Sys.argv 1\n\nlet parse_string specs others (str:string) =\n    let split_spaces (str:string) =\n      let seps = [int_of_char ' '; int_of_char '\\t'] in\n      FStar_List.filter (fun s -> s != \"\") (FStar_String.split seps str)\n    in\n    (* to match the style of the F# code in FStar.GetOpt.fs *)\n    let index_of str c =\n      try\n        String.index str c\n      with Not_found -> -1\n    in\n    let substring_from s j =\n        let len = String.length s - j in\n        String.sub s j len\n    in\n    let rec split_quoted_fragments (str:string) =\n        let i = index_of str '\\'' in\n        if i < 0 then Some (split_spaces str)\n        else let prefix = String.sub str 0 i in\n             let suffix = substring_from str (i + 1) in\n             let j = index_of suffix '\\'' in\n             if j < 0 then None\n             else let quoted_frag = String.sub suffix 0 j in\n                  let rest = split_quoted_fragments (substring_from suffix (j + 1)) in\n                  match rest with\n                  | None -> None\n                  | Some rest -> Some (split_spaces prefix @ quoted_frag::rest)\n\n    in\n    match split_quoted_fragments str with\n    | None -> Error(\"Failed to parse options; unmatched quote \\\"'\\\"\")\n    | Some args ->\n      parse_array specs others (Array.of_list args) 0\n\nlet parse_list specs others lst =\n  parse_array specs others (Array.of_list lst) 0\n\nlet cmdline () =\n   Array.to_list (Sys.argv)\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Ident.ml",
    "content": "open Prims\ntype ident = {\n  idText: Prims.string ;\n  idRange: FStar_Compiler_Range.range }[@@deriving yojson,show,yojson,show]\nlet (__proj__Mkident__item__idText : ident -> Prims.string) =\n  fun projectee -> match projectee with | { idText; idRange;_} -> idText\nlet (__proj__Mkident__item__idRange : ident -> FStar_Compiler_Range.range) =\n  fun projectee -> match projectee with | { idText; idRange;_} -> idRange\ntype path = Prims.string Prims.list[@@deriving yojson,show]\ntype ipath = ident Prims.list[@@deriving yojson,show]\ntype lident =\n  {\n  ns: ipath ;\n  ident: ident ;\n  nsstr: Prims.string ;\n  str: Prims.string }[@@deriving yojson,show,yojson,show]\nlet (__proj__Mklident__item__ns : lident -> ipath) =\n  fun projectee ->\n    match projectee with | { ns; ident = ident1; nsstr; str;_} -> ns\nlet (__proj__Mklident__item__ident : lident -> ident) =\n  fun projectee ->\n    match projectee with | { ns; ident = ident1; nsstr; str;_} -> ident1\nlet (__proj__Mklident__item__nsstr : lident -> Prims.string) =\n  fun projectee ->\n    match projectee with | { ns; ident = ident1; nsstr; str;_} -> nsstr\nlet (__proj__Mklident__item__str : lident -> Prims.string) =\n  fun projectee ->\n    match projectee with | { ns; ident = ident1; nsstr; str;_} -> str\nlet (mk_ident : (Prims.string * FStar_Compiler_Range.range) -> ident) =\n  fun uu___ ->\n    match uu___ with | (text, range) -> { idText = text; idRange = range }\nlet (set_id_range : FStar_Compiler_Range.range -> ident -> ident) =\n  fun r -> fun i -> { idText = (i.idText); idRange = r }\nlet (reserved_prefix : Prims.string) = \"uu___\"\nlet (uu___32 :\n  (((unit -> Prims.int) * (unit -> unit)) * Prims.int\n    FStar_Compiler_Effect.ref))\n  =\n  let x = ref Prims.int_zero in\n  let next_id uu___ =\n    let v = FStar_Compiler_Effect.op_Bang x in\n    FStar_Compiler_Effect.op_Colon_Equals x (v + Prims.int_one); v in\n  let reset uu___ = FStar_Compiler_Effect.op_Colon_Equals x Prims.int_zero in\n  ((next_id, reset), x)\nlet (_gen : ((unit -> Prims.int) * (unit -> unit))) =\n  match uu___32 with | (_gen1, _secret_ref) -> _gen1\nlet (_secret_ref : Prims.int FStar_Compiler_Effect.ref) =\n  match uu___32 with | (_gen1, _secret_ref1) -> _secret_ref1\nlet (next_id : unit -> Prims.int) =\n  fun uu___ -> FStar_Pervasives_Native.fst _gen ()\nlet (reset_gensym : unit -> unit) =\n  fun uu___ -> FStar_Pervasives_Native.snd _gen ()\nlet with_frozen_gensym : 'a . (unit -> 'a) -> 'a =\n  fun f ->\n    let v = FStar_Compiler_Effect.op_Bang _secret_ref in\n    let r =\n      try (fun uu___ -> match () with | () -> f ()) ()\n      with\n      | uu___ ->\n          (FStar_Compiler_Effect.op_Colon_Equals _secret_ref v;\n           FStar_Compiler_Effect.raise uu___) in\n    FStar_Compiler_Effect.op_Colon_Equals _secret_ref v; r\nlet (gen' : Prims.string -> FStar_Compiler_Range.range -> ident) =\n  fun s ->\n    fun r ->\n      let i = next_id () in\n      mk_ident ((Prims.op_Hat s (Prims.string_of_int i)), r)\nlet (gen : FStar_Compiler_Range.range -> ident) =\n  fun r -> gen' reserved_prefix r\nlet (ident_of_lid : lident -> ident) = fun l -> l.ident\nlet (range_of_id : ident -> FStar_Compiler_Range.range) =\n  fun id -> id.idRange\nlet (id_of_text : Prims.string -> ident) =\n  fun str -> mk_ident (str, FStar_Compiler_Range.dummyRange)\nlet (string_of_id : ident -> Prims.string) = fun id -> id.idText\nlet (text_of_path : path -> Prims.string) =\n  fun path1 -> FStar_Compiler_Util.concat_l \".\" path1\nlet (path_of_text : Prims.string -> path) =\n  fun text -> FStar_String.split [46] text\nlet (path_of_ns : ipath -> path) =\n  fun ns -> FStar_Compiler_List.map string_of_id ns\nlet (path_of_lid : lident -> path) =\n  fun lid ->\n    FStar_Compiler_List.map string_of_id\n      (FStar_Compiler_List.op_At lid.ns [lid.ident])\nlet (ns_of_lid : lident -> ipath) = fun lid -> lid.ns\nlet (ids_of_lid : lident -> ipath) =\n  fun lid -> FStar_Compiler_List.op_At lid.ns [lid.ident]\nlet (lid_of_ns_and_id : ipath -> ident -> lident) =\n  fun ns ->\n    fun id ->\n      let nsstr =\n        let uu___ = FStar_Compiler_List.map string_of_id ns in\n        FStar_Compiler_Effect.op_Bar_Greater uu___ text_of_path in\n      {\n        ns;\n        ident = id;\n        nsstr;\n        str =\n          (if nsstr = \"\"\n           then id.idText\n           else Prims.op_Hat nsstr (Prims.op_Hat \".\" id.idText))\n      }\nlet (lid_of_ids : ipath -> lident) =\n  fun ids ->\n    let uu___ = FStar_Compiler_Util.prefix ids in\n    match uu___ with | (ns, id) -> lid_of_ns_and_id ns id\nlet (lid_of_str : Prims.string -> lident) =\n  fun str ->\n    let uu___ =\n      FStar_Compiler_List.map id_of_text (FStar_Compiler_Util.split str \".\") in\n    lid_of_ids uu___\nlet (lid_of_path : path -> FStar_Compiler_Range.range -> lident) =\n  fun path1 ->\n    fun pos ->\n      let ids = FStar_Compiler_List.map (fun s -> mk_ident (s, pos)) path1 in\n      lid_of_ids ids\nlet (text_of_lid : lident -> Prims.string) = fun lid -> lid.str\nlet (lid_equals : lident -> lident -> Prims.bool) =\n  fun l1 -> fun l2 -> l1.str = l2.str\nlet (ident_equals : ident -> ident -> Prims.bool) =\n  fun id1 -> fun id2 -> id1.idText = id2.idText\ntype lid = lident[@@deriving yojson,show]\nlet (range_of_lid : lident -> FStar_Compiler_Range.range) =\n  fun lid1 -> range_of_id lid1.ident\nlet (set_lid_range : lident -> FStar_Compiler_Range.range -> lident) =\n  fun l ->\n    fun r ->\n      {\n        ns = (l.ns);\n        ident =\n          (let uu___ = l.ident in { idText = (uu___.idText); idRange = r });\n        nsstr = (l.nsstr);\n        str = (l.str)\n      }\nlet (lid_add_suffix : lident -> Prims.string -> lident) =\n  fun l ->\n    fun s ->\n      let path1 = path_of_lid l in\n      let uu___ = range_of_lid l in\n      lid_of_path (FStar_Compiler_List.op_At path1 [s]) uu___\nlet (ml_path_of_lid : lident -> Prims.string) =\n  fun lid1 ->\n    let uu___ =\n      let uu___1 = path_of_ns lid1.ns in\n      let uu___2 = let uu___3 = string_of_id lid1.ident in [uu___3] in\n      FStar_Compiler_List.op_At uu___1 uu___2 in\n    FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat \"_\") uu___\nlet (string_of_lid : lident -> Prims.string) = fun lid1 -> lid1.str\nlet (qual_id : lident -> ident -> lident) =\n  fun lid1 ->\n    fun id ->\n      let uu___ =\n        lid_of_ids (FStar_Compiler_List.op_At lid1.ns [lid1.ident; id]) in\n      let uu___1 = range_of_id id in set_lid_range uu___ uu___1\nlet (nsstr : lident -> Prims.string) = fun l -> l.nsstr\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_ImmutableArray_Base.ml",
    "content": "type 'a t = 'a array\n\nlet of_list (l:'a list) = Array.of_list l\n\nlet length (a: 'a t) = Z.of_int (Array.length a)\n\nlet index (a: 'a t) (i:Z.t) = Array.get a (Z.to_int i)\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_List.ml",
    "content": "(* We give an implementation here using OCaml's BatList,\n   which provides tail-recursive versions of most functions *)\nlet isEmpty l = l = []\nlet hd = BatList.hd\nlet tail = BatList.tl\nlet tl = BatList.tl\n\nlet rec last = function\n  | x :: [] -> x\n  | _ :: tl -> last tl\nlet length l = Z.of_int (BatList.length l)\nlet rev = BatList.rev\nlet append = BatList.append\nlet op_At = append\nlet flatten = BatList.flatten\nlet map = BatList.map\nlet mapi f l = BatList.mapi (fun i x -> f (Z.of_int i) x) l\nlet fold_left = BatList.fold_left\nlet fold_right = BatList.fold_right\nlet fold_left2 = BatList.fold_left2\nlet existsb f l = BatList.exists f l\nlet find f l = try Some (BatList.find f l) with | Not_found -> None\nlet filter = BatList.filter\nlet for_all = BatList.for_all\nlet collect f l = BatList.flatten (BatList.map f l)\nlet tryFind = find\nlet choose = BatList.filter_map\nlet partition = BatList.partition\nlet sortWith f l = BatList.sort (fun x y -> Z.to_int (f x y)) l\n\nlet isEmpty l = l = []\nlet singleton x = [x]\nlet mem = BatList.mem\nlet memT = mem\nlet hd = BatList.hd\nlet tl = BatList.tl\nlet tail = BatList.tl\nlet iter = BatList.iter\nlet forall2 = BatList.for_all2\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_AST.ml",
    "content": "open Prims\ntype level =\n  | Un \n  | Expr \n  | Type_level \n  | Kind \n  | Formula \nlet (uu___is_Un : level -> Prims.bool) =\n  fun projectee -> match projectee with | Un -> true | uu___ -> false\nlet (uu___is_Expr : level -> Prims.bool) =\n  fun projectee -> match projectee with | Expr -> true | uu___ -> false\nlet (uu___is_Type_level : level -> Prims.bool) =\n  fun projectee -> match projectee with | Type_level -> true | uu___ -> false\nlet (uu___is_Kind : level -> Prims.bool) =\n  fun projectee -> match projectee with | Kind -> true | uu___ -> false\nlet (uu___is_Formula : level -> Prims.bool) =\n  fun projectee -> match projectee with | Formula -> true | uu___ -> false\ntype let_qualifier =\n  | NoLetQualifier \n  | Rec \nlet (uu___is_NoLetQualifier : let_qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | NoLetQualifier -> true | uu___ -> false\nlet (uu___is_Rec : let_qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Rec -> true | uu___ -> false\ntype quote_kind =\n  | Static \n  | Dynamic \nlet (uu___is_Static : quote_kind -> Prims.bool) =\n  fun projectee -> match projectee with | Static -> true | uu___ -> false\nlet (uu___is_Dynamic : quote_kind -> Prims.bool) =\n  fun projectee -> match projectee with | Dynamic -> true | uu___ -> false\ntype term' =\n  | Wild \n  | Const of FStar_Const.sconst \n  | Op of (FStar_Ident.ident * term Prims.list) \n  | Tvar of FStar_Ident.ident \n  | Uvar of FStar_Ident.ident \n  | Var of FStar_Ident.lid \n  | Name of FStar_Ident.lid \n  | Projector of (FStar_Ident.lid * FStar_Ident.ident) \n  | Construct of (FStar_Ident.lid * (term * imp) Prims.list) \n  | Abs of (pattern Prims.list * term) \n  | App of (term * term * imp) \n  | Let of (let_qualifier * (term Prims.list FStar_Pervasives_Native.option *\n  (pattern * term)) Prims.list * term) \n  | LetOperator of ((FStar_Ident.ident * pattern * term) Prims.list * term) \n  | LetOpen of (FStar_Ident.lid * term) \n  | LetOpenRecord of (term * term * term) \n  | Seq of (term * term) \n  | Bind of (FStar_Ident.ident * term * term) \n  | If of (term * FStar_Ident.ident FStar_Pervasives_Native.option *\n  (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)\n  FStar_Pervasives_Native.option * term * term) \n  | Match of (term * FStar_Ident.ident FStar_Pervasives_Native.option *\n  (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)\n  FStar_Pervasives_Native.option * (pattern * term\n  FStar_Pervasives_Native.option * term) Prims.list) \n  | TryWith of (term * (pattern * term FStar_Pervasives_Native.option * term)\n  Prims.list) \n  | Ascribed of (term * term * term FStar_Pervasives_Native.option *\n  Prims.bool) \n  | Record of (term FStar_Pervasives_Native.option * (FStar_Ident.lid * term)\n  Prims.list) \n  | Project of (term * FStar_Ident.lid) \n  | Product of (binder Prims.list * term) \n  | Sum of ((binder, term) FStar_Pervasives.either Prims.list * term) \n  | QForall of (binder Prims.list * (FStar_Ident.ident Prims.list * term\n  Prims.list Prims.list) * term) \n  | QExists of (binder Prims.list * (FStar_Ident.ident Prims.list * term\n  Prims.list Prims.list) * term) \n  | Refine of (binder * term) \n  | NamedTyp of (FStar_Ident.ident * term) \n  | Paren of term \n  | Requires of (term * Prims.string FStar_Pervasives_Native.option) \n  | Ensures of (term * Prims.string FStar_Pervasives_Native.option) \n  | LexList of term Prims.list \n  | WFOrder of (term * term) \n  | Decreases of (term * Prims.string FStar_Pervasives_Native.option) \n  | Labeled of (term * Prims.string * Prims.bool) \n  | Discrim of FStar_Ident.lid \n  | Attributes of term Prims.list \n  | Antiquote of term \n  | Quote of (term * quote_kind) \n  | VQuote of term \n  | CalcProof of (term * term * calc_step Prims.list) \n  | IntroForall of (binder Prims.list * term * term) \n  | IntroExists of (binder Prims.list * term * term Prims.list * term) \n  | IntroImplies of (term * term * binder * term) \n  | IntroOr of (Prims.bool * term * term * term) \n  | IntroAnd of (term * term * term * term) \n  | ElimForall of (binder Prims.list * term * term Prims.list) \n  | ElimExists of (binder Prims.list * term * term * binder * term) \n  | ElimImplies of (term * term * term) \n  | ElimOr of (term * term * term * binder * term * binder * term) \n  | ElimAnd of (term * term * term * binder * binder * term) \nand term = {\n  tm: term' ;\n  range: FStar_Compiler_Range.range ;\n  level: level }\nand calc_step =\n  | CalcStep of (term * term * term) \nand binder' =\n  | Variable of FStar_Ident.ident \n  | TVariable of FStar_Ident.ident \n  | Annotated of (FStar_Ident.ident * term) \n  | TAnnotated of (FStar_Ident.ident * term) \n  | NoName of term \nand binder =\n  {\n  b: binder' ;\n  brange: FStar_Compiler_Range.range ;\n  blevel: level ;\n  aqual: arg_qualifier FStar_Pervasives_Native.option ;\n  battributes: term Prims.list }\nand pattern' =\n  | PatWild of (arg_qualifier FStar_Pervasives_Native.option * term\n  Prims.list) \n  | PatConst of FStar_Const.sconst \n  | PatApp of (pattern * pattern Prims.list) \n  | PatVar of (FStar_Ident.ident * arg_qualifier\n  FStar_Pervasives_Native.option * term Prims.list) \n  | PatName of FStar_Ident.lid \n  | PatTvar of (FStar_Ident.ident * arg_qualifier\n  FStar_Pervasives_Native.option * term Prims.list) \n  | PatList of pattern Prims.list \n  | PatTuple of (pattern Prims.list * Prims.bool) \n  | PatRecord of (FStar_Ident.lid * pattern) Prims.list \n  | PatAscribed of (pattern * (term * term FStar_Pervasives_Native.option)) \n  | PatOr of pattern Prims.list \n  | PatOp of FStar_Ident.ident \n  | PatVQuote of term \nand pattern = {\n  pat: pattern' ;\n  prange: FStar_Compiler_Range.range }\nand arg_qualifier =\n  | Implicit \n  | Equality \n  | Meta of term \n  | TypeClassArg \nand imp =\n  | FsTypApp \n  | Hash \n  | UnivApp \n  | HashBrace of term \n  | Infix \n  | Nothing \nlet (uu___is_Wild : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Wild -> true | uu___ -> false\nlet (uu___is_Const : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Const _0 -> true | uu___ -> false\nlet (__proj__Const__item___0 : term' -> FStar_Const.sconst) =\n  fun projectee -> match projectee with | Const _0 -> _0\nlet (uu___is_Op : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Op _0 -> true | uu___ -> false\nlet (__proj__Op__item___0 : term' -> (FStar_Ident.ident * term Prims.list)) =\n  fun projectee -> match projectee with | Op _0 -> _0\nlet (uu___is_Tvar : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Tvar _0 -> true | uu___ -> false\nlet (__proj__Tvar__item___0 : term' -> FStar_Ident.ident) =\n  fun projectee -> match projectee with | Tvar _0 -> _0\nlet (uu___is_Uvar : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Uvar _0 -> true | uu___ -> false\nlet (__proj__Uvar__item___0 : term' -> FStar_Ident.ident) =\n  fun projectee -> match projectee with | Uvar _0 -> _0\nlet (uu___is_Var : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Var _0 -> true | uu___ -> false\nlet (__proj__Var__item___0 : term' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | Var _0 -> _0\nlet (uu___is_Name : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Name _0 -> true | uu___ -> false\nlet (__proj__Name__item___0 : term' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | Name _0 -> _0\nlet (uu___is_Projector : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Projector _0 -> true | uu___ -> false\nlet (__proj__Projector__item___0 :\n  term' -> (FStar_Ident.lid * FStar_Ident.ident)) =\n  fun projectee -> match projectee with | Projector _0 -> _0\nlet (uu___is_Construct : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Construct _0 -> true | uu___ -> false\nlet (__proj__Construct__item___0 :\n  term' -> (FStar_Ident.lid * (term * imp) Prims.list)) =\n  fun projectee -> match projectee with | Construct _0 -> _0\nlet (uu___is_Abs : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Abs _0 -> true | uu___ -> false\nlet (__proj__Abs__item___0 : term' -> (pattern Prims.list * term)) =\n  fun projectee -> match projectee with | Abs _0 -> _0\nlet (uu___is_App : term' -> Prims.bool) =\n  fun projectee -> match projectee with | App _0 -> true | uu___ -> false\nlet (__proj__App__item___0 : term' -> (term * term * imp)) =\n  fun projectee -> match projectee with | App _0 -> _0\nlet (uu___is_Let : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Let _0 -> true | uu___ -> false\nlet (__proj__Let__item___0 :\n  term' ->\n    (let_qualifier * (term Prims.list FStar_Pervasives_Native.option *\n      (pattern * term)) Prims.list * term))\n  = fun projectee -> match projectee with | Let _0 -> _0\nlet (uu___is_LetOperator : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | LetOperator _0 -> true | uu___ -> false\nlet (__proj__LetOperator__item___0 :\n  term' -> ((FStar_Ident.ident * pattern * term) Prims.list * term)) =\n  fun projectee -> match projectee with | LetOperator _0 -> _0\nlet (uu___is_LetOpen : term' -> Prims.bool) =\n  fun projectee -> match projectee with | LetOpen _0 -> true | uu___ -> false\nlet (__proj__LetOpen__item___0 : term' -> (FStar_Ident.lid * term)) =\n  fun projectee -> match projectee with | LetOpen _0 -> _0\nlet (uu___is_LetOpenRecord : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | LetOpenRecord _0 -> true | uu___ -> false\nlet (__proj__LetOpenRecord__item___0 : term' -> (term * term * term)) =\n  fun projectee -> match projectee with | LetOpenRecord _0 -> _0\nlet (uu___is_Seq : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Seq _0 -> true | uu___ -> false\nlet (__proj__Seq__item___0 : term' -> (term * term)) =\n  fun projectee -> match projectee with | Seq _0 -> _0\nlet (uu___is_Bind : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Bind _0 -> true | uu___ -> false\nlet (__proj__Bind__item___0 : term' -> (FStar_Ident.ident * term * term)) =\n  fun projectee -> match projectee with | Bind _0 -> _0\nlet (uu___is_If : term' -> Prims.bool) =\n  fun projectee -> match projectee with | If _0 -> true | uu___ -> false\nlet (__proj__If__item___0 :\n  term' ->\n    (term * FStar_Ident.ident FStar_Pervasives_Native.option *\n      (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)\n      FStar_Pervasives_Native.option * term * term))\n  = fun projectee -> match projectee with | If _0 -> _0\nlet (uu___is_Match : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Match _0 -> true | uu___ -> false\nlet (__proj__Match__item___0 :\n  term' ->\n    (term * FStar_Ident.ident FStar_Pervasives_Native.option *\n      (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)\n      FStar_Pervasives_Native.option * (pattern * term\n      FStar_Pervasives_Native.option * term) Prims.list))\n  = fun projectee -> match projectee with | Match _0 -> _0\nlet (uu___is_TryWith : term' -> Prims.bool) =\n  fun projectee -> match projectee with | TryWith _0 -> true | uu___ -> false\nlet (__proj__TryWith__item___0 :\n  term' ->\n    (term * (pattern * term FStar_Pervasives_Native.option * term)\n      Prims.list))\n  = fun projectee -> match projectee with | TryWith _0 -> _0\nlet (uu___is_Ascribed : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Ascribed _0 -> true | uu___ -> false\nlet (__proj__Ascribed__item___0 :\n  term' -> (term * term * term FStar_Pervasives_Native.option * Prims.bool))\n  = fun projectee -> match projectee with | Ascribed _0 -> _0\nlet (uu___is_Record : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Record _0 -> true | uu___ -> false\nlet (__proj__Record__item___0 :\n  term' ->\n    (term FStar_Pervasives_Native.option * (FStar_Ident.lid * term)\n      Prims.list))\n  = fun projectee -> match projectee with | Record _0 -> _0\nlet (uu___is_Project : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Project _0 -> true | uu___ -> false\nlet (__proj__Project__item___0 : term' -> (term * FStar_Ident.lid)) =\n  fun projectee -> match projectee with | Project _0 -> _0\nlet (uu___is_Product : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Product _0 -> true | uu___ -> false\nlet (__proj__Product__item___0 : term' -> (binder Prims.list * term)) =\n  fun projectee -> match projectee with | Product _0 -> _0\nlet (uu___is_Sum : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Sum _0 -> true | uu___ -> false\nlet (__proj__Sum__item___0 :\n  term' -> ((binder, term) FStar_Pervasives.either Prims.list * term)) =\n  fun projectee -> match projectee with | Sum _0 -> _0\nlet (uu___is_QForall : term' -> Prims.bool) =\n  fun projectee -> match projectee with | QForall _0 -> true | uu___ -> false\nlet (__proj__QForall__item___0 :\n  term' ->\n    (binder Prims.list * (FStar_Ident.ident Prims.list * term Prims.list\n      Prims.list) * term))\n  = fun projectee -> match projectee with | QForall _0 -> _0\nlet (uu___is_QExists : term' -> Prims.bool) =\n  fun projectee -> match projectee with | QExists _0 -> true | uu___ -> false\nlet (__proj__QExists__item___0 :\n  term' ->\n    (binder Prims.list * (FStar_Ident.ident Prims.list * term Prims.list\n      Prims.list) * term))\n  = fun projectee -> match projectee with | QExists _0 -> _0\nlet (uu___is_Refine : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Refine _0 -> true | uu___ -> false\nlet (__proj__Refine__item___0 : term' -> (binder * term)) =\n  fun projectee -> match projectee with | Refine _0 -> _0\nlet (uu___is_NamedTyp : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | NamedTyp _0 -> true | uu___ -> false\nlet (__proj__NamedTyp__item___0 : term' -> (FStar_Ident.ident * term)) =\n  fun projectee -> match projectee with | NamedTyp _0 -> _0\nlet (uu___is_Paren : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Paren _0 -> true | uu___ -> false\nlet (__proj__Paren__item___0 : term' -> term) =\n  fun projectee -> match projectee with | Paren _0 -> _0\nlet (uu___is_Requires : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Requires _0 -> true | uu___ -> false\nlet (__proj__Requires__item___0 :\n  term' -> (term * Prims.string FStar_Pervasives_Native.option)) =\n  fun projectee -> match projectee with | Requires _0 -> _0\nlet (uu___is_Ensures : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Ensures _0 -> true | uu___ -> false\nlet (__proj__Ensures__item___0 :\n  term' -> (term * Prims.string FStar_Pervasives_Native.option)) =\n  fun projectee -> match projectee with | Ensures _0 -> _0\nlet (uu___is_LexList : term' -> Prims.bool) =\n  fun projectee -> match projectee with | LexList _0 -> true | uu___ -> false\nlet (__proj__LexList__item___0 : term' -> term Prims.list) =\n  fun projectee -> match projectee with | LexList _0 -> _0\nlet (uu___is_WFOrder : term' -> Prims.bool) =\n  fun projectee -> match projectee with | WFOrder _0 -> true | uu___ -> false\nlet (__proj__WFOrder__item___0 : term' -> (term * term)) =\n  fun projectee -> match projectee with | WFOrder _0 -> _0\nlet (uu___is_Decreases : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Decreases _0 -> true | uu___ -> false\nlet (__proj__Decreases__item___0 :\n  term' -> (term * Prims.string FStar_Pervasives_Native.option)) =\n  fun projectee -> match projectee with | Decreases _0 -> _0\nlet (uu___is_Labeled : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Labeled _0 -> true | uu___ -> false\nlet (__proj__Labeled__item___0 : term' -> (term * Prims.string * Prims.bool))\n  = fun projectee -> match projectee with | Labeled _0 -> _0\nlet (uu___is_Discrim : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Discrim _0 -> true | uu___ -> false\nlet (__proj__Discrim__item___0 : term' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | Discrim _0 -> _0\nlet (uu___is_Attributes : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Attributes _0 -> true | uu___ -> false\nlet (__proj__Attributes__item___0 : term' -> term Prims.list) =\n  fun projectee -> match projectee with | Attributes _0 -> _0\nlet (uu___is_Antiquote : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Antiquote _0 -> true | uu___ -> false\nlet (__proj__Antiquote__item___0 : term' -> term) =\n  fun projectee -> match projectee with | Antiquote _0 -> _0\nlet (uu___is_Quote : term' -> Prims.bool) =\n  fun projectee -> match projectee with | Quote _0 -> true | uu___ -> false\nlet (__proj__Quote__item___0 : term' -> (term * quote_kind)) =\n  fun projectee -> match projectee with | Quote _0 -> _0\nlet (uu___is_VQuote : term' -> Prims.bool) =\n  fun projectee -> match projectee with | VQuote _0 -> true | uu___ -> false\nlet (__proj__VQuote__item___0 : term' -> term) =\n  fun projectee -> match projectee with | VQuote _0 -> _0\nlet (uu___is_CalcProof : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | CalcProof _0 -> true | uu___ -> false\nlet (__proj__CalcProof__item___0 :\n  term' -> (term * term * calc_step Prims.list)) =\n  fun projectee -> match projectee with | CalcProof _0 -> _0\nlet (uu___is_IntroForall : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | IntroForall _0 -> true | uu___ -> false\nlet (__proj__IntroForall__item___0 :\n  term' -> (binder Prims.list * term * term)) =\n  fun projectee -> match projectee with | IntroForall _0 -> _0\nlet (uu___is_IntroExists : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | IntroExists _0 -> true | uu___ -> false\nlet (__proj__IntroExists__item___0 :\n  term' -> (binder Prims.list * term * term Prims.list * term)) =\n  fun projectee -> match projectee with | IntroExists _0 -> _0\nlet (uu___is_IntroImplies : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | IntroImplies _0 -> true | uu___ -> false\nlet (__proj__IntroImplies__item___0 : term' -> (term * term * binder * term))\n  = fun projectee -> match projectee with | IntroImplies _0 -> _0\nlet (uu___is_IntroOr : term' -> Prims.bool) =\n  fun projectee -> match projectee with | IntroOr _0 -> true | uu___ -> false\nlet (__proj__IntroOr__item___0 : term' -> (Prims.bool * term * term * term))\n  = fun projectee -> match projectee with | IntroOr _0 -> _0\nlet (uu___is_IntroAnd : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | IntroAnd _0 -> true | uu___ -> false\nlet (__proj__IntroAnd__item___0 : term' -> (term * term * term * term)) =\n  fun projectee -> match projectee with | IntroAnd _0 -> _0\nlet (uu___is_ElimForall : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | ElimForall _0 -> true | uu___ -> false\nlet (__proj__ElimForall__item___0 :\n  term' -> (binder Prims.list * term * term Prims.list)) =\n  fun projectee -> match projectee with | ElimForall _0 -> _0\nlet (uu___is_ElimExists : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | ElimExists _0 -> true | uu___ -> false\nlet (__proj__ElimExists__item___0 :\n  term' -> (binder Prims.list * term * term * binder * term)) =\n  fun projectee -> match projectee with | ElimExists _0 -> _0\nlet (uu___is_ElimImplies : term' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | ElimImplies _0 -> true | uu___ -> false\nlet (__proj__ElimImplies__item___0 : term' -> (term * term * term)) =\n  fun projectee -> match projectee with | ElimImplies _0 -> _0\nlet (uu___is_ElimOr : term' -> Prims.bool) =\n  fun projectee -> match projectee with | ElimOr _0 -> true | uu___ -> false\nlet (__proj__ElimOr__item___0 :\n  term' -> (term * term * term * binder * term * binder * term)) =\n  fun projectee -> match projectee with | ElimOr _0 -> _0\nlet (uu___is_ElimAnd : term' -> Prims.bool) =\n  fun projectee -> match projectee with | ElimAnd _0 -> true | uu___ -> false\nlet (__proj__ElimAnd__item___0 :\n  term' -> (term * term * term * binder * binder * term)) =\n  fun projectee -> match projectee with | ElimAnd _0 -> _0\nlet (__proj__Mkterm__item__tm : term -> term') =\n  fun projectee ->\n    match projectee with | { tm; range; level = level1;_} -> tm\nlet (__proj__Mkterm__item__range : term -> FStar_Compiler_Range.range) =\n  fun projectee ->\n    match projectee with | { tm; range; level = level1;_} -> range\nlet (__proj__Mkterm__item__level : term -> level) =\n  fun projectee ->\n    match projectee with | { tm; range; level = level1;_} -> level1\nlet (uu___is_CalcStep : calc_step -> Prims.bool) = fun projectee -> true\nlet (__proj__CalcStep__item___0 : calc_step -> (term * term * term)) =\n  fun projectee -> match projectee with | CalcStep _0 -> _0\nlet (uu___is_Variable : binder' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Variable _0 -> true | uu___ -> false\nlet (__proj__Variable__item___0 : binder' -> FStar_Ident.ident) =\n  fun projectee -> match projectee with | Variable _0 -> _0\nlet (uu___is_TVariable : binder' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TVariable _0 -> true | uu___ -> false\nlet (__proj__TVariable__item___0 : binder' -> FStar_Ident.ident) =\n  fun projectee -> match projectee with | TVariable _0 -> _0\nlet (uu___is_Annotated : binder' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Annotated _0 -> true | uu___ -> false\nlet (__proj__Annotated__item___0 : binder' -> (FStar_Ident.ident * term)) =\n  fun projectee -> match projectee with | Annotated _0 -> _0\nlet (uu___is_TAnnotated : binder' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TAnnotated _0 -> true | uu___ -> false\nlet (__proj__TAnnotated__item___0 : binder' -> (FStar_Ident.ident * term)) =\n  fun projectee -> match projectee with | TAnnotated _0 -> _0\nlet (uu___is_NoName : binder' -> Prims.bool) =\n  fun projectee -> match projectee with | NoName _0 -> true | uu___ -> false\nlet (__proj__NoName__item___0 : binder' -> term) =\n  fun projectee -> match projectee with | NoName _0 -> _0\nlet (__proj__Mkbinder__item__b : binder -> binder') =\n  fun projectee ->\n    match projectee with | { b; brange; blevel; aqual; battributes;_} -> b\nlet (__proj__Mkbinder__item__brange : binder -> FStar_Compiler_Range.range) =\n  fun projectee ->\n    match projectee with\n    | { b; brange; blevel; aqual; battributes;_} -> brange\nlet (__proj__Mkbinder__item__blevel : binder -> level) =\n  fun projectee ->\n    match projectee with\n    | { b; brange; blevel; aqual; battributes;_} -> blevel\nlet (__proj__Mkbinder__item__aqual :\n  binder -> arg_qualifier FStar_Pervasives_Native.option) =\n  fun projectee ->\n    match projectee with\n    | { b; brange; blevel; aqual; battributes;_} -> aqual\nlet (__proj__Mkbinder__item__battributes : binder -> term Prims.list) =\n  fun projectee ->\n    match projectee with\n    | { b; brange; blevel; aqual; battributes;_} -> battributes\nlet (uu___is_PatWild : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatWild _0 -> true | uu___ -> false\nlet (__proj__PatWild__item___0 :\n  pattern' ->\n    (arg_qualifier FStar_Pervasives_Native.option * term Prims.list))\n  = fun projectee -> match projectee with | PatWild _0 -> _0\nlet (uu___is_PatConst : pattern' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | PatConst _0 -> true | uu___ -> false\nlet (__proj__PatConst__item___0 : pattern' -> FStar_Const.sconst) =\n  fun projectee -> match projectee with | PatConst _0 -> _0\nlet (uu___is_PatApp : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatApp _0 -> true | uu___ -> false\nlet (__proj__PatApp__item___0 : pattern' -> (pattern * pattern Prims.list)) =\n  fun projectee -> match projectee with | PatApp _0 -> _0\nlet (uu___is_PatVar : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatVar _0 -> true | uu___ -> false\nlet (__proj__PatVar__item___0 :\n  pattern' ->\n    (FStar_Ident.ident * arg_qualifier FStar_Pervasives_Native.option * term\n      Prims.list))\n  = fun projectee -> match projectee with | PatVar _0 -> _0\nlet (uu___is_PatName : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatName _0 -> true | uu___ -> false\nlet (__proj__PatName__item___0 : pattern' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | PatName _0 -> _0\nlet (uu___is_PatTvar : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatTvar _0 -> true | uu___ -> false\nlet (__proj__PatTvar__item___0 :\n  pattern' ->\n    (FStar_Ident.ident * arg_qualifier FStar_Pervasives_Native.option * term\n      Prims.list))\n  = fun projectee -> match projectee with | PatTvar _0 -> _0\nlet (uu___is_PatList : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatList _0 -> true | uu___ -> false\nlet (__proj__PatList__item___0 : pattern' -> pattern Prims.list) =\n  fun projectee -> match projectee with | PatList _0 -> _0\nlet (uu___is_PatTuple : pattern' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | PatTuple _0 -> true | uu___ -> false\nlet (__proj__PatTuple__item___0 :\n  pattern' -> (pattern Prims.list * Prims.bool)) =\n  fun projectee -> match projectee with | PatTuple _0 -> _0\nlet (uu___is_PatRecord : pattern' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | PatRecord _0 -> true | uu___ -> false\nlet (__proj__PatRecord__item___0 :\n  pattern' -> (FStar_Ident.lid * pattern) Prims.list) =\n  fun projectee -> match projectee with | PatRecord _0 -> _0\nlet (uu___is_PatAscribed : pattern' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | PatAscribed _0 -> true | uu___ -> false\nlet (__proj__PatAscribed__item___0 :\n  pattern' -> (pattern * (term * term FStar_Pervasives_Native.option))) =\n  fun projectee -> match projectee with | PatAscribed _0 -> _0\nlet (uu___is_PatOr : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatOr _0 -> true | uu___ -> false\nlet (__proj__PatOr__item___0 : pattern' -> pattern Prims.list) =\n  fun projectee -> match projectee with | PatOr _0 -> _0\nlet (uu___is_PatOp : pattern' -> Prims.bool) =\n  fun projectee -> match projectee with | PatOp _0 -> true | uu___ -> false\nlet (__proj__PatOp__item___0 : pattern' -> FStar_Ident.ident) =\n  fun projectee -> match projectee with | PatOp _0 -> _0\nlet (uu___is_PatVQuote : pattern' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | PatVQuote _0 -> true | uu___ -> false\nlet (__proj__PatVQuote__item___0 : pattern' -> term) =\n  fun projectee -> match projectee with | PatVQuote _0 -> _0\nlet (__proj__Mkpattern__item__pat : pattern -> pattern') =\n  fun projectee -> match projectee with | { pat; prange;_} -> pat\nlet (__proj__Mkpattern__item__prange : pattern -> FStar_Compiler_Range.range)\n  = fun projectee -> match projectee with | { pat; prange;_} -> prange\nlet (uu___is_Implicit : arg_qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Implicit -> true | uu___ -> false\nlet (uu___is_Equality : arg_qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Equality -> true | uu___ -> false\nlet (uu___is_Meta : arg_qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Meta _0 -> true | uu___ -> false\nlet (__proj__Meta__item___0 : arg_qualifier -> term) =\n  fun projectee -> match projectee with | Meta _0 -> _0\nlet (uu___is_TypeClassArg : arg_qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TypeClassArg -> true | uu___ -> false\nlet (uu___is_FsTypApp : imp -> Prims.bool) =\n  fun projectee -> match projectee with | FsTypApp -> true | uu___ -> false\nlet (uu___is_Hash : imp -> Prims.bool) =\n  fun projectee -> match projectee with | Hash -> true | uu___ -> false\nlet (uu___is_UnivApp : imp -> Prims.bool) =\n  fun projectee -> match projectee with | UnivApp -> true | uu___ -> false\nlet (uu___is_HashBrace : imp -> Prims.bool) =\n  fun projectee ->\n    match projectee with | HashBrace _0 -> true | uu___ -> false\nlet (__proj__HashBrace__item___0 : imp -> term) =\n  fun projectee -> match projectee with | HashBrace _0 -> _0\nlet (uu___is_Infix : imp -> Prims.bool) =\n  fun projectee -> match projectee with | Infix -> true | uu___ -> false\nlet (uu___is_Nothing : imp -> Prims.bool) =\n  fun projectee -> match projectee with | Nothing -> true | uu___ -> false\ntype match_returns_annotation =\n  (FStar_Ident.ident FStar_Pervasives_Native.option * term * Prims.bool)\ntype patterns = (FStar_Ident.ident Prims.list * term Prims.list Prims.list)\ntype attributes_ = term Prims.list\ntype branch = (pattern * term FStar_Pervasives_Native.option * term)\ntype aqual = arg_qualifier FStar_Pervasives_Native.option\ntype knd = term\ntype typ = term\ntype expr = term\ntype tycon_record =\n  (FStar_Ident.ident * aqual * attributes_ * term) Prims.list\ntype constructor_payload =\n  | VpOfNotation of typ \n  | VpArbitrary of typ \n  | VpRecord of (tycon_record * typ FStar_Pervasives_Native.option) \nlet (uu___is_VpOfNotation : constructor_payload -> Prims.bool) =\n  fun projectee ->\n    match projectee with | VpOfNotation _0 -> true | uu___ -> false\nlet (__proj__VpOfNotation__item___0 : constructor_payload -> typ) =\n  fun projectee -> match projectee with | VpOfNotation _0 -> _0\nlet (uu___is_VpArbitrary : constructor_payload -> Prims.bool) =\n  fun projectee ->\n    match projectee with | VpArbitrary _0 -> true | uu___ -> false\nlet (__proj__VpArbitrary__item___0 : constructor_payload -> typ) =\n  fun projectee -> match projectee with | VpArbitrary _0 -> _0\nlet (uu___is_VpRecord : constructor_payload -> Prims.bool) =\n  fun projectee ->\n    match projectee with | VpRecord _0 -> true | uu___ -> false\nlet (__proj__VpRecord__item___0 :\n  constructor_payload -> (tycon_record * typ FStar_Pervasives_Native.option))\n  = fun projectee -> match projectee with | VpRecord _0 -> _0\ntype tycon =\n  | TyconAbstract of (FStar_Ident.ident * binder Prims.list * knd\n  FStar_Pervasives_Native.option) \n  | TyconAbbrev of (FStar_Ident.ident * binder Prims.list * knd\n  FStar_Pervasives_Native.option * term) \n  | TyconRecord of (FStar_Ident.ident * binder Prims.list * knd\n  FStar_Pervasives_Native.option * attributes_ * tycon_record) \n  | TyconVariant of (FStar_Ident.ident * binder Prims.list * knd\n  FStar_Pervasives_Native.option * (FStar_Ident.ident * constructor_payload\n  FStar_Pervasives_Native.option * attributes_) Prims.list) \nlet (uu___is_TyconAbstract : tycon -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TyconAbstract _0 -> true | uu___ -> false\nlet (__proj__TyconAbstract__item___0 :\n  tycon ->\n    (FStar_Ident.ident * binder Prims.list * knd\n      FStar_Pervasives_Native.option))\n  = fun projectee -> match projectee with | TyconAbstract _0 -> _0\nlet (uu___is_TyconAbbrev : tycon -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TyconAbbrev _0 -> true | uu___ -> false\nlet (__proj__TyconAbbrev__item___0 :\n  tycon ->\n    (FStar_Ident.ident * binder Prims.list * knd\n      FStar_Pervasives_Native.option * term))\n  = fun projectee -> match projectee with | TyconAbbrev _0 -> _0\nlet (uu___is_TyconRecord : tycon -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TyconRecord _0 -> true | uu___ -> false\nlet (__proj__TyconRecord__item___0 :\n  tycon ->\n    (FStar_Ident.ident * binder Prims.list * knd\n      FStar_Pervasives_Native.option * attributes_ * tycon_record))\n  = fun projectee -> match projectee with | TyconRecord _0 -> _0\nlet (uu___is_TyconVariant : tycon -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TyconVariant _0 -> true | uu___ -> false\nlet (__proj__TyconVariant__item___0 :\n  tycon ->\n    (FStar_Ident.ident * binder Prims.list * knd\n      FStar_Pervasives_Native.option * (FStar_Ident.ident *\n      constructor_payload FStar_Pervasives_Native.option * attributes_)\n      Prims.list))\n  = fun projectee -> match projectee with | TyconVariant _0 -> _0\ntype qualifier =\n  | Private \n  | Noeq \n  | Unopteq \n  | Assumption \n  | DefaultEffect \n  | TotalEffect \n  | Effect_qual \n  | New \n  | Inline \n  | Visible \n  | Unfold_for_unification_and_vcgen \n  | Inline_for_extraction \n  | Irreducible \n  | NoExtract \n  | Reifiable \n  | Reflectable \n  | Opaque \n  | Logic \nlet (uu___is_Private : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Private -> true | uu___ -> false\nlet (uu___is_Noeq : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Noeq -> true | uu___ -> false\nlet (uu___is_Unopteq : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Unopteq -> true | uu___ -> false\nlet (uu___is_Assumption : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Assumption -> true | uu___ -> false\nlet (uu___is_DefaultEffect : qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | DefaultEffect -> true | uu___ -> false\nlet (uu___is_TotalEffect : qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TotalEffect -> true | uu___ -> false\nlet (uu___is_Effect_qual : qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Effect_qual -> true | uu___ -> false\nlet (uu___is_New : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | New -> true | uu___ -> false\nlet (uu___is_Inline : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Inline -> true | uu___ -> false\nlet (uu___is_Visible : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Visible -> true | uu___ -> false\nlet (uu___is_Unfold_for_unification_and_vcgen : qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | Unfold_for_unification_and_vcgen -> true\n    | uu___ -> false\nlet (uu___is_Inline_for_extraction : qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Inline_for_extraction -> true | uu___ -> false\nlet (uu___is_Irreducible : qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Irreducible -> true | uu___ -> false\nlet (uu___is_NoExtract : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | NoExtract -> true | uu___ -> false\nlet (uu___is_Reifiable : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Reifiable -> true | uu___ -> false\nlet (uu___is_Reflectable : qualifier -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Reflectable -> true | uu___ -> false\nlet (uu___is_Opaque : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Opaque -> true | uu___ -> false\nlet (uu___is_Logic : qualifier -> Prims.bool) =\n  fun projectee -> match projectee with | Logic -> true | uu___ -> false\ntype qualifiers = qualifier Prims.list\ntype decoration =\n  | Qualifier of qualifier \n  | DeclAttributes of term Prims.list \nlet (uu___is_Qualifier : decoration -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Qualifier _0 -> true | uu___ -> false\nlet (__proj__Qualifier__item___0 : decoration -> qualifier) =\n  fun projectee -> match projectee with | Qualifier _0 -> _0\nlet (uu___is_DeclAttributes : decoration -> Prims.bool) =\n  fun projectee ->\n    match projectee with | DeclAttributes _0 -> true | uu___ -> false\nlet (__proj__DeclAttributes__item___0 : decoration -> term Prims.list) =\n  fun projectee -> match projectee with | DeclAttributes _0 -> _0\ntype lift_op =\n  | NonReifiableLift of term \n  | ReifiableLift of (term * term) \n  | LiftForFree of term \nlet (uu___is_NonReifiableLift : lift_op -> Prims.bool) =\n  fun projectee ->\n    match projectee with | NonReifiableLift _0 -> true | uu___ -> false\nlet (__proj__NonReifiableLift__item___0 : lift_op -> term) =\n  fun projectee -> match projectee with | NonReifiableLift _0 -> _0\nlet (uu___is_ReifiableLift : lift_op -> Prims.bool) =\n  fun projectee ->\n    match projectee with | ReifiableLift _0 -> true | uu___ -> false\nlet (__proj__ReifiableLift__item___0 : lift_op -> (term * term)) =\n  fun projectee -> match projectee with | ReifiableLift _0 -> _0\nlet (uu___is_LiftForFree : lift_op -> Prims.bool) =\n  fun projectee ->\n    match projectee with | LiftForFree _0 -> true | uu___ -> false\nlet (__proj__LiftForFree__item___0 : lift_op -> term) =\n  fun projectee -> match projectee with | LiftForFree _0 -> _0\ntype lift =\n  {\n  msource: FStar_Ident.lid ;\n  mdest: FStar_Ident.lid ;\n  lift_op: lift_op ;\n  braced: Prims.bool }\nlet (__proj__Mklift__item__msource : lift -> FStar_Ident.lid) =\n  fun projectee ->\n    match projectee with\n    | { msource; mdest; lift_op = lift_op1; braced;_} -> msource\nlet (__proj__Mklift__item__mdest : lift -> FStar_Ident.lid) =\n  fun projectee ->\n    match projectee with\n    | { msource; mdest; lift_op = lift_op1; braced;_} -> mdest\nlet (__proj__Mklift__item__lift_op : lift -> lift_op) =\n  fun projectee ->\n    match projectee with\n    | { msource; mdest; lift_op = lift_op1; braced;_} -> lift_op1\nlet (__proj__Mklift__item__braced : lift -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { msource; mdest; lift_op = lift_op1; braced;_} -> braced\ntype pragma =\n  | SetOptions of Prims.string \n  | ResetOptions of Prims.string FStar_Pervasives_Native.option \n  | PushOptions of Prims.string FStar_Pervasives_Native.option \n  | PopOptions \n  | RestartSolver \n  | PrintEffectsGraph \nlet (uu___is_SetOptions : pragma -> Prims.bool) =\n  fun projectee ->\n    match projectee with | SetOptions _0 -> true | uu___ -> false\nlet (__proj__SetOptions__item___0 : pragma -> Prims.string) =\n  fun projectee -> match projectee with | SetOptions _0 -> _0\nlet (uu___is_ResetOptions : pragma -> Prims.bool) =\n  fun projectee ->\n    match projectee with | ResetOptions _0 -> true | uu___ -> false\nlet (__proj__ResetOptions__item___0 :\n  pragma -> Prims.string FStar_Pervasives_Native.option) =\n  fun projectee -> match projectee with | ResetOptions _0 -> _0\nlet (uu___is_PushOptions : pragma -> Prims.bool) =\n  fun projectee ->\n    match projectee with | PushOptions _0 -> true | uu___ -> false\nlet (__proj__PushOptions__item___0 :\n  pragma -> Prims.string FStar_Pervasives_Native.option) =\n  fun projectee -> match projectee with | PushOptions _0 -> _0\nlet (uu___is_PopOptions : pragma -> Prims.bool) =\n  fun projectee -> match projectee with | PopOptions -> true | uu___ -> false\nlet (uu___is_RestartSolver : pragma -> Prims.bool) =\n  fun projectee ->\n    match projectee with | RestartSolver -> true | uu___ -> false\nlet (uu___is_PrintEffectsGraph : pragma -> Prims.bool) =\n  fun projectee ->\n    match projectee with | PrintEffectsGraph -> true | uu___ -> false\ntype decl' =\n  | TopLevelModule of FStar_Ident.lid \n  | Open of FStar_Ident.lid \n  | Friend of FStar_Ident.lid \n  | Include of FStar_Ident.lid \n  | ModuleAbbrev of (FStar_Ident.ident * FStar_Ident.lid) \n  | TopLevelLet of (let_qualifier * (pattern * term) Prims.list) \n  | Tycon of (Prims.bool * Prims.bool * tycon Prims.list) \n  | Val of (FStar_Ident.ident * term) \n  | Exception of (FStar_Ident.ident * term FStar_Pervasives_Native.option) \n  | NewEffect of effect_decl \n  | LayeredEffect of effect_decl \n  | SubEffect of lift \n  | Polymonadic_bind of (FStar_Ident.lid * FStar_Ident.lid * FStar_Ident.lid\n  * term) \n  | Polymonadic_subcomp of (FStar_Ident.lid * FStar_Ident.lid * term) \n  | Pragma of pragma \n  | Assume of (FStar_Ident.ident * term) \n  | Splice of (FStar_Ident.ident Prims.list * term) \nand decl =\n  {\n  d: decl' ;\n  drange: FStar_Compiler_Range.range ;\n  quals: qualifiers ;\n  attrs: attributes_ }\nand effect_decl =\n  | DefineEffect of (FStar_Ident.ident * binder Prims.list * term * decl\n  Prims.list) \n  | RedefineEffect of (FStar_Ident.ident * binder Prims.list * term) \nlet (uu___is_TopLevelModule : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TopLevelModule _0 -> true | uu___ -> false\nlet (__proj__TopLevelModule__item___0 : decl' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | TopLevelModule _0 -> _0\nlet (uu___is_Open : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Open _0 -> true | uu___ -> false\nlet (__proj__Open__item___0 : decl' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | Open _0 -> _0\nlet (uu___is_Friend : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Friend _0 -> true | uu___ -> false\nlet (__proj__Friend__item___0 : decl' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | Friend _0 -> _0\nlet (uu___is_Include : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Include _0 -> true | uu___ -> false\nlet (__proj__Include__item___0 : decl' -> FStar_Ident.lid) =\n  fun projectee -> match projectee with | Include _0 -> _0\nlet (uu___is_ModuleAbbrev : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | ModuleAbbrev _0 -> true | uu___ -> false\nlet (__proj__ModuleAbbrev__item___0 :\n  decl' -> (FStar_Ident.ident * FStar_Ident.lid)) =\n  fun projectee -> match projectee with | ModuleAbbrev _0 -> _0\nlet (uu___is_TopLevelLet : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | TopLevelLet _0 -> true | uu___ -> false\nlet (__proj__TopLevelLet__item___0 :\n  decl' -> (let_qualifier * (pattern * term) Prims.list)) =\n  fun projectee -> match projectee with | TopLevelLet _0 -> _0\nlet (uu___is_Tycon : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Tycon _0 -> true | uu___ -> false\nlet (__proj__Tycon__item___0 :\n  decl' -> (Prims.bool * Prims.bool * tycon Prims.list)) =\n  fun projectee -> match projectee with | Tycon _0 -> _0\nlet (uu___is_Val : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Val _0 -> true | uu___ -> false\nlet (__proj__Val__item___0 : decl' -> (FStar_Ident.ident * term)) =\n  fun projectee -> match projectee with | Val _0 -> _0\nlet (uu___is_Exception : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Exception _0 -> true | uu___ -> false\nlet (__proj__Exception__item___0 :\n  decl' -> (FStar_Ident.ident * term FStar_Pervasives_Native.option)) =\n  fun projectee -> match projectee with | Exception _0 -> _0\nlet (uu___is_NewEffect : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | NewEffect _0 -> true | uu___ -> false\nlet (__proj__NewEffect__item___0 : decl' -> effect_decl) =\n  fun projectee -> match projectee with | NewEffect _0 -> _0\nlet (uu___is_LayeredEffect : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | LayeredEffect _0 -> true | uu___ -> false\nlet (__proj__LayeredEffect__item___0 : decl' -> effect_decl) =\n  fun projectee -> match projectee with | LayeredEffect _0 -> _0\nlet (uu___is_SubEffect : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | SubEffect _0 -> true | uu___ -> false\nlet (__proj__SubEffect__item___0 : decl' -> lift) =\n  fun projectee -> match projectee with | SubEffect _0 -> _0\nlet (uu___is_Polymonadic_bind : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Polymonadic_bind _0 -> true | uu___ -> false\nlet (__proj__Polymonadic_bind__item___0 :\n  decl' -> (FStar_Ident.lid * FStar_Ident.lid * FStar_Ident.lid * term)) =\n  fun projectee -> match projectee with | Polymonadic_bind _0 -> _0\nlet (uu___is_Polymonadic_subcomp : decl' -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Polymonadic_subcomp _0 -> true | uu___ -> false\nlet (__proj__Polymonadic_subcomp__item___0 :\n  decl' -> (FStar_Ident.lid * FStar_Ident.lid * term)) =\n  fun projectee -> match projectee with | Polymonadic_subcomp _0 -> _0\nlet (uu___is_Pragma : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Pragma _0 -> true | uu___ -> false\nlet (__proj__Pragma__item___0 : decl' -> pragma) =\n  fun projectee -> match projectee with | Pragma _0 -> _0\nlet (uu___is_Assume : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Assume _0 -> true | uu___ -> false\nlet (__proj__Assume__item___0 : decl' -> (FStar_Ident.ident * term)) =\n  fun projectee -> match projectee with | Assume _0 -> _0\nlet (uu___is_Splice : decl' -> Prims.bool) =\n  fun projectee -> match projectee with | Splice _0 -> true | uu___ -> false\nlet (__proj__Splice__item___0 :\n  decl' -> (FStar_Ident.ident Prims.list * term)) =\n  fun projectee -> match projectee with | Splice _0 -> _0\nlet (__proj__Mkdecl__item__d : decl -> decl') =\n  fun projectee -> match projectee with | { d; drange; quals; attrs;_} -> d\nlet (__proj__Mkdecl__item__drange : decl -> FStar_Compiler_Range.range) =\n  fun projectee ->\n    match projectee with | { d; drange; quals; attrs;_} -> drange\nlet (__proj__Mkdecl__item__quals : decl -> qualifiers) =\n  fun projectee ->\n    match projectee with | { d; drange; quals; attrs;_} -> quals\nlet (__proj__Mkdecl__item__attrs : decl -> attributes_) =\n  fun projectee ->\n    match projectee with | { d; drange; quals; attrs;_} -> attrs\nlet (uu___is_DefineEffect : effect_decl -> Prims.bool) =\n  fun projectee ->\n    match projectee with | DefineEffect _0 -> true | uu___ -> false\nlet (__proj__DefineEffect__item___0 :\n  effect_decl ->\n    (FStar_Ident.ident * binder Prims.list * term * decl Prims.list))\n  = fun projectee -> match projectee with | DefineEffect _0 -> _0\nlet (uu___is_RedefineEffect : effect_decl -> Prims.bool) =\n  fun projectee ->\n    match projectee with | RedefineEffect _0 -> true | uu___ -> false\nlet (__proj__RedefineEffect__item___0 :\n  effect_decl -> (FStar_Ident.ident * binder Prims.list * term)) =\n  fun projectee -> match projectee with | RedefineEffect _0 -> _0\ntype modul =\n  | Module of (FStar_Ident.lid * decl Prims.list) \n  | Interface of (FStar_Ident.lid * decl Prims.list * Prims.bool) \nlet (uu___is_Module : modul -> Prims.bool) =\n  fun projectee -> match projectee with | Module _0 -> true | uu___ -> false\nlet (__proj__Module__item___0 : modul -> (FStar_Ident.lid * decl Prims.list))\n  = fun projectee -> match projectee with | Module _0 -> _0\nlet (uu___is_Interface : modul -> Prims.bool) =\n  fun projectee ->\n    match projectee with | Interface _0 -> true | uu___ -> false\nlet (__proj__Interface__item___0 :\n  modul -> (FStar_Ident.lid * decl Prims.list * Prims.bool)) =\n  fun projectee -> match projectee with | Interface _0 -> _0\ntype file = modul\ntype inputFragment = (file, decl Prims.list) FStar_Pervasives.either\nlet (decl_drange : decl -> FStar_Compiler_Range.range) =\n  fun decl1 -> decl1.drange\nlet (check_id : FStar_Ident.ident -> unit) =\n  fun id ->\n    let first_char =\n      let uu___ = FStar_Ident.string_of_id id in\n      FStar_String.substring uu___ Prims.int_zero Prims.int_one in\n    if (FStar_String.lowercase first_char) = first_char\n    then ()\n    else\n      (let uu___1 =\n         let uu___2 =\n           let uu___3 = FStar_Ident.string_of_id id in\n           FStar_Compiler_Util.format1\n             \"Invalid identifer '%s'; expected a symbol that begins with a lower-case character\"\n             uu___3 in\n         (FStar_Errors_Codes.Fatal_InvalidIdentifier, uu___2) in\n       let uu___2 = FStar_Ident.range_of_id id in\n       FStar_Errors.raise_error uu___1 uu___2)\nlet at_most_one :\n  'uuuuu .\n    Prims.string ->\n      FStar_Compiler_Range.range ->\n        'uuuuu Prims.list -> 'uuuuu FStar_Pervasives_Native.option\n  =\n  fun s ->\n    fun r ->\n      fun l ->\n        match l with\n        | x::[] -> FStar_Pervasives_Native.Some x\n        | [] -> FStar_Pervasives_Native.None\n        | uu___ ->\n            let uu___1 =\n              let uu___2 =\n                FStar_Compiler_Util.format1\n                  \"At most one %s is allowed on declarations\" s in\n              (FStar_Errors_Codes.Fatal_MoreThanOneDeclaration, uu___2) in\n            FStar_Errors.raise_error uu___1 r\nlet (mk_decl :\n  decl' -> FStar_Compiler_Range.range -> decoration Prims.list -> decl) =\n  fun d ->\n    fun r ->\n      fun decorations ->\n        let attributes_1 =\n          let uu___ =\n            FStar_Compiler_List.choose\n              (fun uu___1 ->\n                 match uu___1 with\n                 | DeclAttributes a -> FStar_Pervasives_Native.Some a\n                 | uu___2 -> FStar_Pervasives_Native.None) decorations in\n          at_most_one \"attribute set\" r uu___ in\n        let attributes_2 = FStar_Compiler_Util.dflt [] attributes_1 in\n        let qualifiers1 =\n          FStar_Compiler_List.choose\n            (fun uu___ ->\n               match uu___ with\n               | Qualifier q -> FStar_Pervasives_Native.Some q\n               | uu___1 -> FStar_Pervasives_Native.None) decorations in\n        { d; drange = r; quals = qualifiers1; attrs = attributes_2 }\nlet (mk_binder_with_attrs :\n  binder' ->\n    FStar_Compiler_Range.range ->\n      level ->\n        arg_qualifier FStar_Pervasives_Native.option ->\n          term Prims.list -> binder)\n  =\n  fun b ->\n    fun r ->\n      fun l ->\n        fun i ->\n          fun attrs ->\n            { b; brange = r; blevel = l; aqual = i; battributes = attrs }\nlet (mk_binder :\n  binder' ->\n    FStar_Compiler_Range.range ->\n      level -> arg_qualifier FStar_Pervasives_Native.option -> binder)\n  = fun b -> fun r -> fun l -> fun i -> mk_binder_with_attrs b r l i []\nlet (mk_term : term' -> FStar_Compiler_Range.range -> level -> term) =\n  fun t -> fun r -> fun l -> { tm = t; range = r; level = l }\nlet (mk_uminus :\n  term ->\n    FStar_Compiler_Range.range -> FStar_Compiler_Range.range -> level -> term)\n  =\n  fun t ->\n    fun rminus ->\n      fun r ->\n        fun l ->\n          let t1 =\n            match t.tm with\n            | Const (FStar_Const.Const_int\n                (s, FStar_Pervasives_Native.Some (FStar_Const.Signed, width)))\n                ->\n                Const\n                  (FStar_Const.Const_int\n                     ((Prims.op_Hat \"-\" s),\n                       (FStar_Pervasives_Native.Some\n                          (FStar_Const.Signed, width))))\n            | uu___ ->\n                let uu___1 =\n                  let uu___2 = FStar_Ident.mk_ident (\"-\", rminus) in\n                  (uu___2, [t]) in\n                Op uu___1 in\n          mk_term t1 r l\nlet (mk_pattern : pattern' -> FStar_Compiler_Range.range -> pattern) =\n  fun p -> fun r -> { pat = p; prange = r }\nlet (un_curry_abs : pattern Prims.list -> term -> term') =\n  fun ps ->\n    fun body ->\n      match body.tm with\n      | Abs (p', body') -> Abs ((FStar_Compiler_List.op_At ps p'), body')\n      | uu___ -> Abs (ps, body)\nlet (mk_function :\n  (pattern * term FStar_Pervasives_Native.option * term) Prims.list ->\n    FStar_Compiler_Range.range -> FStar_Compiler_Range.range -> term)\n  =\n  fun branches ->\n    fun r1 ->\n      fun r2 ->\n        let x = FStar_Ident.gen r1 in\n        let uu___ =\n          let uu___1 =\n            let uu___2 =\n              let uu___3 =\n                let uu___4 =\n                  let uu___5 =\n                    let uu___6 =\n                      let uu___7 = FStar_Ident.lid_of_ids [x] in Var uu___7 in\n                    mk_term uu___6 r1 Expr in\n                  (uu___5, FStar_Pervasives_Native.None,\n                    FStar_Pervasives_Native.None, branches) in\n                Match uu___4 in\n              mk_term uu___3 r2 Expr in\n            ([mk_pattern (PatVar (x, FStar_Pervasives_Native.None, [])) r1],\n              uu___2) in\n          Abs uu___1 in\n        mk_term uu___ r2 Expr\nlet (un_function :\n  pattern -> term -> (pattern * term) FStar_Pervasives_Native.option) =\n  fun p ->\n    fun tm ->\n      match ((p.pat), (tm.tm)) with\n      | (PatVar uu___, Abs (pats, body)) ->\n          FStar_Pervasives_Native.Some\n            ((mk_pattern (PatApp (p, pats)) p.prange), body)\n      | uu___ -> FStar_Pervasives_Native.None\nlet (lid_with_range :\n  FStar_Ident.lident -> FStar_Compiler_Range.range -> FStar_Ident.lident) =\n  fun lid ->\n    fun r ->\n      let uu___ = FStar_Ident.path_of_lid lid in\n      FStar_Ident.lid_of_path uu___ r\nlet (consPat : FStar_Compiler_Range.range -> pattern -> pattern -> pattern')\n  =\n  fun r ->\n    fun hd ->\n      fun tl ->\n        PatApp\n          ((mk_pattern (PatName FStar_Parser_Const.cons_lid) r), [hd; tl])\nlet (consTerm : FStar_Compiler_Range.range -> term -> term -> term) =\n  fun r ->\n    fun hd ->\n      fun tl ->\n        mk_term\n          (Construct\n             (FStar_Parser_Const.cons_lid, [(hd, Nothing); (tl, Nothing)])) r\n          Expr\nlet (mkConsList : FStar_Compiler_Range.range -> term Prims.list -> term) =\n  fun r ->\n    fun elts ->\n      let nil = mk_term (Construct (FStar_Parser_Const.nil_lid, [])) r Expr in\n      FStar_Compiler_List.fold_right (fun e -> fun tl -> consTerm r e tl)\n        elts nil\nlet (unit_const : FStar_Compiler_Range.range -> term) =\n  fun r -> mk_term (Const FStar_Const.Const_unit) r Expr\nlet (ml_comp : term -> term) =\n  fun t ->\n    let lid = FStar_Parser_Const.effect_ML_lid () in\n    let ml = mk_term (Name lid) t.range Expr in\n    let t1 = mk_term (App (ml, t, Nothing)) t.range Expr in t1\nlet (tot_comp : term -> term) =\n  fun t ->\n    let ml = mk_term (Name FStar_Parser_Const.effect_Tot_lid) t.range Expr in\n    let t1 = mk_term (App (ml, t, Nothing)) t.range Expr in t1\nlet (mkApp :\n  term -> (term * imp) Prims.list -> FStar_Compiler_Range.range -> term) =\n  fun t ->\n    fun args ->\n      fun r ->\n        match args with\n        | [] -> t\n        | uu___ ->\n            (match t.tm with\n             | Name s -> mk_term (Construct (s, args)) r Un\n             | uu___1 ->\n                 FStar_Compiler_List.fold_left\n                   (fun t1 ->\n                      fun uu___2 ->\n                        match uu___2 with\n                        | (a, imp1) -> mk_term (App (t1, a, imp1)) r Un) t\n                   args)\nlet (mkRefSet : FStar_Compiler_Range.range -> term Prims.list -> term) =\n  fun r ->\n    fun elts ->\n      let uu___ =\n        (FStar_Parser_Const.set_empty, FStar_Parser_Const.set_singleton,\n          FStar_Parser_Const.set_union, FStar_Parser_Const.heap_addr_of_lid) in\n      match uu___ with\n      | (empty_lid, singleton_lid, union_lid, addr_of_lid) ->\n          let empty =\n            let uu___1 =\n              let uu___2 = FStar_Ident.set_lid_range empty_lid r in\n              Var uu___2 in\n            mk_term uu___1 r Expr in\n          let addr_of =\n            let uu___1 =\n              let uu___2 = FStar_Ident.set_lid_range addr_of_lid r in\n              Var uu___2 in\n            mk_term uu___1 r Expr in\n          let singleton =\n            let uu___1 =\n              let uu___2 = FStar_Ident.set_lid_range singleton_lid r in\n              Var uu___2 in\n            mk_term uu___1 r Expr in\n          let union =\n            let uu___1 =\n              let uu___2 = FStar_Ident.set_lid_range union_lid r in\n              Var uu___2 in\n            mk_term uu___1 r Expr in\n          FStar_Compiler_List.fold_right\n            (fun e ->\n               fun tl ->\n                 let e1 = mkApp addr_of [(e, Nothing)] r in\n                 let single_e = mkApp singleton [(e1, Nothing)] r in\n                 mkApp union [(single_e, Nothing); (tl, Nothing)] r) elts\n            empty\nlet (mkExplicitApp :\n  term -> term Prims.list -> FStar_Compiler_Range.range -> term) =\n  fun t ->\n    fun args ->\n      fun r ->\n        match args with\n        | [] -> t\n        | uu___ ->\n            (match t.tm with\n             | Name s ->\n                 let uu___1 =\n                   let uu___2 =\n                     let uu___3 =\n                       FStar_Compiler_List.map (fun a -> (a, Nothing)) args in\n                     (s, uu___3) in\n                   Construct uu___2 in\n                 mk_term uu___1 r Un\n             | uu___1 ->\n                 FStar_Compiler_List.fold_left\n                   (fun t1 -> fun a -> mk_term (App (t1, a, Nothing)) r Un) t\n                   args)\nlet (mkAdmitMagic : FStar_Compiler_Range.range -> term) =\n  fun r ->\n    let admit =\n      let admit_name =\n        let uu___ =\n          let uu___1 =\n            FStar_Ident.set_lid_range FStar_Parser_Const.admit_lid r in\n          Var uu___1 in\n        mk_term uu___ r Expr in\n      mkExplicitApp admit_name [unit_const r] r in\n    let magic =\n      let magic_name =\n        let uu___ =\n          let uu___1 =\n            FStar_Ident.set_lid_range FStar_Parser_Const.magic_lid r in\n          Var uu___1 in\n        mk_term uu___ r Expr in\n      mkExplicitApp magic_name [unit_const r] r in\n    let admit_magic = mk_term (Seq (admit, magic)) r Expr in admit_magic\nlet mkWildAdmitMagic :\n  'uuuuu .\n    FStar_Compiler_Range.range ->\n      (pattern * 'uuuuu FStar_Pervasives_Native.option * term)\n  =\n  fun r ->\n    let uu___ = mkAdmitMagic r in\n    ((mk_pattern (PatWild (FStar_Pervasives_Native.None, [])) r),\n      FStar_Pervasives_Native.None, uu___)\nlet focusBranches :\n  'uuuuu .\n    (Prims.bool * (pattern * 'uuuuu FStar_Pervasives_Native.option * term))\n      Prims.list ->\n      FStar_Compiler_Range.range ->\n        (pattern * 'uuuuu FStar_Pervasives_Native.option * term) Prims.list\n  =\n  fun branches ->\n    fun r ->\n      let should_filter =\n        FStar_Compiler_Util.for_some FStar_Pervasives_Native.fst branches in\n      if should_filter\n      then\n        (FStar_Errors.log_issue r\n           (FStar_Errors_Codes.Warning_Filtered,\n             \"Focusing on only some cases\");\n         (let focussed =\n            let uu___1 =\n              FStar_Compiler_List.filter FStar_Pervasives_Native.fst branches in\n            FStar_Compiler_Effect.op_Bar_Greater uu___1\n              (FStar_Compiler_List.map FStar_Pervasives_Native.snd) in\n          let uu___1 = let uu___2 = mkWildAdmitMagic r in [uu___2] in\n          FStar_Compiler_List.op_At focussed uu___1))\n      else\n        FStar_Compiler_Effect.op_Bar_Greater branches\n          (FStar_Compiler_List.map FStar_Pervasives_Native.snd)\nlet focusLetBindings :\n  'uuuuu .\n    (Prims.bool * ('uuuuu * term)) Prims.list ->\n      FStar_Compiler_Range.range -> ('uuuuu * term) Prims.list\n  =\n  fun lbs ->\n    fun r ->\n      let should_filter =\n        FStar_Compiler_Util.for_some FStar_Pervasives_Native.fst lbs in\n      if should_filter\n      then\n        (FStar_Errors.log_issue r\n           (FStar_Errors_Codes.Warning_Filtered,\n             \"Focusing on only some cases in this (mutually) recursive definition\");\n         FStar_Compiler_List.map\n           (fun uu___1 ->\n              match uu___1 with\n              | (f, lb) ->\n                  if f\n                  then lb\n                  else\n                    (let uu___3 = mkAdmitMagic r in\n                     ((FStar_Pervasives_Native.fst lb), uu___3))) lbs)\n      else\n        FStar_Compiler_Effect.op_Bar_Greater lbs\n          (FStar_Compiler_List.map FStar_Pervasives_Native.snd)\nlet focusAttrLetBindings :\n  'uuuuu 'uuuuu1 .\n    ('uuuuu * (Prims.bool * ('uuuuu1 * term))) Prims.list ->\n      FStar_Compiler_Range.range -> ('uuuuu * ('uuuuu1 * term)) Prims.list\n  =\n  fun lbs ->\n    fun r ->\n      let should_filter =\n        FStar_Compiler_Util.for_some\n          (fun uu___ -> match uu___ with | (attr, (focus, uu___1)) -> focus)\n          lbs in\n      if should_filter\n      then\n        (FStar_Errors.log_issue r\n           (FStar_Errors_Codes.Warning_Filtered,\n             \"Focusing on only some cases in this (mutually) recursive definition\");\n         FStar_Compiler_List.map\n           (fun uu___1 ->\n              match uu___1 with\n              | (attr, (f, lb)) ->\n                  if f\n                  then (attr, lb)\n                  else\n                    (let uu___3 =\n                       let uu___4 = mkAdmitMagic r in\n                       ((FStar_Pervasives_Native.fst lb), uu___4) in\n                     (attr, uu___3))) lbs)\n      else\n        FStar_Compiler_Effect.op_Bar_Greater lbs\n          (FStar_Compiler_List.map\n             (fun uu___1 ->\n                match uu___1 with | (attr, (uu___2, lb)) -> (attr, lb)))\nlet (mkFsTypApp :\n  term -> term Prims.list -> FStar_Compiler_Range.range -> term) =\n  fun t ->\n    fun args ->\n      fun r ->\n        let uu___ = FStar_Compiler_List.map (fun a -> (a, FsTypApp)) args in\n        mkApp t uu___ r\nlet (mkTuple : term Prims.list -> FStar_Compiler_Range.range -> term) =\n  fun args ->\n    fun r ->\n      let cons =\n        FStar_Parser_Const.mk_tuple_data_lid\n          (FStar_Compiler_List.length args) r in\n      let uu___ = FStar_Compiler_List.map (fun x -> (x, Nothing)) args in\n      mkApp (mk_term (Name cons) r Expr) uu___ r\nlet (mkDTuple : term Prims.list -> FStar_Compiler_Range.range -> term) =\n  fun args ->\n    fun r ->\n      let cons =\n        FStar_Parser_Const.mk_dtuple_data_lid\n          (FStar_Compiler_List.length args) r in\n      let uu___ = FStar_Compiler_List.map (fun x -> (x, Nothing)) args in\n      mkApp (mk_term (Name cons) r Expr) uu___ r\nlet (mkRefinedBinder :\n  FStar_Ident.ident ->\n    term ->\n      Prims.bool ->\n        term FStar_Pervasives_Native.option ->\n          FStar_Compiler_Range.range ->\n            arg_qualifier FStar_Pervasives_Native.option ->\n              term Prims.list -> binder)\n  =\n  fun id ->\n    fun t ->\n      fun should_bind_var ->\n        fun refopt ->\n          fun m ->\n            fun implicit ->\n              fun attrs ->\n                let b =\n                  mk_binder_with_attrs (Annotated (id, t)) m Type_level\n                    implicit attrs in\n                match refopt with\n                | FStar_Pervasives_Native.None -> b\n                | FStar_Pervasives_Native.Some phi ->\n                    if should_bind_var\n                    then\n                      mk_binder_with_attrs\n                        (Annotated\n                           (id, (mk_term (Refine (b, phi)) m Type_level))) m\n                        Type_level implicit attrs\n                    else\n                      (let x = FStar_Ident.gen t.range in\n                       let b1 =\n                         mk_binder_with_attrs (Annotated (x, t)) m Type_level\n                           implicit attrs in\n                       mk_binder_with_attrs\n                         (Annotated\n                            (id, (mk_term (Refine (b1, phi)) m Type_level)))\n                         m Type_level implicit attrs)\nlet (mkRefinedPattern :\n  pattern ->\n    term ->\n      Prims.bool ->\n        term FStar_Pervasives_Native.option ->\n          FStar_Compiler_Range.range -> FStar_Compiler_Range.range -> pattern)\n  =\n  fun pat ->\n    fun t ->\n      fun should_bind_pat ->\n        fun phi_opt ->\n          fun t_range ->\n            fun range ->\n              let t1 =\n                match phi_opt with\n                | FStar_Pervasives_Native.None -> t\n                | FStar_Pervasives_Native.Some phi ->\n                    if should_bind_pat\n                    then\n                      (match pat.pat with\n                       | PatVar (x, uu___, attrs) ->\n                           mk_term\n                             (Refine\n                                ((mk_binder_with_attrs (Annotated (x, t))\n                                    t_range Type_level\n                                    FStar_Pervasives_Native.None attrs), phi))\n                             range Type_level\n                       | uu___ ->\n                           let x = FStar_Ident.gen t_range in\n                           let phi1 =\n                             let x_var =\n                               let uu___1 =\n                                 let uu___2 = FStar_Ident.lid_of_ids [x] in\n                                 Var uu___2 in\n                               mk_term uu___1 phi.range Formula in\n                             let pat_branch =\n                               (pat, FStar_Pervasives_Native.None, phi) in\n                             let otherwise_branch =\n                               let uu___1 =\n                                 let uu___2 =\n                                   let uu___3 =\n                                     FStar_Ident.lid_of_path [\"False\"]\n                                       phi.range in\n                                   Name uu___3 in\n                                 mk_term uu___2 phi.range Formula in\n                               ((mk_pattern\n                                   (PatWild\n                                      (FStar_Pervasives_Native.None, []))\n                                   phi.range), FStar_Pervasives_Native.None,\n                                 uu___1) in\n                             mk_term\n                               (Match\n                                  (x_var, FStar_Pervasives_Native.None,\n                                    FStar_Pervasives_Native.None,\n                                    [pat_branch; otherwise_branch]))\n                               phi.range Formula in\n                           mk_term\n                             (Refine\n                                ((mk_binder (Annotated (x, t)) t_range\n                                    Type_level FStar_Pervasives_Native.None),\n                                  phi1)) range Type_level)\n                    else\n                      (let x = FStar_Ident.gen t.range in\n                       mk_term\n                         (Refine\n                            ((mk_binder (Annotated (x, t)) t_range Type_level\n                                FStar_Pervasives_Native.None), phi)) range\n                         Type_level) in\n              mk_pattern\n                (PatAscribed (pat, (t1, FStar_Pervasives_Native.None))) range\nlet rec (extract_named_refinement :\n  term ->\n    (FStar_Ident.ident * term * term FStar_Pervasives_Native.option)\n      FStar_Pervasives_Native.option)\n  =\n  fun t1 ->\n    match t1.tm with\n    | NamedTyp (x, t) ->\n        FStar_Pervasives_Native.Some (x, t, FStar_Pervasives_Native.None)\n    | Refine\n        ({ b = Annotated (x, t); brange = uu___; blevel = uu___1;\n           aqual = uu___2; battributes = uu___3;_},\n         t')\n        ->\n        FStar_Pervasives_Native.Some\n          (x, t, (FStar_Pervasives_Native.Some t'))\n    | Paren t -> extract_named_refinement t\n    | uu___ -> FStar_Pervasives_Native.None\nlet rec (as_mlist :\n  ((FStar_Ident.lid * decl) * decl Prims.list) -> decl Prims.list -> modul) =\n  fun cur ->\n    fun ds ->\n      let uu___ = cur in\n      match uu___ with\n      | ((m_name, m_decl), cur1) ->\n          (match ds with\n           | [] ->\n               Module (m_name, (m_decl :: (FStar_Compiler_List.rev cur1)))\n           | d::ds1 ->\n               (match d.d with\n                | TopLevelModule m' ->\n                    FStar_Errors.raise_error\n                      (FStar_Errors_Codes.Fatal_UnexpectedModuleDeclaration,\n                        \"Unexpected module declaration\") d.drange\n                | uu___1 -> as_mlist ((m_name, m_decl), (d :: cur1)) ds1))\nlet (as_frag : decl Prims.list -> inputFragment) =\n  fun ds ->\n    let uu___ =\n      match ds with\n      | d::ds1 -> (d, ds1)\n      | [] -> FStar_Compiler_Effect.raise FStar_Errors.Empty_frag in\n    match uu___ with\n    | (d, ds1) ->\n        (match d.d with\n         | TopLevelModule m ->\n             let m1 = as_mlist ((m, d), []) ds1 in FStar_Pervasives.Inl m1\n         | uu___1 ->\n             let ds2 = d :: ds1 in\n             (FStar_Compiler_List.iter\n                (fun uu___3 ->\n                   match uu___3 with\n                   | { d = TopLevelModule uu___4; drange = r; quals = uu___5;\n                       attrs = uu___6;_} ->\n                       FStar_Errors.raise_error\n                         (FStar_Errors_Codes.Fatal_UnexpectedModuleDeclaration,\n                           \"Unexpected module declaration\") r\n                   | uu___4 -> ()) ds2;\n              FStar_Pervasives.Inr ds2))\nlet (strip_prefix :\n  Prims.string -> Prims.string -> Prims.string FStar_Pervasives_Native.option)\n  =\n  fun prefix ->\n    fun s ->\n      if FStar_Compiler_Util.starts_with s prefix\n      then\n        let uu___ =\n          FStar_Compiler_Util.substring_from s (FStar_String.length prefix) in\n        FStar_Pervasives_Native.Some uu___\n      else FStar_Pervasives_Native.None\nlet compile_op : 'uuuuu . Prims.int -> Prims.string -> 'uuuuu -> Prims.string\n  =\n  fun arity ->\n    fun s ->\n      fun r ->\n        let name_of_char uu___ =\n          match uu___ with\n          | 38 -> \"Amp\"\n          | 64 -> \"At\"\n          | 43 -> \"Plus\"\n          | 45 when arity = Prims.int_one -> \"Minus\"\n          | 45 -> \"Subtraction\"\n          | 126 -> \"Tilde\"\n          | 47 -> \"Slash\"\n          | 92 -> \"Backslash\"\n          | 60 -> \"Less\"\n          | 61 -> \"Equals\"\n          | 62 -> \"Greater\"\n          | 95 -> \"Underscore\"\n          | 124 -> \"Bar\"\n          | 33 -> \"Bang\"\n          | 94 -> \"Hat\"\n          | 37 -> \"Percent\"\n          | 42 -> \"Star\"\n          | 63 -> \"Question\"\n          | 58 -> \"Colon\"\n          | 36 -> \"Dollar\"\n          | 46 -> \"Dot\"\n          | c ->\n              let uu___1 =\n                FStar_Compiler_Util.string_of_int\n                  (FStar_Compiler_Util.int_of_char c) in\n              Prims.op_Hat \"u\" uu___1 in\n        match s with\n        | \".[]<-\" -> \"op_String_Assignment\"\n        | \".()<-\" -> \"op_Array_Assignment\"\n        | \".[||]<-\" -> \"op_Brack_Lens_Assignment\"\n        | \".(||)<-\" -> \"op_Lens_Assignment\"\n        | \".[]\" -> \"op_String_Access\"\n        | \".()\" -> \"op_Array_Access\"\n        | \".[||]\" -> \"op_Brack_Lens_Access\"\n        | \".(||)\" -> \"op_Lens_Access\"\n        | uu___ ->\n            let uu___1 =\n              if\n                (FStar_Compiler_Util.starts_with s \"let\") ||\n                  (FStar_Compiler_Util.starts_with s \"and\")\n              then\n                let uu___2 =\n                  let uu___3 =\n                    FStar_Compiler_Util.substring s Prims.int_zero\n                      (Prims.of_int (3)) in\n                  Prims.op_Hat uu___3 \"_\" in\n                let uu___3 =\n                  FStar_Compiler_Util.substring_from s (Prims.of_int (3)) in\n                (uu___2, uu___3)\n              else (\"\", s) in\n            (match uu___1 with\n             | (prefix, s1) ->\n                 let uu___2 =\n                   let uu___3 =\n                     let uu___4 =\n                       let uu___5 = FStar_String.list_of_string s1 in\n                       FStar_Compiler_List.map name_of_char uu___5 in\n                     FStar_String.concat \"_\" uu___4 in\n                   Prims.op_Hat prefix uu___3 in\n                 Prims.op_Hat \"op_\" uu___2)\nlet compile_op' : 'uuuuu . Prims.string -> 'uuuuu -> Prims.string =\n  fun s -> fun r -> compile_op (~- Prims.int_one) s r\nlet (string_to_op :\n  Prims.string ->\n    (Prims.string * Prims.int FStar_Pervasives_Native.option)\n      FStar_Pervasives_Native.option)\n  =\n  fun s ->\n    let name_of_op uu___ =\n      match uu___ with\n      | \"Amp\" ->\n          FStar_Pervasives_Native.Some (\"&\", FStar_Pervasives_Native.None)\n      | \"At\" ->\n          FStar_Pervasives_Native.Some (\"@\", FStar_Pervasives_Native.None)\n      | \"Plus\" ->\n          FStar_Pervasives_Native.Some (\"+\", FStar_Pervasives_Native.None)\n      | \"Minus\" ->\n          FStar_Pervasives_Native.Some (\"-\", FStar_Pervasives_Native.None)\n      | \"Subtraction\" ->\n          FStar_Pervasives_Native.Some\n            (\"-\", (FStar_Pervasives_Native.Some (Prims.of_int (2))))\n      | \"Tilde\" ->\n          FStar_Pervasives_Native.Some (\"~\", FStar_Pervasives_Native.None)\n      | \"Slash\" ->\n          FStar_Pervasives_Native.Some (\"/\", FStar_Pervasives_Native.None)\n      | \"Backslash\" ->\n          FStar_Pervasives_Native.Some (\"\\\\\", FStar_Pervasives_Native.None)\n      | \"Less\" ->\n          FStar_Pervasives_Native.Some (\"<\", FStar_Pervasives_Native.None)\n      | \"Equals\" ->\n          FStar_Pervasives_Native.Some (\"=\", FStar_Pervasives_Native.None)\n      | \"Greater\" ->\n          FStar_Pervasives_Native.Some (\">\", FStar_Pervasives_Native.None)\n      | \"Underscore\" ->\n          FStar_Pervasives_Native.Some (\"_\", FStar_Pervasives_Native.None)\n      | \"Bar\" ->\n          FStar_Pervasives_Native.Some (\"|\", FStar_Pervasives_Native.None)\n      | \"Bang\" ->\n          FStar_Pervasives_Native.Some (\"!\", FStar_Pervasives_Native.None)\n      | \"Hat\" ->\n          FStar_Pervasives_Native.Some (\"^\", FStar_Pervasives_Native.None)\n      | \"Percent\" ->\n          FStar_Pervasives_Native.Some (\"%\", FStar_Pervasives_Native.None)\n      | \"Star\" ->\n          FStar_Pervasives_Native.Some (\"*\", FStar_Pervasives_Native.None)\n      | \"Question\" ->\n          FStar_Pervasives_Native.Some (\"?\", FStar_Pervasives_Native.None)\n      | \"Colon\" ->\n          FStar_Pervasives_Native.Some (\":\", FStar_Pervasives_Native.None)\n      | \"Dollar\" ->\n          FStar_Pervasives_Native.Some (\"$\", FStar_Pervasives_Native.None)\n      | \"Dot\" ->\n          FStar_Pervasives_Native.Some (\".\", FStar_Pervasives_Native.None)\n      | \"let\" ->\n          FStar_Pervasives_Native.Some (s, FStar_Pervasives_Native.None)\n      | \"and\" ->\n          FStar_Pervasives_Native.Some (s, FStar_Pervasives_Native.None)\n      | uu___1 -> FStar_Pervasives_Native.None in\n    match s with\n    | \"op_String_Assignment\" ->\n        FStar_Pervasives_Native.Some (\".[]<-\", FStar_Pervasives_Native.None)\n    | \"op_Array_Assignment\" ->\n        FStar_Pervasives_Native.Some (\".()<-\", FStar_Pervasives_Native.None)\n    | \"op_Brack_Lens_Assignment\" ->\n        FStar_Pervasives_Native.Some\n          (\".[||]<-\", FStar_Pervasives_Native.None)\n    | \"op_Lens_Assignment\" ->\n        FStar_Pervasives_Native.Some\n          (\".(||)<-\", FStar_Pervasives_Native.None)\n    | \"op_String_Access\" ->\n        FStar_Pervasives_Native.Some (\".[]\", FStar_Pervasives_Native.None)\n    | \"op_Array_Access\" ->\n        FStar_Pervasives_Native.Some (\".()\", FStar_Pervasives_Native.None)\n    | \"op_Brack_Lens_Access\" ->\n        FStar_Pervasives_Native.Some (\".[||]\", FStar_Pervasives_Native.None)\n    | \"op_Lens_Access\" ->\n        FStar_Pervasives_Native.Some (\".(||)\", FStar_Pervasives_Native.None)\n    | uu___ ->\n        if FStar_Compiler_Util.starts_with s \"op_\"\n        then\n          let s1 =\n            let uu___1 =\n              FStar_Compiler_Util.substring_from s\n                (FStar_String.length \"op_\") in\n            FStar_Compiler_Util.split uu___1 \"_\" in\n          (match s1 with\n           | op::[] ->\n               if FStar_Compiler_Util.starts_with op \"u\"\n               then\n                 let uu___1 =\n                   let uu___2 =\n                     FStar_Compiler_Util.substring_from op Prims.int_one in\n                   FStar_Compiler_Util.safe_int_of_string uu___2 in\n                 FStar_Compiler_Util.map_opt uu___1\n                   (fun op1 ->\n                      ((FStar_Compiler_Util.string_of_char\n                          (FStar_Compiler_Util.char_of_int op1)),\n                        FStar_Pervasives_Native.None))\n               else name_of_op op\n           | uu___1 ->\n               let maybeop =\n                 let uu___2 = FStar_Compiler_List.map name_of_op s1 in\n                 FStar_Compiler_List.fold_left\n                   (fun acc ->\n                      fun x ->\n                        match acc with\n                        | FStar_Pervasives_Native.None ->\n                            FStar_Pervasives_Native.None\n                        | FStar_Pervasives_Native.Some acc1 ->\n                            (match x with\n                             | FStar_Pervasives_Native.Some (op, uu___3) ->\n                                 FStar_Pervasives_Native.Some\n                                   (Prims.op_Hat acc1 op)\n                             | FStar_Pervasives_Native.None ->\n                                 FStar_Pervasives_Native.None))\n                   (FStar_Pervasives_Native.Some \"\") uu___2 in\n               FStar_Compiler_Util.map_opt maybeop\n                 (fun o -> (o, FStar_Pervasives_Native.None)))\n        else FStar_Pervasives_Native.None\nlet (string_of_fsdoc :\n  (Prims.string * (Prims.string * Prims.string) Prims.list) -> Prims.string)\n  =\n  fun uu___ ->\n    match uu___ with\n    | (comment, keywords) ->\n        let uu___1 =\n          let uu___2 =\n            FStar_Compiler_List.map\n              (fun uu___3 ->\n                 match uu___3 with\n                 | (k, v) -> Prims.op_Hat k (Prims.op_Hat \"->\" v)) keywords in\n          FStar_String.concat \",\" uu___2 in\n        Prims.op_Hat comment uu___1\nlet (string_of_let_qualifier : let_qualifier -> Prims.string) =\n  fun uu___ -> match uu___ with | NoLetQualifier -> \"\" | Rec -> \"rec\"\nlet to_string_l :\n  'uuuuu .\n    Prims.string ->\n      ('uuuuu -> Prims.string) -> 'uuuuu Prims.list -> Prims.string\n  =\n  fun sep ->\n    fun f ->\n      fun l ->\n        let uu___ = FStar_Compiler_List.map f l in\n        FStar_String.concat sep uu___\nlet (imp_to_string : imp -> Prims.string) =\n  fun uu___ -> match uu___ with | Hash -> \"#\" | uu___1 -> \"\"\nlet rec (term_to_string : term -> Prims.string) =\n  fun x ->\n    match x.tm with\n    | Wild -> \"_\"\n    | LexList l ->\n        let uu___ =\n          match l with\n          | [] -> \" \"\n          | hd::tl ->\n              let uu___1 =\n                let uu___2 = term_to_string hd in\n                FStar_Compiler_List.fold_left\n                  (fun s ->\n                     fun t ->\n                       let uu___3 =\n                         let uu___4 = term_to_string t in\n                         Prims.op_Hat \"; \" uu___4 in\n                       Prims.op_Hat s uu___3) uu___2 in\n              FStar_Compiler_Effect.op_Bar_Greater tl uu___1 in\n        FStar_Compiler_Util.format1 \"%[%s]\" uu___\n    | Decreases (t, uu___) ->\n        let uu___1 = term_to_string t in\n        FStar_Compiler_Util.format1 \"(decreases %s)\" uu___1\n    | Requires (t, uu___) ->\n        let uu___1 = term_to_string t in\n        FStar_Compiler_Util.format1 \"(requires %s)\" uu___1\n    | Ensures (t, uu___) ->\n        let uu___1 = term_to_string t in\n        FStar_Compiler_Util.format1 \"(ensures %s)\" uu___1\n    | Labeled (t, l, uu___) ->\n        let uu___1 = term_to_string t in\n        FStar_Compiler_Util.format2 \"(labeled %s %s)\" l uu___1\n    | Const c -> FStar_Parser_Const.const_to_string c\n    | Op (s, xs) ->\n        let uu___ = FStar_Ident.string_of_id s in\n        let uu___1 =\n          let uu___2 =\n            FStar_Compiler_List.map\n              (fun x1 ->\n                 FStar_Compiler_Effect.op_Bar_Greater x1 term_to_string) xs in\n          FStar_String.concat \", \" uu___2 in\n        FStar_Compiler_Util.format2 \"%s(%s)\" uu___ uu___1\n    | Tvar id -> FStar_Ident.string_of_id id\n    | Uvar id -> FStar_Ident.string_of_id id\n    | Var l -> FStar_Ident.string_of_lid l\n    | Name l -> FStar_Ident.string_of_lid l\n    | Projector (rec_lid, field_id) ->\n        let uu___ = FStar_Ident.string_of_lid rec_lid in\n        let uu___1 = FStar_Ident.string_of_id field_id in\n        FStar_Compiler_Util.format2 \"%s?.%s\" uu___ uu___1\n    | Construct (l, args) ->\n        let uu___ = FStar_Ident.string_of_lid l in\n        let uu___1 =\n          to_string_l \" \"\n            (fun uu___2 ->\n               match uu___2 with\n               | (a, imp1) ->\n                   let uu___3 = term_to_string a in\n                   FStar_Compiler_Util.format2 \"%s%s\" (imp_to_string imp1)\n                     uu___3) args in\n        FStar_Compiler_Util.format2 \"(%s %s)\" uu___ uu___1\n    | Abs (pats, t) ->\n        let uu___ = to_string_l \" \" pat_to_string pats in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format2 \"(fun %s -> %s)\" uu___ uu___1\n    | App (t1, t2, imp1) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in\n        FStar_Compiler_Util.format3 \"%s %s%s\" uu___ (imp_to_string imp1)\n          uu___1\n    | Let (Rec, (a, (p, b))::lbs, body) ->\n        let uu___ = attrs_opt_to_string a in\n        let uu___1 =\n          let uu___2 = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in\n          let uu___3 = FStar_Compiler_Effect.op_Bar_Greater b term_to_string in\n          FStar_Compiler_Util.format2 \"%s=%s\" uu___2 uu___3 in\n        let uu___2 =\n          to_string_l \" \"\n            (fun uu___3 ->\n               match uu___3 with\n               | (a1, (p1, b1)) ->\n                   let uu___4 = attrs_opt_to_string a1 in\n                   let uu___5 =\n                     FStar_Compiler_Effect.op_Bar_Greater p1 pat_to_string in\n                   let uu___6 =\n                     FStar_Compiler_Effect.op_Bar_Greater b1 term_to_string in\n                   FStar_Compiler_Util.format3 \"%sand %s=%s\" uu___4 uu___5\n                     uu___6) lbs in\n        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater body term_to_string in\n        FStar_Compiler_Util.format4 \"%slet rec %s%s in %s\" uu___ uu___1\n          uu___2 uu___3\n    | Let (q, (attrs, (pat, tm))::[], body) ->\n        let uu___ = attrs_opt_to_string attrs in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater pat pat_to_string in\n        let uu___2 = FStar_Compiler_Effect.op_Bar_Greater tm term_to_string in\n        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater body term_to_string in\n        FStar_Compiler_Util.format5 \"%slet %s %s = %s in %s\" uu___\n          (string_of_let_qualifier q) uu___1 uu___2 uu___3\n    | Let (uu___, uu___1, uu___2) ->\n        FStar_Errors.raise_error\n          (FStar_Errors_Codes.Fatal_EmptySurfaceLet,\n            \"Internal error: found an invalid surface Let\") x.range\n    | LetOpen (lid, t) ->\n        let uu___ = FStar_Ident.string_of_lid lid in\n        let uu___1 = term_to_string t in\n        FStar_Compiler_Util.format2 \"let open %s in %s\" uu___ uu___1\n    | Seq (t1, t2) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in\n        FStar_Compiler_Util.format2 \"%s; %s\" uu___ uu___1\n    | Bind (id, t1, t2) ->\n        let uu___ = FStar_Ident.string_of_id id in\n        let uu___1 = term_to_string t1 in\n        let uu___2 = term_to_string t2 in\n        FStar_Compiler_Util.format3 \"%s <- %s; %s\" uu___ uu___1 uu___2\n    | If (t1, op_opt, ret_opt, t2, t3) ->\n        let uu___ =\n          match op_opt with\n          | FStar_Pervasives_Native.Some op -> FStar_Ident.string_of_id op\n          | FStar_Pervasives_Native.None -> \"\" in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in\n        let uu___2 =\n          match ret_opt with\n          | FStar_Pervasives_Native.None -> \"\"\n          | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->\n              let s = if use_eq then \"returns$\" else \"returns\" in\n              let uu___3 =\n                match as_opt with\n                | FStar_Pervasives_Native.None -> \"\"\n                | FStar_Pervasives_Native.Some as_ident ->\n                    let uu___4 = FStar_Ident.string_of_id as_ident in\n                    FStar_Compiler_Util.format1 \" as %s \" uu___4 in\n              let uu___4 = term_to_string ret in\n              FStar_Compiler_Util.format3 \"%s%s %s \" uu___3 s uu___4 in\n        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in\n        let uu___4 = FStar_Compiler_Effect.op_Bar_Greater t3 term_to_string in\n        FStar_Compiler_Util.format5 \"if%s %s %sthen %s else %s\" uu___ uu___1\n          uu___2 uu___3 uu___4\n    | Match (t, op_opt, ret_opt, branches) ->\n        try_or_match_to_string x t branches op_opt ret_opt\n    | TryWith (t, branches) ->\n        try_or_match_to_string x t branches FStar_Pervasives_Native.None\n          FStar_Pervasives_Native.None\n    | Ascribed (t1, t2, FStar_Pervasives_Native.None, flag) ->\n        let s = if flag then \"$:\" else \"<:\" in\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in\n        FStar_Compiler_Util.format3 \"(%s %s %s)\" uu___ s uu___1\n    | Ascribed (t1, t2, FStar_Pervasives_Native.Some tac, flag) ->\n        let s = if flag then \"$:\" else \"<:\" in\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t1 term_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t2 term_to_string in\n        let uu___2 = FStar_Compiler_Effect.op_Bar_Greater tac term_to_string in\n        FStar_Compiler_Util.format4 \"(%s %s %s by %s)\" uu___ s uu___1 uu___2\n    | Record (FStar_Pervasives_Native.Some e, fields) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater e term_to_string in\n        let uu___1 =\n          to_string_l \" \"\n            (fun uu___2 ->\n               match uu___2 with\n               | (l, e1) ->\n                   let uu___3 = FStar_Ident.string_of_lid l in\n                   let uu___4 =\n                     FStar_Compiler_Effect.op_Bar_Greater e1 term_to_string in\n                   FStar_Compiler_Util.format2 \"%s=%s\" uu___3 uu___4) fields in\n        FStar_Compiler_Util.format2 \"{%s with %s}\" uu___ uu___1\n    | Record (FStar_Pervasives_Native.None, fields) ->\n        let uu___ =\n          to_string_l \" \"\n            (fun uu___1 ->\n               match uu___1 with\n               | (l, e) ->\n                   let uu___2 = FStar_Ident.string_of_lid l in\n                   let uu___3 =\n                     FStar_Compiler_Effect.op_Bar_Greater e term_to_string in\n                   FStar_Compiler_Util.format2 \"%s=%s\" uu___2 uu___3) fields in\n        FStar_Compiler_Util.format1 \"{%s}\" uu___\n    | Project (e, l) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater e term_to_string in\n        let uu___1 = FStar_Ident.string_of_lid l in\n        FStar_Compiler_Util.format2 \"%s.%s\" uu___ uu___1\n    | Product ([], t) -> term_to_string t\n    | Product (b::hd::tl, t) ->\n        term_to_string\n          (mk_term\n             (Product\n                ([b], (mk_term (Product ((hd :: tl), t)) x.range x.level)))\n             x.range x.level)\n    | Product (b::[], t) when x.level = Type_level ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater b binder_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format2 \"%s -> %s\" uu___ uu___1\n    | Product (b::[], t) when x.level = Kind ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater b binder_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format2 \"%s => %s\" uu___ uu___1\n    | Sum (binders, t) ->\n        let uu___ =\n          FStar_Compiler_Effect.op_Bar_Greater\n            (FStar_Compiler_List.op_At binders [FStar_Pervasives.Inr t])\n            (FStar_Compiler_List.map\n               (fun uu___1 ->\n                  match uu___1 with\n                  | FStar_Pervasives.Inl b -> binder_to_string b\n                  | FStar_Pervasives.Inr t1 -> term_to_string t1)) in\n        FStar_Compiler_Effect.op_Bar_Greater uu___\n          (FStar_String.concat \" & \")\n    | QForall (bs, (uu___, pats), t) ->\n        let uu___1 = to_string_l \" \" binder_to_string bs in\n        let uu___2 =\n          to_string_l \" \\\\/ \" (to_string_l \"; \" term_to_string) pats in\n        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format3 \"forall %s.{:pattern %s} %s\" uu___1\n          uu___2 uu___3\n    | QExists (bs, (uu___, pats), t) ->\n        let uu___1 = to_string_l \" \" binder_to_string bs in\n        let uu___2 =\n          to_string_l \" \\\\/ \" (to_string_l \"; \" term_to_string) pats in\n        let uu___3 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format3 \"exists %s.{:pattern %s} %s\" uu___1\n          uu___2 uu___3\n    | Refine (b, t) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater b binder_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format2 \"%s:{%s}\" uu___ uu___1\n    | NamedTyp (x1, t) ->\n        let uu___ = FStar_Ident.string_of_id x1 in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format2 \"%s:%s\" uu___ uu___1\n    | Paren t ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format1 \"(%s)\" uu___\n    | Product (bs, t) ->\n        let uu___ =\n          let uu___1 =\n            FStar_Compiler_Effect.op_Bar_Greater bs\n              (FStar_Compiler_List.map binder_to_string) in\n          FStar_Compiler_Effect.op_Bar_Greater uu___1\n            (FStar_String.concat \",\") in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format2 \"Unidentified product: [%s] %s\" uu___\n          uu___1\n    | Discrim lid ->\n        let uu___ = FStar_Ident.string_of_lid lid in\n        FStar_Compiler_Util.format1 \"%s?\" uu___\n    | Attributes ts ->\n        let uu___ =\n          let uu___1 = FStar_Compiler_List.map term_to_string ts in\n          FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat \" \") uu___1 in\n        FStar_Compiler_Util.format1 \"(attributes %s)\" uu___\n    | Antiquote t ->\n        let uu___ = term_to_string t in\n        FStar_Compiler_Util.format1 \"(`#%s)\" uu___\n    | Quote (t, Static) ->\n        let uu___ = term_to_string t in\n        FStar_Compiler_Util.format1 \"(`(%s))\" uu___\n    | Quote (t, Dynamic) ->\n        let uu___ = term_to_string t in\n        FStar_Compiler_Util.format1 \"quote (%s)\" uu___\n    | VQuote t ->\n        let uu___ = term_to_string t in\n        FStar_Compiler_Util.format1 \"`%%%s\" uu___\n    | CalcProof (rel, init, steps) ->\n        let uu___ = term_to_string rel in\n        let uu___1 = term_to_string init in\n        let uu___2 =\n          let uu___3 = FStar_Compiler_List.map calc_step_to_string steps in\n          FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat \" \") uu___3 in\n        FStar_Compiler_Util.format3 \"calc (%s) { %s %s }\" uu___ uu___1 uu___2\n    | ElimForall (bs, t, vs) ->\n        let uu___ = binders_to_string \" \" bs in\n        let uu___1 = term_to_string t in\n        let uu___2 =\n          let uu___3 = FStar_Compiler_List.map term_to_string vs in\n          FStar_String.concat \" \" uu___3 in\n        FStar_Compiler_Util.format3 \"_elim_ forall %s. %s using %s\" uu___\n          uu___1 uu___2\n    | ElimExists (bs, p, q, b, e) ->\n        let uu___ = binders_to_string \" \" bs in\n        let uu___1 = term_to_string p in\n        let uu___2 = term_to_string q in\n        let uu___3 = binder_to_string b in\n        let uu___4 = term_to_string e in\n        FStar_Compiler_Util.format5\n          \"_elim_ exists %s. %s _to_ %s\\n\\\\with %s. %s\" uu___ uu___1 uu___2\n          uu___3 uu___4\n    | ElimImplies (p, q, e) ->\n        let uu___ = term_to_string p in\n        let uu___1 = term_to_string q in\n        let uu___2 = term_to_string e in\n        FStar_Compiler_Util.format3 \"_elim_ %s ==> %s with %s\" uu___ uu___1\n          uu___2\n    | ElimOr (p, q, r, x1, e, y, e') ->\n        let uu___ =\n          let uu___1 = term_to_string p in\n          let uu___2 =\n            let uu___3 = term_to_string q in\n            let uu___4 =\n              let uu___5 = term_to_string r in\n              let uu___6 =\n                let uu___7 = binder_to_string x1 in\n                let uu___8 =\n                  let uu___9 = term_to_string e in\n                  let uu___10 =\n                    let uu___11 = binder_to_string y in\n                    let uu___12 =\n                      let uu___13 = term_to_string e' in [uu___13] in\n                    uu___11 :: uu___12 in\n                  uu___9 :: uu___10 in\n                uu___7 :: uu___8 in\n              uu___5 :: uu___6 in\n            uu___3 :: uu___4 in\n          uu___1 :: uu___2 in\n        FStar_Compiler_Util.format\n          \"_elim_ %s \\\\/ %s _to_ %s\\n\\\\with %s. %s\\n\\\\and %s.%s\" uu___\n    | ElimAnd (p, q, r, x1, y, e) ->\n        let uu___ =\n          let uu___1 = term_to_string p in\n          let uu___2 =\n            let uu___3 = term_to_string q in\n            let uu___4 =\n              let uu___5 = term_to_string r in\n              let uu___6 =\n                let uu___7 = binder_to_string x1 in\n                let uu___8 =\n                  let uu___9 = binder_to_string y in\n                  let uu___10 = let uu___11 = term_to_string e in [uu___11] in\n                  uu___9 :: uu___10 in\n                uu___7 :: uu___8 in\n              uu___5 :: uu___6 in\n            uu___3 :: uu___4 in\n          uu___1 :: uu___2 in\n        FStar_Compiler_Util.format\n          \"_elim_ %s /\\\\ %s _to_ %s\\n\\\\with %s %s. %s\" uu___\n    | IntroForall (xs, p, e) ->\n        let uu___ = binders_to_string \" \" xs in\n        let uu___1 = term_to_string p in\n        let uu___2 = term_to_string e in\n        FStar_Compiler_Util.format3 \"_intro_ forall %s. %s with %s\" uu___\n          uu___1 uu___2\n    | IntroExists (xs, t, vs, e) ->\n        let uu___ = binders_to_string \" \" xs in\n        let uu___1 = term_to_string t in\n        let uu___2 =\n          let uu___3 = FStar_Compiler_List.map term_to_string vs in\n          FStar_String.concat \" \" uu___3 in\n        let uu___3 = term_to_string e in\n        FStar_Compiler_Util.format4 \"_intro_ exists %s. %s using %s with %s\"\n          uu___ uu___1 uu___2 uu___3\n    | IntroImplies (p, q, x1, e) ->\n        let uu___ = term_to_string p in\n        let uu___1 = term_to_string q in\n        let uu___2 = binder_to_string x1 in\n        let uu___3 = term_to_string p in\n        FStar_Compiler_Util.format4 \"_intro_ %s ==> %s with %s. %s\" uu___\n          uu___1 uu___2 uu___3\n    | IntroOr (b, p, q, r) ->\n        let uu___ = term_to_string p in\n        let uu___1 = term_to_string q in\n        let uu___2 = term_to_string r in\n        FStar_Compiler_Util.format4 \"_intro_ %s \\\\/ %s using %s with %s\"\n          uu___ uu___1 (if b then \"Left\" else \"Right\") uu___2\n    | IntroAnd (p, q, e1, e2) ->\n        let uu___ = term_to_string p in\n        let uu___1 = term_to_string q in\n        let uu___2 = term_to_string e1 in\n        let uu___3 = term_to_string e2 in\n        FStar_Compiler_Util.format4 \"_intro_ %s /\\\\ %s with %s and %s\" uu___\n          uu___1 uu___2 uu___3\nand (binders_to_string : Prims.string -> binder Prims.list -> Prims.string) =\n  fun sep ->\n    fun bs ->\n      let uu___ = FStar_Compiler_List.map binder_to_string bs in\n      FStar_Compiler_Effect.op_Bar_Greater uu___ (FStar_String.concat sep)\nand (try_or_match_to_string :\n  term ->\n    term ->\n      (pattern * term FStar_Pervasives_Native.option * term) Prims.list ->\n        FStar_Ident.ident FStar_Pervasives_Native.option ->\n          (FStar_Ident.ident FStar_Pervasives_Native.option * term *\n            Prims.bool) FStar_Pervasives_Native.option -> Prims.string)\n  =\n  fun x ->\n    fun scrutinee ->\n      fun branches ->\n        fun op_opt ->\n          fun ret_opt ->\n            let s =\n              match x.tm with\n              | Match uu___ -> \"match\"\n              | TryWith uu___ -> \"try\"\n              | uu___ -> failwith \"impossible\" in\n            let uu___ =\n              match op_opt with\n              | FStar_Pervasives_Native.Some op ->\n                  FStar_Ident.string_of_id op\n              | FStar_Pervasives_Native.None -> \"\" in\n            let uu___1 =\n              FStar_Compiler_Effect.op_Bar_Greater scrutinee term_to_string in\n            let uu___2 =\n              match ret_opt with\n              | FStar_Pervasives_Native.None -> \"\"\n              | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->\n                  let s1 = if use_eq then \"returns$\" else \"returns\" in\n                  let uu___3 =\n                    match as_opt with\n                    | FStar_Pervasives_Native.None -> \"\"\n                    | FStar_Pervasives_Native.Some as_ident ->\n                        let uu___4 = FStar_Ident.string_of_id as_ident in\n                        FStar_Compiler_Util.format1 \"as %s \" uu___4 in\n                  let uu___4 = term_to_string ret in\n                  FStar_Compiler_Util.format3 \"%s%s %s \" s1 uu___3 uu___4 in\n            let uu___3 =\n              to_string_l \" | \"\n                (fun uu___4 ->\n                   match uu___4 with\n                   | (p, w, e) ->\n                       let uu___5 =\n                         FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in\n                       let uu___6 =\n                         match w with\n                         | FStar_Pervasives_Native.None -> \"\"\n                         | FStar_Pervasives_Native.Some e1 ->\n                             let uu___7 = term_to_string e1 in\n                             FStar_Compiler_Util.format1 \"when %s\" uu___7 in\n                       let uu___7 =\n                         FStar_Compiler_Effect.op_Bar_Greater e\n                           term_to_string in\n                       FStar_Compiler_Util.format3 \"%s %s -> %s\" uu___5\n                         uu___6 uu___7) branches in\n            FStar_Compiler_Util.format5 \"%s%s %s %swith %s\" s uu___ uu___1\n              uu___2 uu___3\nand (calc_step_to_string : calc_step -> Prims.string) =\n  fun uu___ ->\n    match uu___ with\n    | CalcStep (rel, just, next) ->\n        let uu___1 = term_to_string rel in\n        let uu___2 = term_to_string just in\n        let uu___3 = term_to_string next in\n        FStar_Compiler_Util.format3 \"%s{ %s } %s\" uu___1 uu___2 uu___3\nand (binder_to_string : binder -> Prims.string) =\n  fun x ->\n    let pr x1 =\n      let s =\n        match x1.b with\n        | Variable i -> FStar_Ident.string_of_id i\n        | TVariable i ->\n            let uu___ = FStar_Ident.string_of_id i in\n            FStar_Compiler_Util.format1 \"%s:_\" uu___\n        | TAnnotated (i, t) ->\n            let uu___ = FStar_Ident.string_of_id i in\n            let uu___1 =\n              FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n            FStar_Compiler_Util.format2 \"%s:%s\" uu___ uu___1\n        | Annotated (i, t) ->\n            let uu___ = FStar_Ident.string_of_id i in\n            let uu___1 =\n              FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n            FStar_Compiler_Util.format2 \"%s:%s\" uu___ uu___1\n        | NoName t -> FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n      let uu___ = aqual_to_string x1.aqual in\n      let uu___1 = attr_list_to_string x1.battributes in\n      FStar_Compiler_Util.format3 \"%s%s%s\" uu___ uu___1 s in\n    match x.aqual with\n    | FStar_Pervasives_Native.Some (TypeClassArg) ->\n        let uu___ = let uu___1 = pr x in Prims.op_Hat uu___1 \" |}\" in\n        Prims.op_Hat \"{| \" uu___\n    | uu___ -> pr x\nand (aqual_to_string :\n  arg_qualifier FStar_Pervasives_Native.option -> Prims.string) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Pervasives_Native.Some (Equality) -> \"$\"\n    | FStar_Pervasives_Native.Some (Implicit) -> \"#\"\n    | FStar_Pervasives_Native.None -> \"\"\n    | FStar_Pervasives_Native.Some (Meta uu___1) ->\n        failwith \"aqual_to_strings: meta arg qualifier?\"\n    | FStar_Pervasives_Native.Some (TypeClassArg) ->\n        failwith \"aqual_to_strings: meta arg qualifier?\"\nand (attr_list_to_string : term Prims.list -> Prims.string) =\n  fun uu___ ->\n    match uu___ with\n    | [] -> \"\"\n    | l -> attrs_opt_to_string (FStar_Pervasives_Native.Some l)\nand (pat_to_string : pattern -> Prims.string) =\n  fun x ->\n    match x.pat with\n    | PatWild (FStar_Pervasives_Native.None, attrs) ->\n        let uu___ = attr_list_to_string attrs in Prims.op_Hat uu___ \"_\"\n    | PatWild (uu___, attrs) ->\n        let uu___1 =\n          let uu___2 = attr_list_to_string attrs in Prims.op_Hat uu___2 \"_\" in\n        Prims.op_Hat \"#\" uu___1\n    | PatConst c -> FStar_Parser_Const.const_to_string c\n    | PatVQuote t ->\n        let uu___ = term_to_string t in\n        FStar_Compiler_Util.format1 \"`%%%s\" uu___\n    | PatApp (p, ps) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in\n        let uu___1 = to_string_l \" \" pat_to_string ps in\n        FStar_Compiler_Util.format2 \"(%s %s)\" uu___ uu___1\n    | PatTvar (i, aq, attrs) ->\n        let uu___ = aqual_to_string aq in\n        let uu___1 = attr_list_to_string attrs in\n        let uu___2 = FStar_Ident.string_of_id i in\n        FStar_Compiler_Util.format3 \"%s%s%s\" uu___ uu___1 uu___2\n    | PatVar (i, aq, attrs) ->\n        let uu___ = aqual_to_string aq in\n        let uu___1 = attr_list_to_string attrs in\n        let uu___2 = FStar_Ident.string_of_id i in\n        FStar_Compiler_Util.format3 \"%s%s%s\" uu___ uu___1 uu___2\n    | PatName l -> FStar_Ident.string_of_lid l\n    | PatList l ->\n        let uu___ = to_string_l \"; \" pat_to_string l in\n        FStar_Compiler_Util.format1 \"[%s]\" uu___\n    | PatTuple (l, false) ->\n        let uu___ = to_string_l \", \" pat_to_string l in\n        FStar_Compiler_Util.format1 \"(%s)\" uu___\n    | PatTuple (l, true) ->\n        let uu___ = to_string_l \", \" pat_to_string l in\n        FStar_Compiler_Util.format1 \"(|%s|)\" uu___\n    | PatRecord l ->\n        let uu___ =\n          to_string_l \"; \"\n            (fun uu___1 ->\n               match uu___1 with\n               | (f, e) ->\n                   let uu___2 = FStar_Ident.string_of_lid f in\n                   let uu___3 =\n                     FStar_Compiler_Effect.op_Bar_Greater e pat_to_string in\n                   FStar_Compiler_Util.format2 \"%s=%s\" uu___2 uu___3) l in\n        FStar_Compiler_Util.format1 \"{%s}\" uu___\n    | PatOr l -> to_string_l \"|\\n \" pat_to_string l\n    | PatOp op ->\n        let uu___ = FStar_Ident.string_of_id op in\n        FStar_Compiler_Util.format1 \"(%s)\" uu___\n    | PatAscribed (p, (t, FStar_Pervasives_Native.None)) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        FStar_Compiler_Util.format2 \"(%s:%s)\" uu___ uu___1\n    | PatAscribed (p, (t, FStar_Pervasives_Native.Some tac)) ->\n        let uu___ = FStar_Compiler_Effect.op_Bar_Greater p pat_to_string in\n        let uu___1 = FStar_Compiler_Effect.op_Bar_Greater t term_to_string in\n        let uu___2 = FStar_Compiler_Effect.op_Bar_Greater tac term_to_string in\n        FStar_Compiler_Util.format3 \"(%s:%s by %s)\" uu___ uu___1 uu___2\nand (attrs_opt_to_string :\n  term Prims.list FStar_Pervasives_Native.option -> Prims.string) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Pervasives_Native.None -> \"\"\n    | FStar_Pervasives_Native.Some attrs ->\n        let uu___1 =\n          let uu___2 = FStar_Compiler_List.map term_to_string attrs in\n          FStar_Compiler_Effect.op_Bar_Greater uu___2\n            (FStar_String.concat \"; \") in\n        FStar_Compiler_Util.format1 \"[@ %s]\" uu___1\nlet rec (head_id_of_pat : pattern -> FStar_Ident.lident Prims.list) =\n  fun p ->\n    match p.pat with\n    | PatName l -> [l]\n    | PatVar (i, uu___, uu___1) ->\n        let uu___2 = FStar_Ident.lid_of_ids [i] in [uu___2]\n    | PatApp (p1, uu___) -> head_id_of_pat p1\n    | PatAscribed (p1, uu___) -> head_id_of_pat p1\n    | uu___ -> []\nlet lids_of_let :\n  'uuuuu . (pattern * 'uuuuu) Prims.list -> FStar_Ident.lident Prims.list =\n  fun defs ->\n    FStar_Compiler_Effect.op_Bar_Greater defs\n      (FStar_Compiler_List.collect\n         (fun uu___ -> match uu___ with | (p, uu___1) -> head_id_of_pat p))\nlet (id_of_tycon : tycon -> Prims.string) =\n  fun uu___ ->\n    match uu___ with\n    | TyconAbstract (i, uu___1, uu___2) -> FStar_Ident.string_of_id i\n    | TyconAbbrev (i, uu___1, uu___2, uu___3) -> FStar_Ident.string_of_id i\n    | TyconRecord (i, uu___1, uu___2, uu___3, uu___4) ->\n        FStar_Ident.string_of_id i\n    | TyconVariant (i, uu___1, uu___2, uu___3) -> FStar_Ident.string_of_id i\nlet (string_of_pragma : pragma -> Prims.string) =\n  fun uu___ ->\n    match uu___ with\n    | SetOptions s -> FStar_Compiler_Util.format1 \"set-options \\\"%s\\\"\" s\n    | ResetOptions s ->\n        FStar_Compiler_Util.format1 \"reset-options \\\"%s\\\"\"\n          (FStar_Compiler_Util.dflt \"\" s)\n    | PushOptions s ->\n        FStar_Compiler_Util.format1 \"push-options \\\"%s\\\"\"\n          (FStar_Compiler_Util.dflt \"\" s)\n    | PopOptions -> \"pop-options\"\n    | RestartSolver -> \"restart-solver\"\n    | PrintEffectsGraph -> \"print-effects-graph\"\nlet (decl_to_string : decl -> Prims.string) =\n  fun d ->\n    match d.d with\n    | TopLevelModule l ->\n        let uu___ = FStar_Ident.string_of_lid l in\n        Prims.op_Hat \"module \" uu___\n    | Open l ->\n        let uu___ = FStar_Ident.string_of_lid l in Prims.op_Hat \"open \" uu___\n    | Friend l ->\n        let uu___ = FStar_Ident.string_of_lid l in\n        Prims.op_Hat \"friend \" uu___\n    | Include l ->\n        let uu___ = FStar_Ident.string_of_lid l in\n        Prims.op_Hat \"include \" uu___\n    | ModuleAbbrev (i, l) ->\n        let uu___ = FStar_Ident.string_of_id i in\n        let uu___1 = FStar_Ident.string_of_lid l in\n        FStar_Compiler_Util.format2 \"module %s = %s\" uu___ uu___1\n    | TopLevelLet (uu___, pats) ->\n        let uu___1 =\n          let uu___2 =\n            let uu___3 = lids_of_let pats in\n            FStar_Compiler_Effect.op_Bar_Greater uu___3\n              (FStar_Compiler_List.map (fun l -> FStar_Ident.string_of_lid l)) in\n          FStar_Compiler_Effect.op_Bar_Greater uu___2\n            (FStar_String.concat \", \") in\n        Prims.op_Hat \"let \" uu___1\n    | Assume (i, uu___) ->\n        let uu___1 = FStar_Ident.string_of_id i in\n        Prims.op_Hat \"assume \" uu___1\n    | Tycon (uu___, uu___1, tys) ->\n        let uu___2 =\n          let uu___3 =\n            FStar_Compiler_Effect.op_Bar_Greater tys\n              (FStar_Compiler_List.map id_of_tycon) in\n          FStar_Compiler_Effect.op_Bar_Greater uu___3\n            (FStar_String.concat \", \") in\n        Prims.op_Hat \"type \" uu___2\n    | Val (i, uu___) ->\n        let uu___1 = FStar_Ident.string_of_id i in Prims.op_Hat \"val \" uu___1\n    | Exception (i, uu___) ->\n        let uu___1 = FStar_Ident.string_of_id i in\n        Prims.op_Hat \"exception \" uu___1\n    | NewEffect (DefineEffect (i, uu___, uu___1, uu___2)) ->\n        let uu___3 = FStar_Ident.string_of_id i in\n        Prims.op_Hat \"new_effect \" uu___3\n    | NewEffect (RedefineEffect (i, uu___, uu___1)) ->\n        let uu___2 = FStar_Ident.string_of_id i in\n        Prims.op_Hat \"new_effect \" uu___2\n    | LayeredEffect (DefineEffect (i, uu___, uu___1, uu___2)) ->\n        let uu___3 = FStar_Ident.string_of_id i in\n        Prims.op_Hat \"layered_effect \" uu___3\n    | LayeredEffect (RedefineEffect (i, uu___, uu___1)) ->\n        let uu___2 = FStar_Ident.string_of_id i in\n        Prims.op_Hat \"layered_effect \" uu___2\n    | Polymonadic_bind (l1, l2, l3, uu___) ->\n        let uu___1 = FStar_Ident.string_of_lid l1 in\n        let uu___2 = FStar_Ident.string_of_lid l2 in\n        let uu___3 = FStar_Ident.string_of_lid l3 in\n        FStar_Compiler_Util.format3 \"polymonadic_bind (%s, %s) |> %s\" uu___1\n          uu___2 uu___3\n    | Polymonadic_subcomp (l1, l2, uu___) ->\n        let uu___1 = FStar_Ident.string_of_lid l1 in\n        let uu___2 = FStar_Ident.string_of_lid l2 in\n        FStar_Compiler_Util.format2 \"polymonadic_subcomp %s <: %s\" uu___1\n          uu___2\n    | Splice (ids, t) ->\n        let uu___ =\n          let uu___1 =\n            let uu___2 =\n              FStar_Compiler_List.map (fun i -> FStar_Ident.string_of_id i)\n                ids in\n            FStar_Compiler_Effect.op_Less_Bar (FStar_String.concat \";\")\n              uu___2 in\n          let uu___2 =\n            let uu___3 =\n              let uu___4 = term_to_string t in Prims.op_Hat uu___4 \")\" in\n            Prims.op_Hat \"] (\" uu___3 in\n          Prims.op_Hat uu___1 uu___2 in\n        Prims.op_Hat \"splice[\" uu___\n    | SubEffect uu___ -> \"sub_effect\"\n    | Pragma p ->\n        let uu___ = string_of_pragma p in Prims.op_Hat \"pragma #\" uu___\nlet (modul_to_string : modul -> Prims.string) =\n  fun m ->\n    match m with\n    | Module (uu___, decls) ->\n        let uu___1 =\n          FStar_Compiler_Effect.op_Bar_Greater decls\n            (FStar_Compiler_List.map decl_to_string) in\n        FStar_Compiler_Effect.op_Bar_Greater uu___1\n          (FStar_String.concat \"\\n\")\n    | Interface (uu___, decls, uu___1) ->\n        let uu___2 =\n          FStar_Compiler_Effect.op_Bar_Greater decls\n            (FStar_Compiler_List.map decl_to_string) in\n        FStar_Compiler_Effect.op_Bar_Greater uu___2\n          (FStar_String.concat \"\\n\")\nlet (decl_is_val : FStar_Ident.ident -> decl -> Prims.bool) =\n  fun id ->\n    fun decl1 ->\n      match decl1.d with\n      | Val (id', uu___) -> FStar_Ident.ident_equals id id'\n      | uu___ -> false\nlet (thunk : term -> term) =\n  fun ens ->\n    let wildpat =\n      mk_pattern (PatWild (FStar_Pervasives_Native.None, [])) ens.range in\n    mk_term (Abs ([wildpat], ens)) ens.range Expr\nlet (ident_of_binder :\n  FStar_Compiler_Range.range -> binder -> FStar_Ident.ident) =\n  fun r ->\n    fun b ->\n      match b.b with\n      | Variable i -> i\n      | TVariable i -> i\n      | Annotated (i, uu___) -> i\n      | TAnnotated (i, uu___) -> i\n      | NoName uu___ ->\n          FStar_Errors.raise_error\n            (FStar_Errors_Codes.Fatal_MissingQuantifierBinder,\n              \"Wildcard binders in quantifiers are not allowed\") r\nlet (idents_of_binders :\n  binder Prims.list ->\n    FStar_Compiler_Range.range -> FStar_Ident.ident Prims.list)\n  =\n  fun bs ->\n    fun r ->\n      FStar_Compiler_Effect.op_Bar_Greater bs\n        (FStar_Compiler_List.map (ident_of_binder r))\nlet (decl_syntax_is_delimited : decl -> Prims.bool) =\n  fun d ->\n    match d.d with\n    | Pragma (ResetOptions (FStar_Pervasives_Native.None)) -> false\n    | Pragma (PushOptions (FStar_Pervasives_Native.None)) -> false\n    | Pragma uu___ -> true\n    | NewEffect (DefineEffect uu___) -> true\n    | LayeredEffect (DefineEffect uu___) -> true\n    | SubEffect\n        { msource = uu___; mdest = uu___1; lift_op = uu___2; braced = true;_}\n        -> true\n    | Tycon (uu___, b, uu___1) -> b\n    | uu___ -> false"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_AST_Util.ml",
    "content": "open Prims\nlet (eq_ident : FStar_Ident.ident -> FStar_Ident.ident -> Prims.bool) =\n  fun i1 -> fun i2 -> FStar_Ident.ident_equals i1 i2\nlet eq_list :\n  'a .\n    ('a -> 'a -> Prims.bool) -> 'a Prims.list -> 'a Prims.list -> Prims.bool\n  =\n  fun f ->\n    fun t1 ->\n      fun t2 ->\n        ((FStar_Compiler_List.length t1) = (FStar_Compiler_List.length t2))\n          && (FStar_Compiler_List.forall2 f t1 t2)\nlet eq_option :\n  'a .\n    ('a -> 'a -> Prims.bool) ->\n      'a FStar_Pervasives_Native.option ->\n        'a FStar_Pervasives_Native.option -> Prims.bool\n  =\n  fun f ->\n    fun t1 ->\n      fun t2 ->\n        match (t1, t2) with\n        | (FStar_Pervasives_Native.None, FStar_Pervasives_Native.None) ->\n            true\n        | (FStar_Pervasives_Native.Some t11, FStar_Pervasives_Native.Some\n           t21) -> f t11 t21\n        | uu___ -> false\nlet (eq_sconst : FStar_Const.sconst -> FStar_Const.sconst -> Prims.bool) =\n  fun c1 ->\n    fun c2 ->\n      match (c1, c2) with\n      | (FStar_Const.Const_effect, FStar_Const.Const_effect) -> true\n      | (FStar_Const.Const_unit, FStar_Const.Const_unit) -> true\n      | (FStar_Const.Const_bool b1, FStar_Const.Const_bool b2) -> b1 = b2\n      | (FStar_Const.Const_int (s1, sw1), FStar_Const.Const_int (s2, sw2)) ->\n          (s1 = s2) && (sw1 = sw2)\n      | (FStar_Const.Const_char c11, FStar_Const.Const_char c21) -> c11 = c21\n      | (FStar_Const.Const_string (s1, uu___), FStar_Const.Const_string\n         (s2, uu___1)) -> s1 = s2\n      | (FStar_Const.Const_real s1, FStar_Const.Const_real s2) -> s1 = s2\n      | (FStar_Const.Const_range r1, FStar_Const.Const_range r2) -> r1 = r2\n      | (FStar_Const.Const_reify uu___, FStar_Const.Const_reify uu___1) ->\n          true\n      | (FStar_Const.Const_reflect l1, FStar_Const.Const_reflect l2) ->\n          FStar_Ident.lid_equals l1 l2\n      | uu___ -> false\nlet rec (eq_term :\n  FStar_Parser_AST.term -> FStar_Parser_AST.term -> Prims.bool) =\n  fun t1 -> fun t2 -> eq_term' t1.FStar_Parser_AST.tm t2.FStar_Parser_AST.tm\nand (eq_terms :\n  FStar_Parser_AST.term Prims.list ->\n    FStar_Parser_AST.term Prims.list -> Prims.bool)\n  = fun t1 -> fun t2 -> eq_list eq_term t1 t2\nand (eq_arg :\n  (FStar_Parser_AST.term * FStar_Parser_AST.imp) ->\n    (FStar_Parser_AST.term * FStar_Parser_AST.imp) -> Prims.bool)\n  =\n  fun t1 ->\n    fun t2 ->\n      let uu___ = t1 in\n      match uu___ with\n      | (t11, a1) ->\n          let uu___1 = t2 in\n          (match uu___1 with\n           | (t21, a2) -> (eq_term t11 t21) && (eq_imp a1 a2))\nand (eq_imp : FStar_Parser_AST.imp -> FStar_Parser_AST.imp -> Prims.bool) =\n  fun i1 ->\n    fun i2 ->\n      match (i1, i2) with\n      | (FStar_Parser_AST.FsTypApp, FStar_Parser_AST.FsTypApp) -> true\n      | (FStar_Parser_AST.Hash, FStar_Parser_AST.Hash) -> true\n      | (FStar_Parser_AST.UnivApp, FStar_Parser_AST.UnivApp) -> true\n      | (FStar_Parser_AST.Infix, FStar_Parser_AST.Infix) -> true\n      | (FStar_Parser_AST.Nothing, FStar_Parser_AST.Nothing) -> true\n      | (FStar_Parser_AST.HashBrace t1, FStar_Parser_AST.HashBrace t2) ->\n          eq_term t1 t2\n      | uu___ -> false\nand (eq_args :\n  (FStar_Parser_AST.term * FStar_Parser_AST.imp) Prims.list ->\n    (FStar_Parser_AST.term * FStar_Parser_AST.imp) Prims.list -> Prims.bool)\n  = fun t1 -> fun t2 -> eq_list eq_arg t1 t2\nand (eq_arg_qualifier :\n  FStar_Parser_AST.arg_qualifier ->\n    FStar_Parser_AST.arg_qualifier -> Prims.bool)\n  =\n  fun arg_qualifier1 ->\n    fun arg_qualifier2 ->\n      match (arg_qualifier1, arg_qualifier2) with\n      | (FStar_Parser_AST.Implicit, FStar_Parser_AST.Implicit) -> true\n      | (FStar_Parser_AST.Equality, FStar_Parser_AST.Equality) -> true\n      | (FStar_Parser_AST.Meta t1, FStar_Parser_AST.Meta t2) -> eq_term t1 t2\n      | (FStar_Parser_AST.TypeClassArg, FStar_Parser_AST.TypeClassArg) ->\n          true\n      | uu___ -> false\nand (eq_pattern :\n  FStar_Parser_AST.pattern -> FStar_Parser_AST.pattern -> Prims.bool) =\n  fun p1 ->\n    fun p2 -> eq_pattern' p1.FStar_Parser_AST.pat p2.FStar_Parser_AST.pat\nand (eq_aqual :\n  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->\n    FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->\n      Prims.bool)\n  = fun a1 -> fun a2 -> eq_option eq_arg_qualifier a1 a2\nand (eq_pattern' :\n  FStar_Parser_AST.pattern' -> FStar_Parser_AST.pattern' -> Prims.bool) =\n  fun p1 ->\n    fun p2 ->\n      match (p1, p2) with\n      | (FStar_Parser_AST.PatWild (q1, a1), FStar_Parser_AST.PatWild\n         (q2, a2)) -> (eq_aqual q1 q2) && (eq_terms a1 a2)\n      | (FStar_Parser_AST.PatConst s1, FStar_Parser_AST.PatConst s2) ->\n          eq_sconst s1 s2\n      | (FStar_Parser_AST.PatApp (p11, ps1), FStar_Parser_AST.PatApp\n         (p21, ps2)) -> (eq_pattern p11 p21) && (eq_list eq_pattern ps1 ps2)\n      | (FStar_Parser_AST.PatTvar (i1, aq1, as1), FStar_Parser_AST.PatTvar\n         (i2, aq2, as2)) ->\n          ((FStar_Ident.ident_equals i1 i2) && (eq_aqual aq1 aq2)) &&\n            (eq_terms as1 as2)\n      | (FStar_Parser_AST.PatVar (i1, aq1, as1), FStar_Parser_AST.PatVar\n         (i2, aq2, as2)) ->\n          ((FStar_Ident.ident_equals i1 i2) && (eq_aqual aq1 aq2)) &&\n            (eq_terms as1 as2)\n      | (FStar_Parser_AST.PatName l1, FStar_Parser_AST.PatName l2) ->\n          FStar_Ident.lid_equals l1 l2\n      | (FStar_Parser_AST.PatOr ps1, FStar_Parser_AST.PatOr ps2) ->\n          eq_list eq_pattern ps1 ps2\n      | (FStar_Parser_AST.PatList ps1, FStar_Parser_AST.PatList ps2) ->\n          eq_list eq_pattern ps1 ps2\n      | (FStar_Parser_AST.PatTuple (ps1, b1), FStar_Parser_AST.PatTuple\n         (ps2, b2)) -> (eq_list eq_pattern ps1 ps2) && (b1 = b2)\n      | (FStar_Parser_AST.PatRecord ps1, FStar_Parser_AST.PatRecord ps2) ->\n          eq_list\n            (fun uu___ ->\n               fun uu___1 ->\n                 match (uu___, uu___1) with\n                 | ((l1, p11), (l2, p21)) ->\n                     (FStar_Ident.lid_equals l1 l2) && (eq_pattern p11 p21))\n            ps1 ps2\n      | (FStar_Parser_AST.PatAscribed (p11, (t1, topt1)),\n         FStar_Parser_AST.PatAscribed (p21, (t2, topt2))) ->\n          ((eq_pattern p11 p21) && (eq_term t1 t2)) &&\n            (eq_option eq_term topt1 topt2)\n      | (FStar_Parser_AST.PatOp i1, FStar_Parser_AST.PatOp i2) ->\n          eq_ident i1 i2\n      | (FStar_Parser_AST.PatVQuote t1, FStar_Parser_AST.PatVQuote t2) ->\n          eq_term t1 t2\n      | uu___ -> false\nand (eq_term' :\n  FStar_Parser_AST.term' -> FStar_Parser_AST.term' -> Prims.bool) =\n  fun t1 ->\n    fun t2 ->\n      match (t1, t2) with\n      | (FStar_Parser_AST.Wild, FStar_Parser_AST.Wild) -> true\n      | (FStar_Parser_AST.Const s1, FStar_Parser_AST.Const s2) ->\n          FStar_Const.eq_const s1 s2\n      | (FStar_Parser_AST.Op (i1, ts1), FStar_Parser_AST.Op (i2, ts2)) ->\n          (eq_ident i1 i2) && (eq_terms ts1 ts2)\n      | (FStar_Parser_AST.Tvar i1, FStar_Parser_AST.Tvar i2) ->\n          eq_ident i1 i2\n      | (FStar_Parser_AST.Uvar i1, FStar_Parser_AST.Uvar i2) ->\n          eq_ident i1 i2\n      | (FStar_Parser_AST.Var l1, FStar_Parser_AST.Var l2) ->\n          FStar_Ident.lid_equals l1 l2\n      | (FStar_Parser_AST.Name l1, FStar_Parser_AST.Name l2) ->\n          FStar_Ident.lid_equals l1 l2\n      | (FStar_Parser_AST.Projector (l1, i1), FStar_Parser_AST.Projector\n         (l2, i2)) ->\n          (FStar_Ident.lid_equals l1 l2) && (FStar_Ident.ident_equals i1 i2)\n      | (FStar_Parser_AST.Construct (l1, args1), FStar_Parser_AST.Construct\n         (l2, args2)) ->\n          (FStar_Ident.lid_equals l1 l2) && (eq_args args1 args2)\n      | (FStar_Parser_AST.Abs (ps1, t11), FStar_Parser_AST.Abs (ps2, t21)) ->\n          (eq_list eq_pattern ps1 ps2) && (eq_term t11 t21)\n      | (FStar_Parser_AST.App (h1, t11, i1), FStar_Parser_AST.App\n         (h2, t21, i2)) ->\n          ((eq_term h1 h2) && (eq_term t11 t21)) && (eq_imp i1 i2)\n      | (FStar_Parser_AST.Let (lq1, defs1, t11), FStar_Parser_AST.Let\n         (lq2, defs2, t21)) ->\n          ((lq1 = lq2) &&\n             (eq_list\n                (fun uu___ ->\n                   fun uu___1 ->\n                     match (uu___, uu___1) with\n                     | ((o1, (p1, t12)), (o2, (p2, t22))) ->\n                         ((eq_option eq_terms o1 o2) && (eq_pattern p1 p2))\n                           && (eq_term t12 t22)) defs1 defs2))\n            && (eq_term t11 t21)\n      | (FStar_Parser_AST.LetOperator (defs1, t11),\n         FStar_Parser_AST.LetOperator (defs2, t21)) ->\n          (eq_list\n             (fun uu___ ->\n                fun uu___1 ->\n                  match (uu___, uu___1) with\n                  | ((i1, ps1, t12), (i2, ps2, t22)) ->\n                      ((eq_ident i1 i2) && (eq_pattern ps1 ps2)) &&\n                        (eq_term t12 t22)) defs1 defs2)\n            && (eq_term t11 t21)\n      | (FStar_Parser_AST.LetOpen (l1, t11), FStar_Parser_AST.LetOpen\n         (l2, t21)) -> (FStar_Ident.lid_equals l1 l2) && (eq_term t11 t21)\n      | (FStar_Parser_AST.LetOpenRecord (t11, t21, t3),\n         FStar_Parser_AST.LetOpenRecord (t4, t5, t6)) ->\n          ((eq_term t11 t4) && (eq_term t21 t5)) && (eq_term t3 t6)\n      | (FStar_Parser_AST.Seq (t11, t21), FStar_Parser_AST.Seq (t3, t4)) ->\n          (eq_term t11 t3) && (eq_term t21 t4)\n      | (FStar_Parser_AST.Bind (i1, t11, t21), FStar_Parser_AST.Bind\n         (i2, t3, t4)) ->\n          ((FStar_Ident.ident_equals i1 i2) && (eq_term t11 t3)) &&\n            (eq_term t21 t4)\n      | (FStar_Parser_AST.If (t11, i1, mra1, t21, t3), FStar_Parser_AST.If\n         (t4, i2, mra2, t5, t6)) ->\n          ((((eq_term t11 t4) && (eq_option eq_ident i1 i2)) &&\n              (eq_option eq_match_returns_annotation mra1 mra2))\n             && (eq_term t21 t5))\n            && (eq_term t3 t6)\n      | (FStar_Parser_AST.Match (t11, i1, mra1, bs1), FStar_Parser_AST.Match\n         (t21, i2, mra2, bs2)) ->\n          (((eq_term t11 t21) && (eq_option eq_ident i1 i2)) &&\n             (eq_option eq_match_returns_annotation mra1 mra2))\n            && (eq_list eq_branch bs1 bs2)\n      | (FStar_Parser_AST.TryWith (t11, bs1), FStar_Parser_AST.TryWith\n         (t21, bs2)) -> (eq_term t11 t21) && (eq_list eq_branch bs1 bs2)\n      | (FStar_Parser_AST.Ascribed (t11, t21, topt1, b1),\n         FStar_Parser_AST.Ascribed (t3, t4, topt2, b2)) ->\n          (((eq_term t11 t3) && (eq_term t21 t4)) &&\n             (eq_option eq_term topt1 topt2))\n            && (b1 = b2)\n      | (FStar_Parser_AST.Record (topt1, fs1), FStar_Parser_AST.Record\n         (topt2, fs2)) ->\n          (eq_option eq_term topt1 topt2) &&\n            (eq_list\n               (fun uu___ ->\n                  fun uu___1 ->\n                    match (uu___, uu___1) with\n                    | ((l1, t11), (l2, t21)) ->\n                        (FStar_Ident.lid_equals l1 l2) && (eq_term t11 t21))\n               fs1 fs2)\n      | (FStar_Parser_AST.Project (t11, l1), FStar_Parser_AST.Project\n         (t21, l2)) -> (eq_term t11 t21) && (FStar_Ident.lid_equals l1 l2)\n      | (FStar_Parser_AST.Product (bs1, t11), FStar_Parser_AST.Product\n         (bs2, t21)) -> (eq_list eq_binder bs1 bs2) && (eq_term t11 t21)\n      | (FStar_Parser_AST.Sum (bs1, t11), FStar_Parser_AST.Sum (bs2, t21)) ->\n          (eq_list\n             (fun b1 ->\n                fun b2 ->\n                  match (b1, b2) with\n                  | (FStar_Pervasives.Inl b11, FStar_Pervasives.Inl b21) ->\n                      eq_binder b11 b21\n                  | (FStar_Pervasives.Inr t12, FStar_Pervasives.Inr t22) ->\n                      eq_term t12 t22\n                  | (FStar_Pervasives.Inl uu___, FStar_Pervasives.Inr uu___1)\n                      -> false\n                  | (FStar_Pervasives.Inr uu___, FStar_Pervasives.Inl uu___1)\n                      -> false) bs1 bs2)\n            && (eq_term t11 t21)\n      | (FStar_Parser_AST.QForall (bs1, ps1, t11), FStar_Parser_AST.QForall\n         (bs2, ps2, t21)) ->\n          let eq_ps uu___ uu___1 =\n            match (uu___, uu___1) with\n            | ((is1, ts1), (is2, ts2)) ->\n                (eq_list eq_ident is1 is2) &&\n                  (eq_list (eq_list eq_term) ts1 ts2) in\n          ((eq_list eq_binder bs1 bs2) && (eq_ps ps1 ps2)) &&\n            (eq_term t11 t21)\n      | (FStar_Parser_AST.QExists (bs1, ps1, t11), FStar_Parser_AST.QExists\n         (bs2, ps2, t21)) ->\n          let eq_ps uu___ uu___1 =\n            match (uu___, uu___1) with\n            | ((is1, ts1), (is2, ts2)) ->\n                (eq_list eq_ident is1 is2) &&\n                  (eq_list (eq_list eq_term) ts1 ts2) in\n          ((eq_list eq_binder bs1 bs2) && (eq_ps ps1 ps2)) &&\n            (eq_term t11 t21)\n      | (FStar_Parser_AST.Refine (t11, t21), FStar_Parser_AST.Refine\n         (t3, t4)) -> (eq_binder t11 t3) && (eq_term t21 t4)\n      | (FStar_Parser_AST.NamedTyp (i1, t11), FStar_Parser_AST.NamedTyp\n         (i2, t21)) -> (eq_ident i1 i2) && (eq_term t11 t21)\n      | (FStar_Parser_AST.Paren t11, FStar_Parser_AST.Paren t21) ->\n          eq_term t11 t21\n      | (FStar_Parser_AST.Requires (t11, s1), FStar_Parser_AST.Requires\n         (t21, s2)) -> (eq_term t11 t21) && (eq_option (=) s1 s2)\n      | (FStar_Parser_AST.Ensures (t11, s1), FStar_Parser_AST.Ensures\n         (t21, s2)) -> (eq_term t11 t21) && (eq_option (=) s1 s2)\n      | (FStar_Parser_AST.LexList ts1, FStar_Parser_AST.LexList ts2) ->\n          eq_list eq_term ts1 ts2\n      | (FStar_Parser_AST.WFOrder (t11, t21), FStar_Parser_AST.WFOrder\n         (t3, t4)) -> (eq_term t11 t3) && (eq_term t21 t4)\n      | (FStar_Parser_AST.Decreases (t11, s1), FStar_Parser_AST.Decreases\n         (t21, s2)) -> (eq_term t11 t21) && (eq_option (=) s1 s2)\n      | (FStar_Parser_AST.Labeled (t11, s1, b1), FStar_Parser_AST.Labeled\n         (t21, s2, b2)) -> ((eq_term t11 t21) && (s1 = s2)) && (b1 = b2)\n      | (FStar_Parser_AST.Discrim l1, FStar_Parser_AST.Discrim l2) ->\n          FStar_Ident.lid_equals l1 l2\n      | (FStar_Parser_AST.Attributes ts1, FStar_Parser_AST.Attributes ts2) ->\n          eq_list eq_term ts1 ts2\n      | (FStar_Parser_AST.Antiquote t11, FStar_Parser_AST.Antiquote t21) ->\n          eq_term t11 t21\n      | (FStar_Parser_AST.Quote (t11, k1), FStar_Parser_AST.Quote (t21, k2))\n          -> (eq_term t11 t21) && (k1 = k2)\n      | (FStar_Parser_AST.VQuote t11, FStar_Parser_AST.VQuote t21) ->\n          eq_term t11 t21\n      | (FStar_Parser_AST.CalcProof (t11, t21, cs1),\n         FStar_Parser_AST.CalcProof (t3, t4, cs2)) ->\n          ((eq_term t11 t3) && (eq_term t21 t4)) &&\n            (eq_list eq_calc_step cs1 cs2)\n      | (FStar_Parser_AST.IntroForall (bs1, t11, t21),\n         FStar_Parser_AST.IntroForall (bs2, t3, t4)) ->\n          ((eq_list eq_binder bs1 bs2) && (eq_term t11 t3)) &&\n            (eq_term t21 t4)\n      | (FStar_Parser_AST.IntroExists (bs1, t11, ts1, t21),\n         FStar_Parser_AST.IntroExists (bs2, t3, ts2, t4)) ->\n          (((eq_list eq_binder bs1 bs2) && (eq_term t11 t3)) &&\n             (eq_list eq_term ts1 ts2))\n            && (eq_term t21 t4)\n      | (FStar_Parser_AST.IntroImplies (t11, t21, b1, t3),\n         FStar_Parser_AST.IntroImplies (t4, t5, b2, t6)) ->\n          (((eq_term t11 t4) && (eq_term t21 t5)) && (eq_binder b1 b2)) &&\n            (eq_term t3 t6)\n      | (FStar_Parser_AST.IntroOr (b1, t11, t21, t3),\n         FStar_Parser_AST.IntroOr (b2, t4, t5, t6)) ->\n          (((b1 = b2) && (eq_term t11 t4)) && (eq_term t21 t5)) &&\n            (eq_term t3 t6)\n      | (FStar_Parser_AST.IntroAnd (t11, t21, t3, t4),\n         FStar_Parser_AST.IntroAnd (t5, t6, t7, t8)) ->\n          (((eq_term t11 t5) && (eq_term t21 t6)) && (eq_term t3 t7)) &&\n            (eq_term t4 t8)\n      | (FStar_Parser_AST.ElimForall (bs1, t11, ts1),\n         FStar_Parser_AST.ElimForall (bs2, t21, ts2)) ->\n          ((eq_list eq_binder bs1 bs2) && (eq_term t11 t21)) &&\n            (eq_list eq_term ts1 ts2)\n      | (FStar_Parser_AST.ElimExists (bs1, t11, t21, b1, t3),\n         FStar_Parser_AST.ElimExists (bs2, t4, t5, b2, t6)) ->\n          ((((eq_list eq_binder bs1 bs2) && (eq_term t11 t4)) &&\n              (eq_term t21 t5))\n             && (eq_binder b1 b2))\n            && (eq_term t3 t6)\n      | (FStar_Parser_AST.ElimImplies (t11, t21, t3),\n         FStar_Parser_AST.ElimImplies (t4, t5, t6)) ->\n          ((eq_term t11 t4) && (eq_term t21 t5)) && (eq_term t3 t6)\n      | (FStar_Parser_AST.ElimOr (t11, t21, t3, b1, t4, b2, t5),\n         FStar_Parser_AST.ElimOr (t6, t7, t8, b3, t9, b4, t10)) ->\n          ((((((eq_term t11 t6) && (eq_term t21 t7)) && (eq_term t3 t8)) &&\n               (eq_binder b1 b3))\n              && (eq_term t4 t9))\n             && (eq_binder b2 b4))\n            && (eq_term t5 t10)\n      | (FStar_Parser_AST.ElimAnd (t11, t21, t3, b1, b2, t4),\n         FStar_Parser_AST.ElimAnd (t5, t6, t7, b3, b4, t8)) ->\n          (((((eq_term t11 t5) && (eq_term t21 t6)) && (eq_term t3 t7)) &&\n              (eq_binder b1 b3))\n             && (eq_binder b2 b4))\n            && (eq_term t4 t8)\n      | uu___ -> false\nand (eq_calc_step :\n  FStar_Parser_AST.calc_step -> FStar_Parser_AST.calc_step -> Prims.bool) =\n  fun uu___ ->\n    fun uu___1 ->\n      match (uu___, uu___1) with\n      | (FStar_Parser_AST.CalcStep (t1, t2, t3), FStar_Parser_AST.CalcStep\n         (t4, t5, t6)) ->\n          ((eq_term t1 t4) && (eq_term t2 t5)) && (eq_term t3 t6)\nand (eq_binder :\n  FStar_Parser_AST.binder -> FStar_Parser_AST.binder -> Prims.bool) =\n  fun b1 ->\n    fun b2 ->\n      ((eq_binder' b1.FStar_Parser_AST.b b2.FStar_Parser_AST.b) &&\n         (eq_aqual b1.FStar_Parser_AST.aqual b2.FStar_Parser_AST.aqual))\n        &&\n        (eq_list eq_term b1.FStar_Parser_AST.battributes\n           b2.FStar_Parser_AST.battributes)\nand (eq_binder' :\n  FStar_Parser_AST.binder' -> FStar_Parser_AST.binder' -> Prims.bool) =\n  fun b1 ->\n    fun b2 ->\n      match (b1, b2) with\n      | (FStar_Parser_AST.Variable i1, FStar_Parser_AST.Variable i2) ->\n          eq_ident i1 i2\n      | (FStar_Parser_AST.TVariable i1, FStar_Parser_AST.TVariable i2) ->\n          eq_ident i1 i2\n      | (FStar_Parser_AST.Annotated (i1, t1), FStar_Parser_AST.Annotated\n         (i2, t2)) -> (eq_ident i1 i2) && (eq_term t1 t2)\n      | (FStar_Parser_AST.TAnnotated (i1, t1), FStar_Parser_AST.TAnnotated\n         (i2, t2)) -> (eq_ident i1 i2) && (eq_term t1 t2)\n      | (FStar_Parser_AST.NoName t1, FStar_Parser_AST.NoName t2) ->\n          eq_term t1 t2\n      | uu___ -> false\nand (eq_match_returns_annotation :\n  (FStar_Ident.ident FStar_Pervasives_Native.option * FStar_Parser_AST.term *\n    Prims.bool) ->\n    (FStar_Ident.ident FStar_Pervasives_Native.option * FStar_Parser_AST.term\n      * Prims.bool) -> Prims.bool)\n  =\n  fun uu___ ->\n    fun uu___1 ->\n      match (uu___, uu___1) with\n      | ((i1, t1, b1), (i2, t2, b2)) ->\n          ((eq_option eq_ident i1 i2) && (eq_term t1 t2)) && (b1 = b2)\nand (eq_branch :\n  (FStar_Parser_AST.pattern * FStar_Parser_AST.term\n    FStar_Pervasives_Native.option * FStar_Parser_AST.term) ->\n    (FStar_Parser_AST.pattern * FStar_Parser_AST.term\n      FStar_Pervasives_Native.option * FStar_Parser_AST.term) -> Prims.bool)\n  =\n  fun uu___ ->\n    fun uu___1 ->\n      match (uu___, uu___1) with\n      | ((p1, o1, t1), (p2, o2, t2)) ->\n          ((eq_pattern p1 p2) && (eq_option eq_term o1 o2)) &&\n            (eq_term t1 t2)\nlet (eq_tycon_record :\n  FStar_Parser_AST.tycon_record ->\n    FStar_Parser_AST.tycon_record -> Prims.bool)\n  =\n  fun t1 ->\n    fun t2 ->\n      eq_list\n        (fun uu___ ->\n           fun uu___1 ->\n             match (uu___, uu___1) with\n             | ((i1, a1, a2, t11), (i2, a3, a4, t21)) ->\n                 (((eq_ident i1 i2) && (eq_aqual a1 a3)) &&\n                    (eq_list eq_term a2 a4))\n                   && (eq_term t11 t21)) t1 t2\nlet (eq_constructor_payload :\n  FStar_Parser_AST.constructor_payload ->\n    FStar_Parser_AST.constructor_payload -> Prims.bool)\n  =\n  fun t1 ->\n    fun t2 ->\n      match (t1, t2) with\n      | (FStar_Parser_AST.VpOfNotation t11, FStar_Parser_AST.VpOfNotation\n         t21) -> eq_term t11 t21\n      | (FStar_Parser_AST.VpArbitrary t11, FStar_Parser_AST.VpArbitrary t21)\n          -> eq_term t11 t21\n      | (FStar_Parser_AST.VpRecord (r1, k1), FStar_Parser_AST.VpRecord\n         (r2, k2)) -> (eq_tycon_record r1 r2) && (eq_option eq_term k1 k2)\n      | uu___ -> false\nlet (eq_tycon :\n  FStar_Parser_AST.tycon -> FStar_Parser_AST.tycon -> Prims.bool) =\n  fun t1 ->\n    fun t2 ->\n      match (t1, t2) with\n      | (FStar_Parser_AST.TyconAbstract (i1, bs1, k1),\n         FStar_Parser_AST.TyconAbstract (i2, bs2, k2)) ->\n          ((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&\n            (eq_option eq_term k1 k2)\n      | (FStar_Parser_AST.TyconAbbrev (i1, bs1, k1, t11),\n         FStar_Parser_AST.TyconAbbrev (i2, bs2, k2, t21)) ->\n          (((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&\n             (eq_option eq_term k1 k2))\n            && (eq_term t11 t21)\n      | (FStar_Parser_AST.TyconRecord (i1, bs1, k1, a1, r1),\n         FStar_Parser_AST.TyconRecord (i2, bs2, k2, a2, r2)) ->\n          ((((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&\n              (eq_option eq_term k1 k2))\n             && (eq_list eq_term a1 a2))\n            && (eq_tycon_record r1 r2)\n      | (FStar_Parser_AST.TyconVariant (i1, bs1, k1, cs1),\n         FStar_Parser_AST.TyconVariant (i2, bs2, k2, cs2)) ->\n          (((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&\n             (eq_option eq_term k1 k2))\n            &&\n            (eq_list\n               (fun uu___ ->\n                  fun uu___1 ->\n                    match (uu___, uu___1) with\n                    | ((i11, o1, a1), (i21, o2, a2)) ->\n                        ((eq_ident i11 i21) &&\n                           (eq_option eq_constructor_payload o1 o2))\n                          && (eq_list eq_term a1 a2)) cs1 cs2)\n      | uu___ -> false\nlet (eq_lid : FStar_Ident.lident -> FStar_Ident.lident -> Prims.bool) =\n  FStar_Ident.lid_equals\nlet (eq_lift : FStar_Parser_AST.lift -> FStar_Parser_AST.lift -> Prims.bool)\n  =\n  fun t1 ->\n    fun t2 ->\n      ((eq_lid t1.FStar_Parser_AST.msource t2.FStar_Parser_AST.msource) &&\n         (eq_lid t1.FStar_Parser_AST.mdest t2.FStar_Parser_AST.mdest))\n        &&\n        (match ((t1.FStar_Parser_AST.lift_op), (t2.FStar_Parser_AST.lift_op))\n         with\n         | (FStar_Parser_AST.NonReifiableLift t11,\n            FStar_Parser_AST.NonReifiableLift t21) -> eq_term t11 t21\n         | (FStar_Parser_AST.ReifiableLift (t11, t21),\n            FStar_Parser_AST.ReifiableLift (t3, t4)) ->\n             (eq_term t11 t3) && (eq_term t21 t4)\n         | (FStar_Parser_AST.LiftForFree t11, FStar_Parser_AST.LiftForFree\n            t21) -> eq_term t11 t21\n         | uu___ -> false)\nlet (eq_pragma :\n  FStar_Parser_AST.pragma -> FStar_Parser_AST.pragma -> Prims.bool) =\n  fun t1 ->\n    fun t2 ->\n      match (t1, t2) with\n      | (FStar_Parser_AST.SetOptions s1, FStar_Parser_AST.SetOptions s2) ->\n          s1 = s2\n      | (FStar_Parser_AST.ResetOptions s1, FStar_Parser_AST.ResetOptions s2)\n          -> eq_option (fun s11 -> fun s21 -> s11 = s21) s1 s2\n      | (FStar_Parser_AST.PushOptions s1, FStar_Parser_AST.PushOptions s2) ->\n          eq_option (fun s11 -> fun s21 -> s11 = s21) s1 s2\n      | (FStar_Parser_AST.PopOptions, FStar_Parser_AST.PopOptions) -> true\n      | (FStar_Parser_AST.RestartSolver, FStar_Parser_AST.RestartSolver) ->\n          true\n      | (FStar_Parser_AST.PrintEffectsGraph,\n         FStar_Parser_AST.PrintEffectsGraph) -> true\n      | uu___ -> false\nlet (eq_qualifier :\n  FStar_Parser_AST.qualifier -> FStar_Parser_AST.qualifier -> Prims.bool) =\n  fun t1 ->\n    fun t2 ->\n      match (t1, t2) with\n      | (FStar_Parser_AST.Private, FStar_Parser_AST.Private) -> true\n      | (FStar_Parser_AST.Noeq, FStar_Parser_AST.Noeq) -> true\n      | (FStar_Parser_AST.Unopteq, FStar_Parser_AST.Unopteq) -> true\n      | (FStar_Parser_AST.Assumption, FStar_Parser_AST.Assumption) -> true\n      | (FStar_Parser_AST.DefaultEffect, FStar_Parser_AST.DefaultEffect) ->\n          true\n      | (FStar_Parser_AST.TotalEffect, FStar_Parser_AST.TotalEffect) -> true\n      | (FStar_Parser_AST.Effect_qual, FStar_Parser_AST.Effect_qual) -> true\n      | (FStar_Parser_AST.New, FStar_Parser_AST.New) -> true\n      | (FStar_Parser_AST.Inline, FStar_Parser_AST.Inline) -> true\n      | (FStar_Parser_AST.Visible, FStar_Parser_AST.Visible) -> true\n      | (FStar_Parser_AST.Unfold_for_unification_and_vcgen,\n         FStar_Parser_AST.Unfold_for_unification_and_vcgen) -> true\n      | (FStar_Parser_AST.Inline_for_extraction,\n         FStar_Parser_AST.Inline_for_extraction) -> true\n      | (FStar_Parser_AST.Irreducible, FStar_Parser_AST.Irreducible) -> true\n      | (FStar_Parser_AST.NoExtract, FStar_Parser_AST.NoExtract) -> true\n      | (FStar_Parser_AST.Reifiable, FStar_Parser_AST.Reifiable) -> true\n      | (FStar_Parser_AST.Reflectable, FStar_Parser_AST.Reflectable) -> true\n      | (FStar_Parser_AST.Opaque, FStar_Parser_AST.Opaque) -> true\n      | (FStar_Parser_AST.Logic, FStar_Parser_AST.Logic) -> true\n      | uu___ -> false\nlet (eq_qualifiers :\n  FStar_Parser_AST.qualifiers -> FStar_Parser_AST.qualifiers -> Prims.bool) =\n  fun t1 -> fun t2 -> eq_list eq_qualifier t1 t2\nlet rec (eq_decl' :\n  FStar_Parser_AST.decl' -> FStar_Parser_AST.decl' -> Prims.bool) =\n  fun d1 ->\n    fun d2 ->\n      match (d1, d2) with\n      | (FStar_Parser_AST.TopLevelModule lid1,\n         FStar_Parser_AST.TopLevelModule lid2) -> eq_lid lid1 lid2\n      | (FStar_Parser_AST.Open lid1, FStar_Parser_AST.Open lid2) ->\n          eq_lid lid1 lid2\n      | (FStar_Parser_AST.Friend lid1, FStar_Parser_AST.Friend lid2) ->\n          eq_lid lid1 lid2\n      | (FStar_Parser_AST.Include lid1, FStar_Parser_AST.Include lid2) ->\n          eq_lid lid1 lid2\n      | (FStar_Parser_AST.ModuleAbbrev (i1, lid1),\n         FStar_Parser_AST.ModuleAbbrev (i2, lid2)) ->\n          (eq_ident i1 i2) && (eq_lid lid1 lid2)\n      | (FStar_Parser_AST.TopLevelLet (lq1, pats1),\n         FStar_Parser_AST.TopLevelLet (lq2, pats2)) ->\n          (lq1 = lq2) &&\n            (eq_list\n               (fun uu___ ->\n                  fun uu___1 ->\n                    match (uu___, uu___1) with\n                    | ((p1, t1), (p2, t2)) ->\n                        (eq_pattern p1 p2) && (eq_term t1 t2)) pats1 pats2)\n      | (FStar_Parser_AST.Tycon (b1, b2, tcs1), FStar_Parser_AST.Tycon\n         (b3, b4, tcs2)) ->\n          ((b1 = b3) && (b2 = b4)) && (eq_list eq_tycon tcs1 tcs2)\n      | (FStar_Parser_AST.Val (i1, t1), FStar_Parser_AST.Val (i2, t2)) ->\n          (eq_ident i1 i2) && (eq_term t1 t2)\n      | (FStar_Parser_AST.Exception (i1, t1), FStar_Parser_AST.Exception\n         (i2, t2)) -> (eq_ident i1 i2) && (eq_option eq_term t1 t2)\n      | (FStar_Parser_AST.NewEffect ed1, FStar_Parser_AST.NewEffect ed2) ->\n          eq_effect_decl ed1 ed2\n      | (FStar_Parser_AST.LayeredEffect ed1, FStar_Parser_AST.LayeredEffect\n         ed2) -> eq_effect_decl ed1 ed2\n      | (FStar_Parser_AST.SubEffect l1, FStar_Parser_AST.SubEffect l2) ->\n          eq_lift l1 l2\n      | (FStar_Parser_AST.Polymonadic_bind (lid1, lid2, lid3, t1),\n         FStar_Parser_AST.Polymonadic_bind (lid4, lid5, lid6, t2)) ->\n          (((eq_lid lid1 lid4) && (eq_lid lid2 lid5)) && (eq_lid lid3 lid6))\n            && (eq_term t1 t2)\n      | (FStar_Parser_AST.Polymonadic_subcomp (lid1, lid2, t1),\n         FStar_Parser_AST.Polymonadic_subcomp (lid3, lid4, t2)) ->\n          ((eq_lid lid1 lid3) && (eq_lid lid2 lid4)) && (eq_term t1 t2)\n      | (FStar_Parser_AST.Pragma p1, FStar_Parser_AST.Pragma p2) ->\n          eq_pragma p1 p2\n      | (FStar_Parser_AST.Assume (i1, t1), FStar_Parser_AST.Assume (i2, t2))\n          -> (eq_ident i1 i2) && (eq_term t1 t2)\n      | (FStar_Parser_AST.Splice (is1, t1), FStar_Parser_AST.Splice\n         (is2, t2)) -> (eq_list eq_ident is1 is2) && (eq_term t1 t2)\n      | uu___ -> false\nand (eq_effect_decl :\n  FStar_Parser_AST.effect_decl -> FStar_Parser_AST.effect_decl -> Prims.bool)\n  =\n  fun t1 ->\n    fun t2 ->\n      match (t1, t2) with\n      | (FStar_Parser_AST.DefineEffect (i1, bs1, t11, ds1),\n         FStar_Parser_AST.DefineEffect (i2, bs2, t21, ds2)) ->\n          (((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&\n             (eq_term t11 t21))\n            && (eq_list eq_decl ds1 ds2)\n      | (FStar_Parser_AST.RedefineEffect (i1, bs1, t11),\n         FStar_Parser_AST.RedefineEffect (i2, bs2, t21)) ->\n          ((eq_ident i1 i2) && (eq_list eq_binder bs1 bs2)) &&\n            (eq_term t11 t21)\n      | uu___ -> false\nand (eq_decl : FStar_Parser_AST.decl -> FStar_Parser_AST.decl -> Prims.bool)\n  =\n  fun d1 ->\n    fun d2 ->\n      ((eq_decl' d1.FStar_Parser_AST.d d2.FStar_Parser_AST.d) &&\n         (eq_list eq_qualifier d1.FStar_Parser_AST.quals\n            d2.FStar_Parser_AST.quals))\n        &&\n        (eq_list eq_term d1.FStar_Parser_AST.attrs d2.FStar_Parser_AST.attrs)\nlet concat_map :\n  'uuuuu 'uuuuu1 .\n    unit ->\n      ('uuuuu -> 'uuuuu1 Prims.list) ->\n        'uuuuu Prims.list -> 'uuuuu1 Prims.list\n  = fun uu___ -> FStar_Compiler_List.collect\nlet opt_map :\n  'uuuuu 'a .\n    ('a -> 'uuuuu Prims.list) ->\n      'a FStar_Pervasives_Native.option -> 'uuuuu Prims.list\n  =\n  fun f ->\n    fun x ->\n      match x with\n      | FStar_Pervasives_Native.None -> []\n      | FStar_Pervasives_Native.Some x1 -> f x1\nlet rec (lidents_of_term :\n  FStar_Parser_AST.term -> FStar_Ident.lident Prims.list) =\n  fun t -> lidents_of_term' t.FStar_Parser_AST.tm\nand (lidents_of_term' :\n  FStar_Parser_AST.term' -> FStar_Ident.lident Prims.list) =\n  fun t ->\n    match t with\n    | FStar_Parser_AST.Wild -> []\n    | FStar_Parser_AST.Const uu___ -> []\n    | FStar_Parser_AST.Op (s, ts) -> (concat_map ()) lidents_of_term ts\n    | FStar_Parser_AST.Tvar uu___ -> []\n    | FStar_Parser_AST.Uvar uu___ -> []\n    | FStar_Parser_AST.Var lid -> [lid]\n    | FStar_Parser_AST.Name lid -> [lid]\n    | FStar_Parser_AST.Projector (lid, uu___) -> [lid]\n    | FStar_Parser_AST.Construct (lid, ts) ->\n        let uu___ =\n          (concat_map ())\n            (fun uu___1 ->\n               match uu___1 with | (t1, uu___2) -> lidents_of_term t1) ts in\n        lid :: uu___\n    | FStar_Parser_AST.Abs (ps, t1) ->\n        let uu___ = (concat_map ()) lidents_of_pattern ps in\n        let uu___1 = lidents_of_term t1 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.App (t1, t2, uu___) ->\n        let uu___1 = lidents_of_term t1 in\n        let uu___2 = lidents_of_term t2 in\n        FStar_Compiler_List.op_At uu___1 uu___2\n    | FStar_Parser_AST.Let (uu___, lbs, t1) ->\n        let uu___1 =\n          (concat_map ())\n            (fun uu___2 ->\n               match uu___2 with\n               | (uu___3, (p, t2)) ->\n                   let uu___4 = lidents_of_pattern p in\n                   let uu___5 = lidents_of_term t2 in\n                   FStar_Compiler_List.op_At uu___4 uu___5) lbs in\n        let uu___2 = lidents_of_term t1 in\n        FStar_Compiler_List.op_At uu___1 uu___2\n    | FStar_Parser_AST.LetOperator (lbs, t1) ->\n        let uu___ =\n          (concat_map ())\n            (fun uu___1 ->\n               match uu___1 with\n               | (uu___2, p, t2) ->\n                   let uu___3 = lidents_of_pattern p in\n                   let uu___4 = lidents_of_term t2 in\n                   FStar_Compiler_List.op_At uu___3 uu___4) lbs in\n        let uu___1 = lidents_of_term t1 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.LetOpen (lid, t1) ->\n        let uu___ = lidents_of_term t1 in lid :: uu___\n    | FStar_Parser_AST.LetOpenRecord (t1, t2, t3) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 = lidents_of_term t3 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.Seq (t1, t2) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 = lidents_of_term t2 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.Bind (uu___, t1, t2) ->\n        let uu___1 = lidents_of_term t1 in\n        let uu___2 = lidents_of_term t2 in\n        FStar_Compiler_List.op_At uu___1 uu___2\n    | FStar_Parser_AST.If (t1, uu___, uu___1, t2, t3) ->\n        let uu___2 = lidents_of_term t1 in\n        let uu___3 =\n          let uu___4 = lidents_of_term t2 in\n          let uu___5 = lidents_of_term t3 in\n          FStar_Compiler_List.op_At uu___4 uu___5 in\n        FStar_Compiler_List.op_At uu___2 uu___3\n    | FStar_Parser_AST.Match (t1, uu___, uu___1, bs) ->\n        let uu___2 = lidents_of_term t1 in\n        let uu___3 = (concat_map ()) lidents_of_branch bs in\n        FStar_Compiler_List.op_At uu___2 uu___3\n    | FStar_Parser_AST.TryWith (t1, bs) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 = (concat_map ()) lidents_of_branch bs in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.Ascribed (t1, t2, uu___, uu___1) ->\n        let uu___2 = lidents_of_term t1 in\n        let uu___3 = lidents_of_term t2 in\n        FStar_Compiler_List.op_At uu___2 uu___3\n    | FStar_Parser_AST.Record (t1, ts) ->\n        let uu___ =\n          (concat_map ())\n            (fun uu___1 ->\n               match uu___1 with | (uu___2, t2) -> lidents_of_term t2) ts in\n        let uu___1 = opt_map lidents_of_term t1 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.Project (t1, uu___) -> lidents_of_term t1\n    | FStar_Parser_AST.Product (ts, t1) ->\n        let uu___ = (concat_map ()) lidents_of_binder ts in\n        let uu___1 = lidents_of_term t1 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.Sum (ts, t1) ->\n        let uu___ =\n          (concat_map ())\n            (fun uu___1 ->\n               match uu___1 with\n               | FStar_Pervasives.Inl b -> lidents_of_binder b\n               | FStar_Pervasives.Inr t2 -> lidents_of_term t2) ts in\n        let uu___1 = lidents_of_term t1 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.QForall (bs, _pats, t1) -> lidents_of_term t1\n    | FStar_Parser_AST.QExists (bs, _pats, t1) -> lidents_of_term t1\n    | FStar_Parser_AST.Refine (b, t1) -> lidents_of_term t1\n    | FStar_Parser_AST.NamedTyp (i, t1) -> lidents_of_term t1\n    | FStar_Parser_AST.Paren t1 -> lidents_of_term t1\n    | FStar_Parser_AST.Requires (t1, uu___) -> lidents_of_term t1\n    | FStar_Parser_AST.Ensures (t1, uu___) -> lidents_of_term t1\n    | FStar_Parser_AST.LexList ts -> (concat_map ()) lidents_of_term ts\n    | FStar_Parser_AST.WFOrder (t1, t2) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 = lidents_of_term t2 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.Decreases (t1, uu___) -> lidents_of_term t1\n    | FStar_Parser_AST.Labeled (t1, uu___, uu___1) -> lidents_of_term t1\n    | FStar_Parser_AST.Discrim lid -> [lid]\n    | FStar_Parser_AST.Attributes ts -> (concat_map ()) lidents_of_term ts\n    | FStar_Parser_AST.Antiquote t1 -> lidents_of_term t1\n    | FStar_Parser_AST.Quote (t1, uu___) -> lidents_of_term t1\n    | FStar_Parser_AST.VQuote t1 -> lidents_of_term t1\n    | FStar_Parser_AST.CalcProof (t1, t2, ts) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 = (concat_map ()) lidents_of_calc_step ts in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.IntroForall (bs, t1, t2) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 = lidents_of_term t2 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.IntroExists (bs, t1, ts, t2) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = (concat_map ()) lidents_of_term ts in\n          let uu___3 = lidents_of_term t2 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.IntroImplies (t1, t2, b, t3) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 = lidents_of_term t3 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.IntroOr (b, t1, t2, t3) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 = lidents_of_term t3 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.IntroAnd (t1, t2, t3, t4) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 =\n            let uu___4 = lidents_of_term t3 in\n            let uu___5 = lidents_of_term t4 in\n            FStar_Compiler_List.op_At uu___4 uu___5 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.ElimForall (bs, t1, ts) ->\n        let uu___ = (concat_map ()) lidents_of_binder bs in\n        let uu___1 =\n          let uu___2 = lidents_of_term t1 in\n          let uu___3 = (concat_map ()) lidents_of_term ts in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.ElimExists (bs, t1, t2, b, t3) ->\n        let uu___ = (concat_map ()) lidents_of_binder bs in\n        let uu___1 =\n          let uu___2 = lidents_of_term t1 in\n          let uu___3 =\n            let uu___4 = lidents_of_term t2 in\n            let uu___5 = lidents_of_term t3 in\n            FStar_Compiler_List.op_At uu___4 uu___5 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.ElimImplies (t1, t2, t3) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 = lidents_of_term t3 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.ElimOr (t1, t2, t3, b1, t4, b2, t5) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 =\n            let uu___4 = lidents_of_term t3 in\n            let uu___5 =\n              let uu___6 = lidents_of_term t4 in\n              let uu___7 = lidents_of_term t5 in\n              FStar_Compiler_List.op_At uu___6 uu___7 in\n            FStar_Compiler_List.op_At uu___4 uu___5 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.ElimAnd (t1, t2, t3, b1, b2, t4) ->\n        let uu___ = lidents_of_term t1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t2 in\n          let uu___3 =\n            let uu___4 = lidents_of_term t3 in\n            let uu___5 = lidents_of_term t4 in\n            FStar_Compiler_List.op_At uu___4 uu___5 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\nand (lidents_of_branch :\n  (FStar_Parser_AST.pattern * FStar_Parser_AST.term\n    FStar_Pervasives_Native.option * FStar_Parser_AST.term) ->\n    FStar_Ident.lident Prims.list)\n  =\n  fun uu___ ->\n    match uu___ with\n    | (p, uu___1, t) ->\n        let uu___2 = lidents_of_pattern p in\n        let uu___3 = lidents_of_term t in\n        FStar_Compiler_List.op_At uu___2 uu___3\nand (lidents_of_calc_step :\n  FStar_Parser_AST.calc_step -> FStar_Ident.lident Prims.list) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Parser_AST.CalcStep (t1, t2, t3) ->\n        let uu___1 = lidents_of_term t1 in\n        let uu___2 =\n          let uu___3 = lidents_of_term t2 in\n          let uu___4 = lidents_of_term t3 in\n          FStar_Compiler_List.op_At uu___3 uu___4 in\n        FStar_Compiler_List.op_At uu___1 uu___2\nand (lidents_of_pattern :\n  FStar_Parser_AST.pattern -> FStar_Ident.lident Prims.list) =\n  fun p ->\n    match p.FStar_Parser_AST.pat with\n    | FStar_Parser_AST.PatWild uu___ -> []\n    | FStar_Parser_AST.PatConst uu___ -> []\n    | FStar_Parser_AST.PatApp (p1, ps) ->\n        let uu___ = lidents_of_pattern p1 in\n        let uu___1 = (concat_map ()) lidents_of_pattern ps in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.PatVar (i, uu___, uu___1) ->\n        let uu___2 = FStar_Ident.lid_of_ids [i] in [uu___2]\n    | FStar_Parser_AST.PatName lid -> [lid]\n    | FStar_Parser_AST.PatTvar (i, uu___, uu___1) -> []\n    | FStar_Parser_AST.PatList ps -> (concat_map ()) lidents_of_pattern ps\n    | FStar_Parser_AST.PatTuple (ps, uu___) ->\n        (concat_map ()) lidents_of_pattern ps\n    | FStar_Parser_AST.PatRecord ps ->\n        (concat_map ())\n          (fun uu___ ->\n             match uu___ with | (uu___1, p1) -> lidents_of_pattern p1) ps\n    | FStar_Parser_AST.PatAscribed (p1, (t1, t2)) ->\n        let uu___ = lidents_of_pattern p1 in\n        let uu___1 =\n          let uu___2 = lidents_of_term t1 in\n          let uu___3 = opt_map lidents_of_term t2 in\n          FStar_Compiler_List.op_At uu___2 uu___3 in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | FStar_Parser_AST.PatOr ps -> (concat_map ()) lidents_of_pattern ps\n    | FStar_Parser_AST.PatOp uu___ -> []\n    | FStar_Parser_AST.PatVQuote t -> lidents_of_term t\nand (lidents_of_binder :\n  FStar_Parser_AST.binder -> FStar_Ident.lident Prims.list) =\n  fun b ->\n    match b.FStar_Parser_AST.b with\n    | FStar_Parser_AST.Annotated (uu___, t) -> lidents_of_term t\n    | FStar_Parser_AST.TAnnotated (uu___, t) -> lidents_of_term t\n    | FStar_Parser_AST.NoName t -> lidents_of_term t\n    | uu___ -> []\nlet lidents_of_tycon_record :\n  'uuuuu 'uuuuu1 'uuuuu2 .\n    ('uuuuu * 'uuuuu1 * 'uuuuu2 * FStar_Parser_AST.term) ->\n      FStar_Ident.lident Prims.list\n  =\n  fun uu___ ->\n    match uu___ with | (uu___1, uu___2, uu___3, t) -> lidents_of_term t\nlet (lidents_of_constructor_payload :\n  FStar_Parser_AST.constructor_payload -> FStar_Ident.lident Prims.list) =\n  fun t ->\n    match t with\n    | FStar_Parser_AST.VpOfNotation t1 -> lidents_of_term t1\n    | FStar_Parser_AST.VpArbitrary t1 -> lidents_of_term t1\n    | FStar_Parser_AST.VpRecord (tc, FStar_Pervasives_Native.None) ->\n        (concat_map ()) lidents_of_tycon_record tc\n    | FStar_Parser_AST.VpRecord (tc, FStar_Pervasives_Native.Some t1) ->\n        let uu___ = (concat_map ()) lidents_of_tycon_record tc in\n        let uu___1 = lidents_of_term t1 in\n        FStar_Compiler_List.op_At uu___ uu___1\nlet (lidents_of_tycon_variant :\n  (FStar_Ident.ident * FStar_Parser_AST.constructor_payload\n    FStar_Pervasives_Native.option * FStar_Parser_AST.attributes_) ->\n    FStar_Ident.lident Prims.list)\n  =\n  fun tc ->\n    match tc with\n    | (uu___, FStar_Pervasives_Native.None, uu___1) -> []\n    | (uu___, FStar_Pervasives_Native.Some t, uu___1) ->\n        lidents_of_constructor_payload t\nlet (lidents_of_tycon :\n  FStar_Parser_AST.tycon -> FStar_Ident.lident Prims.list) =\n  fun tc ->\n    match tc with\n    | FStar_Parser_AST.TyconAbstract (uu___, bs, k) ->\n        let uu___1 = (concat_map ()) lidents_of_binder bs in\n        let uu___2 = opt_map lidents_of_term k in\n        FStar_Compiler_List.op_At uu___1 uu___2\n    | FStar_Parser_AST.TyconAbbrev (uu___, bs, k, t) ->\n        let uu___1 = (concat_map ()) lidents_of_binder bs in\n        let uu___2 =\n          let uu___3 = opt_map lidents_of_term k in\n          let uu___4 = lidents_of_term t in\n          FStar_Compiler_List.op_At uu___3 uu___4 in\n        FStar_Compiler_List.op_At uu___1 uu___2\n    | FStar_Parser_AST.TyconRecord (uu___, bs, k, uu___1, tcs) ->\n        let uu___2 = (concat_map ()) lidents_of_binder bs in\n        let uu___3 =\n          let uu___4 = opt_map lidents_of_term k in\n          let uu___5 = (concat_map ()) lidents_of_tycon_record tcs in\n          FStar_Compiler_List.op_At uu___4 uu___5 in\n        FStar_Compiler_List.op_At uu___2 uu___3\n    | FStar_Parser_AST.TyconVariant (uu___, bs, k, tcs) ->\n        let uu___1 = (concat_map ()) lidents_of_binder bs in\n        let uu___2 =\n          let uu___3 = opt_map lidents_of_term k in\n          let uu___4 = (concat_map ()) lidents_of_tycon_variant tcs in\n          FStar_Compiler_List.op_At uu___3 uu___4 in\n        FStar_Compiler_List.op_At uu___1 uu___2\nlet (lidents_of_lift :\n  FStar_Parser_AST.lift -> FStar_Ident.lident Prims.list) =\n  fun l ->\n    let uu___ =\n      match l.FStar_Parser_AST.lift_op with\n      | FStar_Parser_AST.NonReifiableLift t -> lidents_of_term t\n      | FStar_Parser_AST.ReifiableLift (t1, t2) ->\n          let uu___1 = lidents_of_term t1 in\n          let uu___2 = lidents_of_term t2 in\n          FStar_Compiler_List.op_At uu___1 uu___2\n      | FStar_Parser_AST.LiftForFree t -> lidents_of_term t in\n    FStar_Compiler_List.op_At\n      [l.FStar_Parser_AST.msource; l.FStar_Parser_AST.mdest] uu___\nlet rec (lidents_of_decl :\n  FStar_Parser_AST.decl -> FStar_Ident.lident Prims.list) =\n  fun d ->\n    match d.FStar_Parser_AST.d with\n    | FStar_Parser_AST.TopLevelModule uu___ -> []\n    | FStar_Parser_AST.Open l -> [l]\n    | FStar_Parser_AST.Friend l -> [l]\n    | FStar_Parser_AST.Include l -> [l]\n    | FStar_Parser_AST.ModuleAbbrev (uu___, l) -> [l]\n    | FStar_Parser_AST.TopLevelLet (_q, lbs) ->\n        (concat_map ())\n          (fun uu___ ->\n             match uu___ with\n             | (p, t) ->\n                 let uu___1 = lidents_of_pattern p in\n                 let uu___2 = lidents_of_term t in\n                 FStar_Compiler_List.op_At uu___1 uu___2) lbs\n    | FStar_Parser_AST.Tycon (uu___, uu___1, tcs) ->\n        (concat_map ()) lidents_of_tycon tcs\n    | FStar_Parser_AST.Val (uu___, t) -> lidents_of_term t\n    | FStar_Parser_AST.Exception (uu___, FStar_Pervasives_Native.None) -> []\n    | FStar_Parser_AST.Exception (uu___, FStar_Pervasives_Native.Some t) ->\n        lidents_of_term t\n    | FStar_Parser_AST.NewEffect ed -> lidents_of_effect_decl ed\n    | FStar_Parser_AST.LayeredEffect ed -> lidents_of_effect_decl ed\n    | FStar_Parser_AST.SubEffect lift -> lidents_of_lift lift\n    | FStar_Parser_AST.Polymonadic_bind (l0, l1, l2, t) ->\n        let uu___ =\n          let uu___1 = let uu___2 = lidents_of_term t in l2 :: uu___2 in l1\n            :: uu___1 in\n        l0 :: uu___\n    | FStar_Parser_AST.Polymonadic_subcomp (l0, l1, t) ->\n        let uu___ = let uu___1 = lidents_of_term t in l1 :: uu___1 in l0 ::\n          uu___\n    | FStar_Parser_AST.Pragma uu___ -> []\n    | FStar_Parser_AST.Assume (uu___, t) -> lidents_of_term t\n    | FStar_Parser_AST.Splice (uu___, t) -> lidents_of_term t\nand (lidents_of_effect_decl :\n  FStar_Parser_AST.effect_decl -> FStar_Ident.lident Prims.list) =\n  fun ed ->\n    match ed with\n    | FStar_Parser_AST.DefineEffect (uu___, bs, t, ds) ->\n        let uu___1 = (concat_map ()) lidents_of_binder bs in\n        let uu___2 =\n          let uu___3 = lidents_of_term t in\n          let uu___4 = (concat_map ()) lidents_of_decl ds in\n          FStar_Compiler_List.op_At uu___3 uu___4 in\n        FStar_Compiler_List.op_At uu___1 uu___2\n    | FStar_Parser_AST.RedefineEffect (uu___, bs, t) ->\n        let uu___1 = (concat_map ()) lidents_of_binder bs in\n        let uu___2 = lidents_of_term t in\n        FStar_Compiler_List.op_At uu___1 uu___2"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_Const.ml",
    "content": "open Prims\nlet (p2l : FStar_Ident.path -> FStar_Ident.lident) =\n  fun l -> FStar_Ident.lid_of_path l FStar_Compiler_Range.dummyRange\nlet (pconst : Prims.string -> FStar_Ident.lident) = fun s -> p2l [\"Prims\"; s]\nlet (psconst : Prims.string -> FStar_Ident.lident) =\n  fun s -> p2l [\"FStar\"; \"Pervasives\"; s]\nlet (psnconst : Prims.string -> FStar_Ident.lident) =\n  fun s -> p2l [\"FStar\"; \"Pervasives\"; \"Native\"; s]\nlet (prims_lid : FStar_Ident.lident) = p2l [\"Prims\"]\nlet (pervasives_native_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"Native\"]\nlet (pervasives_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Pervasives\"]\nlet (fstar_ns_lid : FStar_Ident.lident) = p2l [\"FStar\"]\nlet (bool_lid : FStar_Ident.lident) = pconst \"bool\"\nlet (unit_lid : FStar_Ident.lident) = pconst \"unit\"\nlet (squash_lid : FStar_Ident.lident) = pconst \"squash\"\nlet (auto_squash_lid : FStar_Ident.lident) = pconst \"auto_squash\"\nlet (string_lid : FStar_Ident.lident) = pconst \"string\"\nlet (bytes_lid : FStar_Ident.lident) = pconst \"bytes\"\nlet (int_lid : FStar_Ident.lident) = pconst \"int\"\nlet (exn_lid : FStar_Ident.lident) = pconst \"exn\"\nlet (list_lid : FStar_Ident.lident) = pconst \"list\"\nlet (immutable_array_t_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"ImmutableArray\"; \"Base\"; \"t\"]\nlet (immutable_array_of_list_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"ImmutableArray\"; \"Base\"; \"of_list\"]\nlet (immutable_array_length_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"ImmutableArray\"; \"Base\"; \"length\"]\nlet (immutable_array_index_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"ImmutableArray\"; \"Base\"; \"index\"]\nlet (eqtype_lid : FStar_Ident.lident) = pconst \"eqtype\"\nlet (option_lid : FStar_Ident.lident) = psnconst \"option\"\nlet (either_lid : FStar_Ident.lident) = psconst \"either\"\nlet (pattern_lid : FStar_Ident.lident) = psconst \"pattern\"\nlet (lex_t_lid : FStar_Ident.lident) = pconst \"lex_t\"\nlet (precedes_lid : FStar_Ident.lident) = pconst \"precedes\"\nlet (smtpat_lid : FStar_Ident.lident) = psconst \"smt_pat\"\nlet (smtpatOr_lid : FStar_Ident.lident) = psconst \"smt_pat_or\"\nlet (monadic_lid : FStar_Ident.lident) = pconst \"M\"\nlet (spinoff_lid : FStar_Ident.lident) = psconst \"spinoff\"\nlet (inl_lid : FStar_Ident.lident) = psconst \"Inl\"\nlet (inr_lid : FStar_Ident.lident) = psconst \"Inr\"\nlet (int8_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Int8\"; \"t\"]\nlet (uint8_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"UInt8\"; \"t\"]\nlet (int16_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Int16\"; \"t\"]\nlet (uint16_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"UInt16\"; \"t\"]\nlet (int32_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Int32\"; \"t\"]\nlet (uint32_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"UInt32\"; \"t\"]\nlet (int64_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Int64\"; \"t\"]\nlet (uint64_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"UInt64\"; \"t\"]\nlet (salloc_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"ST\"; \"salloc\"]\nlet (swrite_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"ST\"; \"op_Colon_Equals\"]\nlet (sread_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"ST\"; \"op_Bang\"]\nlet (max_lid : FStar_Ident.lident) = p2l [\"max\"]\nlet (real_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Real\"; \"real\"]\nlet (float_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Float\"; \"float\"]\nlet (char_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Char\"; \"char\"]\nlet (heap_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Heap\"; \"heap\"]\nlet (logical_lid : FStar_Ident.lident) = pconst \"logical\"\nlet (smt_theory_symbol_attr_lid : FStar_Ident.lident) =\n  pconst \"smt_theory_symbol\"\nlet (true_lid : FStar_Ident.lident) = pconst \"l_True\"\nlet (false_lid : FStar_Ident.lident) = pconst \"l_False\"\nlet (and_lid : FStar_Ident.lident) = pconst \"l_and\"\nlet (or_lid : FStar_Ident.lident) = pconst \"l_or\"\nlet (not_lid : FStar_Ident.lident) = pconst \"l_not\"\nlet (imp_lid : FStar_Ident.lident) = pconst \"l_imp\"\nlet (iff_lid : FStar_Ident.lident) = pconst \"l_iff\"\nlet (ite_lid : FStar_Ident.lident) = pconst \"l_ITE\"\nlet (exists_lid : FStar_Ident.lident) = pconst \"l_Exists\"\nlet (forall_lid : FStar_Ident.lident) = pconst \"l_Forall\"\nlet (haseq_lid : FStar_Ident.lident) = pconst \"hasEq\"\nlet (b2t_lid : FStar_Ident.lident) = pconst \"b2t\"\nlet (admit_lid : FStar_Ident.lident) = pconst \"admit\"\nlet (magic_lid : FStar_Ident.lident) = pconst \"magic\"\nlet (has_type_lid : FStar_Ident.lident) = pconst \"has_type\"\nlet (c_true_lid : FStar_Ident.lident) = pconst \"trivial\"\nlet (empty_type_lid : FStar_Ident.lident) = pconst \"empty\"\nlet (c_and_lid : FStar_Ident.lident) = pconst \"pair\"\nlet (c_or_lid : FStar_Ident.lident) = pconst \"sum\"\nlet (dtuple2_lid : FStar_Ident.lident) = pconst \"dtuple2\"\nlet (eq2_lid : FStar_Ident.lident) = pconst \"eq2\"\nlet (eq3_lid : FStar_Ident.lident) = pconst \"op_Equals_Equals_Equals\"\nlet (c_eq2_lid : FStar_Ident.lident) = pconst \"equals\"\nlet (cons_lid : FStar_Ident.lident) = pconst \"Cons\"\nlet (nil_lid : FStar_Ident.lident) = pconst \"Nil\"\nlet (some_lid : FStar_Ident.lident) = psnconst \"Some\"\nlet (none_lid : FStar_Ident.lident) = psnconst \"None\"\nlet (assume_lid : FStar_Ident.lident) = pconst \"_assume\"\nlet (assert_lid : FStar_Ident.lident) = pconst \"_assert\"\nlet (pure_wp_lid : FStar_Ident.lident) = pconst \"pure_wp\"\nlet (pure_wp_monotonic_lid : FStar_Ident.lident) = pconst \"pure_wp_monotonic\"\nlet (pure_wp_monotonic0_lid : FStar_Ident.lident) =\n  pconst \"pure_wp_monotonic0\"\nlet (trivial_pure_post_lid : FStar_Ident.lident) =\n  psconst \"trivial_pure_post\"\nlet (pure_assert_wp_lid : FStar_Ident.lident) = pconst \"pure_assert_wp0\"\nlet (pure_assume_wp_lid : FStar_Ident.lident) = pconst \"pure_assume_wp0\"\nlet (assert_norm_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"assert_norm\"]\nlet (list_append_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"List\"; \"append\"]\nlet (list_tot_append_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"List\"; \"Tot\"; \"Base\"; \"append\"]\nlet (id_lid : FStar_Ident.lident) = psconst \"id\"\nlet (c2l : Prims.string -> FStar_Ident.lident) =\n  fun s -> p2l [\"FStar\"; \"Char\"; s]\nlet (char_u32_of_char : FStar_Ident.lident) = c2l \"u32_of_char\"\nlet (s2l : Prims.string -> FStar_Ident.lident) =\n  fun n -> p2l [\"FStar\"; \"String\"; n]\nlet (string_list_of_string_lid : FStar_Ident.lident) = s2l \"list_of_string\"\nlet (string_string_of_list_lid : FStar_Ident.lident) = s2l \"string_of_list\"\nlet (string_make_lid : FStar_Ident.lident) = s2l \"make\"\nlet (string_split_lid : FStar_Ident.lident) = s2l \"split\"\nlet (string_concat_lid : FStar_Ident.lident) = s2l \"concat\"\nlet (string_compare_lid : FStar_Ident.lident) = s2l \"compare\"\nlet (string_lowercase_lid : FStar_Ident.lident) = s2l \"lowercase\"\nlet (string_uppercase_lid : FStar_Ident.lident) = s2l \"uppercase\"\nlet (string_index_lid : FStar_Ident.lident) = s2l \"index\"\nlet (string_index_of_lid : FStar_Ident.lident) = s2l \"index_of\"\nlet (string_sub_lid : FStar_Ident.lident) = s2l \"sub\"\nlet (prims_strcat_lid : FStar_Ident.lident) = pconst \"strcat\"\nlet (prims_op_Hat_lid : FStar_Ident.lident) = pconst \"op_Hat\"\nlet (let_in_typ : FStar_Ident.lident) = p2l [\"Prims\"; \"Let\"]\nlet (string_of_int_lid : FStar_Ident.lident) = p2l [\"Prims\"; \"string_of_int\"]\nlet (string_of_bool_lid : FStar_Ident.lident) =\n  p2l [\"Prims\"; \"string_of_bool\"]\nlet (string_compare : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"String\"; \"compare\"]\nlet (order_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Order\"; \"order\"]\nlet (vconfig_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"VConfig\"; \"vconfig\"]\nlet (mkvconfig_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"VConfig\"; \"Mkvconfig\"]\nlet (op_Eq : FStar_Ident.lident) = pconst \"op_Equality\"\nlet (op_notEq : FStar_Ident.lident) = pconst \"op_disEquality\"\nlet (op_LT : FStar_Ident.lident) = pconst \"op_LessThan\"\nlet (op_LTE : FStar_Ident.lident) = pconst \"op_LessThanOrEqual\"\nlet (op_GT : FStar_Ident.lident) = pconst \"op_GreaterThan\"\nlet (op_GTE : FStar_Ident.lident) = pconst \"op_GreaterThanOrEqual\"\nlet (op_Subtraction : FStar_Ident.lident) = pconst \"op_Subtraction\"\nlet (op_Minus : FStar_Ident.lident) = pconst \"op_Minus\"\nlet (op_Addition : FStar_Ident.lident) = pconst \"op_Addition\"\nlet (op_Multiply : FStar_Ident.lident) = pconst \"op_Multiply\"\nlet (op_Division : FStar_Ident.lident) = pconst \"op_Division\"\nlet (op_Modulus : FStar_Ident.lident) = pconst \"op_Modulus\"\nlet (op_And : FStar_Ident.lident) = pconst \"op_AmpAmp\"\nlet (op_Or : FStar_Ident.lident) = pconst \"op_BarBar\"\nlet (op_Negation : FStar_Ident.lident) = pconst \"op_Negation\"\nlet (real_const : Prims.string -> FStar_Ident.lident) =\n  fun s -> p2l [\"FStar\"; \"Real\"; s]\nlet (real_op_LT : FStar_Ident.lident) = real_const \"op_Less_Dot\"\nlet (real_op_LTE : FStar_Ident.lident) = real_const \"op_Less_Equals_Dot\"\nlet (real_op_GT : FStar_Ident.lident) = real_const \"op_Greater_Dot\"\nlet (real_op_GTE : FStar_Ident.lident) = real_const \"op_Greater_Equals_Dot\"\nlet (real_op_Subtraction : FStar_Ident.lident) =\n  real_const \"op_Subtraction_Dot\"\nlet (real_op_Addition : FStar_Ident.lident) = real_const \"op_Plus_Dot\"\nlet (real_op_Multiply : FStar_Ident.lident) = real_const \"op_Star_Dot\"\nlet (real_op_Division : FStar_Ident.lident) = real_const \"op_Slash_Dot\"\nlet (real_of_int : FStar_Ident.lident) = real_const \"of_int\"\nlet (bvconst : Prims.string -> FStar_Ident.lident) =\n  fun s -> p2l [\"FStar\"; \"BV\"; s]\nlet (bv_t_lid : FStar_Ident.lident) = bvconst \"bv_t\"\nlet (nat_to_bv_lid : FStar_Ident.lident) = bvconst \"int2bv\"\nlet (bv_to_nat_lid : FStar_Ident.lident) = bvconst \"bv2int\"\nlet (bv_and_lid : FStar_Ident.lident) = bvconst \"bvand\"\nlet (bv_xor_lid : FStar_Ident.lident) = bvconst \"bvxor\"\nlet (bv_or_lid : FStar_Ident.lident) = bvconst \"bvor\"\nlet (bv_add_lid : FStar_Ident.lident) = bvconst \"bvadd\"\nlet (bv_sub_lid : FStar_Ident.lident) = bvconst \"bvsub\"\nlet (bv_shift_left_lid : FStar_Ident.lident) = bvconst \"bvshl\"\nlet (bv_shift_right_lid : FStar_Ident.lident) = bvconst \"bvshr\"\nlet (bv_udiv_lid : FStar_Ident.lident) = bvconst \"bvdiv\"\nlet (bv_mod_lid : FStar_Ident.lident) = bvconst \"bvmod\"\nlet (bv_mul_lid : FStar_Ident.lident) = bvconst \"bvmul\"\nlet (bv_ult_lid : FStar_Ident.lident) = bvconst \"bvult\"\nlet (bv_uext_lid : FStar_Ident.lident) = bvconst \"bv_uext\"\nlet (array_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Array\"; \"array\"]\nlet (array_of_list_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Array\"; \"of_list\"]\nlet (st_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"ST\"]\nlet (write_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"ST\"; \"write\"]\nlet (read_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"ST\"; \"read\"]\nlet (alloc_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"ST\"; \"alloc\"]\nlet (op_ColonEq : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"ST\"; \"op_Colon_Equals\"]\nlet (ref_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Heap\"; \"ref\"]\nlet (heap_addr_of_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Heap\"; \"addr_of\"]\nlet (set_empty : FStar_Ident.lident) = p2l [\"FStar\"; \"Set\"; \"empty\"]\nlet (set_singleton : FStar_Ident.lident) = p2l [\"FStar\"; \"Set\"; \"singleton\"]\nlet (set_union : FStar_Ident.lident) = p2l [\"FStar\"; \"Set\"; \"union\"]\nlet (fstar_hyperheap_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"HyperHeap\"]\nlet (rref_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"HyperHeap\"; \"rref\"]\nlet (erased_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Ghost\"; \"erased\"]\nlet (effect_PURE_lid : FStar_Ident.lident) = pconst \"PURE\"\nlet (effect_Pure_lid : FStar_Ident.lident) = pconst \"Pure\"\nlet (effect_Tot_lid : FStar_Ident.lident) = pconst \"Tot\"\nlet (effect_Lemma_lid : FStar_Ident.lident) = psconst \"Lemma\"\nlet (effect_GTot_lid : FStar_Ident.lident) = pconst \"GTot\"\nlet (effect_GHOST_lid : FStar_Ident.lident) = pconst \"GHOST\"\nlet (effect_Ghost_lid : FStar_Ident.lident) = pconst \"Ghost\"\nlet (effect_DIV_lid : FStar_Ident.lident) = psconst \"DIV\"\nlet (effect_Div_lid : FStar_Ident.lident) = psconst \"Div\"\nlet (effect_Dv_lid : FStar_Ident.lident) = psconst \"Dv\"\nlet (compiler_effect_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Compiler\"; \"Effect\"]\nlet (compiler_effect_ALL_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Compiler\"; \"Effect\"; \"ALL\"]\nlet (compiler_effect_ML_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Compiler\"; \"Effect\"; \"ML\"]\nlet (compiler_effect_failwith_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Compiler\"; \"Effect\"; \"failwith\"]\nlet (compiler_effect_try_with_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Compiler\"; \"Effect\"; \"try_with\"]\nlet (all_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"All\"]\nlet (all_ALL_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"All\"; \"All\"]\nlet (all_ML_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"All\"; \"ML\"]\nlet (all_failwith_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"All\"; \"failwith\"]\nlet (all_try_with_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"All\"; \"try_with\"]\nlet (effect_ALL_lid : unit -> FStar_Ident.lident) =\n  fun uu___ ->\n    let uu___1 = false in\n    if uu___1 then compiler_effect_ALL_lid else all_lid\nlet (effect_ML_lid : unit -> FStar_Ident.lident) =\n  fun uu___ ->\n    let uu___1 = false in\n    if uu___1 then compiler_effect_ML_lid else all_ML_lid\nlet (failwith_lid : unit -> FStar_Ident.lident) =\n  fun uu___ ->\n    let uu___1 = false in\n    if uu___1 then compiler_effect_failwith_lid else all_failwith_lid\nlet (try_with_lid : unit -> FStar_Ident.lident) =\n  fun uu___ ->\n    let uu___1 = false in\n    if uu___1 then compiler_effect_try_with_lid else all_try_with_lid\nlet (as_requires : FStar_Ident.lident) = pconst \"as_requires\"\nlet (as_ensures : FStar_Ident.lident) = pconst \"as_ensures\"\nlet (decreases_lid : FStar_Ident.lident) = pconst \"decreases\"\nlet (inspect : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Tactics\"; \"Builtins\"; \"inspect\"]\nlet (pack : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Tactics\"; \"Builtins\"; \"pack\"]\nlet (binder_to_term : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Tactics\"; \"Derived\"; \"binder_to_term\"]\nlet (reveal : FStar_Ident.lident) = p2l [\"FStar\"; \"Ghost\"; \"reveal\"]\nlet (hide : FStar_Ident.lident) = p2l [\"FStar\"; \"Ghost\"; \"hide\"]\nlet (term_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Reflection\"; \"Types\"; \"term\"]\nlet (term_view_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Reflection\"; \"Data\"; \"term_view\"]\nlet (decls_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Reflection\"; \"Data\"; \"decls\"]\nlet (ctx_uvar_and_subst_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Reflection\"; \"Types\"; \"ctx_uvar_and_subst\"]\nlet (universe_uvar_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Reflection\"; \"Types\"; \"universe_uvar\"]\nlet (range_lid : FStar_Ident.lident) = pconst \"range\"\nlet (range_of_lid : FStar_Ident.lident) = pconst \"range_of\"\nlet (labeled_lid : FStar_Ident.lident) = pconst \"labeled\"\nlet (range_0 : FStar_Ident.lident) = pconst \"range_0\"\nlet (guard_free : FStar_Ident.lident) = pconst \"guard_free\"\nlet (inversion_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"inversion\"]\nlet (normalize : FStar_Ident.lident) = psconst \"normalize\"\nlet (normalize_term : FStar_Ident.lident) = psconst \"normalize_term\"\nlet (norm : FStar_Ident.lident) = psconst \"norm\"\nlet (steps_simpl : FStar_Ident.lident) = psconst \"simplify\"\nlet (steps_weak : FStar_Ident.lident) = psconst \"weak\"\nlet (steps_hnf : FStar_Ident.lident) = psconst \"hnf\"\nlet (steps_primops : FStar_Ident.lident) = psconst \"primops\"\nlet (steps_zeta : FStar_Ident.lident) = psconst \"zeta\"\nlet (steps_zeta_full : FStar_Ident.lident) = psconst \"zeta_full\"\nlet (steps_iota : FStar_Ident.lident) = psconst \"iota\"\nlet (steps_delta : FStar_Ident.lident) = psconst \"delta\"\nlet (steps_reify : FStar_Ident.lident) = psconst \"reify_\"\nlet (steps_unfoldonly : FStar_Ident.lident) = psconst \"delta_only\"\nlet (steps_unfoldfully : FStar_Ident.lident) = psconst \"delta_fully\"\nlet (steps_unfoldattr : FStar_Ident.lident) = psconst \"delta_attr\"\nlet (steps_unfoldqual : FStar_Ident.lident) = psconst \"delta_qualifier\"\nlet (steps_unfoldnamespace : FStar_Ident.lident) = psconst \"delta_namespace\"\nlet (steps_unascribe : FStar_Ident.lident) = psconst \"unascribe\"\nlet (steps_nbe : FStar_Ident.lident) = psconst \"nbe\"\nlet (steps_unmeta : FStar_Ident.lident) = psconst \"unmeta\"\nlet (deprecated_attr : FStar_Ident.lident) = pconst \"deprecated\"\nlet (warn_on_use_attr : FStar_Ident.lident) = pconst \"warn_on_use\"\nlet (inline_let_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"inline_let\"]\nlet (rename_let_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"rename_let\"]\nlet (plugin_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"plugin\"]\nlet (tcnorm_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"tcnorm\"]\nlet (dm4f_bind_range_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"dm4f_bind_range\"]\nlet (must_erase_for_extraction_attr : FStar_Ident.lident) =\n  psconst \"must_erase_for_extraction\"\nlet (strict_on_arguments_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"strict_on_arguments\"]\nlet (resolve_implicits_attr_string : Prims.string) =\n  \"FStar.Pervasives.resolve_implicits\"\nlet (override_resolve_implicits_handler_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"override_resolve_implicits_handler\"]\nlet (handle_smt_goals_attr : FStar_Ident.lident) = psconst \"handle_smt_goals\"\nlet (handle_smt_goals_attr_string : Prims.string) =\n  \"FStar.Pervasives.handle_smt_goals\"\nlet (erasable_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"erasable\"]\nlet (comment_attr : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"Comment\"]\nlet (fail_attr : FStar_Ident.lident) = psconst \"expect_failure\"\nlet (fail_lax_attr : FStar_Ident.lident) = psconst \"expect_lax_failure\"\nlet (tcdecltime_attr : FStar_Ident.lident) = psconst \"tcdecltime\"\nlet (noextract_to_attr : FStar_Ident.lident) = psconst \"noextract_to\"\nlet (unifier_hint_injective_lid : FStar_Ident.lident) =\n  psconst \"unifier_hint_injective\"\nlet (normalize_for_extraction_lid : FStar_Ident.lident) =\n  psconst \"normalize_for_extraction\"\nlet (postprocess_with : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Tactics\"; \"Effect\"; \"postprocess_with\"]\nlet (preprocess_with : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Tactics\"; \"Effect\"; \"preprocess_with\"]\nlet (postprocess_extr_with : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Tactics\"; \"Effect\"; \"postprocess_for_extraction_with\"]\nlet (check_with_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"FStar\"; \"Reflection\"; \"Builtins\"; \"check_with\"]\n    FStar_Compiler_Range.dummyRange\nlet (commute_nested_matches_lid : FStar_Ident.lident) =\n  psconst \"commute_nested_matches\"\nlet (remove_unused_type_parameters_lid : FStar_Ident.lident) =\n  psconst \"remove_unused_type_parameters\"\nlet (ite_soundness_by_attr : FStar_Ident.lident) = psconst \"ite_soundness_by\"\nlet (default_effect_attr : FStar_Ident.lident) = psconst \"default_effect\"\nlet (top_level_effect_attr : FStar_Ident.lident) = psconst \"top_level_effect\"\nlet (effect_parameter_attr : FStar_Ident.lident) = psconst \"effect_param\"\nlet (bind_has_range_args_attr : FStar_Ident.lident) =\n  psconst \"bind_has_range_args\"\nlet (primitive_extraction_attr : FStar_Ident.lident) =\n  psconst \"primitive_extraction\"\nlet (binder_strictly_positive_attr : FStar_Ident.lident) =\n  psconst \"strictly_positive\"\nlet (no_auto_projectors_attr : FStar_Ident.lident) =\n  psconst \"no_auto_projectors\"\nlet (no_subtping_attr_lid : FStar_Ident.lident) = psconst \"no_subtyping\"\nlet (attr_substitute_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Pervasives\"; \"Substitute\"]\nlet (well_founded_relation_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"WellFounded\"; \"well_founded_relation\"]\n\nlet (sli : FStar_Ident.lident -> Prims.string) =\n  fun l ->\n    let uu___ = false in\n    if uu___\n    then FStar_Ident.string_of_lid l\n    else\n      (let uu___2 = FStar_Ident.ident_of_lid l in\n       FStar_Ident.string_of_id uu___2)\nlet (const_to_string : FStar_Const.sconst -> Prims.string) =\n  fun x ->\n    match x with\n    | FStar_Const.Const_effect -> \"Effect\"\n    | FStar_Const.Const_unit -> \"()\"\n    | FStar_Const.Const_bool b -> if b then \"true\" else \"false\"\n    | FStar_Const.Const_real r -> FStar_String.op_Hat r \"R\"\n    | FStar_Const.Const_string (s, uu___) ->\n        FStar_Compiler_Util.format1 \"\\\"%s\\\"\" s\n    | FStar_Const.Const_int (x1, uu___) -> x1\n    | FStar_Const.Const_char c ->\n        let uu___ =\n          FStar_String.op_Hat (FStar_Compiler_Util.string_of_char c) \"'\" in\n        FStar_String.op_Hat \"'\" uu___\n    | FStar_Const.Const_range r -> FStar_Compiler_Range.string_of_range r\n    | FStar_Const.Const_range_of -> \"range_of\"\n    | FStar_Const.Const_set_range_of -> \"set_range_of\"\n    | FStar_Const.Const_reify lopt ->\n        let uu___ =\n          match lopt with\n          | FStar_Pervasives_Native.None -> \"\"\n          | FStar_Pervasives_Native.Some l ->\n              let uu___1 = FStar_Ident.string_of_lid l in\n              FStar_Compiler_Util.format1 \"<%s>\" uu___1 in\n        FStar_Compiler_Util.format1 \"reify%s\" uu___\n    | FStar_Const.Const_reflect l ->\n        let uu___ = sli l in\n        FStar_Compiler_Util.format1 \"[[%s.reflect]]\" uu___\nlet (mk_tuple_lid :\n  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =\n  fun n ->\n    fun r ->\n      let t =\n        let uu___ = FStar_Compiler_Util.string_of_int n in\n        FStar_Compiler_Util.format1 \"tuple%s\" uu___ in\n      let uu___ = psnconst t in FStar_Ident.set_lid_range uu___ r\nlet (lid_tuple2 : FStar_Ident.lident) =\n  mk_tuple_lid (Prims.of_int (2)) FStar_Compiler_Range.dummyRange\nlet (lid_tuple3 : FStar_Ident.lident) =\n  mk_tuple_lid (Prims.of_int (3)) FStar_Compiler_Range.dummyRange\nlet (is_tuple_constructor_string : Prims.string -> Prims.bool) =\n  fun s -> FStar_Compiler_Util.starts_with s \"FStar.Pervasives.Native.tuple\"\nlet (is_tuple_constructor_id : FStar_Ident.ident -> Prims.bool) =\n  fun id ->\n    let uu___ = FStar_Ident.string_of_id id in\n    is_tuple_constructor_string uu___\nlet (is_tuple_constructor_lid : FStar_Ident.lident -> Prims.bool) =\n  fun lid ->\n    let uu___ = FStar_Ident.string_of_lid lid in\n    is_tuple_constructor_string uu___\nlet (mk_tuple_data_lid :\n  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =\n  fun n ->\n    fun r ->\n      let t =\n        let uu___ = FStar_Compiler_Util.string_of_int n in\n        FStar_Compiler_Util.format1 \"Mktuple%s\" uu___ in\n      let uu___ = psnconst t in FStar_Ident.set_lid_range uu___ r\nlet (lid_Mktuple2 : FStar_Ident.lident) =\n  mk_tuple_data_lid (Prims.of_int (2)) FStar_Compiler_Range.dummyRange\nlet (lid_Mktuple3 : FStar_Ident.lident) =\n  mk_tuple_data_lid (Prims.of_int (3)) FStar_Compiler_Range.dummyRange\nlet (is_tuple_datacon_string : Prims.string -> Prims.bool) =\n  fun s ->\n    FStar_Compiler_Util.starts_with s \"FStar.Pervasives.Native.Mktuple\"\nlet (is_tuple_datacon_id : FStar_Ident.ident -> Prims.bool) =\n  fun id ->\n    let uu___ = FStar_Ident.string_of_id id in is_tuple_datacon_string uu___\nlet (is_tuple_datacon_lid : FStar_Ident.lident -> Prims.bool) =\n  fun lid ->\n    let uu___ = FStar_Ident.string_of_lid lid in\n    is_tuple_datacon_string uu___\nlet (is_tuple_data_lid : FStar_Ident.lident -> Prims.int -> Prims.bool) =\n  fun f ->\n    fun n ->\n      let uu___ = mk_tuple_data_lid n FStar_Compiler_Range.dummyRange in\n      FStar_Ident.lid_equals f uu___\nlet (is_tuple_data_lid' : FStar_Ident.lident -> Prims.bool) =\n  fun f ->\n    let uu___ = FStar_Ident.string_of_lid f in is_tuple_datacon_string uu___\nlet (mod_prefix_dtuple : Prims.int -> Prims.string -> FStar_Ident.lident) =\n  fun n -> if n = (Prims.of_int (2)) then pconst else psconst\nlet (mk_dtuple_lid :\n  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =\n  fun n ->\n    fun r ->\n      let t =\n        let uu___ = FStar_Compiler_Util.string_of_int n in\n        FStar_Compiler_Util.format1 \"dtuple%s\" uu___ in\n      let uu___ = let uu___1 = mod_prefix_dtuple n in uu___1 t in\n      FStar_Ident.set_lid_range uu___ r\nlet (is_dtuple_constructor_string : Prims.string -> Prims.bool) =\n  fun s ->\n    (s = \"Prims.dtuple2\") ||\n      (FStar_Compiler_Util.starts_with s \"FStar.Pervasives.dtuple\")\nlet (is_dtuple_constructor_lid : FStar_Ident.lident -> Prims.bool) =\n  fun lid ->\n    let uu___ = FStar_Ident.string_of_lid lid in\n    is_dtuple_constructor_string uu___\nlet (mk_dtuple_data_lid :\n  Prims.int -> FStar_Compiler_Range.range -> FStar_Ident.lident) =\n  fun n ->\n    fun r ->\n      let t =\n        let uu___ = FStar_Compiler_Util.string_of_int n in\n        FStar_Compiler_Util.format1 \"Mkdtuple%s\" uu___ in\n      let uu___ = let uu___1 = mod_prefix_dtuple n in uu___1 t in\n      FStar_Ident.set_lid_range uu___ r\nlet (is_dtuple_datacon_string : Prims.string -> Prims.bool) =\n  fun s ->\n    (s = \"Prims.Mkdtuple2\") ||\n      (FStar_Compiler_Util.starts_with s \"FStar.Pervasives.Mkdtuple\")\nlet (is_dtuple_data_lid : FStar_Ident.lident -> Prims.int -> Prims.bool) =\n  fun f ->\n    fun n ->\n      let uu___ = mk_dtuple_data_lid n FStar_Compiler_Range.dummyRange in\n      FStar_Ident.lid_equals f uu___\nlet (is_dtuple_data_lid' : FStar_Ident.lident -> Prims.bool) =\n  fun f ->\n    let uu___ = FStar_Ident.string_of_lid f in is_dtuple_datacon_string uu___\nlet (is_name : FStar_Ident.lident -> Prims.bool) =\n  fun lid ->\n    let c =\n      let uu___ =\n        let uu___1 = FStar_Ident.ident_of_lid lid in\n        FStar_Ident.string_of_id uu___1 in\n      FStar_Compiler_Util.char_at uu___ Prims.int_zero in\n    FStar_Compiler_Util.is_upper c\nlet (fstar_tactics_lid' : Prims.string Prims.list -> FStar_Ident.lid) =\n  fun s ->\n    FStar_Ident.lid_of_path\n      (FStar_Compiler_List.op_At [\"FStar\"; \"Tactics\"] s)\n      FStar_Compiler_Range.dummyRange\nlet (fstar_tactics_lid : Prims.string -> FStar_Ident.lid) =\n  fun s -> fstar_tactics_lid' [s]\nlet (tac_lid : FStar_Ident.lid) = fstar_tactics_lid' [\"Effect\"; \"tac\"]\nlet (tactic_lid : FStar_Ident.lid) = fstar_tactics_lid' [\"Effect\"; \"tactic\"]\nlet (mk_class_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Typeclasses\"; \"mk_class\"]\nlet (tcresolve_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Typeclasses\"; \"tcresolve\"]\nlet (solve_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Typeclasses\"; \"solve\"]\nlet (tcclass_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Typeclasses\"; \"tcclass\"]\nlet (tcinstance_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Typeclasses\"; \"tcinstance\"]\nlet (no_method_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Typeclasses\"; \"no_method\"]\nlet (effect_TAC_lid : FStar_Ident.lid) = fstar_tactics_lid' [\"Effect\"; \"TAC\"]\nlet (effect_Tac_lid : FStar_Ident.lid) = fstar_tactics_lid' [\"Effect\"; \"Tac\"]\nlet (by_tactic_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Effect\"; \"with_tactic\"]\nlet (rewrite_by_tactic_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Effect\"; \"rewrite_with_tactic\"]\nlet (synth_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Effect\"; \"synth_by_tactic\"]\nlet (assert_by_tactic_lid : FStar_Ident.lid) =\n  fstar_tactics_lid' [\"Effect\"; \"assert_by_tactic\"]\nlet (fstar_syntax_syntax_term : FStar_Ident.lident) =\n  FStar_Ident.lid_of_str \"FStar.Syntax.Syntax.term\"\nlet (binder_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"FStar\"; \"Reflection\"; \"Types\"; \"binder\"]\n    FStar_Compiler_Range.dummyRange\nlet (binders_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"FStar\"; \"Reflection\"; \"Types\"; \"binders\"]\n    FStar_Compiler_Range.dummyRange\nlet (bv_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"FStar\"; \"Reflection\"; \"Types\"; \"bv\"]\n    FStar_Compiler_Range.dummyRange\nlet (fv_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"FStar\"; \"Reflection\"; \"Types\"; \"fv\"]\n    FStar_Compiler_Range.dummyRange\nlet (norm_step_lid : FStar_Ident.lident) = psconst \"norm_step\"\nlet (calc_lid : Prims.string -> FStar_Ident.lid) =\n  fun i ->\n    FStar_Ident.lid_of_path [\"FStar\"; \"Calc\"; i]\n      FStar_Compiler_Range.dummyRange\nlet (calc_init_lid : FStar_Ident.lid) = calc_lid \"calc_init\"\nlet (calc_step_lid : FStar_Ident.lid) = calc_lid \"calc_step\"\nlet (calc_finish_lid : FStar_Ident.lid) = calc_lid \"calc_finish\"\nlet (calc_push_impl_lid : FStar_Ident.lid) = calc_lid \"calc_push_impl\"\nlet (classical_sugar_lid : Prims.string -> FStar_Ident.lid) =\n  fun i ->\n    FStar_Ident.lid_of_path [\"FStar\"; \"Classical\"; \"Sugar\"; i]\n      FStar_Compiler_Range.dummyRange\nlet (forall_intro_lid : FStar_Ident.lid) = classical_sugar_lid \"forall_intro\"\nlet (exists_intro_lid : FStar_Ident.lid) = classical_sugar_lid \"exists_intro\"\nlet (implies_intro_lid : FStar_Ident.lid) =\n  classical_sugar_lid \"implies_intro\"\nlet (or_intro_left_lid : FStar_Ident.lid) =\n  classical_sugar_lid \"or_intro_left\"\nlet (or_intro_right_lid : FStar_Ident.lid) =\n  classical_sugar_lid \"or_intro_right\"\nlet (and_intro_lid : FStar_Ident.lid) = classical_sugar_lid \"and_intro\"\nlet (forall_elim_lid : FStar_Ident.lid) = classical_sugar_lid \"forall_elim\"\nlet (exists_elim_lid : FStar_Ident.lid) = classical_sugar_lid \"exists_elim\"\nlet (implies_elim_lid : FStar_Ident.lid) = classical_sugar_lid \"implies_elim\"\nlet (or_elim_lid : FStar_Ident.lid) = classical_sugar_lid \"or_elim\"\nlet (and_elim_lid : FStar_Ident.lid) = classical_sugar_lid \"and_elim\"\nlet (match_returns_def_name : Prims.string) =\n  FStar_String.op_Hat FStar_Ident.reserved_prefix \"_ret_\"\nlet (steel_memory_inv_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"Steel\"; \"Memory\"; \"inv\"]\n    FStar_Compiler_Range.dummyRange\nlet (steel_new_invariant_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"Steel\"; \"Effect\"; \"Atomic\"; \"new_invariant\"]\n    FStar_Compiler_Range.dummyRange\nlet (steel_st_new_invariant_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"Steel\"; \"ST\"; \"Util\"; \"new_invariant\"]\n    FStar_Compiler_Range.dummyRange\nlet (steel_with_invariant_g_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"Steel\"; \"Effect\"; \"Atomic\"; \"with_invariant_g\"]\n    FStar_Compiler_Range.dummyRange\nlet (steel_st_with_invariant_g_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"Steel\"; \"ST\"; \"Util\"; \"with_invariant_g\"]\n    FStar_Compiler_Range.dummyRange\nlet (steel_with_invariant_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"Steel\"; \"Effect\"; \"Atomic\"; \"with_invariant\"]\n    FStar_Compiler_Range.dummyRange\nlet (steel_st_with_invariant_lid : FStar_Ident.lident) =\n  FStar_Ident.lid_of_path [\"Steel\"; \"ST\"; \"Util\"; \"with_invariant\"]\n    FStar_Compiler_Range.dummyRange\nlet (fext_lid : Prims.string -> FStar_Ident.lident) =\n  fun s ->\n    FStar_Ident.lid_of_path [\"FStar\"; \"FunctionalExtensionality\"; s]\n      FStar_Compiler_Range.dummyRange\nlet (fext_on_domain_lid : FStar_Ident.lident) = fext_lid \"on_domain\"\nlet (fext_on_dom_lid : FStar_Ident.lident) = fext_lid \"on_dom\"\nlet (fext_on_domain_g_lid : FStar_Ident.lident) = fext_lid \"on_domain_g\"\nlet (fext_on_dom_g_lid : FStar_Ident.lident) = fext_lid \"on_dom_g\"\nlet (sealed_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Sealed\"; \"sealed\"]\nlet (seal_lid : FStar_Ident.lident) = p2l [\"FStar\"; \"Sealed\"; \"seal\"]\nlet (unseal_lid : FStar_Ident.lident) =\n  p2l [\"FStar\"; \"Tactics\"; \"Builtins\"; \"unseal\"]\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_Driver.ml",
    "content": "open Prims\nlet (is_cache_file : Prims.string -> Prims.bool) =\n  fun fn ->\n    let uu___ = FStar_Compiler_Util.get_file_extension fn in uu___ = \".cache\"\ntype fragment =\n  | Empty \n  | Modul of FStar_Parser_AST.modul \n  | Decls of FStar_Parser_AST.decl Prims.list \n  | DeclsWithContent of (FStar_Parser_AST.decl *\n  FStar_Parser_ParseIt.code_fragment) Prims.list \nlet (uu___is_Empty : fragment -> Prims.bool) =\n  fun projectee -> match projectee with | Empty -> true | uu___ -> false\nlet (uu___is_Modul : fragment -> Prims.bool) =\n  fun projectee -> match projectee with | Modul _0 -> true | uu___ -> false\nlet (__proj__Modul__item___0 : fragment -> FStar_Parser_AST.modul) =\n  fun projectee -> match projectee with | Modul _0 -> _0\nlet (uu___is_Decls : fragment -> Prims.bool) =\n  fun projectee -> match projectee with | Decls _0 -> true | uu___ -> false\nlet (__proj__Decls__item___0 : fragment -> FStar_Parser_AST.decl Prims.list)\n  = fun projectee -> match projectee with | Decls _0 -> _0\nlet (uu___is_DeclsWithContent : fragment -> Prims.bool) =\n  fun projectee ->\n    match projectee with | DeclsWithContent _0 -> true | uu___ -> false\nlet (__proj__DeclsWithContent__item___0 :\n  fragment ->\n    (FStar_Parser_AST.decl * FStar_Parser_ParseIt.code_fragment) Prims.list)\n  = fun projectee -> match projectee with | DeclsWithContent _0 -> _0\nlet (parse_fragment : FStar_Parser_ParseIt.input_frag -> fragment) =\n  fun frag ->\n    let uu___ =\n      FStar_Parser_ParseIt.parse (FStar_Parser_ParseIt.Toplevel frag) in\n    match uu___ with\n    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inl modul, uu___1)\n        -> Modul modul\n    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inr [], uu___1) ->\n        Empty\n    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inr decls, uu___1)\n        -> Decls decls\n    | FStar_Parser_ParseIt.IncrementalFragment (decls, uu___1, uu___2) ->\n        DeclsWithContent decls\n    | FStar_Parser_ParseIt.ParseError (e, msg, r) ->\n        FStar_Errors.raise_error (e, msg) r\n    | FStar_Parser_ParseIt.Term uu___1 ->\n        failwith\n          \"Impossible: parsing a Toplevel always results in an ASTFragment\"\nlet (parse_file :\n  Prims.string ->\n    (FStar_Parser_AST.file * (Prims.string * FStar_Compiler_Range.range)\n      Prims.list))\n  =\n  fun fn ->\n    let uu___ = FStar_Parser_ParseIt.parse (FStar_Parser_ParseIt.Filename fn) in\n    match uu___ with\n    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inl ast, comments)\n        -> (ast, comments)\n    | FStar_Parser_ParseIt.ASTFragment (FStar_Pervasives.Inr uu___1, uu___2)\n        ->\n        let msg = FStar_Compiler_Util.format1 \"%s: expected a module\\n\" fn in\n        let r = FStar_Compiler_Range.dummyRange in\n        FStar_Errors.raise_error\n          (FStar_Errors_Codes.Fatal_ModuleExpected, msg) r\n    | FStar_Parser_ParseIt.ParseError (e, msg, r) ->\n        FStar_Errors.raise_error (e, msg) r\n    | FStar_Parser_ParseIt.Term uu___1 ->\n        failwith\n          \"Impossible: parsing a Filename always results in an ASTFragment\""
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_LexFStar.ml",
    "content": "open FStar_Parser_Parse\nopen FStar_Parser_Util\n\nmodule Option  = BatOption\nmodule String  = BatString\nmodule Hashtbl = BatHashtbl\nmodule Sedlexing = FStar_Sedlexing\nmodule L = Sedlexing\nmodule E = FStar_Errors\nmodule Codes = FStar_Errors_Codes\n\nlet ba_of_string s = Array.init (String.length s) (fun i -> Char.code (String.get s i))\nlet array_trim_both a n m = Array.sub a n (Array.length a - n - m)\nlet string_trim_both s n m = BatString.sub s n (String.length s - (n+m))\nlet trim_both   lexbuf n m = string_trim_both (L.lexeme lexbuf) n m\nlet utrim_both  lexbuf n m = array_trim_both (L.ulexeme lexbuf) n m\nlet trim_right  lexbuf n = trim_both lexbuf 0 n\nlet trim_left   lexbuf n = trim_both lexbuf n 0\n\nlet unescape (a:int array) : int =\n  match a.(0) with\n  | 92 (* \\ *) ->\n    (match a.(1) with\n    | 48  (*0*) -> 0\n    | 98  (*b*) -> 8\n    | 116 (*t*) -> 9\n    | 110 (*n*) -> 10\n    | 118 (*v*) -> 11\n    | 102 (*f*) -> 12\n    | 114 (*r*) -> 13\n    | 117 (*u*) ->\n      let s = FStar_Parser_Utf8.from_int_array a 2 4 in\n      int_of_string (\"0x\"^s)\n    | 120 (*x*) ->\n      let s = FStar_Parser_Utf8.from_int_array a 2 2 in\n      int_of_string (\"0x\"^s)\n    | c -> c)\n  | c -> c\n\nlet keywords = Hashtbl.create 0\nlet constructors = Hashtbl.create 0\nlet operators = Hashtbl.create 0\n\nlet () =\n  Hashtbl.add keywords \"attributes\"    ATTRIBUTES  ;\n  Hashtbl.add keywords \"noeq\"          NOEQUALITY  ;\n  Hashtbl.add keywords \"unopteq\"       UNOPTEQUALITY  ;\n  Hashtbl.add keywords \"and\"           AND         ;\n  Hashtbl.add keywords \"assert\"        ASSERT      ;\n  Hashtbl.add keywords \"assume\"        ASSUME      ;\n  Hashtbl.add keywords \"begin\"         BEGIN       ;\n  Hashtbl.add keywords \"by\"            BY          ;\n  Hashtbl.add keywords \"calc\"          CALC        ;\n  Hashtbl.add keywords \"class\"         CLASS       ;\n  Hashtbl.add keywords \"default\"       DEFAULT     ;\n  Hashtbl.add keywords \"decreases\"     DECREASES   ;\n  Hashtbl.add keywords \"effect\"        EFFECT      ;\n  Hashtbl.add keywords \"eliminate\"     ELIM;\n  Hashtbl.add keywords \"else\"          ELSE        ;\n  Hashtbl.add keywords \"end\"           END         ;\n  Hashtbl.add keywords \"ensures\"       ENSURES     ;\n  Hashtbl.add keywords \"exception\"     EXCEPTION   ;\n  Hashtbl.add keywords \"exists\"        EXISTS      ;\n  Hashtbl.add keywords \"false\"         FALSE       ;\n  Hashtbl.add keywords \"friend\"        FRIEND      ;\n  Hashtbl.add keywords \"forall\"        FORALL      ;\n  Hashtbl.add keywords \"fun\"           FUN         ;\n  Hashtbl.add keywords \"λ\"             FUN         ;\n  Hashtbl.add keywords \"function\"      FUNCTION    ;\n  Hashtbl.add keywords \"if\"            IF          ;\n  Hashtbl.add keywords \"in\"            IN          ;\n  Hashtbl.add keywords \"include\"       INCLUDE     ;\n  Hashtbl.add keywords \"inline\"        INLINE      ;\n  Hashtbl.add keywords \"inline_for_extraction\"        INLINE_FOR_EXTRACTION      ;\n  Hashtbl.add keywords \"instance\"      INSTANCE    ;\n  Hashtbl.add keywords \"introduce\"     INTRO ;\n  Hashtbl.add keywords \"irreducible\"   IRREDUCIBLE ;\n  Hashtbl.add keywords \"let\"           (LET false) ;\n  Hashtbl.add keywords \"logic\"         LOGIC       ;\n  Hashtbl.add keywords \"match\"         MATCH       ;\n  Hashtbl.add keywords \"returns\"       RETURNS     ;\n  Hashtbl.add keywords \"as\"            AS          ;\n  Hashtbl.add keywords \"module\"        MODULE      ;\n  Hashtbl.add keywords \"new\"           NEW         ;\n  Hashtbl.add keywords \"new_effect\"    NEW_EFFECT  ;\n  Hashtbl.add keywords \"layered_effect\"               LAYERED_EFFECT             ;\n  Hashtbl.add keywords \"polymonadic_bind\"             POLYMONADIC_BIND           ;\n  Hashtbl.add keywords \"polymonadic_subcomp\"          POLYMONADIC_SUBCOMP        ;\n  Hashtbl.add keywords \"noextract\"     NOEXTRACT   ;\n  Hashtbl.add keywords \"of\"            OF          ;\n  Hashtbl.add keywords \"open\"          OPEN        ;\n  Hashtbl.add keywords \"opaque\"        OPAQUE      ;\n  Hashtbl.add keywords \"private\"       PRIVATE     ;\n  Hashtbl.add keywords \"quote\"         QUOTE       ;\n  Hashtbl.add keywords \"range_of\"      RANGE_OF    ;\n  Hashtbl.add keywords \"rec\"           REC         ;\n  Hashtbl.add keywords \"reifiable\"     REIFIABLE   ;\n  Hashtbl.add keywords \"reify\"         REIFY       ;\n  Hashtbl.add keywords \"reflectable\"   REFLECTABLE ;\n  Hashtbl.add keywords \"requires\"      REQUIRES    ;\n  Hashtbl.add keywords \"set_range_of\"  SET_RANGE_OF;\n  Hashtbl.add keywords \"sub_effect\"    SUB_EFFECT  ;\n  Hashtbl.add keywords \"synth\"         SYNTH       ;\n  Hashtbl.add keywords \"then\"          THEN        ;\n  Hashtbl.add keywords \"total\"         TOTAL       ;\n  Hashtbl.add keywords \"true\"          TRUE        ;\n  Hashtbl.add keywords \"try\"           TRY         ;\n  Hashtbl.add keywords \"type\"          TYPE        ;\n  Hashtbl.add keywords \"unfold\"        UNFOLD      ;\n  Hashtbl.add keywords \"unfoldable\"    UNFOLDABLE  ;\n  Hashtbl.add keywords \"val\"           VAL         ;\n  Hashtbl.add keywords \"when\"          WHEN        ;\n  Hashtbl.add keywords \"with\"          WITH        ;\n  Hashtbl.add keywords \"_\"             UNDERSCORE  ;\n  Hashtbl.add keywords \"α\"             (TVAR \"a\")  ;\n  Hashtbl.add keywords \"β\"             (TVAR \"b\")  ;\n  Hashtbl.add keywords \"γ\"             (TVAR \"c\")  ;\n  Hashtbl.add keywords \"δ\"             (TVAR \"d\")  ;\n  Hashtbl.add keywords \"ε\"             (TVAR \"e\")  ;\n  Hashtbl.add keywords \"φ\"             (TVAR \"f\")  ;\n  Hashtbl.add keywords \"χ\"             (TVAR \"g\")  ;\n  Hashtbl.add keywords \"η\"             (TVAR \"h\")  ;\n  Hashtbl.add keywords \"ι\"             (TVAR \"i\")  ;\n  Hashtbl.add keywords \"κ\"             (TVAR \"k\")  ;\n  Hashtbl.add keywords \"μ\"             (TVAR \"m\")  ;\n  Hashtbl.add keywords \"ν\"             (TVAR \"n\")  ;\n  Hashtbl.add keywords \"π\"             (TVAR \"p\")  ;\n  Hashtbl.add keywords \"θ\"             (TVAR \"q\")  ;\n  Hashtbl.add keywords \"ρ\"             (TVAR \"r\")  ;\n  Hashtbl.add keywords \"σ\"             (TVAR \"s\")  ;\n  Hashtbl.add keywords \"τ\"             (TVAR \"t\")  ;\n  Hashtbl.add keywords \"ψ\"             (TVAR \"u\")  ;\n  Hashtbl.add keywords \"ω\"             (TVAR \"w\")  ;\n  Hashtbl.add keywords \"ξ\"             (TVAR \"x\")  ;\n  Hashtbl.add keywords \"ζ\"             (TVAR \"z\")  ;\n  Hashtbl.add constructors \"ℕ\"         (IDENT \"nat\");\n  Hashtbl.add constructors \"ℤ\"         (IDENT \"int\");\n  Hashtbl.add constructors \"𝔹\"         (IDENT \"bool\");\n  let l =\n  [\"~\", TILDE \"~\";\n   \"-\", MINUS;\n   \"/\\\\\", CONJUNCTION;\n   \"\\\\/\", DISJUNCTION;\n   \"<:\", SUBTYPE;\n   \"$:\", EQUALTYPE;\n   \"<@\", SUBKIND;\n   \"(|\", LENS_PAREN_LEFT;\n   \"|)\", LENS_PAREN_RIGHT;\n   \"#\", HASH;\n   \"u#\", UNIV_HASH;\n   \"&\", AMP;\n   \"()\", LPAREN_RPAREN;\n   \"(\", LPAREN;\n   \")\", RPAREN;\n   \",\", COMMA;\n   \"~>\", SQUIGGLY_RARROW;\n   \"->\", RARROW;\n   \"<--\", LONG_LEFT_ARROW;\n   \"<-\", LARROW;\n   \"<==>\", IFF;\n   \"==>\", IMPLIES;\n   \".\", DOT;\n   \"?.\", QMARK_DOT;\n   \"?\", QMARK;\n   \".[\", DOT_LBRACK;\n   \".(|\", DOT_LENS_PAREN_LEFT;\n   \".(\", DOT_LPAREN;\n   \".[|\", DOT_LBRACK_BAR;\n   \"{:pattern\", LBRACE_COLON_PATTERN;\n   \"{:well-founded\", LBRACE_COLON_WELL_FOUNDED;\n   \"returns$\", RETURNS_EQ;\n   \":\", COLON;\n   \"::\", COLON_COLON;\n   \":=\", COLON_EQUALS;\n   \";\", SEMICOLON;\n   \"=\", EQUALS;\n   \"%[\", PERCENT_LBRACK;\n   \"!{\", BANG_LBRACE;\n   \"[@@@\", LBRACK_AT_AT_AT;\n   \"[@@\", LBRACK_AT_AT;\n   \"[@\", LBRACK_AT;\n   \"[\", LBRACK;\n   \"[|\", LBRACK_BAR;\n   \"{|\", LBRACE_BAR;\n   \"|>\", PIPE_RIGHT;\n   \"]\", RBRACK;\n   \"|]\", BAR_RBRACK;\n   \"|}\", BAR_RBRACE;\n   \"{\", LBRACE;\n   \"|\", BAR;\n   \"}\", RBRACE;\n   \"$\", DOLLAR;\n     (* New Unicode equivalents *)\n   \"∀\", FORALL;\n   \"∃\", EXISTS;\n   \"⊤\", NAME \"True\";\n   \"⊥\", NAME \"False\";\n   \"⟹\", IMPLIES;\n   \"⟺\", IFF;\n   \"→\", RARROW;\n   \"←\", LARROW;\n   \"⟵\", LONG_LEFT_ARROW;\n   \"↝\", SQUIGGLY_RARROW;\n   \"≔\", COLON_EQUALS;\n   \"∧\", CONJUNCTION;\n   \"∨\", DISJUNCTION;\n   \"¬\", TILDE \"~\";\n   \"⸬\", COLON_COLON;\n   \"▹\", PIPE_RIGHT;\n   \"÷\", OPINFIX3 \"÷\";\n   \"‖\", OPINFIX0a \"||\";\n   \"×\", IDENT \"op_Multiply\";\n   \"∗\", OPINFIX3 \"*\";\n   \"⇒\", OPINFIX0c \"=>\";\n   \"≥\", OPINFIX0c \">=\";\n   \"≤\", OPINFIX0c \"<=\";\n   \"≠\", OPINFIX0c \"<>\";\n   \"≪\", OPINFIX0c \"<<\";\n   \"◃\", OPINFIX0c \"<|\";\n   \"±\", OPPREFIX \"±\";\n   \"∁\", OPPREFIX \"∁\";\n   \"∂\", OPPREFIX \"∂\";\n   \"√\", OPPREFIX \"√\";\n    ] in\n   List.iter (fun (k,v) -> Hashtbl.add operators k v) l\n\nlet current_range lexbuf =\n    FStar_Parser_Util.mksyn_range (fst (L.range lexbuf)) (snd (L.range lexbuf))\n\nlet fail lexbuf (e, msg) =\n     let m = current_range lexbuf in\n     E.raise_error (e, msg) m\n\ntype delimiters = { angle:int ref; paren:int ref; }\nlet n_typ_apps = ref 0\n\nlet is_typ_app_gt () =\n  if !n_typ_apps > 0\n  then (decr n_typ_apps; true)\n  else false\n\nlet rec mknewline n lexbuf =\n  if n = 0 then ()\n  else (L.new_line lexbuf; mknewline (n-1) lexbuf)\n\nlet clean_number x = String.strip ~chars:\"uzyslLUnIN\" x\n\n(* Try to trim each line of [comment] by the ammount of space\n    on the first line of the comment if possible *)\n(* TODO : apply this to FSDOC too *)\nlet maybe_trim_lines start_column comment =\n  if start_column = 0 then comment\n  else\n    let comment_lines = String.split_on_char '\\n' comment in\n    let ensures_empty_prefix k s =\n      let j = min k (String.length s - 1) in\n      let rec aux i = if i > j then k else if s.[i] <> ' ' then i else aux (i+1) in\n      aux 0 in\n    let trim_width = List.fold_left ensures_empty_prefix start_column comment_lines in\n    String.concat \"\\n\" (List.map (fun s -> String.tail s trim_width) comment_lines)\n\nlet comment_buffer = Buffer.create 128\n\nlet start_comment lexbuf =\n  Buffer.add_string comment_buffer \"(*\" ;\n  (false, comment_buffer, fst (L.range lexbuf))\n\nlet terminate_comment buffer startpos lexbuf =\n  let endpos = snd (L.range lexbuf) in\n  Buffer.add_string buffer \"*)\" ;\n  let comment = Buffer.contents buffer in\n  let comment = maybe_trim_lines (startpos.Lexing.pos_cnum - startpos.Lexing.pos_bol) comment in\n  Buffer.clear buffer;\n  add_comment (comment, FStar_Parser_Util.mksyn_range startpos endpos)\n\nlet push_one_line_comment pre lexbuf =\n  let startpos, endpos = L.range lexbuf in\n  assert (startpos.Lexing.pos_lnum = endpos.Lexing.pos_lnum);\n  add_comment (pre ^ L.lexeme lexbuf, FStar_Parser_Util.mksyn_range startpos endpos)\n\n(** Unicode class definitions\n  Auto-generated from http:/ /www.unicode.org/Public/8.0.0/ucd/UnicodeData.txt **)\n(** Ll **)\nlet u_lower = [%sedlex.regexp? ll]\n(** Lu *)\nlet u_upper = [%sedlex.regexp? lu]\n(** Lo *)\nlet u_other = [%sedlex.regexp? lo]\n(** Lm *)\nlet u_modifier = [%sedlex.regexp? lm]\n(** Lt *)\nlet u_title = [%sedlex.regexp? lt]\n(** Zs *)\nlet u_space = [%sedlex.regexp? zs]\n(** These are not unicode spaces but we accept as whitespace in F* source (e.g. tab and BOM) *)\nlet u_space_extra = [%sedlex.regexp? '\\t' | '\\x0B' | '\\x0C' | '\\xA0' | 0xfeff]\n(** Zl and Zp *)\nlet u_line_sep = [%sedlex.regexp? zl]\nlet u_par_sep = [%sedlex.regexp? zp]\n(** Sm math symbols *)\nlet u_math = [%sedlex.regexp? sm]\nlet u_math_ascii = [%sedlex.regexp? 0x002b | 0x003c .. 0x003e | 0x007c | 0x007e]\nlet u_math_nonascii = [%sedlex.regexp? Sub(u_math, u_math_ascii)]\n(** Sc currency *)\nlet u_currency = [%sedlex.regexp? sc]\n(** Sk *)\nlet u_modifier_symbol = [%sedlex.regexp? sk]\n(** So *)\nlet u_other_symbol = [%sedlex.regexp? so]\n(** Nd *)\nlet u_decimal_digit = [%sedlex.regexp? nd]\n(** Nl *)\nlet u_digit_letter = [%sedlex.regexp? nl]\n(** No *)\nlet u_other_digit = [%sedlex.regexp? no]\n(** Pd *)\nlet u_punct_hyphen = [%sedlex.regexp? pd]\n(** Ps *)\nlet u_punct_obra = [%sedlex.regexp? ps]\n(** Pe *)\nlet u_punct_cbra = [%sedlex.regexp? pe]\n(** Pi *)\nlet u_punct_oquot = [%sedlex.regexp? pi]\n(** Pf *)\nlet u_punct_cquot = [%sedlex.regexp? pf]\n(** Pc *)\nlet u_punct_connect = [%sedlex.regexp? pc]\n(** Po *)\nlet u_punct_other = [%sedlex.regexp? po]\n(** Mn *)\nlet u_mod_nospace = [%sedlex.regexp? mn]\n(** Mc *)\nlet u_mod = [%sedlex.regexp? mc]\n(** Me *)\nlet u_mod_enclose = [%sedlex.regexp? me]\n(** Cc *)\nlet u_ascii_control = [%sedlex.regexp? cc]\n(** Cf *)\nlet u_format_control = [%sedlex.regexp? cf]\n(** Co *)\nlet u_private_use = [%sedlex.regexp? co]\n(** Cs *)\nlet u_surrogate = [%sedlex.regexp? cs]\n\n(* -------------------------------------------------------------------- *)\nlet lower  = [%sedlex.regexp? u_lower]\nlet upper  = [%sedlex.regexp? u_upper | u_title]\nlet letter = [%sedlex.regexp? u_lower | u_upper | u_other | u_modifier]\nlet digit  = [%sedlex.regexp? '0'..'9']\nlet hex    = [%sedlex.regexp? '0'..'9' | 'A'..'F' | 'a'..'f']\n\n(* -------------------------------------------------------------------- *)\nlet anywhite  = [%sedlex.regexp? u_space | u_space_extra]\nlet newline   = [%sedlex.regexp? \"\\r\\n\" | 10 | 13 | 0x2028 | 0x2029]\n\n(* -------------------------------------------------------------------- *)\nlet op_char = [%sedlex.regexp? Chars \"!$%&*+-.<>=?^|~:@#\\\\/\"]\n\n(* op_token must be splt into seperate regular expressions to prevent\n   compliation from hanging *)\nlet op_token_1 = [%sedlex.regexp? \"~\" | \"-\" | \"/\\\\\" | \"\\\\/\" | \"<:\" | \"$:\" | \"<@\" | \"(|\" | \"|)\" | \"#\" ]\nlet op_token_2 = [%sedlex.regexp? \"u#\" | \"&\" | \"()\" | \"(\" | \")\" | \",\" | \"~>\" | \"->\" | \"<--\" ]\nlet op_token_3 = [%sedlex.regexp? \"<-\" | \"<==>\" | \"==>\" | \".\" | \"?.\" | \"?\" | \".[|\" | \".[\" | \".(|\" | \".(\" ]\nlet op_token_4 = [%sedlex.regexp? \"$\" | \"{:pattern\" | \"{:well-founded\" | \":\" | \"::\" | \":=\" | \";;\" | \";\" | \"=\" | \"%[\" | \"returns$\" ]\nlet op_token_5 = [%sedlex.regexp? \"!{\" | \"[@@@\" | \"[@@\" | \"[@\" | \"[|\" | \"{|\" | \"[\" | \"|>\" | \"]\" | \"|]\" | \"|}\" | \"{\" | \"|\" | \"}\" ]\n\n(* -------------------------------------------------------------------- *)\nlet xinteger =\n  [%sedlex.regexp?\n  (  '0', ('x'| 'X'), Plus hex\n   | '0', ('o'| 'O'), Plus ('0' .. '7')\n   | '0', ('b'| 'B'), Plus ('0' .. '1') )]\nlet integer = [%sedlex.regexp? Plus digit]\nlet any_integer = [%sedlex.regexp? xinteger | integer]\nlet unsigned = [%sedlex.regexp? Chars \"uU\"]\nlet int8 = [%sedlex.regexp? any_integer, 'y']\nlet uint8 = [%sedlex.regexp? any_integer, unsigned, 'y']\nlet int16 = [%sedlex.regexp? any_integer, 's']\nlet uint16 = [%sedlex.regexp? any_integer, unsigned, 's']\nlet int32 = [%sedlex.regexp? any_integer, 'l']\nlet uint32 = [%sedlex.regexp? any_integer, unsigned, 'l']\nlet int64 = [%sedlex.regexp? any_integer, 'L']\nlet uint64 = [%sedlex.regexp? any_integer, unsigned, 'L']\nlet char8 = [%sedlex.regexp? any_integer, 'z']\nlet sizet = [%sedlex.regexp? any_integer, \"sz\"]\n\nlet floatp     = [%sedlex.regexp? Plus digit, '.', Star digit]\nlet floate     = [%sedlex.regexp? Plus digit, Opt ('.', Star digit), Chars \"eE\", Opt (Chars \"+-\"), Plus digit]\nlet real       = [%sedlex.regexp? floatp, 'R']\nlet ieee64     = [%sedlex.regexp? floatp | floate]\nlet xieee64    = [%sedlex.regexp? xinteger, 'L', 'F']\nlet range      = [%sedlex.regexp? Plus digit, '.', '.', Plus digit]\n\nlet op_prefix  = [%sedlex.regexp? Chars \"!~?\"]\nlet op_infix0a = [%sedlex.regexp? Chars \"|\"] (* left *)\nlet op_infix0b = [%sedlex.regexp? Chars \"&\"] (* left *)\nlet op_infix0c = [%sedlex.regexp? Chars \"=<>\"] (* left *)\nlet op_infix0c_nogt = [%sedlex.regexp? Chars \"=<\"] (* left *)\nlet op_infix0d = [%sedlex.regexp? Chars \"$\"] (* left *)\n\nlet op_infix0  = [%sedlex.regexp? op_infix0a | op_infix0b | op_infix0c | op_infix0d]\nlet op_infix1  = [%sedlex.regexp? Chars \"@^\"] (* right *)\nlet op_infix2  = [%sedlex.regexp? Chars \"+-\"] (* left *)\nlet op_infix3  = [%sedlex.regexp? Chars \"*/%\"] (* left *)\nlet symbolchar = [%sedlex.regexp? op_prefix | op_infix0 | op_infix1 | op_infix2 | op_infix3 | Chars \".:\"]\nlet uoperator  = [%sedlex.regexp? u_math_nonascii]\n\n(* -------------------------------------------------------------------- *)\nlet escape_char = [%sedlex.regexp? '\\\\', (Chars \"\\\\\\\"'bfntrv0\" | \"x\", hex, hex | \"u\", hex, hex, hex, hex)]\nlet char        = [%sedlex.regexp? Compl '\\\\' | escape_char]\n\n(* -------------------------------------------------------------------- *)\nlet constructor_start_char = [%sedlex.regexp? upper]\nlet ident_start_char       = [%sedlex.regexp? lower  | '_']\nlet ident_char             = [%sedlex.regexp? letter | digit | '\\'' | '_']\nlet tvar_char              = [%sedlex.regexp? letter | digit | '\\'' | '_']\n\nlet constructor = [%sedlex.regexp? constructor_start_char, Star ident_char]\nlet ident       = [%sedlex.regexp? ident_start_char, Star ident_char]\nlet tvar        = [%sedlex.regexp? '\\'', (ident_start_char | constructor_start_char), Star tvar_char]\n\n(* [ensure_no_comment lexbuf next] takes a [lexbuf] and [next], a\n   continuation. It is to be called after a regexp was matched, to\n   ensure match text does not contain any comment start.\n\n   If the match [s] contains a comment start (an occurence of [//])\n   then we place the lexer at that comment start.  We continue with\n   [next s], [s] being either the whole match, or the chunk before\n   [//].\n*)\nlet ensure_no_comment lexbuf (next: string -> token): token =\n  let s = L.lexeme lexbuf in\n  next (try let before, _after = BatString.split s \"//\" in\n            (* rollback to the begining of the match *)\n            L.rollback lexbuf;\n            (* skip [n] characters in the lexer, with [n] being [hd]'s len *)\n            BatString.iter (fun _ -> let _ = L.next lexbuf in ()) before;\n            before with | Not_found -> s)\n\nlet rec token lexbuf =\nmatch%sedlex lexbuf with\n | \"%splice\" -> SPLICE\n | \"`%\" -> BACKTICK_PERC\n | \"`#\" -> BACKTICK_HASH\n | \"`@\" -> BACKTICK_AT\n | \"#set-options\" -> PRAGMA_SET_OPTIONS\n | \"#reset-options\" -> PRAGMA_RESET_OPTIONS\n | \"#push-options\" -> PRAGMA_PUSH_OPTIONS\n | \"#pop-options\" -> PRAGMA_POP_OPTIONS\n | \"#restart-solver\" -> PRAGMA_RESTART_SOLVER\n | \"#print-effects-graph\" -> PRAGMA_PRINT_EFFECTS_GRAPH\n | \"__SOURCE_FILE__\" -> STRING (L.source_file lexbuf)\n | \"__LINE__\" -> INT (string_of_int (L.current_line lexbuf), false)\n\n | Plus anywhite -> token lexbuf\n | newline -> L.new_line lexbuf; token lexbuf\n\n (* Must appear before tvar to avoid 'a <-> 'a' conflict *)\n | ('\\'', char, '\\'') -> CHAR (unescape (utrim_both lexbuf 1 1))\n | ('\\'', char, '\\'', 'B') -> CHAR (unescape (utrim_both lexbuf 1 2))\n | '`' -> BACKTICK\n\n | \"match\", Plus op_char ->\n    ensure_no_comment lexbuf (fun s ->\n        match BatString.lchop ~n:5 s with\n        | \"\" -> MATCH\n        | s  -> MATCH_OP s\n      )\n\n | \"if\", Plus op_char ->\n    ensure_no_comment lexbuf (fun s ->\n        match BatString.lchop ~n:2 s with\n        | \"\" -> IF\n        | s  -> IF_OP s\n      )\n\n | \"let\", Plus op_char ->\n    ensure_no_comment lexbuf (fun s ->\n        match BatString.lchop ~n:3 s with\n        | \"\" -> LET false\n        | s  -> LET_OP s\n      )\n\n | \"and\", Plus op_char ->\n    ensure_no_comment lexbuf (fun s ->\n        match BatString.lchop ~n:3 s with\n        | \"\" -> AND\n        | s  -> AND_OP s\n      )\n\n | \";\", Plus op_char ->\n    ensure_no_comment lexbuf (fun s ->\n        match BatString.lchop ~n:1 s with\n        | \"\" -> SEMICOLON\n        | s  -> SEMICOLON_OP (Some s)\n      )\n\n | \";;\" -> SEMICOLON_OP None\n\n | ident -> let id = L.lexeme lexbuf in\n   if FStar_Compiler_Util.starts_with id FStar_Ident.reserved_prefix\n   then FStar_Errors.raise_error\n                    (Codes.Fatal_ReservedPrefix,\n                     FStar_Ident.reserved_prefix  ^ \" is a reserved prefix for an identifier\")\n                    (current_range lexbuf);\n   Hashtbl.find_option keywords id |> Option.default (IDENT id)\n | constructor -> let id = L.lexeme lexbuf in\n   Hashtbl.find_option constructors id |> Option.default (NAME id)\n\n | tvar -> TVAR (L.lexeme lexbuf)\n | (integer | xinteger) -> INT (clean_number (L.lexeme lexbuf), false)\n | (uint8 | char8) ->\n   let c = clean_number (L.lexeme lexbuf) in\n   let cv = int_of_string c in\n   if cv < 0 || cv > 255 then fail lexbuf (Codes.Fatal_SyntaxError, \"Out-of-range character literal\")\n   else UINT8 (c)\n | int8 -> INT8 (clean_number (L.lexeme lexbuf), false)\n | uint16 -> UINT16 (clean_number (L.lexeme lexbuf))\n | int16 -> INT16 (clean_number (L.lexeme lexbuf), false)\n | uint32 -> UINT32 (clean_number (L.lexeme lexbuf))\n | int32 -> INT32 (clean_number (L.lexeme lexbuf), false)\n | uint64 -> UINT64 (clean_number (L.lexeme lexbuf))\n | int64 -> INT64 (clean_number (L.lexeme lexbuf), false)\n | sizet -> SIZET (clean_number (L.lexeme lexbuf))\n | range -> RANGE (L.lexeme lexbuf)\n | real -> REAL(trim_right lexbuf 1)\n | (integer | xinteger | ieee64 | xieee64), Plus ident_char ->\n   fail lexbuf (Codes.Fatal_SyntaxError, \"This is not a valid numeric literal: \" ^ L.lexeme lexbuf)\n\n | \"(*\" ->\n   let inner, buffer, startpos = start_comment lexbuf in\n   comment inner buffer startpos lexbuf\n\n | \"// IN F*:\" -> token lexbuf\n | \"//\" ->\n     (* Only match on \"//\" to allow the longest-match rule to catch IN F*. This\n      * creates a lexing conflict with op_infix3 which is caught below. *)\n     one_line_comment (L.lexeme lexbuf) lexbuf\n\n | '\"' -> string (Buffer.create 0) lexbuf.Sedlexing.start_p lexbuf\n\n | '`', '`', (Plus (Compl ('`' | 10 | 13 | 0x2028 | 0x2029) | '`', Compl ('`' | 10 | 13 | 0x2028 | 0x2029))), '`', '`' ->\n   IDENT (trim_both lexbuf 2 2)\n\n | op_token_1\n | op_token_2\n | op_token_3\n | op_token_4\n | op_token_5 -> L.lexeme lexbuf |> Hashtbl.find operators\n\n | \"<\" -> OPINFIX0c(\"<\")\n | \">\" -> if is_typ_app_gt ()\n          then TYP_APP_GREATER\n          else begin match%sedlex lexbuf with\n               | Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0c (\">\" ^ s))\n               | _ -> assert false end\n\n (* Operators. *)\n | op_prefix,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPPREFIX  s)\n | op_infix0a, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0a s)\n | op_infix0b, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0b s)\n | op_infix0c_nogt, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0c s)\n | op_infix0d, Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX0d s)\n | op_infix1,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX1  s)\n | op_infix2,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX2  s)\n | op_infix3,  Star symbolchar -> ensure_no_comment lexbuf (function\n                                      | \"\" -> one_line_comment \"\" lexbuf\n                                      | s  -> OPINFIX3 s\n                                    )\n | \"**\"     ,  Star symbolchar -> ensure_no_comment lexbuf (fun s -> OPINFIX4  s)\n\n (* Unicode Operators *)\n | uoperator -> let id = L.lexeme lexbuf in\n   Hashtbl.find_option operators id |> Option.default (OPINFIX4 id)\n\n | \".[]<-\"                 -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)\n | \".()<-\"                 -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)\n | \".(||)<-\"                -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)\n | \".[||]<-\"                 -> OP_MIXFIX_ASSIGNMENT (L.lexeme lexbuf)\n | \".[]\"                  -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)\n | \".()\"                  -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)\n | \".(||)\"                 -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)\n | \".[||]\"                  -> OP_MIXFIX_ACCESS (L.lexeme lexbuf)\n\n | eof -> EOF\n | _ -> fail lexbuf (Codes.Fatal_SyntaxError, \"unexpected char\")\n\nand one_line_comment pre lexbuf =\nmatch%sedlex lexbuf with\n | Star (Compl (10 | 13 | 0x2028 | 0x2029)) -> push_one_line_comment pre lexbuf; token lexbuf\n | _ -> assert false\n\nand string buffer start_pos lexbuf =\nmatch%sedlex lexbuf with\n | '\\\\', newline, Star anywhite -> L.new_line lexbuf; string buffer start_pos lexbuf\n | newline ->\n   Buffer.add_string buffer (L.lexeme lexbuf);\n   L.new_line lexbuf; string buffer start_pos lexbuf\n | escape_char ->\n   Buffer.add_string buffer (BatUTF8.init 1 (fun _ -> unescape (L.ulexeme lexbuf) |> BatUChar.chr));\n   string buffer start_pos lexbuf\n | '\"' ->\n   (* position info must be set since the start of the string *)\n   lexbuf.Sedlexing.start_p <- start_pos;\n   STRING (Buffer.contents buffer)\n | eof -> fail lexbuf (Codes.Fatal_SyntaxError, \"unterminated string\")\n | any ->\n  Buffer.add_string buffer (L.lexeme lexbuf);\n  string buffer start_pos lexbuf\n | _ -> assert false\n\nand comment inner buffer startpos lexbuf =\nmatch%sedlex lexbuf with\n | \"(*\" ->\n   Buffer.add_string buffer \"(*\" ;\n   let _ = comment true buffer startpos lexbuf in\n   comment inner buffer startpos lexbuf\n | newline ->\n   L.new_line lexbuf;\n   Buffer.add_string buffer (L.lexeme lexbuf);\n   comment inner buffer startpos lexbuf\n | \"*)\" ->\n   terminate_comment buffer startpos lexbuf;\n   if inner then EOF else token lexbuf\n | eof ->\n   terminate_comment buffer startpos lexbuf; EOF\n | any ->\n   Buffer.add_string buffer (L.lexeme lexbuf);\n   comment inner buffer startpos lexbuf\n | _ -> assert false\n\nand ignore_endline lexbuf =\nmatch%sedlex lexbuf with\n | Star ' ', newline -> token lexbuf\n | _ -> assert false\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_Parse.ml",
    "content": "open Prims\nopen FStar_Errors\nopen FStar_Compiler_List\nopen FStar_Compiler_Util\nopen FStar_Compiler_Range\n(* open FStar_Options *)\nopen FStar_Parser_Const\nopen FStar_Parser_AST\nopen FStar_Parser_Util\nopen FStar_Const\nopen FStar_Ident\nopen FStar_String\ntype token =\n  | AMP\n  | AND\n  | AND_OP of (string)\n  | AS\n  | ASSERT\n  | ASSUME\n  | ATTRIBUTES\n  | BACKTICK\n  | BACKTICK_AT\n  | BACKTICK_HASH\n  | BACKTICK_PERC\n  | BANG_LBRACE\n  | BAR\n  | BAR_RBRACE\n  | BAR_RBRACK\n  | BEGIN\n  | BY\n  | CALC\n  | CHAR of (char)\n  | CLASS\n  | COLON\n  | COLON_COLON\n  | COLON_EQUALS\n  | COMMA\n  | CONJUNCTION\n  | DECREASES\n  | DEFAULT\n  | DISJUNCTION\n  | DOLLAR\n  | DOT\n  | DOT_LBRACK\n  | DOT_LBRACK_BAR\n  | DOT_LENS_PAREN_LEFT\n  | DOT_LPAREN\n  | EFFECT\n  | ELIM\n  | ELSE\n  | END\n  | ENSURES\n  | EOF\n  | EQUALS\n  | EQUALTYPE\n  | EXCEPTION\n  | EXISTS\n  | FALSE\n  | FORALL\n  | FRIEND\n  | FUN\n  | FUNCTION\n  | HASH\n  | IDENT of (string)\n  | IF\n  | IFF\n  | IF_OP of (string)\n  | IMPLIES\n  | IN\n  | INCLUDE\n  | INLINE\n  | INLINE_FOR_EXTRACTION\n  | INSTANCE\n  | INT of (string * bool)\n  | INT16 of (string * bool)\n  | INT32 of (string * bool)\n  | INT64 of (string * bool)\n  | INT8 of (string * bool)\n  | INTRO\n  | IRREDUCIBLE\n  | LARROW\n  | LAYERED_EFFECT\n  | LBRACE\n  | LBRACE_BAR\n  | LBRACE_COLON_PATTERN\n  | LBRACE_COLON_WELL_FOUNDED\n  | LBRACK\n  | LBRACK_AT\n  | LBRACK_AT_AT\n  | LBRACK_AT_AT_AT\n  | LBRACK_BAR\n  | LENS_PAREN_LEFT\n  | LENS_PAREN_RIGHT\n  | LET of (bool)\n  | LET_OP of (string)\n  | LOGIC\n  | LONG_LEFT_ARROW\n  | LPAREN\n  | LPAREN_RPAREN\n  | MATCH\n  | MATCH_OP of (string)\n  | MINUS\n  | MODULE\n  | NAME of (string)\n  | NEW\n  | NEW_EFFECT\n  | NOEQUALITY\n  | NOEXTRACT\n  | OF\n  | OPAQUE\n  | OPEN\n  | OPINFIX0a of (string)\n  | OPINFIX0b of (string)\n  | OPINFIX0c of (string)\n  | OPINFIX0d of (string)\n  | OPINFIX1 of (string)\n  | OPINFIX2 of (string)\n  | OPINFIX3 of (string)\n  | OPINFIX4 of (string)\n  | OPPREFIX of (string)\n  | OP_MIXFIX_ACCESS of (string)\n  | OP_MIXFIX_ASSIGNMENT of (string)\n  | PERCENT_LBRACK\n  | PIPE_RIGHT\n  | POLYMONADIC_BIND\n  | POLYMONADIC_SUBCOMP\n  | PRAGMA_POP_OPTIONS\n  | PRAGMA_PRINT_EFFECTS_GRAPH\n  | PRAGMA_PUSH_OPTIONS\n  | PRAGMA_RESET_OPTIONS\n  | PRAGMA_RESTART_SOLVER\n  | PRAGMA_SET_OPTIONS\n  | PRIVATE\n  | QMARK\n  | QMARK_DOT\n  | QUOTE\n  | RANGE of (string)\n  | RANGE_OF\n  | RARROW\n  | RBRACE\n  | RBRACK\n  | REAL of (string)\n  | REC\n  | REFLECTABLE\n  | REIFIABLE\n  | REIFY\n  | REQUIRES\n  | RETURNS\n  | RETURNS_EQ\n  | RPAREN\n  | SEMICOLON\n  | SEMICOLON_OP of (string option)\n  | SET_RANGE_OF\n  | SIZET of (string)\n  | SPLICE\n  | SQUIGGLY_RARROW\n  | STRING of (string)\n  | SUBKIND\n  | SUBTYPE\n  | SUB_EFFECT\n  | SYNTH\n  | THEN\n  | TILDE of (string)\n  | TOTAL\n  | TRUE\n  | TRY\n  | TVAR of (string)\n  | TYPE\n  | TYP_APP_GREATER\n  | TYP_APP_LESS\n  | UINT16 of (string)\n  | UINT32 of (string)\n  | UINT64 of (string)\n  | UINT8 of (string)\n  | UNDERSCORE\n  | UNFOLD\n  | UNFOLDABLE\n  | UNIV_HASH\n  | UNOPTEQUALITY\n  | VAL\n  | WHEN\n  | WITH\n\nopen Parsing;;\nlet _ = parse_error;;\n# 2 \"parse.mly\"\n(*\n We are expected to have only 6 shift-reduce conflicts in ML and 8 in F#.\n A lot (176) of end-of-stream conflicts are also reported and\n should be investigated...\n*)\n(* (c) Microsoft Corporation. All rights reserved *)\nopen Prims\nopen FStar_Pervasives\nopen FStar_Errors\nopen FStar_Compiler_List\nopen FStar_Compiler_Util\nopen FStar_Compiler_Range\n(* open FStar_Options *)\n(* TODO : these files should be deprecated and removed *)\n(* open FStar_Syntax_Syntax *)\nopen FStar_Parser_Const\nopen FStar_Parser_AST\nopen FStar_Parser_Util\nopen FStar_Const\nopen FStar_Ident\nopen FStar_String\n\nlet logic_qualifier_deprecation_warning =\n  \"logic qualifier is deprecated, please remove it from the source program. In case your program verifies with the qualifier annotated but not without it, please try to minimize the example and file a github issue\"\n\nlet mk_meta_tac m = Meta m\n\nlet old_attribute_syntax_warning =\n  \"The `[@ ...]` syntax of attributes is deprecated. \\\n   Use `[@@ a1; a2; ...; an]`, a semi-colon separated list of attributes, instead\"\n\nlet do_notation_deprecation_warning =\n  \"The lightweight do notation [x <-- y; z] or [x ;; z] is deprecated, use let operators (i.e. [let* x = y in z] or [y ;* z], [*] being any sequence of operator characters) instead.\"\n\nlet none_to_empty_list x =\n  match x with\n  | None -> []\n  | Some l -> l\n\n# 216 \"parse.ml\"\nlet yytransl_const = [|\n  257 (* AMP *);\n  258 (* AND *);\n  260 (* AS *);\n  261 (* ASSERT *);\n  262 (* ASSUME *);\n  263 (* ATTRIBUTES *);\n  264 (* BACKTICK *);\n  265 (* BACKTICK_AT *);\n  266 (* BACKTICK_HASH *);\n  267 (* BACKTICK_PERC *);\n  268 (* BANG_LBRACE *);\n  269 (* BAR *);\n  270 (* BAR_RBRACE *);\n  271 (* BAR_RBRACK *);\n  272 (* BEGIN *);\n  273 (* BY *);\n  274 (* CALC *);\n  276 (* CLASS *);\n  277 (* COLON *);\n  278 (* COLON_COLON *);\n  279 (* COLON_EQUALS *);\n  280 (* COMMA *);\n  281 (* CONJUNCTION *);\n  282 (* DECREASES *);\n  283 (* DEFAULT *);\n  284 (* DISJUNCTION *);\n  285 (* DOLLAR *);\n  286 (* DOT *);\n  287 (* DOT_LBRACK *);\n  288 (* DOT_LBRACK_BAR *);\n  289 (* DOT_LENS_PAREN_LEFT *);\n  290 (* DOT_LPAREN *);\n  291 (* EFFECT *);\n  292 (* ELIM *);\n  293 (* ELSE *);\n  294 (* END *);\n  295 (* ENSURES *);\n    0 (* EOF *);\n  296 (* EQUALS *);\n  297 (* EQUALTYPE *);\n  298 (* EXCEPTION *);\n  299 (* EXISTS *);\n  300 (* FALSE *);\n  301 (* FORALL *);\n  302 (* FRIEND *);\n  303 (* FUN *);\n  304 (* FUNCTION *);\n  305 (* HASH *);\n  307 (* IF *);\n  308 (* IFF *);\n  310 (* IMPLIES *);\n  311 (* IN *);\n  312 (* INCLUDE *);\n  313 (* INLINE *);\n  314 (* INLINE_FOR_EXTRACTION *);\n  315 (* INSTANCE *);\n  321 (* INTRO *);\n  322 (* IRREDUCIBLE *);\n  323 (* LARROW *);\n  324 (* LAYERED_EFFECT *);\n  325 (* LBRACE *);\n  326 (* LBRACE_BAR *);\n  327 (* LBRACE_COLON_PATTERN *);\n  328 (* LBRACE_COLON_WELL_FOUNDED *);\n  329 (* LBRACK *);\n  330 (* LBRACK_AT *);\n  331 (* LBRACK_AT_AT *);\n  332 (* LBRACK_AT_AT_AT *);\n  333 (* LBRACK_BAR *);\n  334 (* LENS_PAREN_LEFT *);\n  335 (* LENS_PAREN_RIGHT *);\n  338 (* LOGIC *);\n  339 (* LONG_LEFT_ARROW *);\n  340 (* LPAREN *);\n  341 (* LPAREN_RPAREN *);\n  342 (* MATCH *);\n  344 (* MINUS *);\n  345 (* MODULE *);\n  347 (* NEW *);\n  348 (* NEW_EFFECT *);\n  349 (* NOEQUALITY *);\n  350 (* NOEXTRACT *);\n  351 (* OF *);\n  352 (* OPAQUE *);\n  353 (* OPEN *);\n  365 (* PERCENT_LBRACK *);\n  366 (* PIPE_RIGHT *);\n  367 (* POLYMONADIC_BIND *);\n  368 (* POLYMONADIC_SUBCOMP *);\n  369 (* PRAGMA_POP_OPTIONS *);\n  370 (* PRAGMA_PRINT_EFFECTS_GRAPH *);\n  371 (* PRAGMA_PUSH_OPTIONS *);\n  372 (* PRAGMA_RESET_OPTIONS *);\n  373 (* PRAGMA_RESTART_SOLVER *);\n  374 (* PRAGMA_SET_OPTIONS *);\n  375 (* PRIVATE *);\n  376 (* QMARK *);\n  377 (* QMARK_DOT *);\n  378 (* QUOTE *);\n  380 (* RANGE_OF *);\n  381 (* RARROW *);\n  382 (* RBRACE *);\n  383 (* RBRACK *);\n  385 (* REC *);\n  386 (* REFLECTABLE *);\n  387 (* REIFIABLE *);\n  388 (* REIFY *);\n  389 (* REQUIRES *);\n  390 (* RETURNS *);\n  391 (* RETURNS_EQ *);\n  392 (* RPAREN *);\n  393 (* SEMICOLON *);\n  395 (* SET_RANGE_OF *);\n  397 (* SPLICE *);\n  398 (* SQUIGGLY_RARROW *);\n  400 (* SUBKIND *);\n  401 (* SUBTYPE *);\n  402 (* SUB_EFFECT *);\n  403 (* SYNTH *);\n  404 (* THEN *);\n  406 (* TOTAL *);\n  407 (* TRUE *);\n  408 (* TRY *);\n  410 (* TYPE *);\n  411 (* TYP_APP_GREATER *);\n  412 (* TYP_APP_LESS *);\n  417 (* UNDERSCORE *);\n  418 (* UNFOLD *);\n  419 (* UNFOLDABLE *);\n  420 (* UNIV_HASH *);\n  421 (* UNOPTEQUALITY *);\n  422 (* VAL *);\n  423 (* WHEN *);\n  424 (* WITH *);\n    0|]\n\nlet yytransl_block = [|\n  259 (* AND_OP *);\n  275 (* CHAR *);\n  306 (* IDENT *);\n  309 (* IF_OP *);\n  316 (* INT *);\n  317 (* INT16 *);\n  318 (* INT32 *);\n  319 (* INT64 *);\n  320 (* INT8 *);\n  336 (* LET *);\n  337 (* LET_OP *);\n  343 (* MATCH_OP *);\n  346 (* NAME *);\n  354 (* OPINFIX0a *);\n  355 (* OPINFIX0b *);\n  356 (* OPINFIX0c *);\n  357 (* OPINFIX0d *);\n  358 (* OPINFIX1 *);\n  359 (* OPINFIX2 *);\n  360 (* OPINFIX3 *);\n  361 (* OPINFIX4 *);\n  362 (* OPPREFIX *);\n  363 (* OP_MIXFIX_ACCESS *);\n  364 (* OP_MIXFIX_ASSIGNMENT *);\n  379 (* RANGE *);\n  384 (* REAL *);\n  394 (* SEMICOLON_OP *);\n  396 (* SIZET *);\n  399 (* STRING *);\n  405 (* TILDE *);\n  409 (* TVAR *);\n  413 (* UINT16 *);\n  414 (* UINT32 *);\n  415 (* UINT64 *);\n  416 (* UINT8 *);\n    0|]\n\nlet yylhs = \"\\255\\255\\\n\\006\\000\\006\\000\\008\\000\\008\\000\\009\\000\\009\\000\\011\\000\\011\\000\\\n\\013\\000\\013\\000\\014\\000\\014\\000\\016\\000\\016\\000\\017\\000\\017\\000\\\n\\018\\000\\018\\000\\020\\000\\020\\000\\021\\000\\021\\000\\023\\000\\023\\000\\\n\\025\\000\\025\\000\\027\\000\\027\\000\\029\\000\\029\\000\\031\\000\\031\\000\\\n\\034\\000\\034\\000\\036\\000\\036\\000\\037\\000\\037\\000\\038\\000\\038\\000\\\n\\040\\000\\040\\000\\042\\000\\042\\000\\044\\000\\044\\000\\046\\000\\046\\000\\\n\\046\\000\\046\\000\\049\\000\\049\\000\\051\\000\\051\\000\\051\\000\\054\\000\\\n\\054\\000\\055\\000\\055\\000\\057\\000\\057\\000\\059\\000\\059\\000\\061\\000\\\n\\061\\000\\063\\000\\063\\000\\065\\000\\065\\000\\067\\000\\067\\000\\068\\000\\\n\\068\\000\\070\\000\\070\\000\\072\\000\\072\\000\\074\\000\\074\\000\\075\\000\\\n\\075\\000\\077\\000\\077\\000\\077\\000\\077\\000\\077\\000\\077\\000\\077\\000\\\n\\077\\000\\078\\000\\078\\000\\080\\000\\080\\000\\082\\000\\082\\000\\084\\000\\\n\\084\\000\\039\\000\\039\\000\\087\\000\\087\\000\\088\\000\\088\\000\\090\\000\\\n\\090\\000\\092\\000\\092\\000\\094\\000\\094\\000\\096\\000\\096\\000\\097\\000\\\n\\097\\000\\041\\000\\041\\000\\043\\000\\043\\000\\001\\000\\002\\000\\002\\000\\\n\\100\\000\\100\\000\\100\\000\\100\\000\\100\\000\\100\\000\\101\\000\\101\\000\\\n\\066\\000\\066\\000\\064\\000\\064\\000\\064\\000\\106\\000\\106\\000\\105\\000\\\n\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\\n\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\\n\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\105\\000\\\n\\083\\000\\109\\000\\109\\000\\119\\000\\118\\000\\118\\000\\118\\000\\118\\000\\\n\\118\\000\\123\\000\\026\\000\\026\\000\\026\\000\\062\\000\\062\\000\\058\\000\\\n\\058\\000\\045\\000\\081\\000\\081\\000\\081\\000\\081\\000\\081\\000\\081\\000\\\n\\081\\000\\081\\000\\112\\000\\112\\000\\126\\000\\113\\000\\114\\000\\098\\000\\\n\\115\\000\\115\\000\\115\\000\\116\\000\\117\\000\\103\\000\\103\\000\\103\\000\\\n\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\\n\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\125\\000\\107\\000\\\n\\107\\000\\129\\000\\129\\000\\129\\000\\122\\000\\131\\000\\085\\000\\089\\000\\\n\\089\\000\\089\\000\\073\\000\\073\\000\\073\\000\\073\\000\\073\\000\\073\\000\\\n\\073\\000\\073\\000\\073\\000\\073\\000\\073\\000\\073\\000\\073\\000\\073\\000\\\n\\073\\000\\073\\000\\073\\000\\135\\000\\135\\000\\079\\000\\079\\000\\079\\000\\\n\\079\\000\\052\\000\\052\\000\\053\\000\\053\\000\\053\\000\\120\\000\\069\\000\\\n\\069\\000\\069\\000\\069\\000\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\\n\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\\n\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\124\\000\\\n\\124\\000\\124\\000\\124\\000\\071\\000\\071\\000\\071\\000\\071\\000\\050\\000\\\n\\108\\000\\137\\000\\137\\000\\138\\000\\138\\000\\099\\000\\099\\000\\139\\000\\\n\\139\\000\\139\\000\\139\\000\\139\\000\\139\\000\\140\\000\\140\\000\\141\\000\\\n\\141\\000\\136\\000\\136\\000\\005\\000\\104\\000\\093\\000\\015\\000\\130\\000\\\n\\019\\000\\010\\000\\024\\000\\022\\000\\142\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\030\\000\\030\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\\n\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\\n\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\\n\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\\n\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\012\\000\\148\\000\\\n\\151\\000\\151\\000\\151\\000\\060\\000\\007\\000\\007\\000\\007\\000\\152\\000\\\n\\152\\000\\153\\000\\095\\000\\033\\000\\033\\000\\154\\000\\154\\000\\155\\000\\\n\\155\\000\\143\\000\\143\\000\\156\\000\\156\\000\\157\\000\\157\\000\\157\\000\\\n\\157\\000\\157\\000\\157\\000\\157\\000\\157\\000\\127\\000\\127\\000\\127\\000\\\n\\127\\000\\127\\000\\127\\000\\127\\000\\127\\000\\132\\000\\132\\000\\158\\000\\\n\\158\\000\\158\\000\\158\\000\\158\\000\\147\\000\\147\\000\\149\\000\\149\\000\\\n\\150\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\\n\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\\n\\160\\000\\162\\000\\162\\000\\162\\000\\162\\000\\162\\000\\162\\000\\162\\000\\\n\\162\\000\\162\\000\\162\\000\\162\\000\\162\\000\\162\\000\\162\\000\\162\\000\\\n\\162\\000\\161\\000\\161\\000\\161\\000\\161\\000\\161\\000\\161\\000\\161\\000\\\n\\161\\000\\161\\000\\163\\000\\163\\000\\163\\000\\163\\000\\163\\000\\163\\000\\\n\\163\\000\\163\\000\\163\\000\\111\\000\\111\\000\\111\\000\\111\\000\\111\\000\\\n\\111\\000\\111\\000\\111\\000\\111\\000\\111\\000\\111\\000\\111\\000\\111\\000\\\n\\111\\000\\111\\000\\111\\000\\111\\000\\111\\000\\159\\000\\091\\000\\128\\000\\\n\\165\\000\\165\\000\\133\\000\\048\\000\\048\\000\\167\\000\\167\\000\\086\\000\\\n\\164\\000\\047\\000\\047\\000\\047\\000\\168\\000\\168\\000\\056\\000\\056\\000\\\n\\056\\000\\170\\000\\170\\000\\144\\000\\144\\000\\144\\000\\144\\000\\144\\000\\\n\\144\\000\\144\\000\\144\\000\\144\\000\\144\\000\\144\\000\\144\\000\\172\\000\\\n\\171\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\\n\\173\\000\\173\\000\\028\\000\\174\\000\\175\\000\\032\\000\\110\\000\\110\\000\\\n\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\\n\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\110\\000\\\n\\110\\000\\169\\000\\177\\000\\177\\000\\177\\000\\076\\000\\076\\000\\076\\000\\\n\\076\\000\\004\\000\\178\\000\\178\\000\\179\\000\\179\\000\\179\\000\\180\\000\\\n\\180\\000\\035\\000\\176\\000\\181\\000\\181\\000\\181\\000\\102\\000\\102\\000\\\n\\102\\000\\182\\000\\182\\000\\182\\000\\183\\000\\183\\000\\183\\000\\134\\000\\\n\\134\\000\\121\\000\\121\\000\\166\\000\\166\\000\\146\\000\\146\\000\\146\\000\\\n\\145\\000\\145\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\"\n\nlet yylen = \"\\002\\000\\\n\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\003\\000\\\n\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\002\\000\\\n\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\001\\000\\000\\000\\001\\000\\\n\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\002\\000\\\n\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\001\\000\\\n\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\003\\000\\000\\000\\002\\000\\\n\\004\\000\\005\\000\\000\\000\\003\\000\\000\\000\\002\\000\\002\\000\\000\\000\\\n\\002\\000\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\\n\\002\\000\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\002\\000\\001\\000\\\n\\002\\000\\001\\000\\002\\000\\001\\000\\002\\000\\001\\000\\002\\000\\001\\000\\\n\\002\\000\\003\\000\\003\\000\\003\\000\\003\\000\\004\\000\\004\\000\\004\\000\\\n\\004\\000\\001\\000\\002\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\\n\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\\n\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\\n\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\002\\000\\001\\000\\001\\000\\\n\\002\\000\\002\\000\\002\\000\\001\\000\\001\\000\\001\\000\\003\\000\\003\\000\\\n\\001\\000\\001\\000\\004\\000\\002\\000\\002\\000\\002\\000\\003\\000\\001\\000\\\n\\002\\000\\002\\000\\002\\000\\004\\000\\002\\000\\002\\000\\002\\000\\005\\000\\\n\\003\\000\\002\\000\\005\\000\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\\n\\005\\000\\003\\000\\002\\000\\002\\000\\002\\000\\002\\000\\002\\000\\002\\000\\\n\\004\\000\\001\\000\\001\\000\\003\\000\\000\\000\\002\\000\\004\\000\\005\\000\\\n\\002\\000\\003\\000\\002\\000\\002\\000\\004\\000\\003\\000\\004\\000\\002\\000\\\n\\003\\000\\003\\000\\006\\000\\008\\000\\008\\000\\008\\000\\008\\000\\008\\000\\\n\\005\\000\\004\\000\\001\\000\\001\\000\\003\\000\\008\\000\\006\\000\\004\\000\\\n\\005\\000\\008\\000\\012\\000\\009\\000\\005\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\000\\000\\004\\000\\001\\000\\001\\000\\003\\000\\001\\000\\001\\000\\003\\000\\\n\\002\\000\\001\\000\\006\\000\\003\\000\\003\\000\\005\\000\\003\\000\\001\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\001\\000\\002\\000\\001\\000\\\n\\002\\000\\001\\000\\001\\000\\003\\000\\001\\000\\005\\000\\003\\000\\001\\000\\\n\\007\\000\\001\\000\\001\\000\\005\\000\\003\\000\\006\\000\\001\\000\\003\\000\\\n\\002\\000\\002\\000\\001\\000\\003\\000\\005\\000\\005\\000\\005\\000\\005\\000\\\n\\005\\000\\002\\000\\004\\000\\004\\000\\004\\000\\004\\000\\004\\000\\002\\000\\\n\\004\\000\\004\\000\\004\\000\\004\\000\\004\\000\\001\\000\\003\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\002\\000\\002\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\001\\000\\001\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\003\\000\\002\\000\\001\\000\\001\\000\\003\\000\\003\\000\\\n\\005\\000\\003\\000\\003\\000\\001\\000\\004\\000\\004\\000\\006\\000\\006\\000\\\n\\006\\000\\006\\000\\002\\000\\002\\000\\002\\000\\004\\000\\002\\000\\007\\000\\\n\\005\\000\\004\\000\\005\\000\\005\\000\\006\\000\\007\\000\\005\\000\\002\\000\\\n\\002\\000\\003\\000\\003\\000\\002\\000\\007\\000\\007\\000\\009\\000\\008\\000\\\n\\007\\000\\008\\000\\007\\000\\011\\000\\006\\000\\014\\000\\010\\000\\001\\000\\\n\\001\\000\\003\\000\\001\\000\\006\\000\\001\\000\\005\\000\\005\\000\\000\\000\\\n\\003\\000\\001\\000\\001\\000\\001\\000\\004\\000\\001\\000\\001\\000\\003\\000\\\n\\005\\000\\003\\000\\001\\000\\003\\000\\001\\000\\005\\000\\006\\000\\007\\000\\\n\\003\\000\\004\\000\\004\\000\\005\\000\\001\\000\\005\\000\\006\\000\\007\\000\\\n\\003\\000\\004\\000\\004\\000\\005\\000\\001\\000\\003\\000\\001\\000\\003\\000\\\n\\001\\000\\002\\000\\002\\000\\003\\000\\003\\000\\001\\000\\003\\000\\001\\000\\\n\\001\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\002\\000\\002\\000\\002\\000\\002\\000\\002\\000\\\n\\001\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\002\\000\\002\\000\\002\\000\\002\\000\\002\\000\\\n\\001\\000\\003\\000\\003\\000\\003\\000\\005\\000\\003\\000\\003\\000\\002\\000\\\n\\002\\000\\001\\000\\003\\000\\003\\000\\003\\000\\005\\000\\003\\000\\003\\000\\\n\\002\\000\\002\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\004\\000\\001\\000\\001\\000\\001\\000\\003\\000\\003\\000\\001\\000\\002\\000\\\n\\002\\000\\001\\000\\002\\000\\001\\000\\002\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\004\\000\\001\\000\\001\\000\\001\\000\\001\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\005\\000\\002\\000\\003\\000\\002\\000\\\n\\002\\000\\001\\000\\001\\000\\004\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\002\\000\\003\\000\\002\\000\\002\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\002\\000\\001\\000\\003\\000\\002\\000\\001\\000\\001\\000\\001\\000\\\n\\003\\000\\002\\000\\002\\000\\003\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\000\\000\\001\\000\\003\\000\\000\\000\\001\\000\\\n\\003\\000\\000\\000\\001\\000\\003\\000\\000\\000\\001\\000\\003\\000\\001\\000\\\n\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\000\\000\\001\\000\\003\\000\\\n\\001\\000\\002\\000\\003\\000\\002\\000\\002\\000\\002\\000\\002\\000\"\n\nlet yydefred = \"\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\205\\000\\199\\000\\\n\\201\\000\\203\\000\\000\\000\\000\\000\\211\\000\\210\\000\\208\\000\\204\\000\\\n\\212\\000\\207\\000\\214\\000\\213\\000\\206\\000\\202\\000\\200\\000\\209\\000\\\n\\076\\002\\000\\000\\000\\000\\000\\000\\000\\000\\129\\000\\130\\000\\119\\000\\\n\\077\\002\\120\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\017\\002\\000\\000\\220\\000\\000\\000\\\n\\000\\000\\000\\000\\020\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\048\\001\\049\\001\\016\\002\\025\\002\\027\\002\\029\\002\\023\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\015\\002\\046\\001\\047\\001\\000\\000\\053\\001\\000\\000\\000\\000\\\n\\000\\000\\032\\002\\021\\002\\031\\002\\000\\000\\033\\002\\030\\002\\018\\002\\\n\\000\\000\\000\\000\\019\\002\\000\\000\\054\\001\\024\\002\\026\\002\\028\\002\\\n\\022\\002\\000\\000\\078\\002\\034\\001\\068\\001\\000\\000\\109\\001\\000\\000\\\n\\238\\001\\226\\001\\153\\001\\000\\000\\245\\001\\000\\000\\000\\000\\000\\000\\\n\\246\\001\\000\\000\\000\\000\\000\\000\\032\\001\\033\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\152\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\203\\001\\000\\000\\240\\001\\241\\001\\247\\001\\000\\000\\002\\002\\\n\\003\\002\\047\\002\\045\\002\\046\\002\\079\\002\\000\\000\\000\\000\\000\\000\\\n\\052\\001\\000\\000\\244\\001\\000\\000\\000\\000\\239\\001\\000\\000\\000\\000\\\n\\000\\000\\118\\000\\067\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\124\\000\\126\\000\\000\\000\\000\\000\\125\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\136\\000\\132\\000\\133\\000\\198\\000\\069\\000\\000\\000\\\n\\089\\001\\000\\000\\079\\001\\000\\000\\000\\000\\182\\001\\183\\001\\184\\001\\\n\\201\\001\\000\\000\\040\\000\\000\\000\\000\\000\\000\\000\\000\\000\\077\\001\\\n\\116\\001\\000\\000\\000\\000\\000\\000\\000\\000\\076\\001\\050\\001\\000\\000\\\n\\000\\000\\051\\001\\255\\000\\000\\000\\000\\000\\250\\000\\251\\000\\000\\000\\\n\\000\\000\\000\\000\\031\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\238\\000\\003\\001\\242\\000\\248\\000\\000\\000\\\n\\000\\000\\232\\000\\000\\000\\243\\000\\240\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\226\\000\\222\\000\\000\\000\\223\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\073\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\228\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\219\\001\\218\\001\\214\\001\\215\\001\\207\\001\\216\\001\\213\\001\\\n\\000\\000\\204\\001\\205\\001\\206\\001\\208\\001\\209\\001\\210\\001\\211\\001\\\n\\212\\001\\000\\000\\221\\001\\220\\001\\217\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\180\\001\\000\\000\\000\\002\\001\\002\\000\\000\\181\\001\\075\\001\\\n\\092\\001\\202\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\012\\002\\\n\\028\\000\\000\\000\\000\\000\\000\\000\\000\\000\\010\\002\\000\\000\\051\\002\\\n\\013\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\237\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\232\\001\\000\\000\\\n\\234\\001\\236\\001\\000\\000\\254\\001\\042\\002\\048\\002\\049\\002\\000\\000\\\n\\000\\000\\127\\000\\059\\000\\000\\000\\128\\000\\038\\001\\134\\000\\000\\000\\\n\\039\\001\\000\\000\\000\\000\\157\\000\\000\\000\\138\\000\\139\\000\\000\\000\\\n\\000\\000\\156\\000\\000\\000\\141\\000\\000\\000\\142\\000\\000\\000\\155\\000\\\n\\188\\000\\187\\000\\137\\000\\000\\000\\159\\000\\000\\000\\160\\000\\050\\002\\\n\\123\\000\\034\\000\\122\\000\\121\\000\\000\\000\\000\\000\\158\\000\\143\\000\\\n\\000\\000\\000\\000\\000\\000\\146\\000\\000\\000\\090\\001\\079\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\008\\002\\000\\000\\255\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\194\\001\\000\\000\\\n\\000\\000\\000\\000\\054\\000\\055\\000\\000\\000\\030\\001\\000\\000\\029\\001\\\n\\000\\000\\241\\000\\239\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\091\\000\\000\\000\\002\\001\\\n\\001\\001\\000\\000\\074\\002\\000\\000\\000\\000\\000\\000\\225\\000\\000\\000\\\n\\118\\001\\119\\001\\000\\000\\000\\000\\000\\000\\056\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\200\\001\\000\\000\\000\\000\\000\\000\\000\\000\\006\\002\\\n\\221\\000\\005\\002\\000\\000\\000\\000\\038\\000\\215\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\024\\000\\251\\001\\252\\001\\248\\001\\\n\\250\\001\\014\\002\\000\\000\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\\n\\007\\002\\000\\000\\091\\001\\055\\001\\063\\001\\064\\001\\000\\000\\000\\000\\\n\\105\\000\\000\\000\\035\\001\\037\\001\\000\\000\\009\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\030\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\129\\001\\151\\001\\122\\001\\124\\001\\000\\000\\000\\000\\179\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\178\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\235\\001\\000\\000\\039\\002\\000\\000\\\n\\038\\002\\040\\002\\034\\002\\048\\000\\000\\000\\044\\002\\131\\000\\057\\002\\\n\\000\\000\\000\\000\\162\\000\\163\\000\\000\\000\\000\\000\\000\\000\\154\\000\\\n\\135\\000\\000\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\058\\001\\006\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\099\\000\\000\\000\\078\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\166\\001\\167\\001\\168\\001\\192\\001\\000\\000\\000\\000\\\n\\164\\001\\165\\001\\193\\001\\146\\001\\000\\000\\147\\001\\253\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\233\\001\\000\\000\\075\\000\\000\\000\\000\\000\\028\\001\\\n\\000\\000\\000\\000\\000\\000\\229\\000\\000\\000\\247\\000\\000\\000\\228\\000\\\n\\000\\000\\000\\000\\236\\000\\237\\000\\233\\000\\235\\000\\000\\000\\000\\000\\\n\\000\\000\\231\\000\\234\\000\\117\\001\\000\\001\\097\\000\\000\\000\\103\\000\\\n\\077\\000\\000\\000\\120\\001\\075\\002\\218\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\040\\001\\229\\001\\230\\001\\000\\000\\069\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\178\\000\\004\\002\\032\\000\\000\\000\\000\\000\\000\\000\\000\\000\\011\\002\\\n\\000\\000\\243\\001\\130\\001\\000\\000\\131\\001\\000\\000\\227\\001\\225\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\070\\001\\\n\\000\\000\\069\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\035\\002\\000\\000\\000\\000\\052\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\022\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\\n\\140\\000\\189\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\095\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\071\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\191\\001\\144\\001\\148\\001\\000\\000\\142\\001\\000\\000\\000\\000\\\n\\163\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\162\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\057\\000\\000\\000\\000\\000\\\n\\114\\001\\000\\000\\115\\001\\000\\000\\111\\001\\110\\001\\244\\000\\000\\000\\\n\\065\\002\\000\\000\\117\\000\\000\\000\\000\\000\\073\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\126\\001\\253\\001\\084\\001\\176\\000\\000\\000\\\n\\061\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\046\\000\\087\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\059\\001\\000\\000\\000\\000\\000\\000\\010\\000\\000\\000\\000\\000\\101\\000\\\n\\000\\000\\132\\001\\000\\000\\065\\001\\066\\001\\067\\001\\000\\000\\071\\002\\\n\\000\\000\\057\\001\\020\\000\\018\\000\\000\\000\\087\\000\\000\\000\\088\\000\\\n\\000\\000\\089\\000\\000\\000\\086\\000\\000\\000\\000\\000\\049\\000\\037\\002\\\n\\000\\000\\000\\000\\041\\002\\164\\000\\000\\000\\061\\001\\060\\001\\000\\000\\\n\\161\\000\\000\\000\\144\\000\\000\\000\\093\\000\\000\\000\\000\\000\\153\\000\\\n\\115\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\147\\000\\000\\000\\107\\001\\000\\000\\000\\000\\105\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\101\\001\\000\\000\\252\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\113\\001\\000\\000\\246\\000\\230\\000\\000\\000\\000\\000\\121\\001\\\n\\000\\000\\000\\000\\000\\000\\104\\001\\000\\000\\000\\000\\044\\001\\045\\001\\\n\\041\\001\\043\\001\\042\\001\\063\\002\\085\\001\\177\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\186\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\012\\000\\137\\001\\000\\000\\000\\000\\000\\000\\\n\\127\\001\\000\\000\\008\\000\\000\\000\\000\\000\\072\\001\\073\\001\\074\\001\\\n\\071\\001\\050\\000\\081\\000\\036\\002\\107\\000\\000\\000\\000\\000\\166\\000\\\n\\169\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\197\\000\\193\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\093\\001\\\n\\063\\000\\000\\000\\000\\000\\099\\001\\000\\000\\000\\000\\000\\000\\254\\000\\\n\\111\\000\\109\\000\\054\\002\\000\\000\\227\\000\\000\\000\\094\\001\\097\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\185\\000\\000\\000\\000\\000\\000\\000\\138\\001\\000\\000\\139\\001\\128\\001\\\n\\086\\001\\072\\002\\080\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\000\\000\\191\\000\\000\\000\\\n\\000\\000\\000\\000\\151\\000\\152\\000\\148\\000\\150\\000\\149\\000\\106\\001\\\n\\036\\000\\000\\000\\000\\000\\000\\000\\000\\000\\249\\000\\000\\000\\096\\001\\\n\\098\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\179\\000\\134\\001\\\n\\000\\000\\000\\000\\140\\001\\000\\000\\000\\000\\000\\000\\174\\000\\026\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\167\\000\\000\\000\\\n\\016\\001\\000\\000\\000\\000\\000\\000\\010\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\095\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\135\\001\\\n\\171\\000\\000\\000\\022\\001\\000\\000\\172\\000\\175\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\067\\002\\170\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\004\\001\\168\\000\\000\\000\\190\\000\\000\\000\\000\\000\\194\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\103\\001\\183\\000\\184\\000\\180\\000\\\n\\182\\000\\181\\000\\136\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\020\\001\\021\\001\\017\\001\\019\\001\\018\\001\\000\\000\\014\\001\\\n\\015\\001\\011\\001\\013\\001\\012\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\113\\000\\196\\000\\000\\000\\108\\001\\100\\001\\000\\000\\\n\\026\\001\\027\\001\\023\\001\\025\\001\\024\\001\\000\\000\\173\\000\\060\\002\\\n\\008\\001\\009\\001\\005\\001\\007\\001\\006\\001\\192\\000\\000\\000\\000\\000\\\n\\004\\000\\000\\000\\000\\000\\195\\000\\102\\001\"\n\nlet yydgoto = \"\\005\\000\\\n\\025\\000\\033\\000\\039\\001\\141\\000\\100\\000\\096\\002\\101\\000\\199\\004\\\n\\158\\001\\118\\002\\247\\002\\102\\000\\233\\002\\121\\003\\027\\002\\045\\002\\\n\\046\\002\\002\\003\\139\\003\\000\\003\\018\\003\\019\\003\\012\\002\\013\\002\\\n\\087\\004\\088\\004\\056\\001\\057\\001\\047\\002\\048\\002\\019\\002\\020\\002\\\n\\103\\000\\145\\001\\146\\001\\066\\004\\006\\002\\194\\000\\195\\000\\105\\002\\\n\\106\\002\\210\\001\\211\\001\\010\\002\\015\\001\\103\\001\\104\\001\\001\\001\\\n\\108\\001\\104\\000\\211\\000\\212\\000\\213\\000\\163\\002\\148\\000\\105\\000\\\n\\218\\002\\219\\002\\180\\003\\181\\003\\249\\003\\250\\003\\026\\000\\027\\000\\\n\\028\\000\\029\\000\\116\\002\\183\\002\\230\\000\\193\\001\\214\\000\\231\\001\\\n\\242\\000\\187\\000\\152\\003\\011\\003\\079\\001\\232\\000\\233\\000\\099\\002\\\n\\100\\002\\152\\001\\153\\001\\243\\000\\244\\000\\106\\000\\032\\002\\245\\000\\\n\\246\\000\\107\\000\\108\\000\\015\\003\\109\\000\\073\\003\\074\\003\\075\\003\\\n\\105\\004\\106\\004\\120\\001\\179\\000\\110\\000\\152\\000\\031\\000\\111\\000\\\n\\180\\000\\181\\000\\014\\001\\112\\000\\090\\002\\113\\000\\040\\001\\136\\001\\\n\\130\\001\\124\\001\\151\\001\\141\\001\\143\\001\\161\\003\\091\\002\\204\\003\\\n\\048\\004\\114\\000\\050\\004\\051\\004\\008\\002\\138\\001\\229\\002\\230\\002\\\n\\115\\000\\239\\001\\248\\000\\185\\001\\248\\002\\206\\001\\080\\003\\153\\000\\\n\\117\\000\\118\\000\\004\\001\\119\\000\\120\\000\\159\\003\\121\\000\\189\\000\\\n\\249\\000\\135\\003\\123\\000\\205\\003\\124\\000\\125\\000\\183\\003\\167\\002\\\n\\076\\003\\236\\001\\250\\000\\126\\000\\127\\000\\187\\001\\188\\001\\189\\001\\\n\\190\\001\\128\\000\\129\\000\\191\\001\\130\\000\\005\\001\\006\\001\\131\\000\\\n\\106\\001\\132\\000\\133\\000\\134\\000\\135\\000\\136\\000\\137\\000\\065\\001\\\n\\013\\003\\142\\000\\143\\000\\112\\001\\081\\003\\138\\004\\213\\002\"\n\nlet yysindex = \"\\105\\003\\\n\\061\\006\\035\\004\\046\\057\\010\\001\\000\\000\\020\\255\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\078\\081\\046\\057\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\099\\000\\061\\006\\199\\008\\216\\007\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\078\\081\\078\\081\\078\\081\\182\\070\\078\\081\\078\\081\\\n\\078\\081\\078\\081\\046\\057\\078\\081\\000\\000\\202\\065\\000\\000\\030\\069\\\n\\196\\066\\027\\000\\000\\000\\027\\000\\233\\009\\206\\074\\076\\255\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\132\\069\\\n\\180\\081\\182\\070\\046\\057\\046\\057\\046\\057\\182\\070\\196\\255\\224\\076\\\n\\053\\054\\000\\000\\000\\000\\000\\000\\182\\070\\000\\000\\026\\082\\046\\057\\\n\\182\\070\\000\\000\\000\\000\\000\\000\\196\\066\\000\\000\\000\\000\\000\\000\\\n\\078\\081\\078\\081\\000\\000\\046\\057\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\140\\255\\000\\000\\000\\000\\000\\000\\122\\000\\000\\000\\008\\255\\\n\\000\\000\\000\\000\\000\\000\\160\\255\\000\\000\\112\\255\\179\\255\\034\\255\\\n\\000\\000\\182\\070\\234\\069\\005\\000\\000\\000\\000\\000\\046\\057\\046\\057\\\n\\059\\255\\154\\002\\081\\255\\007\\000\\000\\000\\028\\000\\048\\000\\225\\000\\\n\\062\\000\\000\\000\\094\\072\\000\\000\\000\\000\\000\\000\\113\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\125\\001\\045\\255\\134\\000\\\n\\000\\000\\053\\054\\000\\000\\090\\000\\078\\081\\000\\000\\117\\000\\192\\000\\\n\\027\\001\\000\\000\\000\\000\\014\\000\\017\\000\\020\\255\\020\\255\\020\\255\\\n\\193\\000\\012\\001\\193\\000\\014\\000\\032\\000\\020\\255\\016\\001\\020\\255\\\n\\000\\000\\000\\000\\246\\000\\246\\000\\000\\000\\246\\000\\036\\001\\020\\255\\\n\\014\\000\\147\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\117\\001\\\n\\000\\000\\078\\081\\000\\000\\000\\000\\189\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\009\\001\\000\\000\\115\\001\\105\\001\\079\\001\\046\\057\\000\\000\\\n\\000\\000\\027\\000\\027\\000\\244\\255\\142\\001\\000\\000\\000\\000\\190\\067\\\n\\053\\255\\000\\000\\000\\000\\027\\000\\027\\000\\000\\000\\000\\000\\120\\001\\\n\\089\\255\\101\\255\\000\\000\\135\\001\\078\\081\\107\\255\\014\\000\\190\\067\\\n\\224\\076\\224\\076\\002\\056\\000\\000\\000\\000\\000\\000\\000\\000\\047\\001\\\n\\233\\009\\000\\000\\143\\001\\000\\000\\000\\000\\127\\001\\042\\000\\224\\076\\\n\\002\\056\\000\\000\\000\\000\\166\\001\\000\\000\\062\\001\\224\\076\\000\\255\\\n\\168\\001\\000\\000\\254\\073\\027\\000\\027\\000\\242\\000\\164\\001\\212\\054\\\n\\074\\001\\008\\255\\024\\001\\162\\001\\000\\000\\070\\001\\235\\000\\087\\001\\\n\\103\\001\\220\\001\\213\\001\\046\\057\\000\\000\\102\\001\\242\\001\\226\\001\\\n\\112\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\056\\075\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\132\\078\\000\\000\\000\\000\\000\\000\\234\\078\\108\\001\\113\\001\\\n\\234\\069\\000\\000\\026\\082\\000\\000\\000\\000\\126\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\093\\001\\078\\081\\046\\057\\046\\057\\078\\081\\000\\000\\\n\\000\\000\\182\\070\\193\\000\\014\\000\\046\\057\\000\\000\\127\\001\\000\\000\\\n\\000\\000\\105\\255\\182\\070\\134\\255\\078\\081\\046\\057\\004\\002\\004\\002\\\n\\059\\068\\059\\068\\046\\057\\046\\057\\046\\057\\046\\057\\000\\000\\182\\070\\\n\\059\\068\\182\\070\\059\\068\\059\\068\\182\\070\\182\\070\\182\\070\\182\\070\\\n\\182\\070\\182\\070\\182\\070\\182\\070\\182\\070\\182\\070\\254\\073\\254\\073\\\n\\254\\073\\254\\073\\254\\073\\080\\079\\180\\081\\068\\255\\000\\000\\094\\072\\\n\\000\\000\\000\\000\\014\\000\\000\\000\\000\\000\\000\\000\\000\\000\\010\\001\\\n\\046\\057\\000\\000\\000\\000\\046\\057\\000\\000\\000\\000\\000\\000\\095\\255\\\n\\000\\000\\020\\255\\095\\255\\000\\000\\178\\001\\000\\000\\000\\000\\102\\001\\\n\\020\\255\\000\\000\\102\\001\\000\\000\\011\\001\\000\\000\\234\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\020\\255\\000\\000\\131\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\014\\000\\139\\001\\000\\000\\000\\000\\\n\\029\\002\\078\\008\\231\\000\\000\\000\\196\\066\\000\\000\\000\\000\\046\\057\\\n\\046\\057\\046\\057\\046\\057\\000\\000\\078\\081\\000\\000\\046\\057\\172\\001\\\n\\008\\002\\017\\002\\182\\070\\182\\070\\182\\070\\028\\071\\078\\081\\078\\081\\\n\\078\\081\\180\\081\\028\\071\\028\\071\\028\\071\\078\\081\\028\\071\\080\\070\\\n\\018\\002\\022\\002\\180\\001\\000\\000\\215\\002\\098\\000\\000\\000\\200\\072\\\n\\030\\002\\053\\255\\000\\000\\000\\000\\237\\001\\000\\000\\089\\255\\000\\000\\\n\\237\\001\\000\\000\\000\\000\\021\\002\\033\\002\\196\\001\\186\\001\\057\\002\\\n\\052\\002\\205\\001\\000\\000\\201\\001\\083\\001\\197\\001\\209\\001\\215\\001\\\n\\216\\001\\119\\000\\046\\255\\218\\001\\046\\057\\000\\000\\020\\255\\000\\000\\\n\\000\\000\\127\\001\\000\\000\\224\\076\\224\\076\\224\\076\\000\\000\\224\\076\\\n\\000\\000\\000\\000\\182\\070\\046\\057\\224\\076\\000\\000\\228\\001\\062\\000\\\n\\070\\002\\073\\002\\182\\070\\182\\070\\182\\070\\231\\001\\162\\075\\182\\079\\\n\\028\\080\\236\\001\\000\\000\\219\\255\\046\\057\\219\\255\\255\\001\\000\\000\\\n\\000\\000\\000\\000\\182\\070\\072\\002\\000\\000\\000\\000\\169\\000\\074\\077\\\n\\224\\076\\075\\002\\050\\073\\085\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\065\\067\\000\\000\\182\\070\\109\\255\\195\\255\\\n\\000\\000\\206\\074\\000\\000\\000\\000\\000\\000\\000\\000\\109\\002\\235\\001\\\n\\000\\000\\102\\001\\000\\000\\000\\000\\001\\002\\000\\000\\059\\068\\148\\255\\\n\\059\\068\\067\\002\\006\\002\\127\\001\\005\\002\\016\\002\\229\\001\\000\\000\\\n\\011\\002\\136\\002\\145\\002\\049\\002\\162\\002\\101\\002\\068\\002\\007\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\059\\001\\116\\001\\000\\000\\040\\003\\\n\\166\\002\\116\\001\\026\\001\\015\\001\\000\\000\\015\\001\\088\\000\\120\\000\\\n\\062\\000\\180\\255\\100\\002\\180\\081\\000\\000\\081\\002\\000\\000\\088\\255\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\113\\000\\000\\000\\000\\000\\000\\000\\\n\\059\\002\\194\\002\\000\\000\\000\\000\\027\\000\\177\\002\\196\\066\\000\\000\\\n\\000\\000\\027\\000\\000\\000\\216\\002\\020\\255\\065\\067\\201\\002\\020\\255\\\n\\105\\002\\000\\000\\098\\002\\020\\255\\014\\000\\092\\002\\106\\002\\113\\002\\\n\\115\\002\\116\\002\\231\\000\\220\\002\\000\\000\\000\\000\\118\\002\\221\\002\\\n\\174\\002\\120\\002\\000\\000\\122\\002\\000\\000\\046\\057\\046\\057\\071\\255\\\n\\025\\255\\123\\002\\000\\000\\000\\000\\000\\000\\000\\000\\149\\002\\001\\003\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\028\\071\\000\\000\\000\\000\\184\\068\\\n\\184\\068\\028\\071\\028\\071\\028\\071\\028\\071\\028\\071\\028\\071\\028\\071\\\n\\028\\071\\028\\071\\028\\071\\100\\074\\100\\074\\100\\074\\100\\074\\100\\074\\\n\\078\\081\\200\\072\\000\\000\\184\\068\\000\\000\\078\\081\\046\\057\\000\\000\\\n\\046\\057\\224\\076\\014\\000\\000\\000\\014\\000\\000\\000\\184\\068\\000\\000\\\n\\224\\076\\224\\076\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\119\\000\\\n\\184\\068\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\004\\003\\000\\000\\\n\\000\\000\\031\\255\\000\\000\\000\\000\\000\\000\\046\\057\\046\\057\\033\\255\\\n\\035\\255\\126\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\193\\082\\\n\\000\\000\\000\\000\\000\\000\\146\\002\\000\\000\\059\\068\\205\\002\\046\\057\\\n\\102\\001\\242\\002\\169\\000\\042\\003\\145\\056\\233\\009\\035\\001\\242\\001\\\n\\046\\057\\254\\073\\053\\054\\254\\073\\025\\003\\176\\002\\152\\073\\046\\057\\\n\\000\\000\\000\\000\\000\\000\\185\\255\\179\\002\\168\\001\\078\\081\\000\\000\\\n\\169\\000\\000\\000\\000\\000\\059\\068\\000\\000\\046\\057\\000\\000\\000\\000\\\n\\046\\057\\000\\000\\059\\068\\059\\068\\224\\076\\046\\057\\196\\066\\000\\000\\\n\\196\\066\\000\\000\\058\\001\\208\\001\\088\\002\\141\\002\\254\\073\\180\\002\\\n\\094\\072\\000\\000\\000\\000\\068\\255\\234\\254\\000\\000\\147\\002\\032\\003\\\n\\050\\073\\009\\003\\000\\000\\153\\002\\196\\066\\000\\000\\045\\003\\102\\001\\\n\\000\\000\\000\\000\\020\\255\\030\\003\\078\\081\\014\\000\\146\\000\\000\\000\\\n\\231\\000\\231\\000\\231\\000\\231\\000\\231\\000\\000\\000\\196\\066\\189\\002\\\n\\189\\002\\189\\002\\189\\002\\077\\065\\195\\002\\169\\002\\046\\057\\046\\057\\\n\\046\\057\\000\\000\\000\\000\\000\\000\\060\\003\\000\\000\\047\\002\\253\\002\\\n\\000\\000\\087\\003\\186\\002\\253\\002\\043\\001\\088\\001\\000\\000\\088\\001\\\n\\108\\000\\189\\000\\098\\000\\192\\255\\234\\002\\000\\000\\067\\002\\203\\002\\\n\\000\\000\\058\\003\\000\\000\\217\\002\\000\\000\\000\\000\\000\\000\\207\\002\\\n\\000\\000\\077\\003\\000\\000\\018\\003\\184\\068\\000\\000\\067\\002\\046\\057\\\n\\190\\002\\191\\002\\015\\003\\027\\000\\046\\057\\230\\002\\236\\002\\237\\002\\\n\\238\\002\\241\\002\\219\\255\\000\\000\\000\\000\\000\\000\\000\\000\\046\\057\\\n\\000\\000\\102\\001\\244\\002\\246\\002\\252\\002\\254\\002\\002\\003\\226\\001\\\n\\046\\057\\096\\003\\000\\000\\000\\000\\123\\003\\234\\069\\019\\003\\078\\081\\\n\\000\\000\\050\\073\\254\\073\\021\\003\\000\\000\\022\\003\\059\\068\\000\\000\\\n\\094\\003\\000\\000\\027\\003\\000\\000\\000\\000\\000\\000\\138\\003\\000\\000\\\n\\119\\003\\000\\000\\000\\000\\000\\000\\046\\057\\000\\000\\046\\057\\000\\000\\\n\\046\\057\\000\\000\\046\\057\\000\\000\\100\\002\\094\\072\\000\\000\\000\\000\\\n\\068\\255\\088\\255\\000\\000\\000\\000\\059\\002\\000\\000\\000\\000\\071\\066\\\n\\000\\000\\254\\073\\000\\000\\050\\073\\000\\000\\026\\003\\065\\067\\000\\000\\\n\\000\\000\\065\\067\\108\\003\\142\\003\\146\\003\\147\\003\\148\\003\\150\\003\\\n\\000\\000\\014\\000\\000\\000\\047\\003\\077\\065\\000\\000\\111\\003\\046\\057\\\n\\078\\081\\006\\003\\000\\000\\014\\003\\000\\000\\100\\074\\048\\003\\078\\081\\\n\\078\\081\\000\\000\\014\\000\\000\\000\\000\\000\\067\\002\\059\\003\\000\\000\\\n\\078\\081\\046\\057\\046\\057\\000\\000\\153\\003\\192\\003\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\233\\009\\233\\009\\\n\\233\\009\\233\\009\\233\\009\\158\\003\\000\\000\\046\\057\\076\\003\\182\\070\\\n\\066\\003\\062\\000\\050\\073\\000\\000\\000\\000\\081\\003\\050\\073\\059\\068\\\n\\000\\000\\046\\057\\000\\000\\224\\076\\046\\057\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\066\\001\\030\\078\\000\\000\\\n\\000\\000\\197\\003\\130\\071\\085\\003\\044\\003\\103\\003\\000\\000\\000\\000\\\n\\174\\003\\196\\066\\196\\066\\196\\066\\196\\066\\196\\066\\207\\003\\000\\000\\\n\\000\\000\\046\\057\\050\\003\\000\\000\\027\\000\\027\\000\\234\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\080\\003\\000\\000\\219\\003\\000\\000\\000\\000\\\n\\046\\057\\046\\057\\226\\001\\226\\001\\226\\001\\226\\001\\226\\001\\046\\057\\\n\\000\\000\\050\\073\\089\\003\\098\\003\\000\\000\\050\\073\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\022\\000\\020\\255\\115\\055\\000\\000\\101\\003\\\n\\251\\255\\095\\003\\210\\003\\177\\000\\000\\000\\030\\078\\000\\000\\127\\001\\\n\\020\\255\\065\\067\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\110\\003\\027\\000\\204\\003\\209\\003\\000\\000\\046\\057\\000\\000\\\n\\000\\000\\198\\003\\200\\003\\202\\003\\203\\003\\205\\003\\000\\000\\000\\000\\\n\\112\\003\\050\\073\\000\\000\\196\\066\\222\\000\\196\\066\\000\\000\\000\\000\\\n\\022\\000\\115\\003\\012\\076\\130\\080\\232\\080\\118\\003\\000\\000\\221\\082\\\n\\000\\000\\222\\000\\196\\066\\054\\083\\000\\000\\103\\000\\120\\003\\027\\000\\\n\\122\\003\\107\\003\\216\\003\\024\\000\\046\\057\\227\\003\\046\\057\\046\\057\\\n\\000\\000\\046\\057\\046\\057\\046\\057\\046\\057\\046\\057\\050\\073\\000\\000\\\n\\000\\000\\082\\083\\000\\000\\134\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\129\\003\\130\\003\\131\\003\\132\\003\\133\\003\\\n\\144\\003\\000\\000\\000\\000\\152\\003\\154\\003\\155\\003\\156\\003\\159\\003\\\n\\171\\083\\000\\000\\000\\000\\230\\003\\000\\000\\127\\001\\065\\067\\000\\000\\\n\\232\\003\\157\\003\\046\\057\\025\\004\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\167\\003\\168\\003\\170\\003\\172\\003\\173\\003\\\n\\012\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\222\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\176\\003\\177\\003\\178\\003\\180\\003\\\n\\181\\003\\065\\067\\000\\000\\000\\000\\014\\004\\000\\000\\000\\000\\027\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\196\\066\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\067\\033\\004\\\n\\000\\000\\194\\003\\046\\057\\000\\000\\000\\000\"\n\nlet yyrindex = \"\\000\\000\\\n\\161\\003\\114\\082\\000\\000\\000\\000\\000\\000\\132\\012\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\196\\003\\199\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\161\\003\\000\\000\\114\\082\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\195\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\038\\004\\000\\000\\038\\004\\000\\000\\000\\000\\232\\071\\153\\015\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\199\\003\\199\\003\\058\\004\\000\\000\\118\\076\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\199\\003\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\017\\000\\000\\000\\000\\000\\000\\016\\047\\000\\000\\154\\013\\\n\\000\\000\\000\\000\\000\\000\\169\\036\\000\\000\\000\\000\\070\\014\\065\\016\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\124\\040\\133\\021\\148\\038\\081\\037\\000\\000\\213\\039\\059\\039\\088\\035\\\n\\107\\029\\000\\000\\120\\025\\000\\000\\000\\000\\000\\000\\240\\014\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\022\\041\\000\\000\\054\\255\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\118\\076\\000\\000\\118\\076\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\253\\051\\253\\051\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\176\\041\\\n\\000\\000\\074\\042\\000\\000\\234\\016\\059\\018\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\206\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\038\\004\\038\\004\\000\\000\\019\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\047\\048\\047\\048\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\001\\000\\000\\000\\000\\\n\\214\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\243\\255\\000\\000\\205\\048\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\197\\255\\000\\000\\122\\004\\061\\001\\000\\000\\\n\\173\\044\\000\\000\\000\\000\\038\\004\\038\\004\\000\\000\\019\\001\\000\\000\\\n\\000\\000\\247\\058\\000\\000\\095\\000\\000\\000\\211\\003\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\180\\077\\019\\004\\226\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\189\\003\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\118\\076\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\165\\000\\074\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\120\\025\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\077\\005\\\n\\000\\000\\000\\000\\000\\000\\070\\255\\000\\000\\000\\000\\000\\000\\099\\049\\\n\\000\\000\\000\\000\\039\\004\\000\\000\\098\\052\\000\\000\\000\\000\\180\\077\\\n\\000\\000\\000\\000\\180\\077\\000\\000\\199\\052\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\215\\003\\000\\000\\000\\000\\000\\000\\\n\\044\\053\\000\\000\\057\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\122\\255\\188\\255\\104\\004\\000\\000\\208\\024\\\n\\000\\000\\067\\004\\000\\000\\000\\000\\203\\057\\000\\000\\000\\000\\000\\000\\\n\\203\\057\\000\\000\\000\\000\\130\\000\\000\\000\\000\\000\\220\\003\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\218\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\091\\255\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\084\\004\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\166\\047\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\224\\003\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\041\\004\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\108\\255\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\201\\003\\000\\000\\\n\\000\\000\\180\\077\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\033\\026\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\228\\042\\126\\043\\000\\000\\000\\000\\000\\000\\000\\000\\250\\037\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\036\\182\\032\\000\\000\\175\\034\\\n\\007\\034\\094\\033\\013\\032\\188\\030\\000\\000\\101\\031\\195\\028\\000\\000\\\n\\020\\030\\026\\028\\201\\026\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\240\\014\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\217\\049\\000\\000\\000\\000\\187\\003\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\077\\004\\000\\000\\145\\053\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\233\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\057\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\052\\001\\\n\\000\\000\\182\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\208\\024\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\241\\003\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\091\\004\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\005\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\052\\001\\\n\\000\\000\\182\\000\\123\\062\\017\\063\\167\\063\\061\\064\\211\\064\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\243\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\180\\077\\000\\000\\041\\004\\000\\000\\000\\000\\130\\001\\000\\000\\019\\004\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\255\\000\\132\\011\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\071\\045\\000\\000\\000\\000\\\n\\041\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\209\\000\\000\\000\\000\\000\\024\\044\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\046\\022\\214\\022\\127\\023\\039\\024\\000\\000\\000\\000\\\n\\120\\025\\144\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\221\\003\\\n\\000\\000\\051\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\180\\077\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\057\\004\\057\\004\\057\\004\\057\\004\\057\\004\\000\\000\\000\\000\\227\\018\\\n\\140\\019\\052\\020\\221\\020\\244\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\080\\000\\249\\001\\\n\\000\\000\\016\\000\\118\\255\\006\\005\\185\\001\\250\\002\\000\\000\\186\\003\\\n\\091\\002\\000\\000\\251\\006\\153\\005\\212\\010\\000\\000\\235\\003\\069\\255\\\n\\000\\000\\249\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\003\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\235\\003\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\038\\004\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\224\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\180\\077\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\004\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\220\\045\\000\\000\\\n\\118\\046\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\114\\027\\120\\025\\000\\000\\000\\000\\\n\\050\\255\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\152\\051\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\244\\003\\000\\000\\000\\000\\000\\000\\\n\\022\\041\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\241\\003\\000\\000\\000\\000\\235\\003\\000\\000\\000\\000\\\n\\121\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\161\\001\\210\\001\\\n\\000\\002\\013\\002\\062\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\152\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\004\\004\\000\\000\\000\\000\\038\\004\\038\\004\\011\\012\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\082\\004\\082\\004\\082\\004\\082\\004\\082\\004\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\050\\000\\000\\000\\000\\097\\058\\000\\000\\\n\\000\\000\\006\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\038\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\083\\050\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\007\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\039\\004\\\n\\000\\000\\008\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\141\\059\\035\\060\\\n\\185\\060\\079\\061\\229\\061\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\011\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\205\\050\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\007\\004\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\038\\004\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\"\n\nlet yygindex = \"\\000\\000\\\n\\000\\000\\000\\000\\241\\007\\000\\000\\242\\007\\000\\000\\049\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\011\\000\\000\\000\\000\\000\\118\\253\\000\\000\\\n\\000\\000\\000\\000\\137\\002\\000\\000\\000\\000\\000\\000\\250\\252\\173\\002\\\n\\053\\001\\000\\000\\000\\000\\158\\255\\069\\004\\000\\000\\000\\000\\000\\000\\\n\\072\\254\\229\\004\\230\\004\\000\\000\\000\\000\\000\\000\\238\\003\\000\\000\\\n\\125\\002\\000\\000\\235\\002\\192\\002\\149\\003\\167\\254\\093\\254\\185\\254\\\n\\072\\003\\193\\255\\140\\001\\000\\000\\241\\254\\255\\002\\110\\255\\200\\000\\\n\\033\\254\\000\\000\\239\\001\\000\\000\\165\\001\\000\\000\\140\\005\\166\\005\\\n\\142\\005\\000\\000\\188\\253\\247\\002\\042\\255\\231\\003\\073\\255\\212\\003\\\n\\208\\255\\242\\004\\023\\002\\165\\254\\002\\002\\051\\255\\000\\000\\150\\002\\\n\\002\\255\\069\\003\\023\\005\\222\\003\\190\\255\\018\\000\\200\\002\\068\\254\\\n\\055\\255\\229\\254\\116\\005\\034\\002\\108\\006\\250\\001\\000\\000\\254\\001\\\n\\045\\001\\000\\000\\114\\254\\000\\000\\019\\000\\238\\255\\000\\000\\038\\010\\\n\\000\\000\\000\\000\\183\\255\\184\\010\\070\\004\\106\\010\\150\\255\\000\\000\\\n\\031\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\223\\255\\\n\\100\\253\\208\\003\\091\\252\\000\\000\\000\\000\\000\\000\\044\\253\\009\\255\\\n\\030\\005\\000\\000\\000\\000\\056\\255\\033\\253\\000\\000\\241\\004\\003\\002\\\n\\214\\254\\240\\254\\000\\000\\000\\000\\000\\000\\000\\000\\073\\000\\253\\255\\\n\\226\\003\\000\\000\\235\\255\\041\\252\\247\\255\\209\\254\\000\\000\\017\\004\\\n\\000\\000\\028\\003\\026\\255\\128\\004\\187\\254\\000\\000\\129\\254\\137\\255\\\n\\171\\253\\046\\000\\054\\004\\144\\004\\000\\000\\228\\003\\024\\254\\239\\007\\\n\\000\\000\\178\\255\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\061\\002\\111\\004\\000\\000\\000\\000\\032\\002\\055\\001\\129\\002\"\n\nlet yytablesize = 21824\nlet yytable = \"\\122\\000\\\n\\045\\001\\002\\001\\115\\001\\238\\001\\231\\000\\016\\001\\107\\002\\150\\000\\\n\\122\\000\\227\\001\\083\\002\\057\\002\\218\\001\\064\\001\\084\\002\\007\\002\\\n\\216\\000\\035\\002\\220\\000\\030\\000\\030\\000\\212\\002\\151\\000\\208\\001\\\n\\213\\001\\194\\001\\204\\000\\222\\001\\162\\002\\078\\002\\033\\002\\150\\000\\\n\\150\\000\\150\\000\\058\\002\\150\\000\\150\\000\\150\\000\\205\\000\\122\\000\\\n\\150\\000\\192\\002\\254\\000\\036\\002\\007\\001\\030\\000\\038\\003\\030\\000\\\n\\008\\001\\009\\001\\010\\001\\136\\002\\080\\001\\068\\004\\255\\000\\140\\002\\\n\\142\\002\\082\\001\\080\\001\\196\\000\\158\\003\\046\\001\\080\\001\\122\\000\\\n\\122\\000\\122\\000\\185\\002\\061\\001\\056\\002\\122\\000\\065\\003\\066\\003\\\n\\067\\003\\068\\003\\069\\003\\044\\001\\122\\000\\151\\000\\151\\000\\151\\000\\\n\\154\\003\\047\\000\\003\\001\\190\\000\\055\\002\\150\\000\\150\\000\\128\\001\\\n\\122\\000\\131\\001\\151\\000\\235\\002\\066\\001\\068\\001\\200\\000\\082\\001\\\n\\110\\000\\206\\000\\154\\000\\073\\001\\132\\001\\055\\000\\207\\000\\191\\003\\\n\\110\\001\\220\\003\\135\\002\\110\\004\\080\\001\\078\\000\\009\\000\\242\\000\\\n\\242\\000\\155\\003\\242\\000\\122\\000\\122\\000\\145\\000\\201\\000\\199\\003\\\n\\220\\001\\201\\000\\042\\001\\047\\000\\233\\001\\130\\002\\047\\001\\079\\002\\\n\\068\\000\\097\\002\\072\\001\\158\\001\\080\\001\\048\\001\\220\\001\\143\\001\\\n\\080\\001\\145\\000\\207\\000\\115\\002\\158\\001\\234\\001\\122\\000\\055\\000\\\n\\207\\000\\150\\000\\168\\003\\079\\002\\251\\000\\250\\001\\207\\000\\080\\002\\\n\\080\\000\\062\\001\\063\\001\\233\\001\\052\\001\\201\\000\\212\\001\\204\\001\\\n\\219\\001\\080\\001\\009\\000\\055\\001\\208\\000\\229\\003\\235\\001\\111\\001\\\n\\169\\001\\170\\001\\068\\000\\080\\002\\234\\001\\026\\003\\219\\001\\080\\001\\\n\\068\\000\\078\\000\\209\\000\\251\\000\\056\\002\\186\\002\\150\\000\\058\\001\\\n\\231\\000\\080\\000\\158\\001\\096\\001\\137\\004\\055\\001\\143\\001\\059\\001\\\n\\048\\003\\012\\003\\110\\000\\122\\000\\055\\002\\202\\002\\232\\001\\157\\002\\\n\\091\\003\\222\\001\\092\\003\\074\\001\\047\\003\\081\\001\\036\\002\\253\\003\\\n\\060\\001\\168\\001\\149\\000\\045\\001\\080\\001\\210\\000\\016\\001\\158\\001\\\n\\158\\001\\150\\000\\241\\001\\242\\001\\172\\003\\173\\003\\174\\003\\175\\003\\\n\\176\\003\\243\\002\\242\\000\\245\\002\\081\\002\\039\\002\\020\\004\\215\\002\\\n\\208\\004\\041\\002\\184\\000\\185\\000\\186\\000\\228\\003\\191\\000\\192\\000\\\n\\193\\000\\034\\002\\158\\001\\198\\000\\237\\002\\084\\003\\145\\001\\093\\000\\\n\\081\\002\\210\\000\\089\\002\\105\\003\\122\\000\\158\\001\\162\\002\\210\\000\\\n\\222\\001\\143\\001\\041\\002\\184\\002\\008\\003\\210\\000\\196\\002\\090\\000\\\n\\122\\000\\137\\004\\194\\001\\203\\001\\145\\000\\052\\003\\037\\004\\171\\001\\\n\\244\\002\\129\\003\\039\\004\\023\\002\\074\\004\\075\\004\\076\\004\\077\\004\\\n\\078\\004\\069\\001\\090\\000\\191\\002\\099\\001\\157\\001\\044\\001\\082\\001\\\n\\049\\001\\050\\001\\150\\000\\241\\002\\012\\001\\002\\001\\157\\001\\044\\001\\\n\\160\\002\\172\\001\\084\\004\\085\\002\\145\\000\\040\\002\\208\\002\\114\\002\\\n\\150\\000\\122\\000\\122\\000\\150\\000\\078\\000\\244\\002\\131\\002\\047\\000\\\n\\222\\001\\122\\000\\062\\001\\063\\001\\137\\002\\138\\002\\095\\001\\145\\000\\\n\\126\\003\\096\\000\\122\\000\\222\\001\\013\\001\\096\\001\\056\\002\\122\\000\\\n\\122\\000\\122\\000\\122\\000\\055\\000\\207\\000\\080\\004\\096\\004\\083\\001\\\n\\043\\002\\083\\004\\096\\000\\097\\001\\157\\001\\122\\001\\092\\002\\070\\001\\\n\\095\\001\\092\\002\\085\\004\\145\\000\\149\\000\\155\\001\\055\\001\\096\\001\\\n\\208\\000\\088\\002\\156\\002\\115\\002\\129\\001\\084\\001\\068\\000\\078\\000\\\n\\015\\004\\157\\002\\078\\000\\096\\000\\156\\002\\122\\000\\209\\000\\090\\000\\\n\\122\\000\\157\\001\\002\\001\\157\\002\\086\\004\\068\\000\\003\\001\\158\\002\\\n\\095\\001\\078\\000\\212\\002\\087\\002\\109\\001\\120\\004\\151\\000\\007\\003\\\n\\035\\002\\186\\000\\061\\002\\062\\002\\063\\002\\064\\002\\065\\002\\066\\002\\\n\\067\\002\\068\\002\\069\\002\\070\\002\\157\\001\\097\\001\\107\\001\\107\\002\\\n\\100\\003\\050\\002\\051\\002\\047\\000\\155\\001\\152\\004\\129\\002\\157\\001\\\n\\145\\000\\103\\004\\113\\001\\059\\002\\122\\000\\122\\000\\122\\000\\122\\000\\\n\\153\\004\\128\\002\\163\\004\\122\\000\\202\\001\\098\\001\\099\\001\\055\\000\\\n\\145\\000\\045\\001\\217\\002\\150\\000\\150\\000\\150\\000\\130\\003\\151\\003\\\n\\153\\003\\124\\002\\150\\000\\093\\000\\016\\001\\064\\001\\196\\000\\232\\001\\\n\\124\\004\\170\\003\\145\\004\\210\\000\\209\\002\\156\\002\\209\\002\\098\\001\\\n\\099\\001\\038\\001\\068\\000\\003\\001\\190\\003\\223\\002\\016\\001\\053\\003\\\n\\054\\003\\159\\002\\160\\002\\038\\001\\155\\001\\117\\002\\151\\001\\013\\000\\\n\\015\\000\\151\\001\\158\\002\\159\\002\\160\\002\\194\\002\\171\\003\\155\\001\\\n\\114\\001\\122\\000\\103\\003\\071\\003\\231\\001\\029\\000\\201\\002\\098\\001\\\n\\099\\001\\184\\002\\145\\000\\038\\001\\023\\000\\201\\000\\082\\003\\231\\001\\\n\\122\\000\\200\\002\\117\\003\\151\\001\\119\\003\\050\\001\\255\\003\\124\\003\\\n\\087\\003\\000\\004\\011\\000\\012\\000\\044\\001\\150\\000\\040\\002\\085\\001\\\n\\255\\001\\122\\000\\047\\000\\028\\002\\068\\000\\116\\001\\031\\002\\245\\000\\\n\\236\\002\\011\\000\\053\\001\\054\\001\\100\\004\\116\\000\\080\\001\\211\\002\\\n\\086\\001\\023\\000\\245\\000\\012\\003\\002\\001\\243\\001\\055\\000\\145\\000\\\n\\112\\003\\115\\002\\115\\002\\115\\002\\115\\002\\115\\002\\136\\003\\040\\002\\\n\\023\\000\\220\\002\\055\\003\\056\\003\\057\\003\\058\\003\\059\\003\\060\\003\\\n\\061\\003\\062\\003\\063\\003\\064\\003\\159\\002\\160\\002\\011\\000\\244\\001\\\n\\060\\001\\068\\000\\013\\000\\015\\000\\208\\000\\116\\000\\150\\001\\069\\001\\\n\\038\\001\\122\\004\\101\\002\\219\\000\\219\\000\\011\\000\\219\\000\\011\\002\\\n\\087\\001\\233\\003\\209\\000\\020\\003\\242\\003\\153\\003\\117\\001\\243\\000\\\n\\023\\003\\013\\001\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\\n\\150\\001\\243\\000\\113\\003\\116\\000\\029\\000\\219\\000\\094\\001\\149\\001\\\n\\129\\001\\243\\000\\243\\000\\229\\001\\243\\000\\230\\001\\014\\000\\016\\000\\\n\\160\\001\\161\\001\\162\\001\\163\\001\\201\\000\\003\\001\\116\\000\\195\\001\\\n\\196\\001\\138\\000\\086\\001\\140\\001\\243\\000\\098\\003\\087\\001\\079\\003\\\n\\229\\001\\149\\001\\178\\002\\214\\003\\149\\001\\204\\001\\212\\001\\139\\000\\\n\\140\\000\\087\\001\\111\\003\\243\\000\\092\\001\\093\\001\\132\\002\\133\\002\\\n\\134\\002\\116\\000\\122\\000\\122\\000\\141\\003\\139\\002\\225\\003\\092\\001\\\n\\093\\001\\108\\004\\148\\002\\230\\003\\144\\001\\157\\001\\164\\001\\094\\001\\\n\\045\\003\\046\\003\\165\\001\\243\\000\\198\\003\\068\\000\\166\\001\\022\\003\\\n\\153\\002\\154\\002\\087\\001\\167\\001\\116\\000\\197\\001\\003\\001\\003\\001\\\n\\155\\002\\003\\001\\219\\001\\078\\000\\088\\001\\089\\001\\090\\001\\091\\001\\\n\\092\\001\\093\\001\\040\\004\\122\\000\\201\\001\\122\\000\\173\\001\\201\\000\\\n\\094\\001\\003\\001\\252\\003\\221\\001\\223\\001\\231\\000\\201\\000\\148\\002\\\n\\145\\000\\077\\003\\228\\001\\078\\003\\237\\001\\236\\000\\236\\000\\072\\003\\\n\\236\\000\\243\\000\\243\\000\\243\\000\\245\\001\\153\\002\\154\\002\\252\\001\\\n\\050\\001\\182\\003\\122\\000\\122\\000\\243\\000\\243\\000\\160\\001\\251\\001\\\n\\236\\000\\253\\001\\243\\000\\087\\001\\219\\000\\219\\000\\254\\001\\160\\001\\\n\\089\\003\\090\\003\\186\\001\\219\\000\\122\\000\\000\\002\\219\\000\\219\\000\\\n\\091\\001\\092\\001\\093\\001\\198\\001\\200\\001\\122\\000\\188\\004\\122\\000\\\n\\160\\001\\094\\001\\209\\001\\243\\000\\122\\000\\001\\002\\237\\000\\237\\000\\\n\\035\\004\\237\\000\\002\\002\\150\\000\\003\\002\\220\\002\\160\\001\\161\\001\\\n\\162\\001\\163\\001\\122\\000\\005\\002\\009\\002\\122\\000\\011\\002\\014\\002\\\n\\021\\002\\237\\000\\122\\000\\018\\002\\025\\002\\160\\001\\219\\000\\219\\000\\\n\\131\\003\\206\\004\\116\\000\\220\\002\\026\\002\\042\\004\\154\\001\\044\\002\\\n\\137\\003\\027\\004\\028\\004\\029\\004\\030\\004\\031\\004\\116\\000\\154\\001\\\n\\095\\002\\102\\002\\143\\003\\104\\002\\233\\000\\233\\000\\210\\004\\233\\000\\\n\\108\\002\\150\\000\\160\\001\\160\\001\\160\\001\\160\\001\\109\\002\\143\\002\\\n\\154\\001\\235\\000\\235\\000\\209\\002\\235\\000\\126\\002\\012\\004\\233\\000\\\n\\150\\000\\125\\002\\144\\002\\122\\000\\122\\000\\122\\000\\127\\002\\138\\003\\\n\\145\\002\\138\\003\\164\\002\\166\\002\\235\\000\\160\\001\\022\\004\\116\\000\\\n\\116\\000\\186\\003\\187\\003\\188\\003\\170\\002\\154\\001\\171\\002\\116\\000\\\n\\160\\001\\172\\002\\173\\002\\133\\003\\134\\003\\163\\003\\174\\002\\201\\000\\\n\\175\\002\\201\\000\\182\\003\\176\\002\\179\\002\\116\\000\\116\\000\\116\\000\\\n\\116\\000\\177\\002\\234\\000\\234\\000\\122\\000\\234\\000\\147\\002\\177\\003\\\n\\180\\002\\122\\000\\154\\001\\154\\001\\154\\001\\201\\000\\181\\002\\182\\002\\\n\\068\\001\\187\\002\\197\\002\\198\\002\\122\\000\\234\\000\\199\\002\\206\\003\\\n\\187\\001\\001\\000\\002\\000\\003\\000\\004\\000\\122\\000\\203\\002\\201\\000\\\n\\187\\001\\187\\001\\007\\004\\207\\002\\150\\000\\154\\001\\160\\001\\161\\001\\\n\\162\\001\\163\\001\\219\\000\\214\\002\\232\\002\\219\\000\\216\\002\\234\\002\\\n\\154\\001\\225\\002\\187\\001\\204\\001\\239\\002\\240\\002\\148\\002\\246\\002\\\n\\242\\002\\122\\000\\251\\002\\122\\000\\253\\002\\122\\000\\249\\002\\122\\000\\\n\\149\\002\\150\\002\\151\\002\\152\\002\\153\\002\\154\\002\\252\\002\\238\\003\\\n\\255\\002\\239\\003\\145\\003\\240\\003\\155\\002\\241\\003\\254\\002\\187\\001\\\n\\066\\000\\001\\003\\116\\000\\116\\000\\116\\000\\116\\000\\231\\000\\231\\000\\\n\\231\\000\\231\\000\\231\\000\\160\\001\\161\\001\\162\\001\\163\\001\\003\\003\\\n\\004\\003\\150\\000\\187\\001\\005\\003\\122\\000\\150\\000\\031\\002\\002\\001\\\n\\075\\001\\076\\001\\077\\001\\078\\001\\187\\001\\187\\001\\187\\001\\187\\001\\\n\\187\\001\\187\\001\\011\\004\\094\\004\\219\\000\\150\\000\\122\\000\\122\\000\\\n\\187\\001\\168\\002\\040\\002\\006\\003\\099\\001\\086\\001\\009\\003\\147\\003\\\n\\248\\003\\072\\003\\072\\003\\093\\000\\023\\004\\024\\004\\017\\003\\187\\001\\\n\\021\\003\\024\\003\\122\\000\\160\\001\\161\\001\\162\\001\\163\\001\\116\\000\\\n\\027\\003\\147\\002\\187\\001\\033\\003\\028\\002\\066\\001\\122\\000\\029\\003\\\n\\201\\000\\122\\000\\030\\003\\041\\003\\069\\004\\146\\002\\116\\000\\201\\000\\\n\\039\\003\\034\\003\\201\\000\\179\\003\\040\\003\\136\\004\\002\\001\\043\\004\\\n\\035\\003\\144\\004\\036\\003\\037\\003\\042\\003\\087\\001\\147\\002\\043\\003\\\n\\049\\003\\217\\000\\044\\003\\217\\000\\238\\000\\238\\000\\122\\000\\161\\001\\\n\\003\\001\\090\\001\\091\\001\\092\\001\\093\\001\\045\\001\\051\\003\\168\\004\\\n\\161\\001\\148\\002\\050\\003\\094\\001\\085\\003\\122\\000\\122\\000\\238\\000\\\n\\016\\001\\229\\001\\099\\003\\101\\003\\122\\000\\151\\002\\152\\002\\153\\002\\\n\\154\\002\\161\\001\\032\\000\\072\\004\\073\\004\\093\\003\\185\\004\\155\\002\\\n\\104\\003\\120\\003\\122\\000\\106\\003\\122\\003\\156\\003\\148\\002\\127\\003\\\n\\160\\003\\150\\003\\059\\004\\060\\004\\061\\004\\062\\004\\063\\004\\157\\003\\\n\\149\\002\\150\\002\\151\\002\\152\\002\\153\\002\\154\\002\\161\\001\\028\\002\\\n\\162\\003\\164\\003\\067\\001\\122\\000\\155\\002\\167\\003\\148\\004\\003\\001\\\n\\184\\003\\189\\003\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\086\\001\\\n\\185\\003\\113\\004\\160\\002\\192\\003\\148\\002\\193\\003\\194\\003\\195\\003\\\n\\044\\001\\150\\000\\196\\003\\161\\001\\161\\001\\161\\001\\161\\001\\219\\000\\\n\\197\\003\\152\\002\\153\\002\\154\\002\\219\\000\\201\\003\\202\\003\\161\\001\\\n\\203\\003\\122\\000\\155\\002\\122\\000\\122\\000\\207\\003\\122\\000\\122\\000\\\n\\122\\000\\122\\000\\122\\000\\208\\003\\209\\003\\210\\003\\161\\001\\154\\004\\\n\\211\\003\\156\\004\\157\\004\\215\\003\\179\\003\\216\\003\\147\\002\\087\\001\\\n\\149\\000\\161\\001\\201\\000\\217\\003\\121\\004\\218\\003\\125\\004\\222\\003\\\n\\223\\003\\219\\003\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\227\\003\\\n\\149\\000\\231\\003\\232\\003\\139\\004\\234\\003\\094\\001\\236\\003\\122\\000\\\n\\235\\003\\217\\000\\217\\000\\237\\003\\201\\000\\001\\004\\201\\000\\183\\001\\\n\\217\\000\\254\\003\\002\\004\\217\\000\\217\\000\\191\\004\\003\\004\\004\\004\\\n\\005\\004\\199\\001\\006\\004\\201\\000\\008\\004\\013\\004\\148\\002\\183\\001\\\n\\238\\000\\238\\000\\238\\000\\010\\004\\068\\000\\014\\004\\025\\004\\016\\004\\\n\\238\\000\\150\\002\\151\\002\\152\\002\\153\\002\\154\\002\\226\\001\\238\\000\\\n\\238\\000\\026\\004\\021\\004\\068\\000\\155\\002\\032\\004\\238\\000\\156\\001\\\n\\034\\004\\036\\004\\068\\000\\217\\000\\217\\000\\038\\004\\068\\000\\122\\000\\\n\\156\\001\\246\\003\\055\\004\\056\\004\\057\\004\\058\\004\\064\\004\\070\\004\\\n\\068\\000\\067\\004\\116\\000\\068\\000\\071\\004\\213\\004\\082\\004\\201\\000\\\n\\081\\004\\156\\001\\095\\004\\116\\000\\068\\000\\116\\000\\099\\004\\098\\004\\\n\\238\\000\\111\\004\\116\\000\\109\\004\\119\\004\\114\\004\\112\\004\\115\\004\\\n\\068\\000\\116\\004\\117\\004\\150\\004\\118\\004\\147\\004\\209\\004\\149\\004\\\n\\022\\002\\068\\000\\127\\004\\116\\000\\068\\000\\131\\004\\156\\001\\151\\004\\\n\\155\\004\\068\\000\\201\\000\\169\\004\\142\\003\\144\\003\\146\\003\\148\\003\\\n\\170\\004\\171\\004\\172\\004\\173\\004\\174\\004\\186\\004\\201\\000\\068\\000\\\n\\068\\000\\068\\000\\068\\000\\068\\000\\068\\000\\068\\000\\068\\000\\201\\000\\\n\\175\\004\\189\\004\\192\\004\\156\\001\\156\\001\\156\\001\\156\\001\\176\\004\\\n\\198\\004\\177\\004\\178\\004\\179\\004\\050\\001\\190\\004\\180\\004\\156\\001\\\n\\006\\000\\142\\003\\144\\003\\146\\003\\148\\003\\068\\000\\193\\004\\194\\004\\\n\\240\\001\\195\\004\\068\\000\\196\\004\\197\\004\\207\\004\\156\\001\\201\\004\\\n\\202\\004\\203\\004\\068\\000\\204\\004\\205\\004\\007\\000\\211\\004\\212\\004\\\n\\039\\000\\156\\001\\058\\000\\053\\000\\031\\000\\055\\002\\068\\000\\217\\000\\\n\\055\\002\\045\\000\\217\\000\\098\\000\\043\\002\\070\\000\\053\\000\\218\\000\\\n\\068\\002\\218\\000\\239\\000\\239\\000\\043\\000\\041\\000\\076\\000\\074\\000\\\n\\116\\000\\064\\002\\116\\000\\008\\000\\009\\000\\061\\002\\219\\000\\060\\000\\\n\\076\\000\\053\\000\\053\\000\\100\\000\\010\\000\\239\\000\\041\\001\\114\\000\\\n\\076\\000\\076\\000\\116\\000\\076\\000\\011\\000\\012\\000\\052\\002\\072\\000\\\n\\062\\002\\062\\000\\007\\000\\116\\000\\013\\000\\169\\001\\108\\000\\106\\000\\\n\\053\\002\\023\\000\\058\\000\\076\\000\\102\\000\\014\\000\\169\\001\\015\\000\\\n\\016\\000\\035\\000\\017\\000\\066\\002\\058\\002\\112\\000\\102\\000\\141\\002\\\n\\059\\002\\140\\003\\076\\000\\114\\003\\049\\002\\126\\004\\102\\000\\169\\001\\\n\\147\\001\\217\\000\\123\\002\\148\\001\\071\\002\\072\\002\\073\\002\\074\\002\\\n\\075\\002\\018\\000\\169\\003\\083\\003\\014\\003\\224\\002\\053\\004\\115\\003\\\n\\070\\003\\102\\000\\076\\000\\009\\004\\019\\000\\020\\000\\155\\000\\034\\000\\\n\\165\\002\\238\\000\\183\\000\\159\\001\\169\\001\\165\\003\\086\\003\\243\\003\\\n\\102\\000\\032\\003\\119\\001\\238\\000\\238\\000\\238\\000\\128\\003\\238\\000\\\n\\021\\000\\011\\001\\018\\004\\193\\002\\238\\000\\017\\004\\245\\003\\169\\001\\\n\\094\\002\\190\\002\\187\\004\\137\\001\\022\\000\\023\\000\\238\\000\\024\\000\\\n\\102\\000\\169\\001\\169\\001\\169\\001\\169\\001\\169\\001\\169\\001\\207\\001\\\n\\076\\000\\076\\000\\076\\000\\060\\002\\042\\002\\169\\001\\244\\003\\238\\000\\\n\\238\\000\\169\\002\\228\\002\\076\\000\\076\\000\\088\\003\\086\\002\\210\\002\\\n\\116\\000\\076\\000\\019\\004\\212\\003\\169\\001\\200\\004\\000\\000\\218\\000\\\n\\218\\000\\238\\000\\000\\000\\000\\000\\116\\000\\184\\001\\218\\000\\169\\001\\\n\\000\\000\\218\\000\\218\\000\\000\\000\\000\\000\\000\\000\\102\\000\\102\\000\\\n\\102\\000\\000\\000\\076\\000\\238\\002\\000\\000\\184\\001\\239\\000\\239\\000\\\n\\239\\000\\102\\000\\102\\000\\000\\000\\000\\000\\000\\000\\239\\000\\102\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\116\\000\\239\\000\\239\\000\\219\\000\\\n\\219\\000\\000\\000\\000\\000\\159\\001\\239\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\218\\000\\218\\000\\000\\000\\159\\001\\000\\000\\000\\000\\000\\000\\\n\\102\\000\\000\\000\\116\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\217\\000\\159\\001\\000\\000\\224\\000\\\n\\116\\000\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\239\\000\\000\\000\\\n\\000\\000\\224\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\240\\001\\224\\000\\000\\000\\000\\000\\224\\000\\219\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\159\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\224\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\183\\001\\\n\\183\\001\\000\\000\\000\\000\\224\\000\\000\\000\\000\\000\\000\\000\\159\\001\\\n\\159\\001\\159\\001\\219\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\183\\001\\116\\000\\116\\000\\116\\000\\116\\000\\\n\\116\\000\\238\\000\\000\\000\\224\\000\\000\\000\\000\\000\\183\\001\\000\\000\\\n\\238\\000\\238\\000\\159\\001\\000\\000\\000\\000\\000\\000\\000\\000\\238\\000\\\n\\183\\001\\000\\000\\000\\000\\000\\000\\000\\000\\159\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\218\\000\\000\\000\\000\\000\\\n\\218\\000\\188\\001\\000\\000\\000\\000\\000\\000\\215\\000\\000\\000\\215\\000\\\n\\234\\000\\234\\000\\000\\000\\000\\000\\000\\000\\045\\000\\188\\001\\000\\000\\\n\\000\\000\\224\\000\\224\\000\\224\\000\\238\\000\\238\\000\\188\\001\\188\\001\\\n\\000\\000\\000\\000\\000\\000\\234\\000\\224\\000\\224\\000\\123\\003\\000\\000\\\n\\000\\000\\000\\000\\224\\000\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\\n\\188\\001\\000\\000\\219\\000\\000\\000\\145\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\238\\000\\000\\000\\059\\000\\060\\000\\\n\\061\\000\\062\\000\\063\\000\\224\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\188\\001\\000\\000\\218\\000\\\n\\228\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\154\\001\\074\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\188\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\239\\000\\\n\\000\\000\\000\\000\\188\\001\\188\\001\\188\\001\\188\\001\\188\\001\\188\\001\\\n\\188\\001\\239\\000\\239\\000\\239\\000\\000\\000\\239\\000\\188\\001\\000\\000\\\n\\000\\000\\000\\000\\239\\000\\000\\000\\000\\000\\000\\000\\082\\000\\000\\000\\\n\\000\\000\\000\\000\\083\\000\\000\\000\\239\\000\\188\\001\\084\\000\\240\\001\\\n\\000\\000\\240\\001\\000\\000\\000\\000\\240\\001\\086\\000\\087\\000\\000\\000\\\n\\188\\001\\088\\000\\000\\000\\000\\000\\183\\001\\239\\000\\239\\000\\000\\000\\\n\\231\\002\\091\\000\\000\\000\\217\\000\\000\\000\\000\\000\\000\\000\\094\\000\\\n\\095\\000\\096\\000\\097\\000\\000\\000\\000\\000\\215\\000\\215\\000\\239\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\149\\003\\000\\000\\000\\000\\215\\000\\\n\\215\\000\\000\\000\\006\\000\\000\\000\\000\\000\\224\\003\\240\\001\\000\\000\\\n\\000\\000\\228\\002\\000\\000\\000\\000\\234\\000\\234\\000\\234\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\234\\000\\000\\000\\000\\000\\007\\000\\\n\\000\\000\\000\\000\\000\\000\\234\\000\\234\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\234\\000\\000\\000\\000\\000\\000\\000\\000\\000\\215\\000\\\n\\215\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\003\\\n\\000\\000\\000\\000\\000\\000\\228\\002\\000\\000\\008\\000\\009\\000\\000\\000\\\n\\000\\000\\000\\000\\218\\000\\000\\000\\000\\000\\000\\000\\010\\000\\218\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\234\\000\\000\\000\\011\\000\\012\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\013\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\014\\000\\\n\\000\\000\\015\\000\\016\\000\\000\\000\\017\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\238\\000\\238\\000\\\n\\238\\000\\238\\000\\238\\000\\226\\003\\000\\000\\184\\001\\184\\001\\240\\001\\\n\\240\\001\\000\\000\\228\\002\\018\\000\\000\\000\\000\\000\\228\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\238\\000\\000\\000\\000\\000\\019\\000\\020\\000\\\n\\000\\000\\184\\001\\000\\000\\000\\000\\000\\000\\045\\004\\049\\004\\239\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\184\\001\\000\\000\\239\\000\\239\\000\\\n\\000\\000\\000\\000\\021\\000\\000\\000\\000\\000\\239\\000\\184\\001\\240\\001\\\n\\000\\000\\240\\001\\000\\000\\000\\000\\217\\000\\217\\000\\022\\000\\023\\000\\\n\\000\\000\\024\\000\\000\\000\\215\\000\\000\\000\\000\\000\\215\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\228\\002\\000\\000\\099\\000\\000\\000\\228\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\239\\000\\239\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\118\\003\\000\\000\\000\\000\\102\\004\\000\\000\\049\\004\\000\\000\\000\\000\\\n\\186\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\186\\001\\217\\000\\000\\000\\000\\000\\226\\003\\000\\000\\000\\000\\\n\\240\\001\\000\\000\\239\\000\\197\\000\\240\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\228\\002\\186\\001\\000\\000\\049\\004\\000\\000\\229\\000\\229\\000\\\n\\000\\000\\000\\000\\238\\000\\000\\000\\000\\000\\000\\000\\231\\002\\000\\000\\\n\\000\\000\\049\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\217\\000\\\n\\000\\000\\229\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\186\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\228\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\051\\001\\000\\000\\000\\000\\234\\000\\\n\\234\\000\\234\\000\\186\\001\\234\\000\\000\\000\\000\\000\\000\\000\\240\\001\\\n\\234\\000\\000\\000\\000\\000\\240\\001\\186\\001\\186\\001\\186\\001\\186\\001\\\n\\186\\001\\186\\001\\234\\000\\000\\000\\185\\001\\185\\001\\185\\001\\071\\001\\\n\\186\\001\\185\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\105\\001\\184\\001\\234\\000\\234\\000\\185\\001\\000\\000\\186\\001\\\n\\000\\000\\218\\000\\000\\000\\000\\000\\000\\000\\000\\000\\049\\004\\000\\000\\\n\\000\\000\\000\\000\\186\\001\\000\\000\\000\\000\\234\\000\\000\\000\\240\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\118\\001\\000\\000\\217\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\231\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\118\\001\\155\\001\\000\\000\\185\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\240\\001\\000\\000\\000\\000\\185\\001\\\n\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\185\\001\\000\\000\\000\\000\\192\\001\\000\\000\\\n\\000\\000\\231\\002\\000\\000\\000\\000\\016\\003\\000\\000\\000\\000\\000\\000\\\n\\215\\000\\000\\000\\185\\001\\000\\000\\000\\000\\215\\000\\192\\001\\000\\000\\\n\\000\\000\\000\\000\\229\\000\\229\\000\\229\\000\\224\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\229\\000\\000\\000\\000\\000\\182\\000\\000\\000\\224\\001\\\n\\225\\001\\229\\000\\229\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\229\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\007\\000\\000\\000\\239\\000\\239\\000\\239\\000\\239\\000\\\n\\239\\000\\000\\000\\000\\000\\000\\000\\004\\002\\000\\000\\000\\000\\000\\000\\\n\\231\\002\\000\\000\\000\\000\\000\\000\\231\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\239\\000\\229\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\008\\000\\009\\000\\000\\000\\000\\000\\052\\004\\234\\000\\000\\000\\000\\000\\\n\\000\\000\\010\\000\\000\\000\\000\\000\\234\\000\\234\\000\\000\\000\\000\\000\\\n\\000\\000\\011\\000\\012\\000\\000\\000\\000\\000\\029\\002\\030\\002\\000\\000\\\n\\000\\000\\013\\000\\218\\000\\218\\000\\000\\000\\037\\002\\000\\000\\000\\000\\\n\\038\\002\\000\\000\\014\\000\\192\\001\\015\\000\\016\\000\\000\\000\\017\\000\\\n\\000\\000\\000\\000\\000\\000\\052\\002\\053\\002\\054\\002\\055\\002\\231\\002\\\n\\000\\000\\000\\000\\000\\000\\231\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\234\\000\\234\\000\\000\\000\\000\\000\\000\\000\\000\\000\\018\\000\\000\\000\\\n\\110\\002\\000\\000\\077\\002\\052\\004\\000\\000\\000\\000\\105\\001\\082\\002\\\n\\000\\000\\019\\000\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\218\\000\\000\\000\\000\\000\\018\\001\\019\\001\\000\\000\\020\\001\\000\\000\\\n\\234\\000\\021\\001\\000\\000\\000\\000\\000\\000\\021\\000\\000\\000\\231\\002\\\n\\000\\000\\000\\000\\052\\004\\000\\000\\000\\000\\022\\001\\000\\000\\000\\000\\\n\\239\\000\\022\\000\\023\\000\\000\\000\\024\\000\\000\\000\\000\\000\\052\\004\\\n\\000\\000\\023\\001\\000\\000\\024\\001\\000\\000\\218\\000\\118\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\119\\002\\120\\002\\121\\002\\122\\002\\231\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\192\\001\\000\\000\\111\\002\\000\\000\\\n\\000\\000\\192\\001\\192\\001\\192\\001\\000\\000\\192\\001\\192\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\105\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\112\\002\\\n\\035\\001\\036\\001\\000\\000\\037\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\215\\000\\\n\\000\\000\\000\\000\\000\\000\\229\\000\\052\\004\\188\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\189\\002\\000\\000\\229\\000\\229\\000\\229\\000\\\n\\000\\000\\229\\000\\156\\000\\000\\000\\195\\002\\218\\000\\229\\000\\000\\000\\\n\\000\\000\\000\\000\\113\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\229\\000\\157\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\158\\000\\000\\000\\000\\000\\000\\000\\159\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\222\\002\\229\\000\\000\\000\\000\\000\\000\\000\\160\\000\\000\\000\\\n\\000\\000\\161\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\016\\003\\000\\000\\162\\000\\229\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\163\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\250\\002\\000\\000\\164\\000\\\n\\000\\000\\000\\000\\165\\000\\000\\000\\000\\000\\000\\000\\000\\000\\166\\000\\\n\\000\\000\\000\\000\\000\\000\\144\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\167\\000\\168\\000\\169\\000\\\n\\170\\000\\171\\000\\172\\000\\173\\000\\174\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\010\\003\\234\\000\\234\\000\\234\\000\\234\\000\\234\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\175\\000\\000\\000\\000\\000\\000\\000\\234\\000\\\n\\176\\000\\000\\000\\235\\000\\235\\000\\000\\000\\000\\000\\118\\001\\000\\000\\\n\\177\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\178\\000\\235\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\215\\000\\215\\000\\000\\000\\192\\001\\000\\000\\000\\000\\192\\001\\192\\001\\\n\\192\\001\\192\\001\\192\\001\\192\\001\\192\\001\\192\\001\\192\\001\\192\\001\\\n\\192\\001\\192\\001\\192\\001\\192\\001\\192\\001\\192\\001\\192\\001\\077\\002\\\n\\105\\001\\000\\000\\192\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\229\\000\\000\\000\\192\\001\\237\\000\\237\\000\\\n\\000\\000\\000\\000\\229\\000\\229\\000\\000\\000\\000\\000\\000\\000\\192\\001\\\n\\000\\000\\229\\000\\000\\000\\000\\000\\000\\000\\000\\000\\215\\000\\000\\000\\\n\\000\\000\\237\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\121\\001\\123\\001\\125\\001\\235\\000\\235\\000\\234\\000\\000\\000\\\n\\102\\003\\133\\001\\135\\001\\235\\000\\000\\000\\235\\000\\229\\000\\229\\000\\\n\\000\\000\\116\\003\\000\\000\\215\\000\\000\\000\\235\\000\\121\\001\\000\\000\\\n\\125\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\132\\003\\000\\000\\000\\000\\236\\000\\247\\000\\229\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\000\\000\\105\\001\\\n\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\082\\002\\000\\000\\247\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\205\\001\\047\\000\\235\\000\\235\\000\\\n\\235\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\235\\000\\118\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\051\\000\\235\\000\\235\\000\\000\\000\\\n\\000\\000\\222\\000\\145\\000\\156\\001\\235\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\\n\\063\\000\\000\\000\\000\\000\\215\\000\\000\\000\\223\\000\\224\\000\\000\\000\\\n\\000\\000\\225\\000\\000\\000\\000\\000\\068\\000\\000\\000\\226\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\227\\000\\074\\000\\235\\000\\000\\000\\\n\\000\\000\\000\\000\\078\\000\\192\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\200\\003\\000\\000\\237\\000\\237\\000\\237\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\237\\000\\000\\000\\000\\000\\000\\000\\126\\001\\127\\001\\\n\\213\\003\\237\\000\\237\\000\\134\\001\\000\\000\\139\\001\\000\\000\\142\\001\\\n\\237\\000\\221\\003\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\150\\001\\\n\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\091\\000\\\n\\000\\000\\093\\000\\237\\000\\141\\001\\105\\001\\094\\000\\095\\000\\096\\000\\\n\\097\\000\\228\\000\\082\\002\\010\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\205\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\247\\000\\247\\000\\247\\000\\000\\000\\000\\000\\000\\000\\000\\000\\093\\002\\\n\\236\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\098\\002\\247\\000\\\n\\247\\000\\000\\000\\000\\000\\000\\000\\192\\001\\000\\000\\236\\000\\000\\000\\\n\\000\\000\\235\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\121\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\229\\000\\229\\000\\229\\000\\229\\000\\229\\000\\000\\000\\033\\004\\000\\000\\\n\\247\\000\\000\\000\\000\\000\\000\\000\\190\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\041\\004\\190\\001\\000\\000\\229\\000\\000\\000\\000\\000\\\n\\000\\000\\190\\001\\024\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\047\\004\\190\\001\\190\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\004\\190\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\235\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\235\\000\\235\\000\\235\\000\\000\\000\\235\\000\\000\\000\\000\\000\\\n\\079\\004\\000\\000\\235\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\190\\001\\000\\000\\000\\000\\000\\000\\235\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\205\\001\\097\\004\\205\\001\\000\\000\\101\\004\\000\\000\\047\\004\\\n\\000\\000\\104\\004\\000\\000\\190\\001\\000\\000\\235\\000\\235\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\190\\001\\190\\001\\190\\001\\\n\\190\\001\\190\\001\\190\\001\\190\\001\\000\\000\\000\\000\\000\\000\\235\\000\\\n\\000\\000\\190\\001\\000\\000\\103\\002\\000\\000\\000\\000\\123\\004\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\229\\000\\237\\000\\237\\000\\237\\000\\\n\\190\\001\\237\\000\\000\\000\\123\\004\\000\\000\\000\\000\\237\\000\\146\\004\\\n\\000\\000\\000\\000\\000\\000\\190\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\237\\000\\000\\000\\158\\004\\159\\004\\160\\004\\161\\004\\162\\004\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\237\\000\\237\\000\\000\\000\\000\\000\\121\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\237\\000\\000\\000\\141\\001\\141\\001\\104\\004\\\n\\000\\000\\141\\001\\235\\000\\000\\000\\000\\000\\235\\000\\000\\000\\000\\000\\\n\\000\\000\\235\\000\\121\\001\\000\\000\\141\\001\\000\\000\\000\\000\\141\\001\\\n\\000\\000\\000\\000\\000\\000\\247\\000\\247\\000\\247\\000\\141\\001\\236\\000\\\n\\123\\004\\000\\000\\000\\000\\000\\000\\247\\000\\000\\000\\141\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\141\\001\\000\\000\\141\\001\\247\\000\\000\\000\\\n\\000\\000\\141\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\141\\001\\141\\001\\141\\001\\141\\001\\141\\001\\247\\000\\\n\\247\\000\\000\\000\\000\\000\\000\\000\\000\\000\\141\\001\\000\\000\\141\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\141\\001\\141\\001\\235\\000\\\n\\205\\001\\247\\000\\205\\001\\141\\001\\000\\000\\141\\001\\235\\000\\235\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\141\\001\\000\\000\\141\\001\\141\\001\\\n\\141\\001\\141\\001\\000\\000\\141\\001\\141\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\141\\001\\141\\001\\141\\001\\141\\001\\141\\001\\141\\001\\\n\\141\\001\\141\\001\\141\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\235\\000\\235\\000\\000\\000\\141\\001\\141\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\189\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\141\\001\\000\\000\\189\\001\\237\\000\\000\\000\\141\\001\\000\\000\\000\\000\\\n\\189\\001\\141\\001\\237\\000\\237\\000\\025\\003\\141\\001\\000\\000\\028\\003\\\n\\189\\001\\189\\001\\235\\000\\031\\003\\000\\000\\141\\001\\141\\001\\000\\000\\\n\\141\\001\\141\\001\\000\\000\\141\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\189\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\235\\000\\000\\000\\000\\000\\121\\001\\000\\000\\000\\000\\237\\000\\237\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\189\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\247\\000\\189\\001\\000\\000\\000\\000\\000\\000\\237\\000\\000\\000\\\n\\247\\000\\247\\000\\000\\000\\000\\000\\189\\001\\189\\001\\189\\001\\189\\001\\\n\\189\\001\\189\\001\\189\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\189\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\189\\001\\\n\\205\\001\\198\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\189\\001\\000\\000\\247\\000\\236\\000\\000\\000\\198\\000\\\n\\000\\000\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\000\\000\\\n\\000\\000\\198\\000\\000\\000\\000\\000\\247\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\198\\000\\198\\000\\198\\000\\198\\000\\121\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\198\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\198\\000\\000\\000\\\n\\000\\000\\000\\000\\166\\003\\198\\000\\000\\000\\198\\000\\000\\000\\205\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\198\\000\\198\\000\\\n\\198\\000\\198\\000\\000\\000\\198\\000\\198\\000\\000\\000\\000\\000\\000\\000\\\n\\205\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\\n\\198\\000\\198\\000\\198\\000\\000\\000\\235\\000\\235\\000\\235\\000\\235\\000\\\n\\235\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\198\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\198\\000\\235\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\000\\000\\\n\\000\\000\\198\\000\\000\\000\\044\\004\\000\\000\\198\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\198\\000\\000\\000\\\n\\198\\000\\198\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\237\\000\\237\\000\\237\\000\\237\\000\\237\\000\\036\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\089\\004\\000\\000\\000\\000\\237\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\235\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\235\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\236\\000\\236\\000\\\n\\236\\000\\236\\000\\236\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\000\\000\\027\\000\\\n\\000\\000\\027\\000\\000\\000\\247\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\000\\000\\027\\000\\027\\000\\\n\\000\\000\\027\\000\\027\\000\\027\\000\\237\\000\\027\\000\\000\\000\\027\\000\\\n\\000\\000\\000\\000\\027\\000\\027\\000\\000\\000\\027\\000\\000\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\000\\000\\027\\000\\000\\000\\027\\000\\027\\000\\027\\000\\\n\\000\\000\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\000\\000\\027\\000\\000\\000\\027\\000\\027\\000\\051\\000\\\n\\107\\004\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\027\\000\\000\\000\\000\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\000\\000\\247\\000\\000\\000\\000\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\000\\000\\027\\000\\027\\000\\027\\000\\000\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\027\\000\\000\\000\\027\\000\\000\\000\\027\\000\\\n\\027\\000\\000\\000\\027\\000\\027\\000\\027\\000\\000\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\000\\000\\036\\001\\000\\000\\036\\001\\000\\000\\000\\000\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\\n\\036\\001\\036\\001\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\\n\\036\\001\\000\\000\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\036\\001\\\n\\000\\000\\036\\001\\000\\000\\036\\001\\000\\000\\000\\000\\036\\001\\036\\001\\\n\\000\\000\\036\\001\\000\\000\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\\n\\000\\000\\036\\001\\036\\001\\036\\001\\000\\000\\000\\000\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\\n\\052\\001\\036\\001\\036\\001\\000\\000\\000\\000\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\000\\000\\000\\000\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\\n\\000\\000\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\\n\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\000\\000\\036\\001\\000\\000\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\000\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\000\\000\\051\\000\\000\\000\\051\\000\\\n\\000\\000\\000\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\000\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\000\\000\\051\\000\\051\\000\\051\\000\\000\\000\\051\\000\\051\\000\\\n\\051\\000\\051\\000\\051\\000\\000\\000\\051\\000\\051\\000\\000\\000\\051\\000\\\n\\051\\000\\051\\000\\000\\000\\051\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\051\\000\\051\\000\\000\\000\\051\\000\\000\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\000\\000\\051\\000\\000\\000\\051\\000\\051\\000\\051\\000\\000\\000\\000\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\242\\001\\051\\000\\000\\000\\051\\000\\051\\000\\000\\000\\000\\000\\051\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\000\\000\\051\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\051\\000\\000\\000\\000\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\000\\000\\051\\000\\051\\000\\051\\000\\000\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\051\\000\\000\\000\\051\\000\\000\\000\\051\\000\\051\\000\\000\\000\\\n\\051\\000\\051\\000\\051\\000\\000\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\051\\000\\\n\\000\\000\\052\\001\\052\\001\\052\\001\\052\\001\\000\\000\\052\\001\\000\\000\\\n\\052\\001\\000\\000\\000\\000\\000\\000\\052\\001\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\000\\000\\052\\001\\052\\001\\050\\001\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\000\\000\\052\\001\\052\\001\\000\\000\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\052\\001\\052\\001\\000\\000\\052\\001\\052\\001\\000\\000\\\n\\052\\001\\052\\001\\052\\001\\000\\000\\052\\001\\000\\000\\052\\001\\000\\000\\\n\\000\\000\\052\\001\\052\\001\\000\\000\\052\\001\\000\\000\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\000\\000\\052\\001\\000\\000\\052\\001\\052\\001\\000\\000\\000\\000\\\n\\000\\000\\052\\001\\052\\001\\052\\001\\000\\000\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\244\\001\\052\\001\\050\\001\\052\\001\\052\\001\\000\\000\\000\\000\\\n\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\000\\000\\\n\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\052\\001\\000\\000\\000\\000\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\052\\001\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\000\\000\\052\\001\\052\\001\\052\\001\\000\\000\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\052\\001\\000\\000\\052\\001\\000\\000\\052\\001\\052\\001\\\n\\000\\000\\052\\001\\052\\001\\000\\000\\052\\001\\052\\001\\052\\001\\052\\001\\\n\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\052\\001\\000\\000\\\n\\052\\001\\242\\001\\242\\001\\242\\001\\242\\001\\000\\000\\242\\001\\000\\000\\\n\\242\\001\\000\\000\\000\\000\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\000\\000\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\000\\000\\242\\001\\242\\001\\242\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\242\\001\\000\\000\\242\\001\\242\\001\\000\\000\\\n\\242\\001\\242\\001\\242\\001\\000\\000\\242\\001\\000\\000\\242\\001\\000\\000\\\n\\000\\000\\242\\001\\242\\001\\000\\000\\242\\001\\000\\000\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\000\\000\\242\\001\\000\\000\\242\\001\\242\\001\\242\\001\\000\\000\\\n\\000\\000\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\244\\001\\242\\001\\000\\000\\242\\001\\242\\001\\000\\000\\000\\000\\\n\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\000\\000\\\n\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\242\\001\\000\\000\\000\\000\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\000\\000\\242\\001\\242\\001\\242\\001\\000\\000\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\242\\001\\000\\000\\242\\001\\000\\000\\242\\001\\242\\001\\\n\\000\\000\\242\\001\\242\\001\\242\\001\\000\\000\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\242\\001\\\n\\242\\001\\000\\000\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\\n\\000\\000\\244\\001\\000\\000\\000\\000\\000\\000\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\051\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\000\\000\\000\\000\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\\n\\000\\000\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\\n\\000\\000\\000\\000\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\244\\001\\000\\000\\\n\\000\\000\\000\\000\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\239\\001\\244\\001\\000\\000\\244\\001\\244\\001\\000\\000\\\n\\000\\000\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\000\\000\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\000\\000\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\000\\000\\000\\000\\000\\000\\000\\000\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\\n\\244\\001\\000\\000\\244\\001\\244\\001\\000\\000\\000\\000\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\000\\000\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\\n\\000\\000\\244\\001\\000\\000\\000\\000\\000\\000\\244\\001\\244\\001\\000\\000\\\n\\244\\001\\244\\001\\000\\000\\000\\000\\244\\001\\244\\001\\051\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\000\\000\\000\\000\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\\n\\000\\000\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\\n\\000\\000\\000\\000\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\244\\001\\000\\000\\\n\\000\\000\\000\\000\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\083\\000\\244\\001\\051\\001\\244\\001\\244\\001\\000\\000\\\n\\000\\000\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\000\\000\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\000\\000\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\000\\000\\000\\000\\000\\000\\000\\000\\244\\001\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\000\\000\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\000\\000\\244\\001\\000\\000\\244\\001\\\n\\244\\001\\000\\000\\244\\001\\244\\001\\000\\000\\000\\000\\244\\001\\244\\001\\\n\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\244\\001\\\n\\000\\000\\244\\001\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\\n\\239\\001\\000\\000\\239\\001\\000\\000\\000\\000\\000\\000\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\000\\000\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\239\\001\\000\\000\\239\\001\\\n\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\000\\000\\\n\\239\\001\\000\\000\\000\\000\\239\\001\\239\\001\\000\\000\\239\\001\\000\\000\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\000\\000\\239\\001\\239\\001\\\n\\000\\000\\000\\000\\000\\000\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\084\\000\\239\\001\\000\\000\\239\\001\\239\\001\\\n\\000\\000\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\000\\000\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\000\\000\\000\\000\\000\\000\\000\\000\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\000\\000\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\000\\000\\\n\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\000\\000\\000\\000\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\000\\000\\239\\001\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\\n\\083\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\000\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\083\\000\\\n\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\000\\000\\\n\\083\\000\\000\\000\\000\\000\\083\\000\\083\\000\\000\\000\\083\\000\\000\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\000\\000\\083\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\085\\000\\083\\000\\000\\000\\083\\000\\083\\000\\\n\\000\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\000\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\000\\000\\000\\000\\000\\000\\000\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\000\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\000\\000\\\n\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\000\\000\\000\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\000\\000\\083\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\000\\000\\084\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\\n\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\\n\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\084\\000\\084\\000\\000\\000\\084\\000\\\n\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\000\\000\\084\\000\\\n\\084\\000\\000\\000\\000\\000\\000\\000\\084\\000\\084\\000\\084\\000\\000\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\082\\000\\084\\000\\000\\000\\084\\000\\\n\\084\\000\\000\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\000\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\\n\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\\n\\000\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\000\\000\\000\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\000\\000\\084\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\000\\000\\085\\000\\000\\000\\085\\000\\000\\000\\000\\000\\000\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\\n\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\085\\000\\000\\000\\\n\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\\n\\000\\000\\085\\000\\000\\000\\000\\000\\085\\000\\085\\000\\000\\000\\085\\000\\\n\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\000\\000\\085\\000\\\n\\085\\000\\000\\000\\000\\000\\000\\000\\085\\000\\085\\000\\085\\000\\000\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\239\\001\\085\\000\\000\\000\\085\\000\\\n\\085\\000\\000\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\000\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\000\\000\\000\\000\\000\\000\\000\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\\n\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\\n\\000\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\000\\000\\000\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\000\\000\\085\\000\\000\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\000\\000\\082\\000\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\\n\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\\n\\082\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\000\\\n\\000\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\000\\000\\\n\\082\\000\\000\\000\\082\\000\\000\\000\\000\\000\\082\\000\\082\\000\\000\\000\\\n\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\000\\000\\\n\\082\\000\\082\\000\\000\\000\\000\\000\\000\\000\\082\\000\\082\\000\\082\\000\\\n\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\083\\000\\082\\000\\000\\000\\\n\\082\\000\\082\\000\\000\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\\n\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\\n\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\\n\\082\\000\\000\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\000\\000\\\n\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\000\\000\\239\\001\\000\\000\\239\\001\\000\\000\\000\\000\\000\\000\\\n\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\000\\000\\000\\000\\239\\001\\\n\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\\n\\239\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\239\\001\\\n\\000\\000\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\000\\000\\\n\\239\\001\\000\\000\\239\\001\\000\\000\\000\\000\\239\\001\\239\\001\\000\\000\\\n\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\000\\000\\\n\\239\\001\\239\\001\\000\\000\\000\\000\\000\\000\\239\\001\\239\\001\\239\\001\\\n\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\084\\000\\239\\001\\000\\000\\\n\\239\\001\\239\\001\\000\\000\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\\n\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\\n\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\000\\000\\\n\\239\\001\\000\\000\\239\\001\\239\\001\\000\\000\\239\\001\\239\\001\\000\\000\\\n\\000\\000\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\000\\000\\239\\001\\000\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\000\\000\\083\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\000\\000\\000\\000\\\n\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\\n\\083\\000\\083\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\083\\000\\000\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\\n\\000\\000\\083\\000\\000\\000\\083\\000\\000\\000\\000\\000\\083\\000\\083\\000\\\n\\000\\000\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\\n\\000\\000\\083\\000\\083\\000\\000\\000\\000\\000\\000\\000\\083\\000\\083\\000\\\n\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\085\\000\\083\\000\\\n\\000\\000\\083\\000\\083\\000\\000\\000\\000\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\000\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\\n\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\000\\000\\083\\000\\000\\000\\083\\000\\083\\000\\000\\000\\083\\000\\083\\000\\\n\\000\\000\\000\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\083\\000\\083\\000\\083\\000\\000\\000\\083\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\000\\000\\084\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\000\\000\\000\\000\\\n\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\\n\\084\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\084\\000\\000\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\\n\\000\\000\\084\\000\\000\\000\\084\\000\\000\\000\\000\\000\\084\\000\\084\\000\\\n\\000\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\\n\\000\\000\\084\\000\\084\\000\\000\\000\\000\\000\\000\\000\\084\\000\\084\\000\\\n\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\082\\000\\084\\000\\\n\\000\\000\\084\\000\\084\\000\\000\\000\\000\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\000\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\\n\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\000\\000\\084\\000\\000\\000\\084\\000\\084\\000\\000\\000\\084\\000\\084\\000\\\n\\000\\000\\000\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\084\\000\\\n\\084\\000\\084\\000\\084\\000\\084\\000\\000\\000\\084\\000\\000\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\000\\000\\085\\000\\000\\000\\\n\\000\\000\\000\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\000\\000\\\n\\000\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\000\\000\\085\\000\\085\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\085\\000\\000\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\\n\\085\\000\\000\\000\\085\\000\\000\\000\\085\\000\\000\\000\\000\\000\\085\\000\\\n\\085\\000\\000\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\\n\\085\\000\\000\\000\\085\\000\\085\\000\\000\\000\\000\\000\\000\\000\\085\\000\\\n\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\056\\000\\\n\\085\\000\\000\\000\\085\\000\\085\\000\\000\\000\\000\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\000\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\\n\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\000\\000\\085\\000\\000\\000\\085\\000\\085\\000\\000\\000\\085\\000\\\n\\085\\000\\000\\000\\000\\000\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\\n\\085\\000\\085\\000\\085\\000\\085\\000\\085\\000\\000\\000\\085\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\000\\000\\082\\000\\000\\000\\\n\\000\\000\\000\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\000\\000\\\n\\000\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\000\\000\\082\\000\\082\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\082\\000\\000\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\\n\\082\\000\\000\\000\\082\\000\\000\\000\\082\\000\\000\\000\\000\\000\\082\\000\\\n\\082\\000\\000\\000\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\\n\\082\\000\\000\\000\\082\\000\\082\\000\\000\\000\\000\\000\\000\\000\\082\\000\\\n\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\047\\000\\\n\\082\\000\\000\\000\\082\\000\\082\\000\\000\\000\\000\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\000\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\\n\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\000\\000\\082\\000\\000\\000\\082\\000\\082\\000\\000\\000\\082\\000\\\n\\082\\000\\000\\000\\000\\000\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\\n\\082\\000\\082\\000\\082\\000\\082\\000\\082\\000\\000\\000\\082\\000\\000\\000\\\n\\056\\000\\056\\000\\056\\000\\056\\000\\000\\000\\056\\000\\000\\000\\056\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\056\\000\\056\\000\\056\\000\\000\\000\\\n\\056\\000\\000\\000\\000\\000\\056\\000\\000\\000\\056\\000\\056\\000\\056\\000\\\n\\056\\000\\000\\000\\056\\000\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\056\\000\\000\\000\\056\\000\\056\\000\\000\\000\\056\\000\\\n\\056\\000\\056\\000\\000\\000\\000\\000\\000\\000\\056\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\056\\000\\000\\000\\056\\000\\056\\000\\056\\000\\\n\\056\\000\\056\\000\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\056\\000\\000\\000\\056\\000\\056\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\056\\000\\056\\000\\000\\000\\000\\000\\000\\000\\056\\000\\056\\000\\\n\\007\\000\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\056\\000\\\n\\056\\000\\000\\000\\056\\000\\056\\000\\056\\000\\056\\000\\000\\000\\056\\000\\\n\\056\\000\\056\\000\\056\\000\\056\\000\\056\\000\\056\\000\\056\\000\\056\\000\\\n\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\056\\000\\056\\000\\056\\000\\\n\\056\\000\\056\\000\\056\\000\\056\\000\\056\\000\\056\\000\\056\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\056\\000\\056\\000\\056\\000\\000\\000\\\n\\000\\000\\056\\000\\056\\000\\000\\000\\000\\000\\056\\000\\056\\000\\056\\000\\\n\\056\\000\\056\\000\\000\\000\\000\\000\\056\\000\\056\\000\\000\\000\\056\\000\\\n\\056\\000\\056\\000\\000\\000\\056\\000\\000\\000\\056\\000\\000\\000\\000\\000\\\n\\000\\000\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\056\\000\\056\\000\\000\\000\\056\\000\\056\\000\\000\\000\\056\\000\\\n\\047\\000\\047\\000\\047\\000\\047\\000\\000\\000\\047\\000\\000\\000\\047\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\047\\000\\047\\000\\047\\000\\000\\000\\\n\\047\\000\\000\\000\\000\\000\\047\\000\\000\\000\\047\\000\\047\\000\\047\\000\\\n\\047\\000\\000\\000\\047\\000\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\047\\000\\000\\000\\047\\000\\047\\000\\000\\000\\047\\000\\\n\\047\\000\\047\\000\\000\\000\\000\\000\\000\\000\\047\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\047\\000\\000\\000\\047\\000\\047\\000\\047\\000\\\n\\047\\000\\047\\000\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\047\\000\\000\\000\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\047\\000\\047\\000\\000\\000\\000\\000\\000\\000\\047\\000\\047\\000\\\n\\199\\001\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\047\\000\\\n\\047\\000\\000\\000\\047\\000\\047\\000\\047\\000\\047\\000\\000\\000\\047\\000\\\n\\047\\000\\047\\000\\047\\000\\047\\000\\047\\000\\047\\000\\047\\000\\047\\000\\\n\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\047\\000\\047\\000\\047\\000\\\n\\047\\000\\047\\000\\047\\000\\047\\000\\047\\000\\047\\000\\047\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\047\\000\\047\\000\\047\\000\\000\\000\\\n\\000\\000\\047\\000\\047\\000\\000\\000\\000\\000\\047\\000\\047\\000\\047\\000\\\n\\047\\000\\047\\000\\000\\000\\000\\000\\047\\000\\047\\000\\000\\000\\047\\000\\\n\\047\\000\\047\\000\\000\\000\\047\\000\\000\\000\\047\\000\\000\\000\\000\\000\\\n\\000\\000\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\047\\000\\047\\000\\000\\000\\047\\000\\047\\000\\000\\000\\047\\000\\\n\\000\\000\\007\\000\\007\\000\\007\\000\\007\\000\\000\\000\\007\\000\\000\\000\\\n\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\007\\000\\007\\000\\007\\000\\\n\\000\\000\\007\\000\\000\\000\\000\\000\\007\\000\\000\\000\\007\\000\\007\\000\\\n\\007\\000\\007\\000\\000\\000\\007\\000\\007\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\007\\000\\000\\000\\007\\000\\007\\000\\000\\000\\\n\\007\\000\\007\\000\\007\\000\\000\\000\\000\\000\\000\\000\\007\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\007\\000\\000\\000\\007\\000\\007\\000\\\n\\007\\000\\007\\000\\007\\000\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\007\\000\\000\\000\\007\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\007\\000\\007\\000\\000\\000\\000\\000\\000\\000\\007\\000\\\n\\007\\000\\198\\001\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\007\\000\\007\\000\\000\\000\\007\\000\\007\\000\\007\\000\\007\\000\\000\\000\\\n\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\\n\\007\\000\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\007\\000\\007\\000\\\n\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\007\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\007\\000\\007\\000\\007\\000\\\n\\000\\000\\000\\000\\007\\000\\007\\000\\000\\000\\000\\000\\007\\000\\007\\000\\\n\\007\\000\\007\\000\\007\\000\\000\\000\\000\\000\\007\\000\\007\\000\\000\\000\\\n\\007\\000\\007\\000\\007\\000\\000\\000\\007\\000\\000\\000\\007\\000\\000\\000\\\n\\000\\000\\000\\000\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\007\\000\\007\\000\\000\\000\\007\\000\\007\\000\\000\\000\\\n\\007\\000\\199\\001\\199\\001\\199\\001\\199\\001\\000\\000\\199\\001\\000\\000\\\n\\199\\001\\000\\000\\000\\000\\000\\000\\000\\000\\199\\001\\199\\001\\199\\001\\\n\\000\\000\\199\\001\\000\\000\\000\\000\\199\\001\\000\\000\\199\\001\\199\\001\\\n\\199\\001\\199\\001\\000\\000\\199\\001\\199\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\199\\001\\000\\000\\199\\001\\199\\001\\000\\000\\\n\\199\\001\\199\\001\\199\\001\\000\\000\\000\\000\\000\\000\\199\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\199\\001\\000\\000\\199\\001\\199\\001\\\n\\199\\001\\199\\001\\199\\001\\199\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\199\\001\\000\\000\\199\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\199\\001\\199\\001\\000\\000\\000\\000\\000\\000\\199\\001\\\n\\199\\001\\197\\001\\199\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\199\\001\\199\\001\\000\\000\\199\\001\\199\\001\\199\\001\\199\\001\\000\\000\\\n\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\\n\\199\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\199\\001\\199\\001\\\n\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\199\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\199\\001\\199\\001\\199\\001\\\n\\000\\000\\000\\000\\199\\001\\199\\001\\000\\000\\000\\000\\199\\001\\199\\001\\\n\\199\\001\\199\\001\\199\\001\\000\\000\\000\\000\\199\\001\\199\\001\\000\\000\\\n\\199\\001\\199\\001\\199\\001\\000\\000\\199\\001\\000\\000\\199\\001\\000\\000\\\n\\000\\000\\000\\000\\199\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\199\\001\\199\\001\\000\\000\\199\\001\\199\\001\\000\\000\\\n\\199\\001\\000\\000\\198\\001\\198\\001\\198\\001\\198\\001\\000\\000\\198\\001\\\n\\000\\000\\198\\001\\000\\000\\000\\000\\000\\000\\000\\000\\198\\001\\198\\001\\\n\\198\\001\\000\\000\\198\\001\\000\\000\\000\\000\\198\\001\\000\\000\\198\\001\\\n\\198\\001\\198\\001\\198\\001\\000\\000\\198\\001\\198\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\198\\001\\000\\000\\198\\001\\198\\001\\\n\\000\\000\\198\\001\\198\\001\\198\\001\\000\\000\\000\\000\\000\\000\\198\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\001\\000\\000\\198\\001\\\n\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\198\\001\\000\\000\\198\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\198\\001\\198\\001\\000\\000\\000\\000\\000\\000\\\n\\198\\001\\198\\001\\196\\001\\198\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\198\\001\\198\\001\\000\\000\\198\\001\\198\\001\\198\\001\\198\\001\\\n\\000\\000\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\\n\\198\\001\\198\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\001\\\n\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\198\\001\\\n\\198\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\001\\198\\001\\\n\\198\\001\\000\\000\\000\\000\\198\\001\\198\\001\\000\\000\\000\\000\\198\\001\\\n\\198\\001\\198\\001\\198\\001\\198\\001\\000\\000\\000\\000\\198\\001\\198\\001\\\n\\000\\000\\198\\001\\198\\001\\198\\001\\000\\000\\198\\001\\000\\000\\198\\001\\\n\\000\\000\\000\\000\\000\\000\\198\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\198\\001\\198\\001\\000\\000\\198\\001\\198\\001\\\n\\000\\000\\198\\001\\197\\001\\197\\001\\197\\001\\197\\001\\000\\000\\197\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\197\\001\\197\\001\\\n\\197\\001\\000\\000\\197\\001\\000\\000\\000\\000\\197\\001\\000\\000\\197\\001\\\n\\197\\001\\197\\001\\197\\001\\000\\000\\197\\001\\197\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\197\\001\\000\\000\\197\\001\\197\\001\\\n\\000\\000\\197\\001\\197\\001\\197\\001\\000\\000\\000\\000\\000\\000\\197\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\197\\001\\000\\000\\197\\001\\\n\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\197\\001\\000\\000\\197\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\197\\001\\197\\001\\000\\000\\000\\000\\000\\000\\\n\\197\\001\\197\\001\\185\\001\\197\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\197\\001\\197\\001\\000\\000\\197\\001\\197\\001\\197\\001\\197\\001\\\n\\000\\000\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\\n\\197\\001\\197\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\197\\001\\\n\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\197\\001\\\n\\197\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\197\\001\\197\\001\\\n\\197\\001\\000\\000\\000\\000\\197\\001\\197\\001\\000\\000\\000\\000\\197\\001\\\n\\197\\001\\197\\001\\197\\001\\197\\001\\000\\000\\000\\000\\197\\001\\197\\001\\\n\\000\\000\\197\\001\\197\\001\\197\\001\\000\\000\\197\\001\\000\\000\\197\\001\\\n\\000\\000\\000\\000\\000\\000\\197\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\197\\001\\197\\001\\000\\000\\197\\001\\197\\001\\\n\\000\\000\\197\\001\\000\\000\\000\\000\\196\\001\\196\\001\\196\\001\\000\\000\\\n\\196\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\196\\001\\\n\\196\\001\\196\\001\\000\\000\\196\\001\\000\\000\\000\\000\\196\\001\\000\\000\\\n\\196\\001\\196\\001\\196\\001\\196\\001\\000\\000\\196\\001\\196\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\196\\001\\000\\000\\196\\001\\\n\\196\\001\\000\\000\\196\\001\\196\\001\\196\\001\\000\\000\\000\\000\\000\\000\\\n\\196\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\196\\001\\000\\000\\\n\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\196\\001\\000\\000\\196\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\196\\001\\196\\001\\000\\000\\000\\000\\\n\\000\\000\\196\\001\\196\\001\\195\\001\\196\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\196\\001\\196\\001\\000\\000\\196\\001\\196\\001\\196\\001\\\n\\196\\001\\000\\000\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\\n\\196\\001\\196\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\\n\\196\\001\\196\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\196\\001\\\n\\196\\001\\196\\001\\000\\000\\000\\000\\196\\001\\196\\001\\000\\000\\000\\000\\\n\\196\\001\\196\\001\\196\\001\\196\\001\\196\\001\\000\\000\\000\\000\\196\\001\\\n\\196\\001\\000\\000\\196\\001\\196\\001\\196\\001\\000\\000\\196\\001\\000\\000\\\n\\196\\001\\000\\000\\000\\000\\000\\000\\196\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\196\\001\\196\\001\\000\\000\\196\\001\\\n\\196\\001\\000\\000\\196\\001\\000\\000\\185\\001\\185\\001\\185\\001\\000\\000\\\n\\185\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\185\\001\\\n\\185\\001\\185\\001\\000\\000\\185\\001\\000\\000\\000\\000\\185\\001\\000\\000\\\n\\000\\000\\185\\001\\185\\001\\185\\001\\000\\000\\185\\001\\185\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\185\\001\\000\\000\\185\\001\\\n\\185\\001\\000\\000\\185\\001\\185\\001\\185\\001\\000\\000\\000\\000\\000\\000\\\n\\185\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\185\\001\\000\\000\\\n\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\185\\001\\000\\000\\185\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\185\\001\\185\\001\\000\\000\\000\\000\\\n\\000\\000\\185\\001\\185\\001\\177\\001\\185\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\185\\001\\185\\001\\000\\000\\185\\001\\185\\001\\185\\001\\\n\\185\\001\\000\\000\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\\n\\185\\001\\185\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\\n\\185\\001\\185\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\185\\001\\\n\\185\\001\\185\\001\\000\\000\\000\\000\\185\\001\\185\\001\\000\\000\\000\\000\\\n\\185\\001\\185\\001\\185\\001\\185\\001\\185\\001\\000\\000\\000\\000\\185\\001\\\n\\185\\001\\000\\000\\185\\001\\185\\001\\185\\001\\000\\000\\185\\001\\000\\000\\\n\\185\\001\\000\\000\\000\\000\\000\\000\\185\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\185\\001\\185\\001\\000\\000\\185\\001\\\n\\185\\001\\000\\000\\185\\001\\000\\000\\000\\000\\195\\001\\195\\001\\195\\001\\\n\\000\\000\\195\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\195\\001\\195\\001\\195\\001\\000\\000\\195\\001\\000\\000\\000\\000\\195\\001\\\n\\000\\000\\000\\000\\195\\001\\195\\001\\195\\001\\000\\000\\195\\001\\195\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\001\\000\\000\\\n\\195\\001\\195\\001\\000\\000\\195\\001\\195\\001\\195\\001\\000\\000\\000\\000\\\n\\000\\000\\195\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\001\\\n\\000\\000\\195\\001\\195\\001\\195\\001\\195\\001\\195\\001\\195\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\001\\000\\000\\195\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\001\\195\\001\\000\\000\\\n\\000\\000\\000\\000\\195\\001\\195\\001\\172\\001\\195\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\195\\001\\195\\001\\000\\000\\195\\001\\195\\001\\\n\\195\\001\\195\\001\\000\\000\\195\\001\\195\\001\\195\\001\\195\\001\\195\\001\\\n\\195\\001\\195\\001\\195\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\195\\001\\195\\001\\195\\001\\195\\001\\195\\001\\195\\001\\195\\001\\\n\\195\\001\\195\\001\\195\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\195\\001\\195\\001\\195\\001\\000\\000\\000\\000\\195\\001\\195\\001\\000\\000\\\n\\000\\000\\195\\001\\195\\001\\195\\001\\195\\001\\195\\001\\000\\000\\000\\000\\\n\\195\\001\\195\\001\\000\\000\\195\\001\\195\\001\\195\\001\\000\\000\\195\\001\\\n\\000\\000\\195\\001\\000\\000\\000\\000\\000\\000\\195\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\001\\195\\001\\000\\000\\\n\\195\\001\\195\\001\\000\\000\\195\\001\\000\\000\\177\\001\\177\\001\\177\\001\\\n\\000\\000\\177\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\177\\001\\177\\001\\177\\001\\000\\000\\177\\001\\000\\000\\000\\000\\177\\001\\\n\\000\\000\\000\\000\\177\\001\\177\\001\\177\\001\\000\\000\\177\\001\\177\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\177\\001\\000\\000\\\n\\177\\001\\177\\001\\000\\000\\177\\001\\177\\001\\177\\001\\000\\000\\000\\000\\\n\\000\\000\\177\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\177\\001\\\n\\000\\000\\177\\001\\177\\001\\177\\001\\177\\001\\177\\001\\177\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\177\\001\\000\\000\\177\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\177\\001\\177\\001\\000\\000\\\n\\000\\000\\000\\000\\177\\001\\177\\001\\176\\001\\177\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\177\\001\\000\\000\\177\\001\\177\\001\\\n\\177\\001\\177\\001\\000\\000\\177\\001\\177\\001\\177\\001\\177\\001\\177\\001\\\n\\177\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\177\\001\\177\\001\\177\\001\\177\\001\\177\\001\\177\\001\\177\\001\\\n\\177\\001\\177\\001\\177\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\177\\001\\177\\001\\177\\001\\000\\000\\000\\000\\177\\001\\177\\001\\000\\000\\\n\\000\\000\\177\\001\\177\\001\\177\\001\\177\\001\\177\\001\\000\\000\\000\\000\\\n\\177\\001\\177\\001\\000\\000\\177\\001\\177\\001\\177\\001\\000\\000\\177\\001\\\n\\000\\000\\177\\001\\000\\000\\000\\000\\000\\000\\177\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\177\\001\\177\\001\\000\\000\\\n\\177\\001\\177\\001\\000\\000\\177\\001\\000\\000\\000\\000\\172\\001\\172\\001\\\n\\172\\001\\000\\000\\172\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\172\\001\\172\\001\\172\\001\\000\\000\\172\\001\\000\\000\\000\\000\\\n\\172\\001\\000\\000\\000\\000\\172\\001\\172\\001\\172\\001\\000\\000\\172\\001\\\n\\172\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\172\\001\\\n\\000\\000\\172\\001\\172\\001\\000\\000\\172\\001\\172\\001\\172\\001\\000\\000\\\n\\000\\000\\000\\000\\172\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\172\\001\\000\\000\\172\\001\\172\\001\\172\\001\\172\\001\\172\\001\\172\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\172\\001\\000\\000\\\n\\172\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\172\\001\\172\\001\\\n\\000\\000\\000\\000\\000\\000\\172\\001\\172\\001\\170\\001\\172\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\172\\001\\000\\000\\172\\001\\\n\\172\\001\\172\\001\\172\\001\\000\\000\\172\\001\\172\\001\\172\\001\\172\\001\\\n\\172\\001\\172\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\172\\001\\172\\001\\172\\001\\172\\001\\172\\001\\172\\001\\\n\\172\\001\\172\\001\\172\\001\\172\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\172\\001\\172\\001\\172\\001\\000\\000\\000\\000\\172\\001\\172\\001\\\n\\000\\000\\000\\000\\172\\001\\172\\001\\172\\001\\172\\001\\172\\001\\000\\000\\\n\\000\\000\\172\\001\\172\\001\\000\\000\\172\\001\\172\\001\\172\\001\\000\\000\\\n\\172\\001\\000\\000\\172\\001\\000\\000\\000\\000\\000\\000\\172\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\172\\001\\172\\001\\\n\\000\\000\\172\\001\\172\\001\\000\\000\\172\\001\\000\\000\\176\\001\\176\\001\\\n\\176\\001\\000\\000\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\176\\001\\176\\001\\176\\001\\000\\000\\176\\001\\000\\000\\000\\000\\\n\\176\\001\\000\\000\\000\\000\\176\\001\\176\\001\\176\\001\\000\\000\\176\\001\\\n\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\176\\001\\\n\\000\\000\\176\\001\\176\\001\\000\\000\\176\\001\\176\\001\\176\\001\\000\\000\\\n\\000\\000\\000\\000\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\176\\001\\000\\000\\176\\001\\176\\001\\176\\001\\176\\001\\176\\001\\176\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\176\\001\\000\\000\\\n\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\176\\001\\176\\001\\\n\\000\\000\\000\\000\\000\\000\\176\\001\\176\\001\\175\\001\\176\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\176\\001\\000\\000\\176\\001\\\n\\176\\001\\176\\001\\176\\001\\000\\000\\176\\001\\176\\001\\176\\001\\176\\001\\\n\\176\\001\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\176\\001\\176\\001\\176\\001\\176\\001\\176\\001\\\n\\176\\001\\176\\001\\176\\001\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\176\\001\\176\\001\\176\\001\\000\\000\\000\\000\\176\\001\\176\\001\\\n\\000\\000\\000\\000\\176\\001\\176\\001\\176\\001\\176\\001\\176\\001\\000\\000\\\n\\000\\000\\176\\001\\176\\001\\000\\000\\176\\001\\176\\001\\176\\001\\000\\000\\\n\\176\\001\\000\\000\\176\\001\\000\\000\\000\\000\\000\\000\\176\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\176\\001\\176\\001\\\n\\000\\000\\176\\001\\176\\001\\000\\000\\176\\001\\000\\000\\000\\000\\170\\001\\\n\\170\\001\\170\\001\\000\\000\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\170\\001\\170\\001\\170\\001\\000\\000\\170\\001\\000\\000\\\n\\000\\000\\170\\001\\000\\000\\000\\000\\170\\001\\170\\001\\170\\001\\000\\000\\\n\\170\\001\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\170\\001\\000\\000\\170\\001\\170\\001\\000\\000\\170\\001\\170\\001\\170\\001\\\n\\000\\000\\000\\000\\000\\000\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\170\\001\\000\\000\\170\\001\\170\\001\\170\\001\\170\\001\\170\\001\\\n\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\170\\001\\\n\\000\\000\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\170\\001\\\n\\170\\001\\000\\000\\000\\000\\000\\000\\170\\001\\170\\001\\174\\001\\170\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\170\\001\\000\\000\\\n\\170\\001\\170\\001\\170\\001\\170\\001\\000\\000\\170\\001\\170\\001\\170\\001\\\n\\170\\001\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\170\\001\\170\\001\\170\\001\\170\\001\\\n\\170\\001\\170\\001\\170\\001\\170\\001\\170\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\170\\001\\170\\001\\170\\001\\000\\000\\000\\000\\170\\001\\\n\\170\\001\\000\\000\\000\\000\\170\\001\\170\\001\\170\\001\\170\\001\\170\\001\\\n\\000\\000\\000\\000\\170\\001\\170\\001\\000\\000\\170\\001\\170\\001\\170\\001\\\n\\000\\000\\170\\001\\000\\000\\170\\001\\000\\000\\000\\000\\000\\000\\170\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\170\\001\\\n\\170\\001\\000\\000\\170\\001\\170\\001\\000\\000\\170\\001\\000\\000\\175\\001\\\n\\175\\001\\175\\001\\000\\000\\175\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\175\\001\\175\\001\\175\\001\\000\\000\\175\\001\\000\\000\\\n\\000\\000\\175\\001\\000\\000\\000\\000\\175\\001\\175\\001\\175\\001\\000\\000\\\n\\175\\001\\175\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\175\\001\\000\\000\\175\\001\\175\\001\\000\\000\\175\\001\\175\\001\\175\\001\\\n\\000\\000\\000\\000\\000\\000\\175\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\175\\001\\000\\000\\175\\001\\175\\001\\175\\001\\175\\001\\175\\001\\\n\\175\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\175\\001\\\n\\000\\000\\175\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\175\\001\\\n\\175\\001\\000\\000\\000\\000\\000\\000\\175\\001\\175\\001\\173\\001\\175\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\175\\001\\000\\000\\\n\\175\\001\\175\\001\\175\\001\\175\\001\\000\\000\\175\\001\\175\\001\\175\\001\\\n\\175\\001\\175\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\175\\001\\175\\001\\175\\001\\175\\001\\\n\\175\\001\\175\\001\\175\\001\\175\\001\\175\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\175\\001\\175\\001\\175\\001\\000\\000\\000\\000\\175\\001\\\n\\175\\001\\000\\000\\000\\000\\175\\001\\175\\001\\175\\001\\175\\001\\175\\001\\\n\\000\\000\\000\\000\\175\\001\\175\\001\\000\\000\\175\\001\\175\\001\\175\\001\\\n\\000\\000\\175\\001\\000\\000\\175\\001\\000\\000\\000\\000\\000\\000\\175\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\175\\001\\\n\\175\\001\\000\\000\\175\\001\\175\\001\\000\\000\\175\\001\\000\\000\\000\\000\\\n\\174\\001\\174\\001\\174\\001\\000\\000\\174\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\174\\001\\174\\001\\174\\001\\000\\000\\174\\001\\\n\\000\\000\\000\\000\\174\\001\\000\\000\\000\\000\\174\\001\\174\\001\\174\\001\\\n\\000\\000\\174\\001\\174\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\174\\001\\000\\000\\174\\001\\174\\001\\000\\000\\000\\000\\174\\001\\\n\\174\\001\\000\\000\\000\\000\\000\\000\\174\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\174\\001\\000\\000\\174\\001\\174\\001\\174\\001\\174\\001\\\n\\174\\001\\174\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\174\\001\\000\\000\\174\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\174\\001\\174\\001\\000\\000\\000\\000\\000\\000\\174\\001\\174\\001\\223\\001\\\n\\174\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\174\\001\\\n\\000\\000\\174\\001\\174\\001\\174\\001\\174\\001\\000\\000\\174\\001\\174\\001\\\n\\174\\001\\174\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\174\\001\\174\\001\\174\\001\\\n\\174\\001\\174\\001\\174\\001\\174\\001\\174\\001\\174\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\174\\001\\174\\001\\174\\001\\000\\000\\000\\000\\\n\\174\\001\\174\\001\\000\\000\\000\\000\\174\\001\\174\\001\\174\\001\\174\\001\\\n\\174\\001\\000\\000\\000\\000\\174\\001\\174\\001\\000\\000\\174\\001\\174\\001\\\n\\174\\001\\000\\000\\174\\001\\000\\000\\174\\001\\000\\000\\000\\000\\000\\000\\\n\\174\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\174\\001\\174\\001\\000\\000\\174\\001\\174\\001\\000\\000\\174\\001\\000\\000\\\n\\173\\001\\173\\001\\173\\001\\000\\000\\173\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\173\\001\\173\\001\\173\\001\\000\\000\\173\\001\\\n\\000\\000\\000\\000\\173\\001\\000\\000\\000\\000\\173\\001\\173\\001\\173\\001\\\n\\000\\000\\173\\001\\173\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\173\\001\\000\\000\\173\\001\\173\\001\\000\\000\\000\\000\\173\\001\\\n\\173\\001\\000\\000\\000\\000\\000\\000\\173\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\173\\001\\000\\000\\173\\001\\173\\001\\173\\001\\173\\001\\\n\\173\\001\\173\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\173\\001\\000\\000\\173\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\173\\001\\173\\001\\000\\000\\000\\000\\000\\000\\173\\001\\173\\001\\171\\001\\\n\\173\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\173\\001\\\n\\000\\000\\173\\001\\173\\001\\173\\001\\173\\001\\000\\000\\173\\001\\173\\001\\\n\\173\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\173\\001\\173\\001\\173\\001\\\n\\173\\001\\173\\001\\173\\001\\173\\001\\173\\001\\173\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\173\\001\\173\\001\\173\\001\\000\\000\\000\\000\\\n\\173\\001\\173\\001\\000\\000\\000\\000\\173\\001\\173\\001\\173\\001\\173\\001\\\n\\173\\001\\000\\000\\000\\000\\173\\001\\173\\001\\000\\000\\173\\001\\173\\001\\\n\\173\\001\\000\\000\\173\\001\\000\\000\\173\\001\\000\\000\\000\\000\\000\\000\\\n\\173\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\173\\001\\173\\001\\000\\000\\173\\001\\173\\001\\000\\000\\173\\001\\000\\000\\\n\\000\\000\\223\\001\\223\\001\\223\\001\\000\\000\\223\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\223\\001\\223\\001\\223\\001\\000\\000\\\n\\223\\001\\000\\000\\000\\000\\223\\001\\000\\000\\000\\000\\000\\000\\223\\001\\\n\\223\\001\\000\\000\\223\\001\\223\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\223\\001\\000\\000\\223\\001\\223\\001\\000\\000\\000\\000\\\n\\223\\001\\223\\001\\000\\000\\000\\000\\000\\000\\223\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\223\\001\\000\\000\\223\\001\\223\\001\\223\\001\\\n\\223\\001\\223\\001\\223\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\223\\001\\000\\000\\223\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\223\\001\\223\\001\\000\\000\\000\\000\\000\\000\\223\\001\\223\\001\\\n\\104\\000\\223\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\223\\001\\000\\000\\223\\001\\223\\001\\223\\001\\223\\001\\000\\000\\223\\001\\\n\\223\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\223\\001\\223\\001\\\n\\223\\001\\223\\001\\223\\001\\223\\001\\223\\001\\223\\001\\223\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\223\\001\\223\\001\\223\\001\\000\\000\\\n\\000\\000\\223\\001\\223\\001\\000\\000\\000\\000\\223\\001\\223\\001\\223\\001\\\n\\223\\001\\223\\001\\000\\000\\000\\000\\223\\001\\223\\001\\000\\000\\223\\001\\\n\\223\\001\\223\\001\\000\\000\\223\\001\\000\\000\\223\\001\\000\\000\\000\\000\\\n\\000\\000\\223\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\223\\001\\223\\001\\000\\000\\223\\001\\223\\001\\000\\000\\223\\001\\\n\\000\\000\\171\\001\\171\\001\\171\\001\\000\\000\\171\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\171\\001\\171\\001\\171\\001\\000\\000\\\n\\171\\001\\000\\000\\000\\000\\171\\001\\000\\000\\000\\000\\000\\000\\171\\001\\\n\\171\\001\\000\\000\\171\\001\\171\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\171\\001\\000\\000\\171\\001\\171\\001\\000\\000\\000\\000\\\n\\171\\001\\171\\001\\000\\000\\000\\000\\000\\000\\171\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\171\\001\\000\\000\\171\\001\\171\\001\\171\\001\\\n\\171\\001\\171\\001\\171\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\171\\001\\000\\000\\171\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\171\\001\\171\\001\\000\\000\\000\\000\\000\\000\\171\\001\\171\\001\\\n\\150\\001\\171\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\171\\001\\000\\000\\171\\001\\171\\001\\171\\001\\171\\001\\000\\000\\171\\001\\\n\\171\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\171\\001\\171\\001\\\n\\171\\001\\171\\001\\171\\001\\171\\001\\171\\001\\171\\001\\171\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\171\\001\\171\\001\\171\\001\\000\\000\\\n\\000\\000\\171\\001\\171\\001\\000\\000\\000\\000\\171\\001\\171\\001\\171\\001\\\n\\171\\001\\171\\001\\000\\000\\000\\000\\171\\001\\171\\001\\000\\000\\171\\001\\\n\\171\\001\\171\\001\\000\\000\\171\\001\\000\\000\\171\\001\\000\\000\\000\\000\\\n\\000\\000\\171\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\171\\001\\171\\001\\000\\000\\171\\001\\171\\001\\000\\000\\171\\001\\\n\\000\\000\\000\\000\\104\\000\\104\\000\\104\\000\\000\\000\\104\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\104\\000\\104\\000\\104\\000\\\n\\000\\000\\104\\000\\000\\000\\000\\000\\104\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\104\\000\\000\\000\\104\\000\\104\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\104\\000\\000\\000\\104\\000\\104\\000\\000\\000\\\n\\000\\000\\104\\000\\104\\000\\000\\000\\000\\000\\000\\000\\104\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\104\\000\\000\\000\\104\\000\\104\\000\\\n\\104\\000\\104\\000\\104\\000\\104\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\104\\000\\000\\000\\104\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\104\\000\\104\\000\\000\\000\\000\\000\\000\\000\\104\\000\\\n\\104\\000\\149\\001\\104\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\104\\000\\000\\000\\104\\000\\104\\000\\104\\000\\104\\000\\000\\000\\\n\\104\\000\\104\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\104\\000\\\n\\104\\000\\104\\000\\104\\000\\104\\000\\104\\000\\104\\000\\104\\000\\104\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\104\\000\\104\\000\\104\\000\\\n\\000\\000\\000\\000\\104\\000\\104\\000\\000\\000\\000\\000\\104\\000\\104\\000\\\n\\104\\000\\104\\000\\104\\000\\000\\000\\000\\000\\104\\000\\104\\000\\000\\000\\\n\\104\\000\\104\\000\\104\\000\\000\\000\\104\\000\\000\\000\\104\\000\\000\\000\\\n\\000\\000\\000\\000\\104\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\104\\000\\104\\000\\000\\000\\104\\000\\104\\000\\000\\000\\\n\\104\\000\\000\\000\\150\\001\\150\\001\\150\\001\\000\\000\\150\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\150\\001\\150\\001\\150\\001\\\n\\000\\000\\150\\001\\000\\000\\000\\000\\150\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\150\\001\\150\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\150\\001\\000\\000\\150\\001\\150\\001\\000\\000\\\n\\000\\000\\150\\001\\150\\001\\000\\000\\000\\000\\000\\000\\150\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\150\\001\\000\\000\\150\\001\\150\\001\\\n\\150\\001\\150\\001\\150\\001\\150\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\150\\001\\133\\001\\150\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\150\\001\\150\\001\\000\\000\\000\\000\\000\\000\\150\\001\\\n\\150\\001\\000\\000\\150\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\150\\001\\000\\000\\150\\001\\150\\001\\150\\001\\150\\001\\000\\000\\\n\\150\\001\\150\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\150\\001\\\n\\150\\001\\150\\001\\150\\001\\150\\001\\150\\001\\150\\001\\150\\001\\150\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\150\\001\\150\\001\\150\\001\\\n\\000\\000\\000\\000\\150\\001\\150\\001\\000\\000\\000\\000\\150\\001\\150\\001\\\n\\150\\001\\150\\001\\150\\001\\000\\000\\000\\000\\150\\001\\150\\001\\000\\000\\\n\\150\\001\\150\\001\\150\\001\\000\\000\\150\\001\\000\\000\\150\\001\\000\\000\\\n\\000\\000\\000\\000\\150\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\150\\001\\150\\001\\000\\000\\150\\001\\150\\001\\000\\000\\\n\\150\\001\\000\\000\\000\\000\\149\\001\\149\\001\\149\\001\\000\\000\\149\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\149\\001\\149\\001\\\n\\149\\001\\000\\000\\149\\001\\000\\000\\000\\000\\149\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\149\\001\\149\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\149\\001\\000\\000\\149\\001\\149\\001\\\n\\000\\000\\000\\000\\149\\001\\149\\001\\000\\000\\000\\000\\000\\000\\149\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\149\\001\\000\\000\\149\\001\\\n\\149\\001\\149\\001\\149\\001\\149\\001\\149\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\125\\001\\149\\001\\000\\000\\149\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\149\\001\\149\\001\\000\\000\\000\\000\\000\\000\\\n\\149\\001\\149\\001\\000\\000\\149\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\149\\001\\000\\000\\149\\001\\149\\001\\149\\001\\149\\001\\\n\\000\\000\\149\\001\\149\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\149\\001\\149\\001\\149\\001\\149\\001\\149\\001\\149\\001\\149\\001\\149\\001\\\n\\149\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\149\\001\\149\\001\\\n\\149\\001\\000\\000\\000\\000\\149\\001\\149\\001\\000\\000\\000\\000\\149\\001\\\n\\149\\001\\149\\001\\149\\001\\149\\001\\000\\000\\000\\000\\149\\001\\149\\001\\\n\\000\\000\\149\\001\\149\\001\\149\\001\\000\\000\\149\\001\\000\\000\\149\\001\\\n\\000\\000\\000\\000\\000\\000\\149\\001\\000\\000\\133\\001\\133\\001\\133\\001\\\n\\000\\000\\133\\001\\000\\000\\149\\001\\149\\001\\000\\000\\149\\001\\149\\001\\\n\\133\\001\\149\\001\\133\\001\\000\\000\\133\\001\\000\\000\\000\\000\\133\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\133\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\133\\001\\000\\000\\\n\\133\\001\\133\\001\\000\\000\\000\\000\\133\\001\\133\\001\\000\\000\\000\\000\\\n\\000\\000\\133\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\133\\001\\\n\\000\\000\\133\\001\\133\\001\\133\\001\\133\\001\\133\\001\\133\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\123\\001\\133\\001\\000\\000\\133\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\133\\001\\133\\001\\000\\000\\\n\\000\\000\\000\\000\\133\\001\\133\\001\\000\\000\\133\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\133\\001\\000\\000\\133\\001\\133\\001\\\n\\133\\001\\133\\001\\000\\000\\133\\001\\133\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\133\\001\\133\\001\\133\\001\\133\\001\\133\\001\\133\\001\\\n\\133\\001\\133\\001\\133\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\133\\001\\133\\001\\000\\000\\000\\000\\133\\001\\133\\001\\000\\000\\\n\\000\\000\\133\\001\\133\\001\\133\\001\\133\\001\\133\\001\\000\\000\\000\\000\\\n\\133\\001\\000\\000\\000\\000\\133\\001\\133\\001\\133\\001\\000\\000\\133\\001\\\n\\000\\000\\133\\001\\000\\000\\000\\000\\000\\000\\133\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\133\\001\\133\\001\\000\\000\\\n\\133\\001\\133\\001\\000\\000\\133\\001\\125\\001\\125\\001\\125\\001\\000\\000\\\n\\125\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\125\\001\\\n\\000\\000\\125\\001\\000\\000\\125\\001\\000\\000\\000\\000\\125\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\125\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\125\\001\\000\\000\\125\\001\\\n\\125\\001\\000\\000\\000\\000\\125\\001\\125\\001\\000\\000\\000\\000\\000\\000\\\n\\125\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\125\\001\\000\\000\\\n\\000\\000\\125\\001\\125\\001\\125\\001\\125\\001\\125\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\116\\001\\125\\001\\000\\000\\125\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\125\\001\\125\\001\\000\\000\\000\\000\\\n\\000\\000\\125\\001\\125\\001\\000\\000\\125\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\125\\001\\000\\000\\125\\001\\125\\001\\125\\001\\\n\\125\\001\\000\\000\\125\\001\\125\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\125\\001\\125\\001\\125\\001\\125\\001\\125\\001\\125\\001\\125\\001\\\n\\125\\001\\125\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\125\\001\\125\\001\\000\\000\\000\\000\\125\\001\\125\\001\\000\\000\\000\\000\\\n\\125\\001\\125\\001\\125\\001\\125\\001\\125\\001\\000\\000\\000\\000\\125\\001\\\n\\000\\000\\000\\000\\125\\001\\125\\001\\125\\001\\000\\000\\125\\001\\000\\000\\\n\\125\\001\\000\\000\\000\\000\\000\\000\\125\\001\\000\\000\\123\\001\\123\\001\\\n\\123\\001\\000\\000\\123\\001\\000\\000\\125\\001\\125\\001\\000\\000\\125\\001\\\n\\125\\001\\123\\001\\125\\001\\123\\001\\000\\000\\123\\001\\000\\000\\000\\000\\\n\\123\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\123\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\123\\001\\\n\\000\\000\\123\\001\\123\\001\\000\\000\\000\\000\\123\\001\\123\\001\\000\\000\\\n\\000\\000\\000\\000\\123\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\123\\001\\123\\001\\123\\001\\123\\001\\123\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\058\\000\\123\\001\\000\\000\\\n\\123\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\123\\001\\123\\001\\\n\\000\\000\\000\\000\\000\\000\\123\\001\\123\\001\\000\\000\\123\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\123\\001\\000\\000\\123\\001\\\n\\123\\001\\123\\001\\123\\001\\000\\000\\123\\001\\123\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\123\\001\\123\\001\\123\\001\\123\\001\\123\\001\\\n\\123\\001\\123\\001\\123\\001\\123\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\123\\001\\123\\001\\000\\000\\000\\000\\123\\001\\123\\001\\\n\\000\\000\\000\\000\\123\\001\\123\\001\\123\\001\\123\\001\\123\\001\\000\\000\\\n\\000\\000\\123\\001\\000\\000\\000\\000\\123\\001\\123\\001\\123\\001\\000\\000\\\n\\123\\001\\000\\000\\123\\001\\000\\000\\000\\000\\000\\000\\123\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\123\\001\\123\\001\\\n\\000\\000\\123\\001\\123\\001\\000\\000\\123\\001\\116\\001\\116\\001\\116\\001\\\n\\000\\000\\116\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\116\\001\\000\\000\\116\\001\\000\\000\\000\\000\\000\\000\\000\\000\\116\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\116\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\116\\001\\000\\000\\\n\\116\\001\\116\\001\\000\\000\\000\\000\\000\\000\\116\\001\\000\\000\\000\\000\\\n\\000\\000\\116\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\005\\000\\\n\\000\\000\\000\\000\\116\\001\\116\\001\\116\\001\\116\\001\\116\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\116\\001\\000\\000\\116\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\116\\001\\116\\001\\000\\000\\\n\\000\\000\\000\\000\\116\\001\\116\\001\\000\\000\\116\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\116\\001\\000\\000\\116\\001\\116\\001\\\n\\116\\001\\116\\001\\000\\000\\116\\001\\116\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\116\\001\\116\\001\\116\\001\\116\\001\\116\\001\\116\\001\\\n\\116\\001\\116\\001\\116\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\116\\001\\116\\001\\000\\000\\000\\000\\116\\001\\116\\001\\000\\000\\\n\\000\\000\\116\\001\\116\\001\\116\\001\\116\\001\\116\\001\\000\\000\\000\\000\\\n\\116\\001\\000\\000\\000\\000\\116\\001\\000\\000\\116\\001\\000\\000\\116\\001\\\n\\000\\000\\116\\001\\000\\000\\000\\000\\000\\000\\116\\001\\000\\000\\058\\000\\\n\\058\\000\\058\\000\\000\\000\\058\\000\\000\\000\\116\\001\\116\\001\\000\\000\\\n\\116\\001\\116\\001\\058\\000\\116\\001\\058\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\058\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\058\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\058\\000\\000\\000\\058\\000\\058\\000\\000\\000\\000\\000\\000\\000\\058\\000\\\n\\000\\000\\000\\000\\000\\000\\058\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\058\\000\\058\\000\\058\\000\\058\\000\\\n\\058\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\058\\000\\\n\\000\\000\\058\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\058\\000\\\n\\058\\000\\000\\000\\000\\000\\000\\000\\058\\000\\058\\000\\000\\000\\058\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\058\\000\\000\\000\\\n\\058\\000\\058\\000\\058\\000\\058\\000\\000\\000\\058\\000\\058\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\058\\000\\058\\000\\058\\000\\058\\000\\\n\\058\\000\\058\\000\\058\\000\\058\\000\\058\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\058\\000\\058\\000\\000\\000\\000\\000\\058\\000\\\n\\058\\000\\000\\000\\000\\000\\058\\000\\058\\000\\058\\000\\058\\000\\058\\000\\\n\\000\\000\\000\\000\\058\\000\\000\\000\\000\\000\\058\\000\\000\\000\\058\\000\\\n\\000\\000\\058\\000\\000\\000\\058\\000\\000\\000\\000\\000\\000\\000\\058\\000\\\n\\000\\000\\005\\000\\005\\000\\005\\000\\000\\000\\005\\000\\000\\000\\058\\000\\\n\\058\\000\\000\\000\\058\\000\\058\\000\\005\\000\\058\\000\\005\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\005\\000\\000\\000\\005\\000\\005\\000\\000\\000\\000\\000\\\n\\000\\000\\005\\000\\000\\000\\000\\000\\000\\000\\005\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\019\\000\\000\\000\\000\\000\\005\\000\\005\\000\\\n\\005\\000\\005\\000\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\005\\000\\000\\000\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\005\\000\\005\\000\\000\\000\\000\\000\\000\\000\\005\\000\\005\\000\\\n\\000\\000\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\005\\000\\000\\000\\005\\000\\005\\000\\005\\000\\005\\000\\000\\000\\005\\000\\\n\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\005\\000\\005\\000\\\n\\005\\000\\005\\000\\005\\000\\005\\000\\005\\000\\005\\000\\005\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\005\\000\\005\\000\\000\\000\\\n\\000\\000\\005\\000\\005\\000\\000\\000\\000\\000\\005\\000\\005\\000\\005\\000\\\n\\005\\000\\005\\000\\000\\000\\000\\000\\005\\000\\000\\000\\000\\000\\005\\000\\\n\\000\\000\\005\\000\\000\\000\\005\\000\\000\\000\\005\\000\\000\\000\\000\\000\\\n\\000\\000\\005\\000\\000\\000\\078\\000\\078\\000\\078\\000\\000\\000\\078\\000\\\n\\000\\000\\005\\000\\005\\000\\000\\000\\005\\000\\005\\000\\078\\000\\005\\000\\\n\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\078\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\017\\000\\000\\000\\000\\000\\\n\\078\\000\\078\\000\\078\\000\\078\\000\\078\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\078\\000\\000\\000\\000\\000\\000\\000\\\n\\078\\000\\078\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\078\\000\\000\\000\\078\\000\\078\\000\\078\\000\\078\\000\\\n\\000\\000\\078\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\078\\000\\078\\000\\078\\000\\078\\000\\078\\000\\078\\000\\078\\000\\078\\000\\\n\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\078\\000\\000\\000\\000\\000\\078\\000\\078\\000\\000\\000\\000\\000\\078\\000\\\n\\078\\000\\078\\000\\078\\000\\078\\000\\000\\000\\000\\000\\078\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\078\\000\\000\\000\\078\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\019\\000\\019\\000\\019\\000\\\n\\000\\000\\019\\000\\000\\000\\078\\000\\078\\000\\000\\000\\078\\000\\078\\000\\\n\\019\\000\\078\\000\\019\\000\\000\\000\\000\\000\\000\\000\\000\\000\\019\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\019\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\019\\000\\000\\000\\\n\\019\\000\\019\\000\\000\\000\\000\\000\\000\\000\\019\\000\\000\\000\\000\\000\\\n\\000\\000\\019\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\070\\002\\\n\\000\\000\\000\\000\\019\\000\\019\\000\\019\\000\\019\\000\\019\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\019\\000\\000\\000\\019\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\019\\000\\019\\000\\000\\000\\\n\\000\\000\\000\\000\\019\\000\\019\\000\\000\\000\\019\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\019\\000\\000\\000\\019\\000\\019\\000\\\n\\019\\000\\019\\000\\000\\000\\019\\000\\019\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\019\\000\\019\\000\\019\\000\\019\\000\\019\\000\\019\\000\\\n\\019\\000\\019\\000\\019\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\019\\000\\019\\000\\000\\000\\000\\000\\019\\000\\019\\000\\000\\000\\\n\\000\\000\\019\\000\\019\\000\\019\\000\\019\\000\\019\\000\\000\\000\\000\\000\\\n\\019\\000\\000\\000\\000\\000\\019\\000\\000\\000\\019\\000\\000\\000\\019\\000\\\n\\000\\000\\019\\000\\000\\000\\000\\000\\000\\000\\019\\000\\000\\000\\017\\000\\\n\\017\\000\\017\\000\\000\\000\\017\\000\\000\\000\\019\\000\\019\\000\\000\\000\\\n\\019\\000\\019\\000\\017\\000\\019\\000\\017\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\017\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\017\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\017\\000\\000\\000\\017\\000\\017\\000\\000\\000\\000\\000\\000\\000\\017\\000\\\n\\000\\000\\000\\000\\000\\000\\017\\000\\088\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\017\\000\\017\\000\\017\\000\\017\\000\\\n\\017\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\017\\000\\\n\\000\\000\\017\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\017\\000\\\n\\017\\000\\000\\000\\000\\000\\000\\000\\017\\000\\017\\000\\000\\000\\017\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\017\\000\\000\\000\\\n\\017\\000\\017\\000\\017\\000\\017\\000\\000\\000\\017\\000\\017\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\017\\000\\017\\000\\017\\000\\017\\000\\\n\\017\\000\\017\\000\\017\\000\\017\\000\\017\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\017\\000\\017\\000\\000\\000\\000\\000\\017\\000\\\n\\017\\000\\000\\000\\000\\000\\017\\000\\017\\000\\017\\000\\017\\000\\017\\000\\\n\\000\\000\\000\\000\\017\\000\\000\\000\\000\\000\\017\\000\\000\\000\\017\\000\\\n\\000\\000\\017\\000\\000\\000\\017\\000\\000\\000\\000\\000\\000\\000\\017\\000\\\n\\000\\000\\070\\002\\070\\002\\070\\002\\000\\000\\070\\002\\000\\000\\017\\000\\\n\\017\\000\\000\\000\\017\\000\\017\\000\\070\\002\\017\\000\\070\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\070\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\070\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\070\\002\\000\\000\\070\\002\\070\\002\\000\\000\\000\\000\\\n\\000\\000\\070\\002\\000\\000\\000\\000\\000\\000\\070\\002\\082\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\070\\002\\070\\002\\\n\\070\\002\\070\\002\\070\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\070\\002\\000\\000\\070\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\070\\002\\070\\002\\000\\000\\000\\000\\000\\000\\070\\002\\070\\002\\\n\\000\\000\\070\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\070\\002\\000\\000\\070\\002\\070\\002\\070\\002\\070\\002\\000\\000\\070\\002\\\n\\070\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\070\\002\\070\\002\\\n\\070\\002\\070\\002\\070\\002\\070\\002\\070\\002\\070\\002\\070\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\070\\002\\070\\002\\000\\000\\\n\\000\\000\\070\\002\\070\\002\\000\\000\\000\\000\\070\\002\\070\\002\\070\\002\\\n\\070\\002\\070\\002\\000\\000\\000\\000\\070\\002\\000\\000\\000\\000\\070\\002\\\n\\000\\000\\070\\002\\000\\000\\070\\002\\000\\000\\070\\002\\088\\001\\088\\001\\\n\\088\\001\\070\\002\\088\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\070\\002\\070\\002\\088\\001\\070\\002\\070\\002\\000\\000\\070\\002\\\n\\088\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\088\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\088\\001\\\n\\000\\000\\088\\001\\088\\001\\000\\000\\000\\000\\000\\000\\088\\001\\000\\000\\\n\\000\\000\\000\\000\\088\\001\\083\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\088\\001\\088\\001\\088\\001\\088\\001\\088\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\088\\001\\000\\000\\\n\\088\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\088\\001\\088\\001\\\n\\000\\000\\000\\000\\000\\000\\088\\001\\088\\001\\000\\000\\088\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\088\\001\\000\\000\\088\\001\\\n\\088\\001\\088\\001\\088\\001\\000\\000\\088\\001\\088\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\088\\001\\088\\001\\088\\001\\088\\001\\088\\001\\\n\\088\\001\\088\\001\\088\\001\\088\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\088\\001\\088\\001\\000\\000\\000\\000\\088\\001\\088\\001\\\n\\000\\000\\000\\000\\088\\001\\088\\001\\088\\001\\088\\001\\088\\001\\000\\000\\\n\\000\\000\\088\\001\\000\\000\\000\\000\\088\\001\\000\\000\\088\\001\\000\\000\\\n\\088\\001\\000\\000\\088\\001\\000\\000\\000\\000\\000\\000\\088\\001\\000\\000\\\n\\082\\001\\082\\001\\082\\001\\000\\000\\082\\001\\000\\000\\088\\001\\088\\001\\\n\\000\\000\\088\\001\\088\\001\\000\\000\\088\\001\\082\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\082\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\082\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\082\\001\\000\\000\\082\\001\\082\\001\\000\\000\\000\\000\\000\\000\\\n\\082\\001\\000\\000\\000\\000\\000\\000\\082\\001\\081\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\001\\082\\001\\082\\001\\\n\\082\\001\\082\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\082\\001\\000\\000\\082\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\082\\001\\082\\001\\000\\000\\000\\000\\000\\000\\082\\001\\082\\001\\000\\000\\\n\\082\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\001\\\n\\000\\000\\082\\001\\082\\001\\082\\001\\082\\001\\000\\000\\082\\001\\082\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\001\\082\\001\\082\\001\\\n\\082\\001\\082\\001\\082\\001\\082\\001\\082\\001\\082\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\082\\001\\082\\001\\000\\000\\000\\000\\\n\\082\\001\\082\\001\\000\\000\\000\\000\\082\\001\\082\\001\\082\\001\\082\\001\\\n\\082\\001\\000\\000\\000\\000\\082\\001\\000\\000\\000\\000\\082\\001\\000\\000\\\n\\082\\001\\000\\000\\082\\001\\000\\000\\082\\001\\083\\001\\083\\001\\083\\001\\\n\\082\\001\\083\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\082\\001\\082\\001\\083\\001\\082\\001\\082\\001\\000\\000\\082\\001\\083\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\083\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\083\\001\\000\\000\\\n\\083\\001\\083\\001\\000\\000\\000\\000\\000\\000\\083\\001\\000\\000\\000\\000\\\n\\000\\000\\083\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\\n\\000\\000\\000\\000\\083\\001\\083\\001\\083\\001\\083\\001\\083\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\083\\001\\000\\000\\083\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\083\\001\\083\\001\\000\\000\\\n\\000\\000\\000\\000\\083\\001\\083\\001\\000\\000\\083\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\083\\001\\000\\000\\083\\001\\083\\001\\\n\\083\\001\\083\\001\\000\\000\\083\\001\\083\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\083\\001\\083\\001\\083\\001\\083\\001\\083\\001\\083\\001\\\n\\083\\001\\083\\001\\083\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\001\\083\\001\\000\\000\\000\\000\\083\\001\\083\\001\\000\\000\\\n\\000\\000\\083\\001\\083\\001\\083\\001\\083\\001\\083\\001\\000\\000\\000\\000\\\n\\083\\001\\000\\000\\000\\000\\083\\001\\000\\000\\083\\001\\000\\000\\083\\001\\\n\\000\\000\\083\\001\\000\\000\\000\\000\\000\\000\\083\\001\\000\\000\\081\\001\\\n\\081\\001\\081\\001\\000\\000\\081\\001\\000\\000\\083\\001\\083\\001\\000\\000\\\n\\083\\001\\083\\001\\081\\001\\083\\001\\081\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\081\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\081\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\081\\001\\000\\000\\000\\000\\081\\001\\000\\000\\000\\000\\000\\000\\081\\001\\\n\\000\\000\\000\\000\\000\\000\\081\\001\\000\\000\\224\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\081\\001\\081\\001\\081\\001\\081\\001\\\n\\081\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\081\\001\\\n\\000\\000\\081\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\081\\001\\\n\\081\\001\\000\\000\\000\\000\\000\\000\\081\\001\\081\\001\\000\\000\\081\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\081\\001\\000\\000\\\n\\081\\001\\081\\001\\081\\001\\081\\001\\000\\000\\081\\001\\081\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\081\\001\\081\\001\\081\\001\\081\\001\\\n\\081\\001\\081\\001\\081\\001\\081\\001\\081\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\081\\001\\081\\001\\000\\000\\000\\000\\081\\001\\\n\\081\\001\\000\\000\\000\\000\\081\\001\\081\\001\\081\\001\\081\\001\\081\\001\\\n\\000\\000\\000\\000\\081\\001\\000\\000\\000\\000\\081\\001\\000\\000\\081\\001\\\n\\000\\000\\081\\001\\000\\000\\081\\001\\000\\000\\000\\000\\000\\000\\081\\001\\\n\\000\\000\\062\\001\\062\\001\\062\\001\\000\\000\\062\\001\\000\\000\\081\\001\\\n\\081\\001\\000\\000\\081\\001\\081\\001\\062\\001\\081\\001\\062\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\\n\\000\\000\\000\\000\\062\\001\\000\\000\\062\\001\\062\\001\\000\\000\\000\\000\\\n\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\062\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\062\\001\\\n\\062\\001\\062\\001\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\062\\001\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\062\\001\\062\\001\\000\\000\\000\\000\\000\\000\\062\\001\\062\\001\\\n\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\062\\001\\000\\000\\062\\001\\062\\001\\062\\001\\062\\001\\000\\000\\062\\001\\\n\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\062\\001\\\n\\062\\001\\062\\001\\062\\001\\062\\001\\062\\001\\062\\001\\062\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\062\\001\\000\\000\\\n\\000\\000\\062\\001\\062\\001\\000\\000\\000\\000\\062\\001\\062\\001\\062\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\000\\000\\000\\000\\062\\001\\\n\\000\\000\\062\\001\\000\\000\\062\\001\\000\\000\\062\\001\\000\\000\\224\\001\\\n\\224\\001\\062\\001\\000\\000\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\062\\001\\062\\001\\224\\001\\062\\001\\062\\001\\224\\001\\062\\001\\\n\\000\\000\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\224\\001\\000\\000\\000\\000\\000\\000\\036\\001\\224\\001\\000\\000\\224\\001\\\n\\000\\000\\000\\000\\000\\000\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\224\\001\\224\\001\\224\\001\\224\\001\\\n\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\224\\001\\\n\\000\\000\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\224\\001\\\n\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\224\\001\\000\\000\\224\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\224\\001\\000\\000\\\n\\224\\001\\224\\001\\224\\001\\224\\001\\000\\000\\224\\001\\224\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\224\\001\\224\\001\\224\\001\\224\\001\\\n\\224\\001\\224\\001\\224\\001\\224\\001\\224\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\224\\001\\224\\001\\224\\001\\000\\000\\000\\000\\224\\001\\\n\\224\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\000\\000\\224\\001\\000\\000\\053\\000\\000\\000\\000\\000\\224\\001\\\n\\000\\000\\000\\000\\000\\000\\224\\001\\000\\000\\000\\000\\000\\000\\224\\001\\\n\\000\\000\\000\\000\\053\\000\\053\\000\\000\\000\\000\\000\\000\\000\\224\\001\\\n\\224\\001\\053\\000\\224\\001\\224\\001\\053\\000\\224\\001\\000\\000\\000\\000\\\n\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\053\\000\\053\\000\\\n\\053\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\\n\\000\\000\\053\\000\\053\\000\\053\\000\\053\\000\\000\\000\\053\\000\\053\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\053\\000\\053\\000\\\n\\053\\000\\053\\000\\053\\000\\053\\000\\053\\000\\053\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\\n\\053\\000\\053\\000\\036\\001\\053\\000\\053\\000\\000\\000\\053\\000\\036\\001\\\n\\021\\000\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\000\\000\\000\\000\\036\\001\\\n\\000\\000\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\000\\000\\036\\001\\000\\000\\\n\\036\\001\\000\\000\\036\\001\\000\\000\\000\\000\\036\\001\\036\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\000\\000\\\n\\036\\001\\036\\001\\036\\001\\000\\000\\000\\000\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\000\\000\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\000\\000\\\n\\036\\001\\036\\001\\000\\000\\000\\000\\000\\000\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\\n\\036\\001\\000\\000\\025\\000\\000\\000\\036\\001\\036\\001\\000\\000\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\000\\000\\\n\\000\\000\\000\\000\\036\\001\\036\\001\\053\\000\\036\\001\\036\\001\\000\\000\\\n\\053\\000\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\\n\\000\\000\\036\\001\\036\\001\\036\\001\\000\\000\\000\\000\\053\\000\\053\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\053\\000\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\053\\000\\053\\000\\053\\000\\053\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\053\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\053\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\053\\000\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\053\\000\\053\\000\\053\\000\\\n\\053\\000\\000\\000\\053\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\053\\000\\053\\000\\053\\000\\053\\000\\053\\000\\053\\000\\053\\000\\\n\\053\\000\\053\\000\\021\\000\\000\\000\\000\\000\\000\\000\\021\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\053\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\021\\000\\000\\000\\000\\000\\053\\000\\\n\\000\\000\\000\\000\\000\\000\\021\\000\\053\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\000\\000\\000\\000\\000\\021\\000\\053\\000\\000\\000\\000\\000\\000\\000\\\n\\021\\000\\000\\000\\021\\000\\000\\000\\053\\000\\053\\000\\021\\000\\053\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\021\\000\\000\\000\\021\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\021\\000\\021\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\021\\000\\000\\000\\021\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\021\\000\\165\\000\\021\\000\\021\\000\\021\\000\\021\\000\\000\\000\\\n\\021\\000\\021\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\025\\000\\000\\000\\000\\000\\000\\000\\\n\\025\\000\\000\\000\\021\\000\\021\\000\\000\\000\\000\\000\\000\\000\\025\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\021\\000\\025\\000\\000\\000\\\n\\000\\000\\000\\000\\021\\000\\000\\000\\000\\000\\025\\000\\021\\000\\000\\000\\\n\\000\\000\\000\\000\\021\\000\\000\\000\\000\\000\\025\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\021\\000\\021\\000\\025\\000\\021\\000\\021\\000\\000\\000\\\n\\025\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\025\\000\\025\\000\\025\\000\\025\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\025\\000\\000\\000\\025\\000\\064\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\025\\000\\025\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\025\\000\\000\\000\\025\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\025\\000\\000\\000\\025\\000\\025\\000\\025\\000\\\n\\025\\000\\000\\000\\025\\000\\025\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\025\\000\\025\\000\\025\\000\\025\\000\\025\\000\\025\\000\\025\\000\\\n\\025\\000\\025\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\\n\\000\\000\\000\\000\\003\\000\\000\\000\\025\\000\\025\\000\\000\\000\\000\\000\\\n\\000\\000\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\025\\000\\\n\\003\\000\\000\\000\\000\\000\\000\\000\\025\\000\\000\\000\\000\\000\\003\\000\\\n\\025\\000\\000\\000\\000\\000\\000\\000\\025\\000\\000\\000\\000\\000\\003\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\025\\000\\025\\000\\003\\000\\025\\000\\\n\\025\\000\\000\\000\\003\\000\\000\\000\\033\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\\n\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\003\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\003\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\000\\000\\003\\000\\003\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\165\\000\\000\\000\\000\\000\\000\\000\\\n\\165\\000\\000\\000\\000\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\000\\000\\000\\000\\165\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\165\\000\\003\\000\\003\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\165\\000\\000\\000\\000\\000\\\n\\000\\000\\003\\000\\000\\000\\000\\000\\165\\000\\000\\000\\003\\000\\000\\000\\\n\\165\\000\\001\\000\\003\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\\n\\000\\000\\000\\000\\165\\000\\165\\000\\165\\000\\165\\000\\003\\000\\003\\000\\\n\\000\\000\\003\\000\\003\\000\\000\\000\\165\\000\\000\\000\\165\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\165\\000\\165\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\165\\000\\000\\000\\165\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\165\\000\\000\\000\\165\\000\\165\\000\\165\\000\\\n\\165\\000\\000\\000\\165\\000\\165\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\064\\000\\000\\000\\000\\000\\000\\000\\064\\000\\000\\000\\000\\000\\\n\\000\\000\\165\\000\\165\\000\\165\\000\\165\\000\\165\\000\\165\\000\\165\\000\\\n\\165\\000\\165\\000\\000\\000\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\064\\000\\000\\000\\165\\000\\165\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\165\\000\\\n\\000\\000\\064\\000\\000\\000\\000\\000\\165\\000\\064\\000\\036\\001\\000\\000\\\n\\165\\000\\000\\000\\000\\000\\000\\000\\165\\000\\000\\000\\000\\000\\064\\000\\\n\\064\\000\\064\\000\\064\\000\\000\\000\\165\\000\\165\\000\\000\\000\\165\\000\\\n\\165\\000\\064\\000\\000\\000\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\064\\000\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\064\\000\\\n\\000\\000\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\064\\000\\000\\000\\064\\000\\064\\000\\064\\000\\064\\000\\000\\000\\064\\000\\\n\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\033\\000\\000\\000\\000\\000\\000\\000\\064\\000\\064\\000\\\n\\064\\000\\064\\000\\064\\000\\064\\000\\064\\000\\064\\000\\064\\000\\000\\000\\\n\\033\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\033\\000\\\n\\000\\000\\064\\000\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\033\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\064\\000\\000\\000\\033\\000\\000\\000\\\n\\000\\000\\064\\000\\033\\000\\094\\000\\000\\000\\064\\000\\000\\000\\000\\000\\\n\\000\\000\\064\\000\\000\\000\\000\\000\\033\\000\\033\\000\\033\\000\\033\\000\\\n\\000\\000\\064\\000\\064\\000\\000\\000\\064\\000\\064\\000\\033\\000\\000\\000\\\n\\033\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\033\\000\\033\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\033\\000\\000\\000\\033\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\033\\000\\000\\000\\033\\000\\\n\\033\\000\\033\\000\\033\\000\\000\\000\\033\\000\\033\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\\n\\000\\000\\000\\000\\000\\000\\033\\000\\033\\000\\033\\000\\033\\000\\033\\000\\\n\\033\\000\\033\\000\\033\\000\\033\\000\\000\\000\\001\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\033\\000\\033\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\033\\000\\000\\000\\001\\000\\000\\000\\000\\000\\033\\000\\001\\000\\\n\\092\\000\\000\\000\\033\\000\\000\\000\\000\\000\\000\\000\\033\\000\\000\\000\\\n\\000\\000\\001\\000\\001\\000\\001\\000\\001\\000\\000\\000\\033\\000\\033\\000\\\n\\000\\000\\033\\000\\033\\000\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\001\\000\\001\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\001\\000\\000\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\000\\000\\001\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\000\\000\\000\\000\\000\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\000\\000\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\036\\001\\000\\000\\001\\000\\001\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\\n\\036\\001\\000\\000\\000\\000\\001\\000\\036\\001\\000\\000\\000\\000\\001\\000\\\n\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\000\\000\\001\\000\\001\\000\\000\\000\\001\\000\\001\\000\\\n\\036\\001\\000\\000\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\036\\001\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\000\\000\\\n\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\036\\001\\\n\\000\\000\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\094\\000\\000\\000\\000\\000\\000\\000\\036\\001\\036\\001\\036\\001\\\n\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\036\\001\\000\\000\\094\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\094\\000\\000\\000\\\n\\036\\001\\036\\001\\000\\000\\000\\000\\000\\000\\000\\000\\094\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\036\\001\\000\\000\\094\\000\\000\\000\\000\\000\\\n\\036\\001\\094\\000\\000\\000\\000\\000\\036\\001\\000\\000\\000\\000\\000\\000\\\n\\036\\001\\000\\000\\000\\000\\094\\000\\094\\000\\094\\000\\094\\000\\000\\000\\\n\\036\\001\\036\\001\\000\\000\\036\\001\\036\\001\\094\\000\\000\\000\\094\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\094\\000\\094\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\094\\000\\000\\000\\094\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\094\\000\\000\\000\\094\\000\\094\\000\\\n\\094\\000\\094\\000\\000\\000\\094\\000\\094\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\092\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\094\\000\\094\\000\\094\\000\\094\\000\\094\\000\\\n\\094\\000\\094\\000\\094\\000\\000\\000\\092\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\092\\000\\000\\000\\094\\000\\094\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\092\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\094\\000\\000\\000\\092\\000\\000\\000\\000\\000\\094\\000\\092\\000\\000\\000\\\n\\000\\000\\094\\000\\000\\000\\000\\000\\000\\000\\094\\000\\000\\000\\000\\000\\\n\\092\\000\\092\\000\\092\\000\\092\\000\\000\\000\\094\\000\\094\\000\\000\\000\\\n\\094\\000\\094\\000\\092\\000\\000\\000\\092\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\092\\000\\092\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\092\\000\\000\\000\\092\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\092\\000\\000\\000\\092\\000\\092\\000\\092\\000\\092\\000\\000\\000\\\n\\092\\000\\092\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\092\\000\\\n\\092\\000\\092\\000\\092\\000\\092\\000\\092\\000\\092\\000\\092\\000\\092\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\092\\000\\092\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\092\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\092\\000\\000\\000\\000\\000\\000\\000\\092\\000\\000\\000\\\n\\000\\000\\000\\000\\092\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\092\\000\\092\\000\\000\\000\\092\\000\\092\\000\\017\\001\\\n\\000\\000\\035\\000\\036\\000\\037\\000\\038\\000\\039\\000\\040\\000\\041\\000\\\n\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\044\\000\\045\\000\\\n\\000\\000\\000\\000\\018\\001\\019\\001\\000\\000\\020\\001\\046\\000\\000\\000\\\n\\021\\001\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\048\\000\\000\\000\\000\\000\\049\\000\\022\\001\\000\\000\\000\\000\\050\\000\\\n\\051\\000\\052\\000\\000\\000\\053\\000\\054\\000\\055\\000\\056\\000\\057\\000\\\n\\023\\001\\058\\000\\024\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\064\\000\\000\\000\\000\\000\\\n\\000\\000\\065\\000\\066\\000\\000\\000\\000\\000\\067\\000\\011\\000\\012\\000\\\n\\068\\000\\069\\000\\070\\000\\000\\000\\071\\000\\025\\001\\000\\000\\000\\000\\\n\\073\\000\\074\\000\\075\\000\\076\\000\\077\\000\\000\\000\\078\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\035\\001\\\n\\036\\001\\080\\000\\037\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\081\\000\\000\\000\\\n\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\\n\\084\\000\\085\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\\n\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\000\\000\\089\\000\\\n\\000\\000\\038\\001\\000\\000\\091\\000\\092\\000\\093\\000\\000\\000\\000\\000\\\n\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\098\\000\\246\\001\\000\\000\\\n\\035\\000\\036\\000\\037\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\044\\000\\045\\000\\000\\000\\\n\\000\\000\\018\\001\\019\\001\\000\\000\\020\\001\\046\\000\\000\\000\\021\\001\\\n\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\048\\000\\\n\\000\\000\\000\\000\\049\\000\\022\\001\\000\\000\\000\\000\\050\\000\\051\\000\\\n\\052\\000\\000\\000\\053\\000\\054\\000\\055\\000\\056\\000\\057\\000\\023\\001\\\n\\058\\000\\024\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\064\\000\\000\\000\\000\\000\\000\\000\\\n\\065\\000\\066\\000\\000\\000\\000\\000\\067\\000\\011\\000\\012\\000\\068\\000\\\n\\069\\000\\070\\000\\000\\000\\071\\000\\247\\001\\000\\000\\000\\000\\073\\000\\\n\\074\\000\\075\\000\\076\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\028\\001\\\n\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\248\\001\\035\\001\\036\\001\\\n\\080\\000\\037\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\081\\000\\000\\000\\082\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\085\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\000\\000\\089\\000\\000\\000\\\n\\249\\001\\000\\000\\091\\000\\092\\000\\093\\000\\000\\000\\000\\000\\000\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\098\\000\\090\\004\\000\\000\\035\\000\\\n\\036\\000\\037\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\000\\000\\\n\\000\\000\\000\\000\\043\\000\\000\\000\\044\\000\\045\\000\\000\\000\\000\\000\\\n\\018\\001\\019\\001\\000\\000\\020\\001\\046\\000\\000\\000\\021\\001\\047\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\048\\000\\000\\000\\\n\\000\\000\\049\\000\\022\\001\\000\\000\\000\\000\\050\\000\\051\\000\\052\\000\\\n\\000\\000\\053\\000\\054\\000\\055\\000\\056\\000\\057\\000\\023\\001\\058\\000\\\n\\024\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\\n\\061\\000\\062\\000\\063\\000\\064\\000\\000\\000\\000\\000\\000\\000\\065\\000\\\n\\066\\000\\000\\000\\000\\000\\067\\000\\011\\000\\012\\000\\068\\000\\069\\000\\\n\\070\\000\\000\\000\\071\\000\\091\\004\\000\\000\\000\\000\\073\\000\\074\\000\\\n\\075\\000\\076\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\030\\001\\031\\001\\032\\001\\033\\001\\092\\004\\035\\001\\036\\001\\080\\000\\\n\\037\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\081\\000\\000\\000\\082\\000\\000\\000\\\n\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\085\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\\n\\000\\000\\088\\000\\000\\000\\000\\000\\214\\001\\089\\000\\000\\000\\093\\004\\\n\\000\\000\\091\\000\\092\\000\\093\\000\\221\\000\\000\\000\\000\\000\\094\\000\\\n\\095\\000\\096\\000\\097\\000\\098\\000\\045\\000\\000\\000\\000\\000\\018\\001\\\n\\019\\001\\000\\000\\020\\001\\000\\000\\000\\000\\021\\001\\047\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\022\\001\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\222\\000\\145\\000\\000\\000\\023\\001\\000\\000\\024\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\223\\000\\000\\000\\\n\\000\\000\\000\\000\\225\\000\\000\\000\\000\\000\\068\\000\\000\\000\\226\\000\\\n\\000\\000\\000\\000\\215\\001\\000\\000\\000\\000\\241\\000\\074\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\\n\\031\\001\\032\\001\\033\\001\\216\\001\\035\\001\\036\\001\\000\\000\\037\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\107\\003\\000\\000\\000\\000\\217\\001\\000\\000\\\n\\091\\000\\000\\000\\093\\000\\221\\000\\000\\000\\000\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\228\\000\\045\\000\\000\\000\\000\\000\\018\\001\\019\\001\\\n\\000\\000\\020\\001\\000\\000\\000\\000\\021\\001\\047\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\022\\001\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\222\\000\\145\\000\\000\\000\\023\\001\\000\\000\\024\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\\n\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\223\\000\\000\\000\\000\\000\\\n\\000\\000\\225\\000\\000\\000\\000\\000\\068\\000\\000\\000\\226\\000\\000\\000\\\n\\000\\000\\108\\003\\000\\000\\000\\000\\241\\000\\074\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\\n\\032\\001\\033\\001\\109\\003\\035\\001\\036\\001\\000\\000\\037\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\\n\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\110\\003\\000\\000\\091\\000\\\n\\000\\000\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\\n\\097\\000\\228\\000\\035\\000\\036\\000\\037\\000\\038\\000\\039\\000\\040\\000\\\n\\041\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\044\\000\\\n\\045\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\046\\000\\\n\\000\\000\\000\\000\\047\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\048\\000\\000\\000\\000\\000\\049\\000\\000\\000\\000\\000\\000\\000\\\n\\050\\000\\051\\000\\052\\000\\000\\000\\053\\000\\054\\000\\055\\000\\056\\000\\\n\\057\\000\\000\\000\\058\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\064\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\000\\066\\000\\000\\000\\000\\000\\067\\000\\011\\000\\\n\\012\\000\\068\\000\\069\\000\\070\\000\\000\\000\\071\\000\\072\\000\\000\\000\\\n\\000\\000\\073\\000\\074\\000\\075\\000\\076\\000\\077\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\081\\000\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\085\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\000\\000\\\n\\089\\000\\000\\000\\090\\000\\000\\000\\091\\000\\092\\000\\093\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\098\\000\\112\\001\\\n\\112\\001\\112\\001\\112\\001\\112\\001\\112\\001\\112\\001\\112\\001\\000\\000\\\n\\000\\000\\000\\000\\112\\001\\000\\000\\112\\001\\112\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\112\\001\\000\\000\\000\\000\\112\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\112\\001\\000\\000\\\n\\000\\000\\112\\001\\000\\000\\000\\000\\000\\000\\112\\001\\112\\001\\112\\001\\\n\\000\\000\\112\\001\\112\\001\\112\\001\\112\\001\\112\\001\\000\\000\\112\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\112\\001\\112\\001\\\n\\112\\001\\112\\001\\112\\001\\112\\001\\000\\000\\000\\000\\000\\000\\112\\001\\\n\\112\\001\\000\\000\\000\\000\\112\\001\\112\\001\\112\\001\\112\\001\\112\\001\\\n\\112\\001\\000\\000\\112\\001\\112\\001\\000\\000\\000\\000\\112\\001\\112\\001\\\n\\112\\001\\112\\001\\112\\001\\000\\000\\112\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\112\\001\\000\\000\\000\\000\\112\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\112\\001\\000\\000\\112\\001\\000\\000\\\n\\000\\000\\000\\000\\112\\001\\000\\000\\000\\000\\000\\000\\112\\001\\112\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\112\\001\\112\\001\\000\\000\\\n\\000\\000\\112\\001\\000\\000\\000\\000\\000\\000\\112\\001\\000\\000\\112\\001\\\n\\000\\000\\112\\001\\112\\001\\112\\001\\000\\000\\000\\000\\000\\000\\112\\001\\\n\\112\\001\\112\\001\\112\\001\\112\\001\\034\\001\\000\\000\\000\\000\\000\\000\\\n\\034\\001\\000\\000\\000\\000\\034\\001\\000\\000\\022\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\034\\001\\034\\001\\\n\\034\\001\\034\\001\\034\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\034\\001\\000\\000\\000\\000\\000\\000\\034\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\034\\001\\034\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\034\\001\\034\\001\\034\\001\\034\\001\\\n\\034\\001\\000\\000\\000\\000\\000\\000\\000\\000\\034\\001\\000\\000\\000\\000\\\n\\000\\000\\034\\001\\000\\000\\000\\000\\000\\000\\034\\001\\034\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\034\\001\\034\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\034\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\034\\001\\000\\000\\000\\000\\034\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\034\\001\\000\\000\\034\\001\\000\\000\\\n\\034\\001\\000\\000\\000\\000\\000\\000\\034\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\034\\001\\000\\000\\034\\001\\034\\001\\000\\000\\000\\000\\034\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\034\\001\\\n\\000\\000\\034\\001\\000\\000\\000\\000\\034\\001\\034\\001\\034\\001\\034\\001\\\n\\034\\001\\034\\001\\027\\000\\000\\000\\034\\001\\000\\000\\027\\000\\000\\000\\\n\\034\\001\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\001\\000\\000\\\n\\000\\000\\000\\000\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\027\\000\\\n\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\027\\000\\000\\000\\000\\000\\000\\000\\027\\000\\\n\\000\\000\\000\\000\\000\\000\\027\\000\\027\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\027\\000\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\027\\000\\000\\000\\000\\000\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\027\\000\\000\\000\\040\\001\\000\\000\\027\\000\\000\\000\\\n\\000\\000\\000\\000\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\001\\\n\\000\\000\\027\\000\\027\\000\\000\\000\\000\\000\\027\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\027\\000\\000\\000\\027\\000\\\n\\000\\000\\000\\000\\000\\000\\027\\000\\027\\000\\027\\000\\027\\000\\027\\000\\\n\\251\\001\\000\\000\\027\\000\\000\\000\\251\\001\\000\\000\\027\\000\\251\\001\\\n\\000\\000\\026\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\251\\001\\251\\001\\251\\001\\251\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\044\\001\\000\\000\\000\\000\\000\\000\\\n\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\251\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\251\\001\\251\\001\\251\\001\\251\\001\\251\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\000\\000\\\n\\000\\000\\251\\001\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\251\\001\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\\n\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\251\\001\\000\\000\\044\\001\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\\n\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\044\\001\\000\\000\\251\\001\\\n\\251\\001\\000\\000\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\251\\001\\000\\000\\000\\000\\\n\\000\\000\\251\\001\\251\\001\\251\\001\\251\\001\\251\\001\\252\\001\\000\\000\\\n\\251\\001\\000\\000\\252\\001\\000\\000\\251\\001\\252\\001\\000\\000\\027\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\252\\001\\252\\001\\252\\001\\252\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\045\\001\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\252\\001\\252\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\252\\001\\\n\\252\\001\\252\\001\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\\n\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\252\\001\\\n\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\252\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\252\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\\n\\045\\001\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\045\\001\\000\\000\\252\\001\\252\\001\\000\\000\\\n\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\252\\001\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\252\\001\\\n\\252\\001\\252\\001\\252\\001\\252\\001\\248\\001\\000\\000\\252\\001\\000\\000\\\n\\248\\001\\000\\000\\252\\001\\248\\001\\000\\000\\023\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\\n\\248\\001\\248\\001\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\041\\001\\000\\000\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\248\\001\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\248\\001\\248\\001\\248\\001\\\n\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\\n\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\248\\001\\248\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\248\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\000\\000\\041\\001\\000\\000\\\n\\248\\001\\000\\000\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\041\\001\\000\\000\\248\\001\\248\\001\\000\\000\\000\\000\\248\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\\n\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\248\\001\\248\\001\\248\\001\\\n\\248\\001\\248\\001\\250\\001\\000\\000\\248\\001\\000\\000\\250\\001\\000\\000\\\n\\248\\001\\250\\001\\000\\000\\025\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\250\\001\\250\\001\\250\\001\\\n\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\043\\001\\000\\000\\\n\\000\\000\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\250\\001\\\n\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\250\\001\\250\\001\\250\\001\\250\\001\\250\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\250\\001\\\n\\000\\000\\000\\000\\000\\000\\250\\001\\250\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\250\\001\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\250\\001\\000\\000\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\250\\001\\000\\000\\043\\001\\000\\000\\250\\001\\000\\000\\\n\\000\\000\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\043\\001\\\n\\000\\000\\250\\001\\250\\001\\000\\000\\000\\000\\250\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\250\\001\\000\\000\\250\\001\\\n\\000\\000\\000\\000\\000\\000\\250\\001\\250\\001\\250\\001\\250\\001\\250\\001\\\n\\249\\001\\000\\000\\250\\001\\000\\000\\249\\001\\000\\000\\250\\001\\249\\001\\\n\\000\\000\\024\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\249\\001\\249\\001\\249\\001\\249\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\042\\001\\000\\000\\000\\000\\000\\000\\\n\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\249\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\249\\001\\249\\001\\249\\001\\249\\001\\249\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\000\\000\\\n\\000\\000\\249\\001\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\249\\001\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\\n\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\249\\001\\000\\000\\042\\001\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\\n\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\042\\001\\000\\000\\249\\001\\\n\\249\\001\\000\\000\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\249\\001\\000\\000\\000\\000\\\n\\000\\000\\249\\001\\249\\001\\249\\001\\249\\001\\249\\001\\251\\001\\000\\000\\\n\\249\\001\\000\\000\\251\\001\\000\\000\\249\\001\\251\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\251\\001\\251\\001\\251\\001\\251\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\044\\001\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\251\\001\\251\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\251\\001\\\n\\251\\001\\251\\001\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\\n\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\251\\001\\\n\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\251\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\000\\000\\251\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\\n\\044\\001\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\044\\001\\000\\000\\251\\001\\251\\001\\000\\000\\\n\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\251\\001\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\251\\001\\\n\\251\\001\\251\\001\\251\\001\\251\\001\\252\\001\\000\\000\\251\\001\\000\\000\\\n\\252\\001\\000\\000\\251\\001\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\\n\\252\\001\\252\\001\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\045\\001\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\252\\001\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\252\\001\\252\\001\\252\\001\\\n\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\\n\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\252\\001\\252\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\252\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\045\\001\\000\\000\\\n\\252\\001\\000\\000\\000\\000\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\045\\001\\000\\000\\252\\001\\252\\001\\000\\000\\000\\000\\252\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\252\\001\\\n\\000\\000\\252\\001\\000\\000\\000\\000\\000\\000\\252\\001\\252\\001\\252\\001\\\n\\252\\001\\252\\001\\248\\001\\000\\000\\252\\001\\000\\000\\248\\001\\000\\000\\\n\\252\\001\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\248\\001\\248\\001\\\n\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\041\\001\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\\n\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\248\\001\\248\\001\\248\\001\\248\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\248\\001\\\n\\000\\000\\000\\000\\000\\000\\248\\001\\248\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\248\\001\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\000\\000\\041\\001\\000\\000\\248\\001\\000\\000\\\n\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\000\\000\\000\\000\\041\\001\\\n\\000\\000\\248\\001\\248\\001\\000\\000\\000\\000\\248\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\248\\001\\000\\000\\248\\001\\\n\\000\\000\\000\\000\\000\\000\\248\\001\\248\\001\\248\\001\\248\\001\\248\\001\\\n\\250\\001\\000\\000\\248\\001\\000\\000\\250\\001\\000\\000\\248\\001\\250\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\250\\001\\250\\001\\250\\001\\250\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\043\\001\\000\\000\\000\\000\\000\\000\\\n\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\250\\001\\250\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\250\\001\\250\\001\\250\\001\\250\\001\\250\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\250\\001\\000\\000\\000\\000\\\n\\000\\000\\250\\001\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\250\\001\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\250\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\250\\001\\000\\000\\\n\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\250\\001\\000\\000\\043\\001\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\\n\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\043\\001\\000\\000\\250\\001\\\n\\250\\001\\000\\000\\000\\000\\250\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\250\\001\\000\\000\\250\\001\\000\\000\\000\\000\\\n\\000\\000\\250\\001\\250\\001\\250\\001\\250\\001\\250\\001\\249\\001\\000\\000\\\n\\250\\001\\000\\000\\249\\001\\000\\000\\250\\001\\249\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\249\\001\\249\\001\\249\\001\\249\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\042\\001\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\249\\001\\249\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\249\\001\\\n\\249\\001\\249\\001\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\\n\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\249\\001\\\n\\249\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\249\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\000\\000\\249\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\249\\001\\000\\000\\\n\\042\\001\\000\\000\\249\\001\\000\\000\\178\\003\\000\\000\\249\\001\\000\\000\\\n\\042\\000\\000\\000\\000\\000\\042\\001\\043\\000\\249\\001\\249\\001\\045\\000\\\n\\000\\000\\249\\001\\018\\001\\019\\001\\000\\000\\020\\001\\000\\000\\000\\000\\\n\\021\\001\\249\\001\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\249\\001\\\n\\249\\001\\249\\001\\249\\001\\249\\001\\022\\001\\000\\000\\249\\001\\000\\000\\\n\\051\\000\\000\\000\\249\\001\\000\\000\\000\\000\\000\\000\\145\\000\\000\\000\\\n\\023\\001\\000\\000\\024\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\\n\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\079\\000\\035\\001\\\n\\036\\001\\080\\000\\037\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\\n\\084\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\000\\000\\086\\000\\\n\\087\\000\\043\\000\\000\\000\\088\\000\\045\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\091\\000\\000\\000\\093\\000\\047\\000\\000\\000\\\n\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\050\\000\\051\\000\\052\\000\\000\\000\\\n\\053\\000\\000\\000\\055\\000\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\066\\000\\\n\\000\\000\\199\\000\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\073\\000\\074\\000\\000\\000\\\n\\000\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\081\\000\\000\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\038\\000\\039\\000\\\n\\040\\000\\041\\000\\042\\000\\246\\003\\086\\000\\087\\000\\043\\000\\000\\000\\\n\\088\\000\\045\\000\\000\\000\\000\\000\\000\\000\\000\\000\\090\\000\\000\\000\\\n\\091\\000\\000\\000\\093\\000\\047\\000\\000\\000\\000\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\188\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\050\\000\\051\\000\\052\\000\\000\\000\\053\\000\\000\\000\\055\\000\\\n\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\247\\003\\066\\000\\000\\000\\000\\000\\067\\000\\\n\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\073\\000\\074\\000\\000\\000\\000\\000\\077\\000\\000\\000\\\n\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\081\\000\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\\n\\000\\000\\000\\000\\084\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\\n\\000\\000\\086\\000\\087\\000\\043\\000\\000\\000\\088\\000\\045\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\090\\000\\000\\000\\091\\000\\000\\000\\093\\000\\\n\\047\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\188\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\050\\000\\051\\000\\\n\\052\\000\\000\\000\\053\\000\\000\\000\\055\\000\\056\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\065\\000\\066\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\068\\000\\\n\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\073\\000\\\n\\074\\000\\000\\000\\000\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\\n\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\081\\000\\000\\000\\082\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\000\\000\\086\\000\\087\\000\\\n\\043\\000\\000\\000\\088\\000\\045\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\090\\000\\000\\000\\091\\000\\000\\000\\093\\000\\047\\000\\000\\000\\000\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\188\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\053\\000\\\n\\000\\000\\055\\000\\056\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\\n\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\066\\000\\000\\000\\\n\\000\\000\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\073\\000\\074\\000\\000\\000\\000\\000\\\n\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\081\\000\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\\n\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\174\\001\\175\\001\\176\\001\\\n\\177\\001\\042\\000\\000\\000\\086\\000\\087\\000\\043\\000\\000\\000\\088\\000\\\n\\045\\000\\000\\000\\000\\000\\000\\000\\000\\000\\090\\000\\000\\000\\091\\000\\\n\\000\\000\\093\\000\\047\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\\n\\097\\000\\188\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\055\\000\\056\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\178\\001\\179\\001\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\068\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\180\\001\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\181\\001\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\000\\000\\\n\\086\\000\\087\\000\\043\\000\\000\\000\\088\\000\\045\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\182\\001\\000\\000\\091\\000\\000\\000\\093\\000\\047\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\188\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\055\\000\\056\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\\n\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\\n\\066\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\\n\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\073\\000\\074\\000\\\n\\000\\000\\000\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\081\\000\\000\\000\\082\\000\\000\\000\\\n\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\174\\001\\\n\\175\\001\\176\\001\\177\\001\\042\\000\\000\\000\\086\\000\\087\\000\\043\\000\\\n\\000\\000\\088\\000\\045\\000\\000\\000\\000\\000\\000\\000\\000\\000\\090\\000\\\n\\000\\000\\091\\000\\000\\000\\093\\000\\047\\000\\000\\000\\000\\000\\094\\000\\\n\\095\\000\\096\\000\\097\\000\\188\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\055\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\178\\001\\179\\001\\000\\000\\000\\000\\\n\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\180\\001\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\038\\000\\039\\000\\040\\000\\\n\\041\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\\n\\045\\000\\181\\001\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\202\\000\\051\\000\\203\\000\\000\\000\\182\\001\\000\\000\\091\\000\\056\\000\\\n\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\147\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\077\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\081\\000\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\252\\000\\051\\000\\\n\\253\\000\\000\\000\\090\\000\\000\\000\\091\\000\\056\\000\\093\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\188\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\065\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\\n\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\074\\000\\000\\000\\000\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\\n\\080\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\000\\000\\000\\000\\\n\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\081\\000\\000\\000\\082\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\090\\000\\000\\000\\091\\000\\056\\000\\093\\000\\000\\000\\000\\000\\000\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\188\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\000\\000\\\n\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\\n\\000\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\174\\001\\\n\\175\\001\\176\\001\\177\\001\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\\n\\000\\000\\000\\000\\045\\000\\081\\000\\000\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\090\\000\\000\\000\\\n\\091\\000\\145\\000\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\188\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\178\\001\\000\\000\\000\\000\\000\\000\\\n\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\180\\001\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\038\\000\\039\\000\\040\\000\\\n\\041\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\\n\\045\\000\\181\\001\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\000\\000\\051\\000\\000\\000\\000\\000\\182\\001\\000\\000\\091\\000\\056\\000\\\n\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\147\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\077\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\174\\001\\175\\001\\176\\001\\177\\001\\042\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\081\\000\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\\n\\000\\000\\000\\000\\090\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\188\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\178\\001\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\\n\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\074\\000\\000\\000\\000\\000\\180\\001\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\\n\\080\\000\\038\\000\\039\\000\\040\\000\\041\\000\\042\\000\\000\\000\\000\\000\\\n\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\181\\001\\000\\000\\082\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\182\\001\\000\\000\\091\\000\\056\\000\\093\\000\\000\\000\\000\\000\\000\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\054\\004\\000\\000\\\n\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\069\\000\\070\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\\n\\000\\000\\077\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\219\\000\\\n\\219\\000\\219\\000\\219\\000\\219\\000\\000\\000\\000\\000\\000\\000\\219\\000\\\n\\000\\000\\000\\000\\219\\000\\081\\000\\000\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\219\\000\\000\\000\\000\\000\\090\\000\\000\\000\\\n\\091\\000\\219\\000\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\188\\000\\219\\000\\219\\000\\219\\000\\219\\000\\219\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\219\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\219\\000\\219\\000\\219\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\219\\000\\219\\000\\000\\000\\000\\000\\219\\000\\\n\\000\\000\\219\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\219\\000\\000\\000\\000\\000\\219\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\219\\000\\000\\000\\219\\000\\000\\000\\000\\000\\000\\000\\219\\000\\\n\\000\\000\\042\\000\\000\\000\\219\\000\\000\\000\\043\\000\\000\\000\\000\\000\\\n\\045\\000\\000\\000\\219\\000\\219\\000\\000\\000\\000\\000\\219\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\219\\000\\000\\000\\219\\000\\000\\000\\\n\\219\\000\\000\\000\\000\\000\\000\\000\\219\\000\\219\\000\\219\\000\\219\\000\\\n\\219\\000\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\100\\001\\145\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\101\\001\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\\n\\000\\000\\082\\000\\045\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\051\\000\\091\\000\\000\\000\\093\\000\\000\\000\\\n\\161\\002\\145\\000\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\000\\000\\\n\\000\\000\\102\\001\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\067\\000\\000\\000\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\041\\000\\042\\000\\000\\000\\000\\000\\\n\\000\\000\\043\\000\\000\\000\\082\\000\\045\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\047\\000\\000\\000\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\051\\000\\091\\000\\000\\000\\\n\\093\\000\\000\\000\\055\\000\\056\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\147\\000\\000\\000\\000\\000\\102\\001\\000\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\226\\002\\\n\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\227\\002\\074\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\\n\\000\\000\\000\\000\\041\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\\n\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\090\\000\\000\\000\\\n\\091\\000\\056\\000\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\188\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\000\\000\\000\\000\\000\\000\\\n\\067\\000\\000\\000\\000\\000\\068\\000\\069\\000\\070\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\\n\\041\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\\n\\045\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\000\\000\\051\\000\\000\\000\\000\\000\\090\\000\\000\\000\\091\\000\\056\\000\\\n\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\188\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\177\\001\\042\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\000\\000\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\\n\\000\\000\\000\\000\\090\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\188\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\178\\001\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\\n\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\\n\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\221\\000\\000\\000\\240\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\000\\\n\\045\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\000\\000\\000\\000\\047\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\182\\001\\051\\000\\091\\000\\000\\000\\093\\000\\000\\000\\222\\000\\145\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\223\\000\\000\\000\\000\\000\\000\\000\\225\\000\\000\\000\\\n\\000\\000\\068\\000\\000\\000\\226\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\241\\000\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\082\\000\\045\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\047\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\051\\000\\091\\000\\000\\000\\093\\000\\000\\000\\\n\\222\\000\\145\\000\\094\\000\\095\\000\\096\\000\\097\\000\\228\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\223\\000\\000\\000\\000\\000\\000\\000\\\n\\225\\000\\000\\000\\000\\000\\068\\000\\000\\000\\226\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\241\\000\\074\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\082\\000\\045\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\047\\000\\015\\002\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\051\\000\\091\\000\\000\\000\\\n\\093\\000\\000\\000\\222\\000\\145\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\228\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\223\\000\\000\\000\\\n\\000\\000\\000\\000\\225\\000\\000\\000\\000\\000\\068\\000\\000\\000\\226\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\241\\000\\074\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\221\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\000\\045\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\047\\000\\204\\002\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\051\\000\\\n\\091\\000\\000\\000\\093\\000\\000\\000\\222\\000\\145\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\228\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\223\\000\\000\\000\\000\\000\\000\\000\\225\\000\\000\\000\\000\\000\\068\\000\\\n\\000\\000\\226\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\241\\000\\\n\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\000\\\n\\217\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\000\\000\\000\\000\\217\\000\\128\\004\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\217\\000\\091\\000\\000\\000\\093\\000\\000\\000\\217\\000\\217\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\228\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\217\\000\\217\\000\\217\\000\\217\\000\\217\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\217\\000\\000\\000\\000\\000\\000\\000\\217\\000\\000\\000\\\n\\000\\000\\217\\000\\000\\000\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\217\\000\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\217\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\217\\000\\045\\000\\000\\000\\000\\000\\217\\000\\000\\000\\000\\000\\\n\\000\\000\\217\\000\\000\\000\\000\\000\\047\\000\\000\\000\\000\\000\\000\\000\\\n\\217\\000\\217\\000\\000\\000\\217\\000\\217\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\051\\000\\217\\000\\000\\000\\217\\000\\000\\000\\\n\\222\\000\\145\\000\\217\\000\\217\\000\\217\\000\\217\\000\\217\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\223\\000\\000\\000\\000\\000\\000\\000\\\n\\225\\000\\000\\000\\000\\000\\068\\000\\000\\000\\226\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\241\\000\\074\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\082\\000\\045\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\047\\000\\000\\000\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\051\\000\\091\\000\\000\\000\\\n\\093\\000\\000\\000\\222\\000\\145\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\228\\000\\000\\000\\000\\000\\000\\000\\000\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\223\\000\\000\\000\\\n\\000\\000\\000\\000\\225\\000\\000\\000\\000\\000\\068\\000\\000\\000\\226\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\221\\002\\074\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\037\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\082\\000\\037\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\037\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\037\\000\\\n\\091\\000\\000\\000\\093\\000\\000\\000\\037\\000\\037\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\228\\000\\000\\000\\000\\000\\000\\000\\000\\000\\037\\000\\\n\\037\\000\\037\\000\\037\\000\\037\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\037\\000\\000\\000\\000\\000\\000\\000\\037\\000\\000\\000\\000\\000\\037\\000\\\n\\000\\000\\037\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\037\\000\\\n\\037\\000\\000\\000\\000\\000\\000\\000\\000\\000\\037\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\037\\000\\\n\\045\\000\\000\\000\\000\\000\\037\\000\\000\\000\\000\\000\\000\\000\\037\\000\\\n\\000\\000\\000\\000\\047\\000\\000\\000\\000\\000\\000\\000\\037\\000\\037\\000\\\n\\000\\000\\000\\000\\037\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\051\\000\\037\\000\\000\\000\\037\\000\\000\\000\\055\\000\\145\\000\\\n\\037\\000\\037\\000\\037\\000\\037\\000\\037\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\068\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\046\\004\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\042\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\000\\000\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\\n\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\043\\001\\000\\000\\000\\000\\\n\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\042\\000\\000\\000\\000\\000\\\n\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\082\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\000\\000\\000\\000\\000\\000\\016\\002\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\000\\000\\000\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\069\\000\\070\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\\n\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\000\\000\\017\\002\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\091\\000\\145\\000\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\147\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\076\\002\\000\\000\\000\\000\\000\\000\\\n\\067\\000\\000\\000\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\\n\\045\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\000\\000\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\091\\000\\145\\000\\\n\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\147\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\043\\001\\\n\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\042\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\000\\000\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\205\\002\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\\n\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\\n\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\042\\000\\000\\000\\000\\000\\\n\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\082\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\000\\000\\000\\000\\000\\000\\206\\002\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\000\\000\\000\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\069\\000\\070\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\001\\000\\000\\000\\000\\080\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\\n\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\\n\\000\\000\\129\\004\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\091\\000\\145\\000\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\147\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\067\\000\\000\\000\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\079\\000\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\\n\\045\\000\\000\\000\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\130\\004\\\n\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\\n\\000\\000\\051\\000\\000\\000\\000\\000\\000\\000\\000\\000\\091\\000\\145\\000\\\n\\093\\000\\000\\000\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\\n\\147\\000\\059\\000\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\\n\\000\\000\\000\\000\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\\n\\000\\000\\000\\000\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\042\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\000\\000\\\n\\000\\000\\082\\000\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\\n\\000\\000\\084\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\086\\000\\087\\000\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\\n\\000\\000\\000\\000\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\059\\000\\\n\\060\\000\\061\\000\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\\n\\069\\000\\070\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\\n\\074\\000\\000\\000\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\079\\000\\000\\000\\000\\000\\\n\\080\\000\\000\\000\\000\\000\\000\\000\\000\\000\\042\\000\\000\\000\\000\\000\\\n\\000\\000\\043\\000\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\082\\000\\\n\\000\\000\\000\\000\\000\\000\\083\\000\\000\\000\\000\\000\\000\\000\\084\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\\n\\000\\000\\000\\000\\088\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\091\\000\\145\\000\\093\\000\\000\\000\\000\\000\\000\\000\\\n\\094\\000\\095\\000\\096\\000\\097\\000\\147\\000\\059\\000\\060\\000\\061\\000\\\n\\062\\000\\063\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\067\\000\\000\\000\\000\\000\\000\\000\\069\\000\\070\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\074\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\078\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\043\\001\\000\\000\\068\\000\\080\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\068\\000\\082\\000\\000\\000\\000\\000\\\n\\000\\000\\083\\000\\000\\000\\068\\000\\000\\000\\084\\000\\000\\000\\068\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\086\\000\\087\\000\\000\\000\\000\\000\\\n\\088\\000\\068\\000\\000\\000\\000\\000\\068\\000\\000\\000\\000\\000\\000\\000\\\n\\091\\000\\000\\000\\093\\000\\000\\000\\000\\000\\068\\000\\094\\000\\095\\000\\\n\\096\\000\\097\\000\\147\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\068\\000\\000\\000\\094\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\068\\000\\000\\000\\000\\000\\068\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\068\\000\\000\\000\\000\\000\\000\\000\\018\\001\\019\\001\\\n\\000\\000\\020\\001\\000\\000\\000\\000\\021\\001\\000\\000\\000\\000\\132\\004\\\n\\068\\000\\068\\000\\068\\000\\068\\000\\068\\000\\068\\000\\068\\000\\068\\000\\\n\\022\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\018\\001\\019\\001\\023\\001\\020\\001\\024\\001\\000\\000\\\n\\021\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\068\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\068\\000\\022\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\068\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\023\\001\\095\\003\\024\\001\\000\\000\\000\\000\\000\\000\\000\\000\\068\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\\n\\032\\001\\033\\001\\096\\003\\035\\001\\036\\001\\133\\004\\037\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\140\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\134\\004\\035\\001\\\n\\036\\001\\000\\000\\037\\001\\018\\001\\019\\001\\000\\000\\020\\001\\000\\000\\\n\\000\\000\\021\\001\\000\\000\\000\\000\\164\\004\\097\\003\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\018\\001\\\n\\019\\001\\023\\001\\020\\001\\024\\001\\000\\000\\021\\001\\000\\000\\000\\000\\\n\\000\\000\\135\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\022\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\023\\001\\141\\004\\024\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\142\\004\\\n\\035\\001\\036\\001\\165\\004\\037\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\181\\004\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\\n\\031\\001\\032\\001\\033\\001\\166\\004\\035\\001\\036\\001\\000\\000\\037\\001\\\n\\018\\001\\019\\001\\000\\000\\020\\001\\000\\000\\000\\000\\021\\001\\000\\000\\\n\\000\\000\\000\\000\\143\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\022\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\023\\001\\000\\000\\\n\\024\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\167\\004\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\182\\004\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\030\\001\\031\\001\\032\\001\\033\\001\\183\\004\\035\\001\\036\\001\\000\\000\\\n\\037\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\184\\004\"\n\nlet yycheck = \"\\003\\000\\\n\\079\\000\\065\\000\\149\\000\\251\\000\\053\\000\\072\\000\\149\\001\\011\\000\\\n\\012\\000\\240\\000\\102\\001\\081\\001\\227\\000\\112\\000\\104\\001\\014\\001\\\n\\050\\000\\060\\001\\052\\000\\001\\000\\002\\000\\254\\001\\012\\000\\224\\000\\\n\\226\\000\\209\\000\\048\\000\\233\\000\\192\\001\\101\\001\\058\\001\\035\\000\\\n\\036\\000\\037\\000\\082\\001\\039\\000\\040\\000\\041\\000\\048\\000\\043\\000\\\n\\044\\000\\230\\001\\064\\000\\060\\001\\066\\000\\027\\000\\115\\002\\029\\000\\\n\\067\\000\\068\\000\\069\\000\\179\\001\\028\\001\\013\\004\\064\\000\\183\\001\\\n\\184\\001\\025\\001\\028\\001\\042\\000\\017\\003\\080\\000\\028\\001\\067\\000\\\n\\068\\000\\069\\000\\021\\001\\034\\001\\015\\001\\073\\000\\156\\002\\157\\002\\\n\\158\\002\\159\\002\\160\\002\\079\\000\\080\\000\\067\\000\\068\\000\\069\\000\\\n\\103\\001\\029\\001\\065\\000\\038\\000\\015\\001\\089\\000\\090\\000\\161\\000\\\n\\092\\000\\163\\000\\080\\000\\020\\002\\114\\000\\115\\000\\046\\000\\025\\001\\\n\\028\\001\\049\\000\\000\\000\\041\\001\\164\\000\\049\\001\\050\\001\\071\\003\\\n\\060\\001\\112\\003\\178\\001\\067\\004\\028\\001\\090\\001\\003\\001\\021\\001\\\n\\022\\001\\136\\001\\024\\001\\119\\000\\120\\000\\050\\001\\046\\000\\087\\003\\\n\\227\\000\\049\\000\\077\\000\\029\\001\\125\\001\\173\\001\\081\\000\\060\\001\\\n\\076\\001\\128\\001\\120\\000\\014\\001\\028\\001\\085\\000\\241\\000\\014\\001\\\n\\028\\001\\050\\001\\050\\001\\155\\001\\023\\001\\142\\001\\146\\000\\049\\001\\\n\\050\\001\\149\\000\\029\\003\\060\\001\\073\\001\\000\\001\\050\\001\\084\\001\\\n\\103\\001\\120\\001\\121\\001\\125\\001\\017\\001\\085\\000\\225\\000\\223\\000\\\n\\227\\000\\028\\001\\055\\001\\156\\001\\070\\001\\122\\003\\167\\001\\123\\001\\\n\\202\\000\\203\\000\\076\\001\\084\\001\\142\\001\\102\\002\\241\\000\\028\\001\\\n\\076\\001\\090\\001\\084\\001\\073\\001\\127\\001\\136\\001\\186\\000\\024\\001\\\n\\233\\000\\136\\001\\069\\001\\008\\001\\098\\004\\156\\001\\069\\001\\080\\001\\\n\\168\\001\\080\\002\\126\\001\\199\\000\\127\\001\\245\\001\\247\\000\\008\\001\\\n\\168\\001\\014\\001\\168\\001\\145\\001\\134\\001\\125\\001\\223\\001\\164\\003\\\n\\030\\001\\199\\000\\011\\000\\034\\001\\028\\001\\161\\001\\025\\001\\098\\001\\\n\\099\\001\\221\\000\\252\\000\\253\\000\\033\\003\\034\\003\\035\\003\\036\\003\\\n\\037\\003\\039\\002\\136\\001\\041\\002\\161\\001\\125\\001\\198\\003\\003\\002\\\n\\192\\004\\125\\001\\035\\000\\036\\000\\037\\000\\120\\003\\039\\000\\040\\000\\\n\\041\\000\\059\\001\\125\\001\\044\\000\\136\\001\\178\\002\\125\\001\\153\\001\\\n\\161\\001\\161\\001\\156\\001\\219\\002\\000\\001\\136\\001\\162\\002\\161\\001\\\n\\069\\001\\136\\001\\125\\001\\218\\001\\076\\002\\161\\001\\237\\001\\021\\001\\\n\\012\\001\\175\\004\\194\\001\\161\\001\\050\\001\\141\\002\\227\\003\\028\\001\\\n\\125\\001\\241\\002\\231\\003\\041\\001\\027\\004\\028\\004\\029\\004\\030\\004\\\n\\031\\004\\021\\001\\040\\001\\229\\001\\105\\001\\014\\001\\034\\001\\025\\001\\\n\\089\\000\\090\\000\\038\\001\\034\\002\\097\\001\\101\\001\\023\\001\\043\\001\\\n\\105\\001\\054\\001\\021\\001\\107\\001\\050\\001\\067\\001\\084\\001\\154\\001\\\n\\052\\001\\053\\001\\054\\001\\055\\001\\090\\001\\125\\001\\174\\001\\029\\001\\\n\\125\\001\\061\\001\\120\\001\\121\\001\\180\\001\\181\\001\\001\\001\\050\\001\\\n\\136\\001\\125\\001\\070\\001\\136\\001\\129\\001\\008\\001\\080\\001\\075\\001\\\n\\076\\001\\077\\001\\078\\001\\049\\001\\050\\001\\034\\004\\084\\001\\052\\001\\\n\\070\\001\\038\\004\\142\\001\\022\\001\\069\\001\\069\\001\\120\\001\\083\\001\\\n\\001\\001\\123\\001\\069\\001\\050\\001\\149\\000\\014\\001\\156\\001\\008\\001\\\n\\070\\001\\116\\001\\001\\001\\115\\002\\069\\001\\054\\001\\076\\001\\090\\001\\\n\\190\\003\\008\\001\\090\\001\\167\\001\\001\\001\\113\\001\\084\\001\\125\\001\\\n\\116\\001\\098\\001\\178\\001\\008\\001\\095\\001\\076\\001\\101\\001\\022\\001\\\n\\001\\001\\090\\001\\099\\003\\113\\001\\000\\000\\082\\004\\116\\001\\008\\001\\\n\\171\\002\\186\\000\\085\\001\\086\\001\\087\\001\\088\\001\\089\\001\\090\\001\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\125\\001\\022\\001\\030\\001\\030\\003\\\n\\214\\002\\073\\001\\074\\001\\029\\001\\069\\001\\126\\001\\172\\001\\136\\001\\\n\\050\\001\\054\\004\\021\\001\\083\\001\\160\\001\\161\\001\\162\\001\\163\\001\\\n\\137\\001\\171\\001\\119\\004\\167\\001\\221\\000\\104\\001\\105\\001\\049\\001\\\n\\050\\001\\248\\001\\002\\001\\175\\001\\176\\001\\177\\001\\244\\002\\009\\003\\\n\\012\\003\\167\\001\\182\\001\\153\\001\\247\\001\\024\\002\\165\\001\\232\\001\\\n\\085\\004\\040\\001\\084\\001\\161\\001\\252\\001\\001\\001\\254\\001\\104\\001\\\n\\105\\001\\050\\001\\076\\001\\178\\001\\008\\001\\008\\002\\009\\002\\144\\002\\\n\\145\\002\\104\\001\\105\\001\\060\\001\\125\\001\\157\\001\\025\\001\\134\\001\\\n\\135\\001\\028\\001\\022\\001\\104\\001\\105\\001\\235\\001\\069\\001\\136\\001\\\n\\127\\001\\221\\001\\217\\002\\164\\002\\126\\001\\148\\001\\244\\001\\104\\001\\\n\\105\\001\\184\\002\\050\\001\\084\\001\\003\\001\\157\\001\\175\\002\\137\\001\\\n\\236\\001\\243\\001\\226\\002\\054\\001\\228\\002\\038\\001\\167\\003\\231\\002\\\n\\185\\002\\170\\003\\074\\001\\075\\001\\248\\001\\249\\001\\103\\001\\023\\001\\\n\\014\\001\\253\\001\\029\\001\\052\\001\\076\\001\\137\\001\\055\\001\\126\\001\\\n\\022\\002\\003\\001\\137\\001\\138\\001\\084\\001\\003\\000\\028\\001\\253\\001\\\n\\040\\001\\040\\001\\137\\001\\154\\003\\076\\002\\028\\001\\049\\001\\050\\001\\\n\\222\\002\\033\\003\\034\\003\\035\\003\\036\\003\\037\\003\\253\\002\\136\\001\\\n\\055\\001\\007\\002\\146\\002\\147\\002\\148\\002\\149\\002\\150\\002\\151\\002\\\n\\152\\002\\153\\002\\154\\002\\155\\002\\104\\001\\105\\001\\040\\001\\054\\001\\\n\\030\\001\\076\\001\\134\\001\\135\\001\\070\\001\\043\\000\\028\\001\\021\\001\\\n\\161\\001\\084\\001\\040\\001\\050\\001\\050\\000\\055\\001\\052\\000\\021\\001\\\n\\088\\001\\127\\003\\084\\001\\093\\002\\150\\003\\153\\003\\127\\001\\003\\001\\\n\\098\\002\\129\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\054\\001\\013\\001\\040\\001\\073\\000\\168\\001\\076\\001\\110\\001\\028\\001\\\n\\069\\001\\021\\001\\022\\001\\022\\001\\024\\001\\024\\001\\134\\001\\135\\001\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\020\\002\\076\\002\\092\\000\\212\\000\\\n\\213\\000\\088\\001\\040\\001\\084\\001\\040\\001\\208\\002\\088\\001\\170\\002\\\n\\022\\001\\054\\001\\024\\001\\106\\003\\073\\001\\173\\002\\177\\002\\102\\001\\\n\\103\\001\\088\\001\\221\\002\\055\\001\\102\\001\\103\\001\\175\\001\\176\\001\\\n\\177\\001\\119\\000\\126\\002\\127\\002\\067\\001\\182\\001\\118\\003\\102\\001\\\n\\103\\001\\058\\004\\088\\001\\123\\003\\143\\001\\017\\001\\126\\001\\110\\001\\\n\\126\\002\\127\\002\\024\\001\\079\\001\\085\\003\\076\\001\\038\\001\\095\\002\\\n\\102\\001\\103\\001\\088\\001\\069\\001\\146\\000\\030\\001\\021\\001\\022\\001\\\n\\110\\001\\024\\001\\221\\002\\090\\001\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\232\\003\\167\\002\\030\\001\\169\\002\\025\\001\\095\\002\\\n\\110\\001\\040\\001\\162\\003\\125\\001\\030\\001\\222\\002\\102\\002\\088\\001\\\n\\050\\001\\167\\002\\013\\001\\169\\002\\013\\001\\021\\001\\022\\001\\166\\002\\\n\\024\\001\\125\\001\\126\\001\\127\\001\\025\\001\\102\\001\\103\\001\\168\\001\\\n\\249\\001\\044\\003\\198\\002\\199\\002\\136\\001\\137\\001\\014\\001\\126\\001\\\n\\040\\001\\040\\001\\142\\001\\088\\001\\202\\000\\203\\000\\137\\001\\023\\001\\\n\\198\\002\\199\\002\\208\\000\\209\\000\\216\\002\\127\\001\\212\\000\\213\\000\\\n\\101\\001\\102\\001\\103\\001\\217\\000\\218\\000\\225\\002\\151\\004\\227\\002\\\n\\040\\001\\110\\001\\224\\000\\167\\001\\232\\002\\127\\001\\021\\001\\022\\001\\\n\\224\\003\\024\\001\\015\\001\\239\\002\\024\\001\\219\\002\\031\\001\\032\\001\\\n\\033\\001\\034\\001\\246\\002\\142\\001\\003\\001\\249\\002\\021\\001\\136\\001\\\n\\136\\001\\040\\001\\254\\002\\144\\001\\127\\001\\069\\001\\252\\000\\253\\000\\\n\\246\\002\\186\\004\\000\\001\\241\\002\\168\\001\\236\\003\\014\\001\\004\\001\\\n\\254\\002\\215\\003\\216\\003\\217\\003\\218\\003\\219\\003\\012\\001\\023\\001\\\n\\095\\001\\040\\001\\067\\001\\145\\001\\021\\001\\022\\001\\207\\004\\024\\001\\\n\\142\\001\\029\\003\\098\\001\\099\\001\\100\\001\\101\\001\\002\\001\\014\\001\\\n\\040\\001\\021\\001\\022\\001\\099\\003\\024\\001\\030\\001\\185\\003\\040\\001\\\n\\044\\003\\126\\001\\021\\001\\047\\003\\048\\003\\049\\003\\030\\001\\255\\002\\\n\\125\\001\\001\\003\\021\\001\\071\\001\\040\\001\\125\\001\\201\\003\\053\\001\\\n\\054\\001\\047\\003\\048\\003\\049\\003\\040\\001\\069\\001\\030\\001\\061\\001\\\n\\136\\001\\126\\001\\137\\001\\251\\002\\252\\002\\021\\003\\014\\001\\255\\002\\\n\\021\\001\\001\\003\\181\\003\\127\\001\\136\\001\\075\\001\\076\\001\\077\\001\\\n\\078\\001\\137\\001\\021\\001\\022\\001\\088\\003\\024\\001\\040\\001\\039\\003\\\n\\136\\001\\093\\003\\098\\001\\099\\001\\100\\001\\021\\003\\136\\001\\136\\001\\\n\\118\\003\\136\\001\\127\\001\\030\\001\\104\\003\\040\\001\\030\\001\\093\\003\\\n\\014\\001\\001\\000\\002\\000\\003\\000\\004\\000\\113\\003\\136\\001\\039\\003\\\n\\022\\001\\023\\001\\178\\003\\136\\001\\120\\003\\125\\001\\031\\001\\032\\001\\\n\\033\\001\\034\\001\\120\\001\\125\\001\\040\\001\\123\\001\\055\\001\\136\\001\\\n\\136\\001\\055\\001\\040\\001\\195\\003\\024\\001\\155\\001\\088\\001\\069\\001\\\n\\136\\001\\141\\003\\134\\001\\143\\003\\168\\001\\145\\003\\137\\001\\147\\003\\\n\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\135\\001\\141\\003\\\n\\017\\001\\143\\003\\067\\001\\145\\003\\110\\001\\147\\003\\148\\001\\069\\001\\\n\\000\\000\\017\\001\\160\\001\\161\\001\\162\\001\\163\\001\\215\\003\\216\\003\\\n\\217\\003\\218\\003\\219\\003\\031\\001\\032\\001\\033\\001\\034\\001\\127\\001\\\n\\015\\001\\181\\003\\088\\001\\079\\001\\184\\003\\185\\003\\239\\002\\247\\003\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\184\\003\\046\\004\\194\\001\\201\\003\\202\\003\\203\\003\\\n\\110\\001\\199\\001\\224\\003\\136\\001\\105\\001\\040\\001\\126\\001\\067\\001\\\n\\160\\003\\192\\003\\193\\003\\153\\001\\202\\003\\203\\003\\021\\001\\125\\001\\\n\\040\\001\\002\\001\\222\\003\\031\\001\\032\\001\\033\\001\\034\\001\\221\\001\\\n\\024\\001\\040\\001\\136\\001\\136\\001\\029\\003\\251\\003\\234\\003\\127\\001\\\n\\160\\003\\237\\003\\137\\001\\015\\001\\014\\004\\023\\001\\236\\001\\167\\003\\\n\\021\\001\\136\\001\\170\\003\\044\\003\\127\\001\\096\\004\\054\\004\\237\\003\\\n\\136\\001\\100\\004\\136\\001\\136\\001\\079\\001\\088\\001\\040\\001\\136\\001\\\n\\134\\001\\050\\000\\137\\001\\052\\000\\053\\000\\054\\000\\010\\004\\014\\001\\\n\\247\\003\\100\\001\\101\\001\\102\\001\\103\\001\\092\\004\\014\\001\\122\\004\\\n\\023\\001\\088\\001\\126\\001\\110\\001\\021\\001\\025\\004\\026\\004\\072\\000\\\n\\091\\004\\022\\001\\137\\001\\079\\001\\032\\004\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\040\\001\\000\\000\\025\\004\\026\\004\\168\\001\\145\\004\\110\\001\\\n\\055\\001\\017\\001\\046\\004\\002\\001\\125\\001\\155\\001\\088\\001\\125\\001\\\n\\040\\001\\126\\001\\002\\004\\003\\004\\004\\004\\005\\004\\006\\004\\024\\001\\\n\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\069\\001\\120\\003\\\n\\168\\001\\021\\001\\115\\000\\071\\004\\110\\001\\040\\001\\104\\004\\054\\004\\\n\\134\\001\\014\\001\\002\\004\\003\\004\\004\\004\\005\\004\\006\\004\\040\\001\\\n\\168\\001\\071\\004\\105\\001\\137\\001\\088\\001\\028\\001\\126\\001\\137\\001\\\n\\092\\004\\093\\004\\014\\001\\098\\001\\099\\001\\100\\001\\101\\001\\093\\002\\\n\\079\\001\\101\\001\\102\\001\\103\\001\\098\\002\\168\\001\\168\\001\\110\\001\\\n\\090\\001\\109\\004\\110\\001\\111\\004\\112\\004\\136\\001\\114\\004\\115\\004\\\n\\116\\004\\117\\004\\118\\004\\136\\001\\136\\001\\136\\001\\125\\001\\109\\004\\\n\\136\\001\\111\\004\\112\\004\\136\\001\\181\\003\\136\\001\\040\\001\\088\\001\\\n\\185\\003\\136\\001\\058\\004\\136\\001\\084\\004\\136\\001\\086\\004\\040\\001\\\n\\014\\001\\136\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\125\\001\\\n\\201\\003\\125\\001\\125\\001\\099\\004\\055\\001\\110\\001\\013\\001\\155\\004\\\n\\126\\001\\202\\000\\203\\000\\037\\001\\084\\004\\050\\001\\086\\004\\208\\000\\\n\\209\\000\\136\\001\\021\\001\\212\\000\\213\\000\\155\\004\\021\\001\\021\\001\\\n\\021\\001\\218\\000\\021\\001\\099\\004\\126\\001\\168\\001\\088\\001\\224\\000\\\n\\225\\000\\226\\000\\227\\000\\069\\001\\020\\001\\168\\001\\030\\001\\136\\001\\\n\\233\\000\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\239\\000\\240\\000\\\n\\241\\000\\002\\001\\136\\001\\035\\001\\110\\001\\040\\001\\247\\000\\014\\001\\\n\\125\\001\\136\\001\\042\\001\\252\\000\\253\\000\\125\\001\\046\\001\\211\\004\\\n\\023\\001\\013\\001\\126\\001\\168\\001\\110\\001\\040\\001\\008\\001\\136\\001\\\n\\056\\001\\168\\001\\216\\002\\059\\001\\002\\001\\211\\004\\125\\001\\151\\004\\\n\\136\\001\\040\\001\\126\\001\\225\\002\\068\\001\\227\\002\\021\\001\\137\\001\\\n\\025\\001\\030\\001\\232\\002\\126\\001\\125\\001\\040\\001\\030\\001\\040\\001\\\n\\080\\001\\040\\001\\040\\001\\137\\001\\040\\001\\126\\001\\198\\004\\126\\001\\\n\\041\\001\\089\\001\\136\\001\\249\\002\\092\\001\\136\\001\\069\\001\\040\\001\\\n\\030\\001\\097\\001\\186\\004\\126\\001\\003\\003\\004\\003\\005\\003\\006\\003\\\n\\136\\001\\136\\001\\136\\001\\136\\001\\136\\001\\040\\001\\198\\004\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\207\\004\\\n\\137\\001\\050\\001\\002\\001\\098\\001\\099\\001\\100\\001\\101\\001\\136\\001\\\n\\021\\001\\136\\001\\136\\001\\136\\001\\093\\004\\137\\001\\136\\001\\110\\001\\\n\\006\\001\\040\\003\\041\\003\\042\\003\\043\\003\\141\\001\\136\\001\\136\\001\\\n\\251\\000\\136\\001\\146\\001\\136\\001\\136\\001\\040\\001\\125\\001\\136\\001\\\n\\136\\001\\136\\001\\154\\001\\136\\001\\136\\001\\027\\001\\030\\001\\126\\001\\\n\\126\\001\\136\\001\\127\\001\\030\\001\\136\\001\\127\\001\\166\\001\\120\\001\\\n\\015\\001\\055\\001\\123\\001\\126\\001\\000\\000\\021\\001\\040\\001\\050\\000\\\n\\126\\001\\052\\000\\053\\000\\054\\000\\127\\001\\127\\001\\003\\001\\021\\001\\\n\\127\\001\\126\\001\\088\\003\\057\\001\\058\\001\\126\\001\\092\\003\\055\\001\\\n\\013\\001\\021\\001\\168\\001\\155\\001\\066\\001\\072\\000\\073\\000\\127\\001\\\n\\021\\001\\022\\001\\104\\003\\024\\001\\074\\001\\075\\001\\126\\001\\021\\001\\\n\\126\\001\\126\\001\\136\\001\\113\\003\\082\\001\\014\\001\\126\\001\\155\\001\\\n\\126\\001\\040\\001\\002\\001\\040\\001\\003\\001\\091\\001\\023\\001\\093\\001\\\n\\094\\001\\126\\001\\096\\001\\126\\001\\126\\001\\126\\001\\013\\001\\184\\001\\\n\\126\\001\\001\\003\\055\\001\\223\\002\\072\\001\\089\\004\\021\\001\\040\\001\\\n\\172\\000\\194\\001\\165\\001\\174\\000\\095\\001\\096\\001\\097\\001\\098\\001\\\n\\099\\001\\119\\001\\030\\003\\177\\002\\085\\002\\009\\002\\250\\003\\224\\002\\\n\\162\\002\\040\\001\\079\\001\\181\\003\\130\\001\\131\\001\\027\\000\\002\\000\\\n\\194\\001\\218\\001\\029\\000\\186\\000\\069\\001\\024\\003\\184\\002\\153\\003\\\n\\055\\001\\109\\002\\156\\000\\228\\001\\229\\001\\230\\001\\239\\002\\232\\001\\\n\\150\\001\\070\\000\\193\\003\\232\\001\\237\\001\\192\\003\\157\\003\\088\\001\\\n\\123\\001\\228\\001\\150\\004\\165\\000\\162\\001\\163\\001\\247\\001\\165\\001\\\n\\079\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\223\\000\\\n\\125\\001\\126\\001\\127\\001\\084\\001\\069\\001\\110\\001\\154\\003\\008\\002\\\n\\009\\002\\201\\001\\011\\002\\136\\001\\137\\001\\194\\002\\112\\001\\252\\001\\\n\\222\\003\\142\\001\\195\\003\\099\\003\\125\\001\\175\\004\\255\\255\\202\\000\\\n\\203\\000\\026\\002\\255\\255\\255\\255\\234\\003\\208\\000\\209\\000\\136\\001\\\n\\255\\255\\212\\000\\213\\000\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\\n\\127\\001\\255\\255\\167\\001\\026\\002\\255\\255\\224\\000\\225\\000\\226\\000\\\n\\227\\000\\136\\001\\137\\001\\255\\255\\255\\255\\255\\255\\233\\000\\142\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\010\\004\\240\\000\\241\\000\\013\\004\\\n\\014\\004\\255\\255\\255\\255\\014\\001\\247\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\252\\000\\253\\000\\255\\255\\023\\001\\255\\255\\255\\255\\255\\255\\\n\\167\\001\\255\\255\\032\\004\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\093\\002\\040\\001\\255\\255\\003\\001\\\n\\046\\004\\098\\002\\255\\255\\255\\255\\255\\255\\255\\255\\025\\001\\255\\255\\\n\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\011\\002\\021\\001\\255\\255\\255\\255\\024\\001\\067\\004\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\144\\002\\\n\\145\\002\\255\\255\\255\\255\\055\\001\\255\\255\\255\\255\\255\\255\\098\\001\\\n\\099\\001\\100\\001\\104\\004\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\164\\002\\114\\004\\115\\004\\116\\004\\117\\004\\\n\\118\\004\\170\\002\\255\\255\\079\\001\\255\\255\\255\\255\\175\\002\\255\\255\\\n\\177\\002\\178\\002\\125\\001\\255\\255\\255\\255\\255\\255\\255\\255\\184\\002\\\n\\185\\002\\255\\255\\255\\255\\255\\255\\255\\255\\136\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\120\\001\\255\\255\\255\\255\\\n\\123\\001\\001\\001\\255\\255\\255\\255\\255\\255\\050\\000\\255\\255\\052\\000\\\n\\053\\000\\054\\000\\255\\255\\255\\255\\255\\255\\019\\001\\014\\001\\255\\255\\\n\\255\\255\\125\\001\\126\\001\\127\\001\\221\\002\\222\\002\\022\\001\\023\\001\\\n\\255\\255\\255\\255\\255\\255\\072\\000\\136\\001\\137\\001\\231\\002\\255\\255\\\n\\255\\255\\255\\255\\142\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\\n\\040\\001\\255\\255\\192\\004\\255\\255\\050\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\253\\002\\255\\255\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\167\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\194\\001\\\n\\017\\003\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\088\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\218\\001\\\n\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\104\\001\\228\\001\\229\\001\\230\\001\\255\\255\\232\\001\\110\\001\\255\\255\\\n\\255\\255\\255\\255\\237\\001\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\\n\\255\\255\\255\\255\\128\\001\\255\\255\\247\\001\\125\\001\\132\\001\\226\\002\\\n\\255\\255\\228\\002\\255\\255\\255\\255\\231\\002\\139\\001\\140\\001\\255\\255\\\n\\136\\001\\143\\001\\255\\255\\255\\255\\085\\003\\008\\002\\009\\002\\255\\255\\\n\\011\\002\\151\\001\\255\\255\\092\\003\\255\\255\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\255\\255\\255\\255\\202\\000\\203\\000\\026\\002\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\007\\003\\255\\255\\255\\255\\212\\000\\\n\\213\\000\\255\\255\\006\\001\\255\\255\\255\\255\\118\\003\\017\\003\\255\\255\\\n\\255\\255\\122\\003\\255\\255\\255\\255\\225\\000\\226\\000\\227\\000\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\233\\000\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\255\\255\\255\\255\\240\\000\\241\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\247\\000\\255\\255\\255\\255\\255\\255\\255\\255\\252\\000\\\n\\253\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\160\\003\\\n\\255\\255\\255\\255\\255\\255\\164\\003\\255\\255\\057\\001\\058\\001\\255\\255\\\n\\255\\255\\255\\255\\093\\002\\255\\255\\255\\255\\255\\255\\066\\001\\098\\002\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\025\\001\\255\\255\\074\\001\\075\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\255\\255\\093\\001\\094\\001\\255\\255\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\215\\003\\216\\003\\\n\\217\\003\\218\\003\\219\\003\\118\\003\\255\\255\\144\\002\\145\\002\\122\\003\\\n\\123\\003\\255\\255\\227\\003\\119\\001\\255\\255\\255\\255\\231\\003\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\236\\003\\255\\255\\255\\255\\130\\001\\131\\001\\\n\\255\\255\\164\\002\\255\\255\\255\\255\\255\\255\\246\\003\\247\\003\\170\\002\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\175\\002\\255\\255\\177\\002\\178\\002\\\n\\255\\255\\255\\255\\150\\001\\255\\255\\255\\255\\184\\002\\185\\002\\162\\003\\\n\\255\\255\\164\\003\\255\\255\\255\\255\\013\\004\\014\\004\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\255\\255\\120\\001\\255\\255\\255\\255\\123\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\034\\004\\255\\255\\003\\000\\255\\255\\038\\004\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\221\\002\\222\\002\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\227\\002\\255\\255\\255\\255\\052\\004\\255\\255\\054\\004\\255\\255\\255\\255\\\n\\014\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\023\\001\\067\\004\\255\\255\\255\\255\\224\\003\\255\\255\\255\\255\\\n\\227\\003\\255\\255\\253\\002\\043\\000\\231\\003\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\082\\004\\040\\001\\255\\255\\085\\004\\255\\255\\053\\000\\054\\000\\\n\\255\\255\\255\\255\\091\\004\\255\\255\\255\\255\\255\\255\\017\\003\\255\\255\\\n\\255\\255\\098\\004\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\104\\004\\\n\\255\\255\\072\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\119\\004\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\092\\000\\255\\255\\255\\255\\228\\001\\\n\\229\\001\\230\\001\\088\\001\\232\\001\\255\\255\\255\\255\\255\\255\\034\\004\\\n\\237\\001\\255\\255\\255\\255\\038\\004\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\247\\001\\255\\255\\023\\001\\024\\001\\025\\001\\119\\000\\\n\\110\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\131\\000\\085\\003\\008\\002\\009\\002\\040\\001\\255\\255\\125\\001\\\n\\255\\255\\092\\003\\255\\255\\255\\255\\255\\255\\255\\255\\175\\004\\255\\255\\\n\\255\\255\\255\\255\\136\\001\\255\\255\\255\\255\\026\\002\\255\\255\\082\\004\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\156\\000\\255\\255\\192\\004\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\122\\003\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\177\\000\\178\\000\\255\\255\\088\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\119\\004\\255\\255\\255\\255\\098\\001\\\n\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\110\\001\\255\\255\\255\\255\\208\\000\\255\\255\\\n\\255\\255\\164\\003\\255\\255\\255\\255\\089\\002\\255\\255\\255\\255\\255\\255\\\n\\093\\002\\255\\255\\125\\001\\255\\255\\255\\255\\098\\002\\224\\000\\255\\255\\\n\\255\\255\\255\\255\\225\\000\\226\\000\\227\\000\\136\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\233\\000\\255\\255\\255\\255\\006\\001\\255\\255\\238\\000\\\n\\239\\000\\240\\000\\241\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\247\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\215\\003\\216\\003\\217\\003\\218\\003\\\n\\219\\003\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\\n\\227\\003\\255\\255\\255\\255\\255\\255\\231\\003\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\236\\003\\025\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\057\\001\\058\\001\\255\\255\\255\\255\\247\\003\\170\\002\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\255\\255\\177\\002\\178\\002\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\053\\001\\054\\001\\255\\255\\\n\\255\\255\\082\\001\\013\\004\\014\\004\\255\\255\\061\\001\\255\\255\\255\\255\\\n\\063\\001\\255\\255\\091\\001\\069\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\255\\255\\255\\255\\255\\255\\075\\001\\076\\001\\077\\001\\078\\001\\034\\004\\\n\\255\\255\\255\\255\\255\\255\\038\\004\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\221\\002\\222\\002\\255\\255\\255\\255\\255\\255\\255\\255\\119\\001\\255\\255\\\n\\003\\001\\255\\255\\100\\001\\054\\004\\255\\255\\255\\255\\104\\001\\102\\001\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\067\\004\\255\\255\\255\\255\\022\\001\\023\\001\\255\\255\\025\\001\\255\\255\\\n\\253\\002\\028\\001\\255\\255\\255\\255\\255\\255\\150\\001\\255\\255\\082\\004\\\n\\255\\255\\255\\255\\085\\004\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\\n\\091\\004\\162\\001\\163\\001\\255\\255\\165\\001\\255\\255\\255\\255\\098\\004\\\n\\255\\255\\052\\001\\255\\255\\054\\001\\255\\255\\104\\004\\149\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\160\\001\\161\\001\\162\\001\\163\\001\\119\\004\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\174\\001\\255\\255\\081\\001\\255\\255\\\n\\255\\255\\179\\001\\180\\001\\181\\001\\255\\255\\183\\001\\184\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\192\\001\\098\\001\\\n\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\\n\\107\\001\\108\\001\\255\\255\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\092\\003\\\n\\255\\255\\255\\255\\255\\255\\218\\001\\175\\004\\221\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\226\\001\\255\\255\\228\\001\\229\\001\\230\\001\\\n\\255\\255\\232\\001\\020\\001\\255\\255\\236\\001\\192\\004\\237\\001\\255\\255\\\n\\255\\255\\255\\255\\149\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\247\\001\\035\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\008\\002\\009\\002\\255\\255\\255\\255\\255\\255\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\157\\003\\255\\255\\068\\001\\026\\002\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\044\\002\\255\\255\\089\\001\\\n\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\255\\255\\255\\255\\097\\001\\\n\\255\\255\\255\\255\\255\\255\\006\\000\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\002\\215\\003\\216\\003\\217\\003\\218\\003\\219\\003\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\255\\255\\236\\003\\\n\\146\\001\\255\\255\\053\\000\\054\\000\\255\\255\\255\\255\\109\\002\\255\\255\\\n\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\166\\001\\072\\000\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\013\\004\\014\\004\\255\\255\\141\\002\\255\\255\\255\\255\\144\\002\\145\\002\\\n\\146\\002\\147\\002\\148\\002\\149\\002\\150\\002\\151\\002\\152\\002\\153\\002\\\n\\154\\002\\155\\002\\156\\002\\157\\002\\158\\002\\159\\002\\160\\002\\161\\002\\\n\\162\\002\\255\\255\\164\\002\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\170\\002\\255\\255\\175\\002\\053\\000\\054\\000\\\n\\255\\255\\255\\255\\177\\002\\178\\002\\255\\255\\255\\255\\255\\255\\185\\002\\\n\\255\\255\\184\\002\\255\\255\\255\\255\\255\\255\\255\\255\\067\\004\\255\\255\\\n\\255\\255\\072\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\156\\000\\157\\000\\158\\000\\159\\000\\160\\000\\091\\004\\255\\255\\\n\\216\\002\\164\\000\\165\\000\\166\\000\\255\\255\\168\\000\\221\\002\\222\\002\\\n\\255\\255\\225\\002\\255\\255\\104\\004\\255\\255\\176\\000\\177\\000\\255\\255\\\n\\232\\002\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\249\\002\\255\\255\\255\\255\\053\\000\\054\\000\\253\\002\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\011\\001\\255\\255\\255\\255\\255\\255\\009\\003\\\n\\255\\255\\255\\255\\255\\255\\019\\001\\255\\255\\012\\003\\255\\255\\072\\000\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\223\\000\\029\\001\\225\\000\\226\\000\\\n\\227\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\233\\000\\030\\003\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\240\\000\\241\\000\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\178\\000\\247\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\255\\255\\192\\004\\255\\255\\069\\001\\070\\001\\255\\255\\\n\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\255\\255\\078\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\025\\001\\255\\255\\\n\\255\\255\\255\\255\\090\\001\\085\\003\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\088\\003\\255\\255\\225\\000\\226\\000\\227\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\233\\000\\255\\255\\255\\255\\255\\255\\159\\000\\160\\000\\\n\\104\\003\\240\\000\\241\\000\\164\\000\\255\\255\\166\\000\\255\\255\\168\\000\\\n\\247\\000\\113\\003\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\176\\000\\\n\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\\n\\255\\255\\153\\001\\025\\001\\000\\000\\150\\003\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\153\\003\\154\\003\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\107\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\225\\000\\226\\000\\227\\000\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\\n\\233\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\129\\001\\240\\000\\\n\\241\\000\\255\\255\\255\\255\\255\\255\\190\\003\\255\\255\\247\\000\\255\\255\\\n\\255\\255\\140\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\149\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\215\\003\\216\\003\\217\\003\\218\\003\\219\\003\\255\\255\\222\\003\\255\\255\\\n\\025\\001\\255\\255\\255\\255\\255\\255\\001\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\234\\003\\008\\001\\255\\255\\236\\003\\255\\255\\255\\255\\\n\\255\\255\\014\\001\\043\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\247\\003\\022\\001\\023\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\010\\004\\040\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\223\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\228\\001\\229\\001\\230\\001\\255\\255\\232\\001\\255\\255\\255\\255\\\n\\032\\004\\255\\255\\237\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\247\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\252\\001\\049\\004\\254\\001\\255\\255\\052\\004\\255\\255\\054\\004\\\n\\255\\255\\056\\004\\255\\255\\088\\001\\255\\255\\008\\002\\009\\002\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\\n\\101\\001\\102\\001\\103\\001\\104\\001\\255\\255\\255\\255\\255\\255\\026\\002\\\n\\255\\255\\110\\001\\255\\255\\140\\001\\255\\255\\255\\255\\085\\004\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\091\\004\\228\\001\\229\\001\\230\\001\\\n\\125\\001\\232\\001\\255\\255\\098\\004\\255\\255\\255\\255\\237\\001\\102\\004\\\n\\255\\255\\255\\255\\255\\255\\136\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\247\\001\\255\\255\\114\\004\\115\\004\\116\\004\\117\\004\\118\\004\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\008\\002\\009\\002\\255\\255\\255\\255\\080\\002\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\026\\002\\255\\255\\002\\001\\003\\001\\150\\004\\\n\\255\\255\\006\\001\\101\\002\\255\\255\\255\\255\\104\\002\\255\\255\\255\\255\\\n\\255\\255\\108\\002\\109\\002\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\\n\\255\\255\\255\\255\\255\\255\\228\\001\\229\\001\\230\\001\\027\\001\\232\\001\\\n\\175\\004\\255\\255\\255\\255\\255\\255\\237\\001\\255\\255\\035\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\042\\001\\247\\001\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\008\\002\\\n\\009\\002\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\170\\002\\\n\\171\\002\\026\\002\\173\\002\\080\\001\\255\\255\\082\\001\\177\\002\\178\\002\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\221\\002\\222\\002\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\001\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\141\\001\\255\\255\\008\\001\\170\\002\\255\\255\\146\\001\\255\\255\\255\\255\\\n\\014\\001\\150\\001\\177\\002\\178\\002\\101\\002\\154\\001\\255\\255\\104\\002\\\n\\022\\001\\023\\001\\253\\002\\108\\002\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\003\\255\\255\\255\\255\\030\\003\\255\\255\\255\\255\\221\\002\\222\\002\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\170\\002\\088\\001\\255\\255\\255\\255\\255\\255\\253\\002\\255\\255\\\n\\177\\002\\178\\002\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\104\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\\n\\099\\003\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\136\\001\\255\\255\\221\\002\\222\\002\\255\\255\\020\\001\\\n\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\253\\002\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\154\\003\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\027\\003\\080\\001\\255\\255\\082\\001\\255\\255\\178\\003\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\195\\003\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\215\\003\\216\\003\\217\\003\\218\\003\\\n\\219\\003\\255\\255\\255\\255\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\141\\001\\236\\003\\255\\255\\255\\255\\255\\255\\146\\001\\255\\255\\255\\255\\\n\\255\\255\\150\\001\\255\\255\\246\\003\\255\\255\\154\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\215\\003\\216\\003\\217\\003\\218\\003\\219\\003\\000\\000\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\045\\004\\255\\255\\255\\255\\236\\003\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\057\\004\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\091\\004\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\215\\003\\216\\003\\\n\\217\\003\\218\\003\\219\\003\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\\n\\255\\255\\008\\001\\255\\255\\236\\003\\011\\001\\012\\001\\013\\001\\014\\001\\\n\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\022\\001\\\n\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\029\\001\\030\\001\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\035\\001\\255\\255\\037\\001\\038\\001\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\091\\004\\044\\001\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\\n\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\070\\001\\\n\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\076\\001\\077\\001\\078\\001\\\n\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\084\\001\\085\\001\\000\\000\\\n\\057\\004\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\091\\004\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\\n\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\\n\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\142\\001\\\n\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\\n\\151\\001\\255\\255\\153\\001\\154\\001\\155\\001\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\\n\\167\\001\\168\\001\\255\\255\\255\\255\\255\\255\\255\\255\\001\\001\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\\n\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\255\\255\\\n\\019\\001\\020\\001\\021\\001\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\\n\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\034\\001\\\n\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\\n\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\075\\001\\076\\001\\077\\001\\078\\001\\079\\001\\080\\001\\255\\255\\082\\001\\\n\\000\\000\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\\n\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\120\\001\\121\\001\\255\\255\\\n\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\\n\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\139\\001\\140\\001\\141\\001\\142\\001\\143\\001\\144\\001\\145\\001\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\\n\\155\\001\\156\\001\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\\n\\163\\001\\164\\001\\165\\001\\166\\001\\167\\001\\168\\001\\255\\255\\255\\255\\\n\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\\n\\255\\255\\255\\255\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\\n\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\022\\001\\023\\001\\024\\001\\\n\\025\\001\\255\\255\\027\\001\\028\\001\\029\\001\\255\\255\\031\\001\\032\\001\\\n\\033\\001\\034\\001\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\\n\\041\\001\\042\\001\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\\n\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\070\\001\\255\\255\\255\\255\\\n\\073\\001\\074\\001\\075\\001\\076\\001\\077\\001\\078\\001\\079\\001\\080\\001\\\n\\000\\000\\082\\001\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\\n\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\\n\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\\n\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\142\\001\\143\\001\\144\\001\\\n\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\\n\\153\\001\\154\\001\\155\\001\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\167\\001\\168\\001\\\n\\255\\255\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\\n\\008\\001\\255\\255\\255\\255\\255\\255\\012\\001\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\022\\001\\023\\001\\\n\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\030\\001\\031\\001\\\n\\032\\001\\033\\001\\034\\001\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\255\\255\\255\\255\\\n\\255\\255\\073\\001\\074\\001\\075\\001\\255\\255\\077\\001\\078\\001\\079\\001\\\n\\080\\001\\000\\000\\082\\001\\083\\001\\084\\001\\085\\001\\255\\255\\255\\255\\\n\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\\n\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\\n\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\142\\001\\143\\001\\\n\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\\n\\255\\255\\153\\001\\154\\001\\255\\255\\156\\001\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\255\\255\\\n\\168\\001\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\\n\\008\\001\\255\\255\\255\\255\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\022\\001\\023\\001\\\n\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\070\\001\\255\\255\\\n\\255\\255\\073\\001\\074\\001\\075\\001\\076\\001\\077\\001\\078\\001\\079\\001\\\n\\080\\001\\000\\000\\082\\001\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\\n\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\\n\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\\n\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\142\\001\\143\\001\\\n\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\\n\\255\\255\\153\\001\\154\\001\\155\\001\\255\\255\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\167\\001\\\n\\168\\001\\255\\255\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\\n\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\012\\001\\013\\001\\014\\001\\\n\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\022\\001\\\n\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\035\\001\\255\\255\\037\\001\\038\\001\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\\n\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\255\\255\\077\\001\\078\\001\\\n\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\\n\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\\n\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\142\\001\\\n\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\\n\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\\n\\255\\255\\168\\001\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\\n\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\012\\001\\013\\001\\255\\255\\\n\\015\\001\\016\\001\\255\\255\\255\\255\\019\\001\\020\\001\\021\\001\\022\\001\\\n\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\035\\001\\255\\255\\037\\001\\038\\001\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\\n\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\255\\255\\077\\001\\078\\001\\\n\\079\\001\\080\\001\\000\\000\\082\\001\\083\\001\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\\n\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\\n\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\255\\255\\\n\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\\n\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\\n\\255\\255\\168\\001\\255\\255\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\012\\001\\013\\001\\\n\\014\\001\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\255\\255\\\n\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\\n\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\255\\255\\\n\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\\n\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\\n\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\255\\255\\077\\001\\\n\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\125\\001\\\n\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\\n\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\\n\\142\\001\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\\n\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\\n\\166\\001\\255\\255\\168\\001\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\012\\001\\013\\001\\\n\\014\\001\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\255\\255\\\n\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\\n\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\255\\255\\\n\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\\n\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\\n\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\255\\255\\077\\001\\\n\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\125\\001\\\n\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\255\\255\\\n\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\141\\001\\\n\\142\\001\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\\n\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\165\\001\\\n\\166\\001\\255\\255\\168\\001\\255\\255\\001\\001\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\012\\001\\\n\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\\n\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\\n\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\255\\255\\\n\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\\n\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\\n\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\\n\\141\\001\\142\\001\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\\n\\165\\001\\166\\001\\255\\255\\168\\001\\001\\001\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\012\\001\\\n\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\020\\001\\\n\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\044\\001\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\\n\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\255\\255\\\n\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\\n\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\\n\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\132\\001\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\140\\001\\\n\\141\\001\\142\\001\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\164\\001\\\n\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\001\\001\\002\\001\\003\\001\\\n\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\\n\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\\n\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\\n\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\\n\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\\n\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\\n\\068\\001\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\\n\\255\\255\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\\n\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\\n\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\\n\\140\\001\\141\\001\\142\\001\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\\n\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\\n\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\\n\\164\\001\\165\\001\\166\\001\\255\\255\\168\\001\\001\\001\\002\\001\\003\\001\\\n\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\\n\\012\\001\\013\\001\\255\\255\\015\\001\\016\\001\\255\\255\\255\\255\\019\\001\\\n\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\\n\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\\n\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\\n\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\\n\\068\\001\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\\n\\255\\255\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\\n\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\\n\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\139\\001\\\n\\140\\001\\141\\001\\255\\255\\143\\001\\144\\001\\145\\001\\146\\001\\255\\255\\\n\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\255\\255\\\n\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\\n\\164\\001\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\001\\001\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\\n\\255\\255\\012\\001\\013\\001\\255\\255\\015\\001\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\\n\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\\n\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\075\\001\\255\\255\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\\n\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\\n\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\139\\001\\140\\001\\141\\001\\255\\255\\143\\001\\144\\001\\145\\001\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\\n\\163\\001\\164\\001\\165\\001\\166\\001\\255\\255\\168\\001\\001\\001\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\255\\255\\\n\\255\\255\\012\\001\\013\\001\\255\\255\\015\\001\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\\n\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\\n\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\075\\001\\255\\255\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\082\\001\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\090\\001\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\\n\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\\n\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\139\\001\\140\\001\\141\\001\\255\\255\\143\\001\\144\\001\\145\\001\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\154\\001\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\\n\\163\\001\\164\\001\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\001\\001\\\n\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\\n\\255\\255\\255\\255\\012\\001\\013\\001\\255\\255\\015\\001\\016\\001\\255\\255\\\n\\255\\255\\019\\001\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\\n\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\\n\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\\n\\066\\001\\255\\255\\068\\001\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\\n\\074\\001\\075\\001\\255\\255\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\\n\\082\\001\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\\n\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\\n\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\\n\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\\n\\138\\001\\139\\001\\140\\001\\141\\001\\255\\255\\143\\001\\144\\001\\145\\001\\\n\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\\n\\154\\001\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\\n\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\255\\255\\168\\001\\001\\001\\\n\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\\n\\255\\255\\255\\255\\012\\001\\013\\001\\255\\255\\015\\001\\016\\001\\255\\255\\\n\\255\\255\\019\\001\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\025\\001\\\n\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\255\\255\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\\n\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\\n\\066\\001\\255\\255\\068\\001\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\\n\\074\\001\\075\\001\\255\\255\\077\\001\\078\\001\\079\\001\\080\\001\\000\\000\\\n\\082\\001\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\089\\001\\\n\\090\\001\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\\n\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\\n\\130\\001\\131\\001\\132\\001\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\\n\\138\\001\\139\\001\\140\\001\\141\\001\\255\\255\\143\\001\\144\\001\\145\\001\\\n\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\151\\001\\255\\255\\153\\001\\\n\\154\\001\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\\n\\162\\001\\163\\001\\164\\001\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\\n\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\\n\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\\n\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\\n\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\069\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\\n\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\088\\001\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\\n\\105\\001\\255\\255\\255\\255\\255\\255\\255\\255\\110\\001\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\\n\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\\n\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\\n\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\\n\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\022\\001\\023\\001\\024\\001\\\n\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\\n\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\\n\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\088\\001\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\\n\\105\\001\\255\\255\\255\\255\\255\\255\\255\\255\\110\\001\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\\n\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\\n\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\\n\\255\\255\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\\n\\008\\001\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\\n\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\022\\001\\023\\001\\\n\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\\n\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\088\\001\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\104\\001\\105\\001\\255\\255\\255\\255\\255\\255\\255\\255\\110\\001\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\\n\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\\n\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\\n\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\\n\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\\n\\168\\001\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\\n\\008\\001\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\\n\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\022\\001\\023\\001\\\n\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\\n\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\088\\001\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\104\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\110\\001\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\\n\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\\n\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\\n\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\\n\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\\n\\168\\001\\255\\255\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\\n\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\\n\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\022\\001\\\n\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\\n\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\\n\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\088\\001\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\104\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\110\\001\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\\n\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\\n\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\\n\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\\n\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\\n\\255\\255\\168\\001\\001\\001\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\\n\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\022\\001\\\n\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\\n\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\\n\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\088\\001\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\104\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\110\\001\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\\n\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\\n\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\\n\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\\n\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\\n\\255\\255\\168\\001\\255\\255\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\\n\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\\n\\022\\001\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\\n\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\\n\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\\n\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\\n\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\088\\001\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\\n\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\\n\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\\n\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\\n\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\\n\\166\\001\\255\\255\\168\\001\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\\n\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\\n\\255\\255\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\\n\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\\n\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\\n\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\\n\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\088\\001\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\\n\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\\n\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\\n\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\\n\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\\n\\166\\001\\255\\255\\168\\001\\255\\255\\255\\255\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\\n\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\\n\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\088\\001\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\\n\\101\\001\\102\\001\\103\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\\n\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\\n\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\028\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\\n\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\100\\001\\\n\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\\n\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\255\\255\\002\\001\\003\\001\\\n\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\\n\\020\\001\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\\n\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\\n\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\\n\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\\n\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\\n\\100\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\110\\001\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\\n\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\\n\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\\n\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\002\\001\\003\\001\\\n\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\\n\\020\\001\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\255\\255\\027\\001\\\n\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\\n\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\\n\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\\n\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\099\\001\\\n\\100\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\\n\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\\n\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\\n\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\255\\255\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\\n\\255\\255\\020\\001\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\255\\255\\\n\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\\n\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\\n\\099\\001\\100\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\\n\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\\n\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\255\\255\\\n\\255\\255\\020\\001\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\255\\255\\\n\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\\n\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\098\\001\\\n\\099\\001\\100\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\\n\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\\n\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\255\\255\\\n\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\\n\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\\n\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\041\\001\\\n\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\\n\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\\n\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\\n\\098\\001\\099\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\\n\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\\n\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\\n\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\\n\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\\n\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\017\\001\\\n\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\023\\001\\024\\001\\025\\001\\\n\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\041\\001\\\n\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\000\\000\\\n\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\\n\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\\n\\098\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\\n\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\\n\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\145\\001\\\n\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\\n\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\255\\255\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\\n\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\024\\001\\\n\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\\n\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\\n\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\\n\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\\n\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\255\\255\\\n\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\024\\001\\\n\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\\n\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\\n\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\144\\001\\\n\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\\n\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\\n\\255\\255\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\\n\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\025\\001\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\\n\\255\\255\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\\n\\080\\001\\000\\000\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\\n\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\\n\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\\n\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\\n\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\\n\\168\\001\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\\n\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\\n\\255\\255\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\055\\001\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\000\\000\\068\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\\n\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\\n\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\\n\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\255\\255\\\n\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\\n\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\255\\255\\\n\\168\\001\\255\\255\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\014\\001\\\n\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\028\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\\n\\255\\255\\255\\255\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\054\\001\\\n\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\000\\000\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\\n\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\125\\001\\126\\001\\\n\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\\n\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\142\\001\\\n\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\\n\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\006\\001\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\\n\\013\\001\\168\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\255\\255\\041\\001\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\\n\\255\\255\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\\n\\141\\001\\255\\255\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\255\\255\\168\\001\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\\n\\255\\255\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\020\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\\n\\038\\001\\255\\255\\255\\255\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\\n\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\\n\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\000\\000\\066\\001\\255\\255\\068\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\\n\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\\n\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\\n\\255\\255\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\148\\001\\255\\255\\\n\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\002\\001\\003\\001\\\n\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\\n\\166\\001\\013\\001\\168\\001\\015\\001\\255\\255\\017\\001\\255\\255\\255\\255\\\n\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\041\\001\\042\\001\\255\\255\\\n\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\066\\001\\255\\255\\\n\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\\n\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\\n\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\\n\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\145\\001\\146\\001\\255\\255\\\n\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\013\\001\\255\\255\\015\\001\\255\\255\\255\\255\\255\\255\\255\\255\\020\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\255\\255\\255\\255\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\\n\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\013\\001\\168\\001\\015\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\255\\255\\042\\001\\\n\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\000\\000\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\\n\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\\n\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\\n\\163\\001\\255\\255\\165\\001\\166\\001\\013\\001\\168\\001\\015\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\\n\\255\\255\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\\n\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\\n\\255\\255\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\\n\\255\\255\\154\\001\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\\n\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\013\\001\\168\\001\\\n\\015\\001\\255\\255\\255\\255\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\\n\\255\\255\\255\\255\\255\\255\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\\n\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\\n\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\126\\001\\\n\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\\n\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\255\\255\\\n\\255\\255\\144\\001\\255\\255\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\\n\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\006\\001\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\\n\\013\\001\\168\\001\\015\\001\\255\\255\\255\\255\\255\\255\\255\\255\\020\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\255\\255\\255\\255\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\\n\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\013\\001\\168\\001\\015\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\255\\255\\042\\001\\\n\\255\\255\\255\\255\\255\\255\\046\\001\\000\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\\n\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\\n\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\\n\\163\\001\\255\\255\\165\\001\\166\\001\\013\\001\\168\\001\\015\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\\n\\255\\255\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\000\\000\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\\n\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\137\\001\\138\\001\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\\n\\255\\255\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\002\\001\\003\\001\\\n\\004\\001\\154\\001\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\162\\001\\163\\001\\015\\001\\165\\001\\166\\001\\255\\255\\168\\001\\\n\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\255\\255\\042\\001\\255\\255\\\n\\255\\255\\255\\255\\046\\001\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\\n\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\\n\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\\n\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\\n\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\146\\001\\255\\255\\\n\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\\n\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\166\\001\\255\\255\\168\\001\\015\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\255\\255\\\n\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\000\\000\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\\n\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\\n\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\\n\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\\n\\138\\001\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\\n\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\002\\001\\003\\001\\004\\001\\\n\\154\\001\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\162\\001\\163\\001\\015\\001\\165\\001\\166\\001\\255\\255\\168\\001\\020\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\037\\001\\038\\001\\255\\255\\255\\255\\255\\255\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\255\\255\\255\\255\\\n\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\146\\001\\255\\255\\148\\001\\\n\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\002\\001\\\n\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\013\\001\\168\\001\\015\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\255\\255\\038\\001\\255\\255\\255\\255\\255\\255\\042\\001\\\n\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\000\\000\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\\n\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\255\\255\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\\n\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\137\\001\\138\\001\\\n\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\255\\255\\146\\001\\\n\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\006\\001\\255\\255\\162\\001\\\n\\163\\001\\255\\255\\165\\001\\166\\001\\013\\001\\168\\001\\015\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\037\\001\\038\\001\\255\\255\\255\\255\\\n\\255\\255\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\079\\001\\080\\001\\\n\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\126\\001\\127\\001\\255\\255\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\134\\001\\135\\001\\136\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\144\\001\\\n\\255\\255\\146\\001\\255\\255\\148\\001\\255\\255\\150\\001\\255\\255\\002\\001\\\n\\003\\001\\154\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\162\\001\\163\\001\\014\\001\\165\\001\\166\\001\\017\\001\\168\\001\\\n\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\035\\001\\255\\255\\255\\255\\255\\255\\000\\000\\040\\001\\255\\255\\042\\001\\\n\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\\n\\075\\001\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\\n\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\125\\001\\126\\001\\127\\001\\255\\255\\255\\255\\130\\001\\\n\\131\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\002\\001\\255\\255\\141\\001\\255\\255\\006\\001\\255\\255\\255\\255\\146\\001\\\n\\255\\255\\255\\255\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\\n\\255\\255\\255\\255\\020\\001\\021\\001\\255\\255\\255\\255\\255\\255\\162\\001\\\n\\163\\001\\027\\001\\165\\001\\166\\001\\030\\001\\168\\001\\255\\255\\255\\255\\\n\\255\\255\\035\\001\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\\n\\042\\001\\255\\255\\255\\255\\255\\255\\046\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\\n\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\130\\001\\131\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\146\\001\\255\\255\\255\\255\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\\n\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\003\\001\\\n\\162\\001\\163\\001\\006\\001\\165\\001\\166\\001\\255\\255\\168\\001\\011\\001\\\n\\000\\000\\013\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\019\\001\\\n\\020\\001\\021\\001\\022\\001\\255\\255\\024\\001\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\042\\001\\255\\255\\\n\\044\\001\\255\\255\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\066\\001\\255\\255\\\n\\068\\001\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\\n\\076\\001\\255\\255\\078\\001\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\089\\001\\090\\001\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\124\\001\\125\\001\\126\\001\\127\\001\\128\\001\\255\\255\\130\\001\\131\\001\\\n\\132\\001\\255\\255\\000\\000\\255\\255\\136\\001\\137\\001\\255\\255\\139\\001\\\n\\140\\001\\141\\001\\142\\001\\143\\001\\255\\255\\145\\001\\146\\001\\255\\255\\\n\\255\\255\\255\\255\\150\\001\\151\\001\\002\\001\\153\\001\\154\\001\\255\\255\\\n\\006\\001\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\166\\001\\167\\001\\255\\255\\255\\255\\020\\001\\021\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\040\\001\\255\\255\\042\\001\\255\\255\\255\\255\\255\\255\\\n\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\002\\001\\255\\255\\255\\255\\255\\255\\006\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\020\\001\\255\\255\\255\\255\\141\\001\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\146\\001\\255\\255\\255\\255\\255\\255\\\n\\150\\001\\255\\255\\255\\255\\035\\001\\154\\001\\255\\255\\255\\255\\255\\255\\\n\\040\\001\\255\\255\\042\\001\\255\\255\\162\\001\\163\\001\\046\\001\\165\\001\\\n\\166\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\089\\001\\000\\000\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\002\\001\\255\\255\\255\\255\\255\\255\\\n\\006\\001\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\255\\255\\013\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\020\\001\\255\\255\\\n\\255\\255\\255\\255\\146\\001\\255\\255\\255\\255\\027\\001\\150\\001\\255\\255\\\n\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\035\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\162\\001\\163\\001\\042\\001\\165\\001\\166\\001\\255\\255\\\n\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\068\\001\\000\\000\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\255\\255\\255\\255\\255\\255\\255\\255\\002\\001\\255\\255\\\n\\255\\255\\255\\255\\006\\001\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\\n\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\\n\\020\\001\\255\\255\\255\\255\\255\\255\\146\\001\\255\\255\\255\\255\\027\\001\\\n\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\162\\001\\163\\001\\042\\001\\165\\001\\\n\\166\\001\\255\\255\\046\\001\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\\n\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\002\\001\\255\\255\\255\\255\\255\\255\\\n\\006\\001\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\255\\255\\020\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\130\\001\\131\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\255\\255\\\n\\255\\255\\141\\001\\255\\255\\255\\255\\042\\001\\255\\255\\146\\001\\255\\255\\\n\\046\\001\\000\\000\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\\n\\255\\255\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\166\\001\\255\\255\\066\\001\\255\\255\\068\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\\n\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\002\\001\\255\\255\\255\\255\\255\\255\\006\\001\\255\\255\\255\\255\\\n\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\\n\\118\\001\\119\\001\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\035\\001\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\\n\\255\\255\\042\\001\\255\\255\\255\\255\\146\\001\\046\\001\\000\\000\\255\\255\\\n\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\056\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\\n\\166\\001\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\\n\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\\n\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\042\\001\\255\\255\\\n\\255\\255\\146\\001\\046\\001\\000\\000\\255\\255\\150\\001\\255\\255\\255\\255\\\n\\255\\255\\154\\001\\255\\255\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\066\\001\\255\\255\\\n\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\006\\001\\\n\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\\n\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\020\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\130\\001\\131\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\141\\001\\255\\255\\042\\001\\255\\255\\255\\255\\146\\001\\046\\001\\\n\\000\\000\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\\n\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\162\\001\\163\\001\\\n\\255\\255\\165\\001\\166\\001\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\\n\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\119\\001\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\027\\001\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\035\\001\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\\n\\042\\001\\255\\255\\255\\255\\146\\001\\046\\001\\255\\255\\255\\255\\150\\001\\\n\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\\n\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\\n\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\\n\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\255\\255\\020\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\130\\001\\131\\001\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\042\\001\\255\\255\\255\\255\\\n\\146\\001\\046\\001\\255\\255\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\\n\\154\\001\\255\\255\\255\\255\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\066\\001\\255\\255\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\093\\001\\094\\001\\255\\255\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\006\\001\\255\\255\\\n\\255\\255\\255\\255\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\\n\\117\\001\\118\\001\\119\\001\\255\\255\\020\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\130\\001\\131\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\035\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\141\\001\\255\\255\\042\\001\\255\\255\\255\\255\\146\\001\\046\\001\\255\\255\\\n\\255\\255\\150\\001\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\\n\\056\\001\\057\\001\\058\\001\\059\\001\\255\\255\\162\\001\\163\\001\\255\\255\\\n\\165\\001\\166\\001\\066\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\074\\001\\075\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\093\\001\\094\\001\\255\\255\\\n\\096\\001\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\111\\001\\\n\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\119\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\130\\001\\131\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\146\\001\\255\\255\\255\\255\\255\\255\\150\\001\\255\\255\\\n\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\162\\001\\163\\001\\255\\255\\165\\001\\166\\001\\003\\001\\\n\\255\\255\\005\\001\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\\n\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\018\\001\\019\\001\\\n\\255\\255\\255\\255\\022\\001\\023\\001\\255\\255\\025\\001\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\036\\001\\255\\255\\255\\255\\039\\001\\040\\001\\255\\255\\255\\255\\043\\001\\\n\\044\\001\\045\\001\\255\\255\\047\\001\\048\\001\\049\\001\\050\\001\\051\\001\\\n\\052\\001\\053\\001\\054\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\\n\\076\\001\\077\\001\\078\\001\\255\\255\\080\\001\\081\\001\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\086\\001\\087\\001\\088\\001\\255\\255\\090\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\\n\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\\n\\108\\001\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\255\\255\\\n\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\\n\\132\\001\\133\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\\n\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\147\\001\\\n\\255\\255\\149\\001\\255\\255\\151\\001\\152\\001\\153\\001\\255\\255\\255\\255\\\n\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\003\\001\\255\\255\\\n\\005\\001\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\018\\001\\019\\001\\255\\255\\\n\\255\\255\\022\\001\\023\\001\\255\\255\\025\\001\\026\\001\\255\\255\\028\\001\\\n\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\\n\\255\\255\\255\\255\\039\\001\\040\\001\\255\\255\\255\\255\\043\\001\\044\\001\\\n\\045\\001\\255\\255\\047\\001\\048\\001\\049\\001\\050\\001\\051\\001\\052\\001\\\n\\053\\001\\054\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\076\\001\\\n\\077\\001\\078\\001\\255\\255\\080\\001\\081\\001\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\086\\001\\087\\001\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\\n\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\108\\001\\\n\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\255\\255\\124\\001\\\n\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\133\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\147\\001\\255\\255\\\n\\149\\001\\255\\255\\151\\001\\152\\001\\153\\001\\255\\255\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\003\\001\\255\\255\\005\\001\\\n\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\\n\\255\\255\\255\\255\\016\\001\\255\\255\\018\\001\\019\\001\\255\\255\\255\\255\\\n\\022\\001\\023\\001\\255\\255\\025\\001\\026\\001\\255\\255\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\255\\255\\\n\\255\\255\\039\\001\\040\\001\\255\\255\\255\\255\\043\\001\\044\\001\\045\\001\\\n\\255\\255\\047\\001\\048\\001\\049\\001\\050\\001\\051\\001\\052\\001\\053\\001\\\n\\054\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\076\\001\\077\\001\\\n\\078\\001\\255\\255\\080\\001\\081\\001\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\086\\001\\087\\001\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\108\\001\\109\\001\\\n\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\255\\255\\124\\001\\255\\255\\\n\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\133\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\\n\\255\\255\\143\\001\\255\\255\\255\\255\\003\\001\\147\\001\\255\\255\\149\\001\\\n\\255\\255\\151\\001\\152\\001\\153\\001\\011\\001\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\019\\001\\255\\255\\255\\255\\022\\001\\\n\\023\\001\\255\\255\\025\\001\\255\\255\\255\\255\\028\\001\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\255\\255\\078\\001\\\n\\255\\255\\255\\255\\081\\001\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\108\\001\\255\\255\\110\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\003\\001\\255\\255\\255\\255\\149\\001\\255\\255\\\n\\151\\001\\255\\255\\153\\001\\011\\001\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\019\\001\\255\\255\\255\\255\\022\\001\\023\\001\\\n\\255\\255\\025\\001\\255\\255\\255\\255\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\255\\255\\052\\001\\255\\255\\054\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\\n\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\255\\255\\078\\001\\255\\255\\\n\\255\\255\\081\\001\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\104\\001\\105\\001\\106\\001\\107\\001\\108\\001\\255\\255\\110\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\\n\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\\n\\255\\255\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\005\\001\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\\n\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\018\\001\\\n\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\\n\\255\\255\\255\\255\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\\n\\043\\001\\044\\001\\045\\001\\255\\255\\047\\001\\048\\001\\049\\001\\050\\001\\\n\\051\\001\\255\\255\\053\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\065\\001\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\075\\001\\076\\001\\077\\001\\078\\001\\255\\255\\080\\001\\081\\001\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\086\\001\\087\\001\\088\\001\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\133\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\\n\\147\\001\\255\\255\\149\\001\\255\\255\\151\\001\\152\\001\\153\\001\\255\\255\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\005\\001\\\n\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\\n\\255\\255\\255\\255\\016\\001\\255\\255\\018\\001\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\255\\255\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\255\\255\\\n\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\043\\001\\044\\001\\045\\001\\\n\\255\\255\\047\\001\\048\\001\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\075\\001\\076\\001\\077\\001\\\n\\078\\001\\255\\255\\080\\001\\081\\001\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\086\\001\\087\\001\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\255\\255\\124\\001\\255\\255\\\n\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\133\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\\n\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\147\\001\\255\\255\\149\\001\\\n\\255\\255\\151\\001\\152\\001\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\012\\001\\255\\255\\255\\255\\255\\255\\\n\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\021\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\030\\001\\031\\001\\\n\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\\n\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\126\\001\\255\\255\\\n\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\137\\001\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\\n\\255\\255\\153\\001\\255\\255\\255\\255\\156\\001\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\012\\001\\255\\255\\164\\001\\255\\255\\016\\001\\255\\255\\\n\\168\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\\n\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\\n\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\\n\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\\n\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\124\\001\\255\\255\\126\\001\\255\\255\\128\\001\\255\\255\\\n\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\137\\001\\\n\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\255\\255\\153\\001\\\n\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\\n\\012\\001\\255\\255\\164\\001\\255\\255\\016\\001\\255\\255\\168\\001\\019\\001\\\n\\255\\255\\021\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\\n\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\\n\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\124\\001\\255\\255\\126\\001\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\\n\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\137\\001\\255\\255\\139\\001\\\n\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\151\\001\\255\\255\\153\\001\\255\\255\\255\\255\\\n\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\012\\001\\255\\255\\\n\\164\\001\\255\\255\\016\\001\\255\\255\\168\\001\\019\\001\\255\\255\\021\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\\n\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\\n\\126\\001\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\137\\001\\255\\255\\139\\001\\140\\001\\255\\255\\\n\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\151\\001\\255\\255\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\012\\001\\255\\255\\164\\001\\255\\255\\\n\\016\\001\\255\\255\\168\\001\\019\\001\\255\\255\\021\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\031\\001\\\n\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\\n\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\126\\001\\255\\255\\\n\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\137\\001\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\\n\\255\\255\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\012\\001\\255\\255\\164\\001\\255\\255\\016\\001\\255\\255\\\n\\168\\001\\019\\001\\255\\255\\021\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\031\\001\\032\\001\\033\\001\\\n\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\\n\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\\n\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\\n\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\124\\001\\255\\255\\126\\001\\255\\255\\128\\001\\255\\255\\\n\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\137\\001\\\n\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\255\\255\\153\\001\\\n\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\\n\\012\\001\\255\\255\\164\\001\\255\\255\\016\\001\\255\\255\\168\\001\\019\\001\\\n\\255\\255\\021\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\\n\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\\n\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\124\\001\\255\\255\\126\\001\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\\n\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\137\\001\\255\\255\\139\\001\\\n\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\151\\001\\255\\255\\153\\001\\255\\255\\255\\255\\\n\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\012\\001\\255\\255\\\n\\164\\001\\255\\255\\016\\001\\255\\255\\168\\001\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\\n\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\\n\\126\\001\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\137\\001\\255\\255\\139\\001\\140\\001\\255\\255\\\n\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\151\\001\\255\\255\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\012\\001\\255\\255\\164\\001\\255\\255\\\n\\016\\001\\255\\255\\168\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\031\\001\\\n\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\\n\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\126\\001\\255\\255\\\n\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\137\\001\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\\n\\255\\255\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\012\\001\\255\\255\\164\\001\\255\\255\\016\\001\\255\\255\\\n\\168\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\031\\001\\032\\001\\033\\001\\\n\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\\n\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\\n\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\\n\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\124\\001\\255\\255\\126\\001\\255\\255\\128\\001\\255\\255\\\n\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\137\\001\\\n\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\255\\255\\153\\001\\\n\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\\n\\012\\001\\255\\255\\164\\001\\255\\255\\016\\001\\255\\255\\168\\001\\019\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\\n\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\\n\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\124\\001\\255\\255\\126\\001\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\\n\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\137\\001\\255\\255\\139\\001\\\n\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\151\\001\\255\\255\\153\\001\\255\\255\\255\\255\\\n\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\012\\001\\255\\255\\\n\\164\\001\\255\\255\\016\\001\\255\\255\\168\\001\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\\n\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\255\\255\\\n\\126\\001\\255\\255\\128\\001\\255\\255\\008\\001\\255\\255\\132\\001\\255\\255\\\n\\012\\001\\255\\255\\255\\255\\137\\001\\016\\001\\139\\001\\140\\001\\019\\001\\\n\\255\\255\\143\\001\\022\\001\\023\\001\\255\\255\\025\\001\\255\\255\\255\\255\\\n\\028\\001\\151\\001\\255\\255\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\040\\001\\255\\255\\164\\001\\255\\255\\\n\\044\\001\\255\\255\\168\\001\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\\n\\052\\001\\255\\255\\054\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\\n\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\\n\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\\n\\108\\001\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\\n\\132\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\139\\001\\\n\\140\\001\\016\\001\\255\\255\\143\\001\\019\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\151\\001\\255\\255\\153\\001\\029\\001\\255\\255\\\n\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\043\\001\\044\\001\\045\\001\\255\\255\\\n\\047\\001\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\070\\001\\\n\\255\\255\\072\\001\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\008\\001\\009\\001\\\n\\010\\001\\011\\001\\012\\001\\013\\001\\139\\001\\140\\001\\016\\001\\255\\255\\\n\\143\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\149\\001\\255\\255\\\n\\151\\001\\255\\255\\153\\001\\029\\001\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\043\\001\\044\\001\\045\\001\\255\\255\\047\\001\\255\\255\\049\\001\\\n\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\\n\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\255\\255\\\n\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\\n\\255\\255\\255\\255\\132\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\\n\\255\\255\\139\\001\\140\\001\\016\\001\\255\\255\\143\\001\\019\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\255\\255\\153\\001\\\n\\029\\001\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\043\\001\\044\\001\\\n\\045\\001\\255\\255\\047\\001\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\\n\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\255\\255\\124\\001\\\n\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\139\\001\\140\\001\\\n\\016\\001\\255\\255\\143\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\149\\001\\255\\255\\151\\001\\255\\255\\153\\001\\029\\001\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\047\\001\\\n\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\070\\001\\255\\255\\\n\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\\n\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\\n\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\008\\001\\009\\001\\010\\001\\\n\\011\\001\\012\\001\\255\\255\\139\\001\\140\\001\\016\\001\\255\\255\\143\\001\\\n\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\\n\\255\\255\\153\\001\\029\\001\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\\n\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\070\\001\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\\n\\139\\001\\140\\001\\016\\001\\255\\255\\143\\001\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\255\\255\\153\\001\\029\\001\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\\n\\070\\001\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\\n\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\255\\255\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\122\\001\\255\\255\\124\\001\\255\\255\\\n\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\008\\001\\\n\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\139\\001\\140\\001\\016\\001\\\n\\255\\255\\143\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\149\\001\\\n\\255\\255\\151\\001\\255\\255\\153\\001\\029\\001\\255\\255\\255\\255\\157\\001\\\n\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\049\\001\\050\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\070\\001\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\008\\001\\009\\001\\010\\001\\\n\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\043\\001\\044\\001\\045\\001\\255\\255\\149\\001\\255\\255\\151\\001\\050\\001\\\n\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\122\\001\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\043\\001\\044\\001\\\n\\045\\001\\255\\255\\149\\001\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\\n\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\122\\001\\255\\255\\124\\001\\\n\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\\n\\149\\001\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\008\\001\\\n\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\\n\\255\\255\\255\\255\\019\\001\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\149\\001\\255\\255\\\n\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\008\\001\\009\\001\\010\\001\\\n\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\044\\001\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\050\\001\\\n\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\122\\001\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\\n\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\\n\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\122\\001\\255\\255\\124\\001\\\n\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\\n\\149\\001\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\088\\001\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\008\\001\\\n\\009\\001\\010\\001\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\\n\\255\\255\\255\\255\\019\\001\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\149\\001\\255\\255\\\n\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\088\\001\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\122\\001\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\012\\001\\255\\255\\132\\001\\255\\255\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\255\\255\\\n\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\\n\\255\\255\\124\\001\\019\\001\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\044\\001\\151\\001\\255\\255\\153\\001\\255\\255\\\n\\049\\001\\050\\001\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\\n\\255\\255\\164\\001\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\016\\001\\255\\255\\124\\001\\019\\001\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\151\\001\\255\\255\\\n\\153\\001\\255\\255\\049\\001\\050\\001\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\255\\255\\255\\255\\164\\001\\255\\255\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\070\\001\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\\n\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\\n\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\149\\001\\255\\255\\\n\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\\n\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\044\\001\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\050\\001\\\n\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\\n\\255\\255\\255\\255\\149\\001\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\\n\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\011\\001\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\\n\\019\\001\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\029\\001\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\149\\001\\044\\001\\151\\001\\255\\255\\153\\001\\255\\255\\049\\001\\050\\001\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\076\\001\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\011\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\124\\001\\019\\001\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\044\\001\\151\\001\\255\\255\\153\\001\\255\\255\\\n\\049\\001\\050\\001\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\076\\001\\255\\255\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\011\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\124\\001\\019\\001\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\029\\001\\136\\001\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\151\\001\\255\\255\\\n\\153\\001\\255\\255\\049\\001\\050\\001\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\255\\255\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\011\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\019\\001\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\029\\001\\136\\001\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\\n\\151\\001\\255\\255\\153\\001\\255\\255\\049\\001\\050\\001\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\\n\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\011\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\\n\\019\\001\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\029\\001\\136\\001\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\044\\001\\151\\001\\255\\255\\153\\001\\255\\255\\049\\001\\050\\001\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\076\\001\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\011\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\124\\001\\019\\001\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\142\\001\\143\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\044\\001\\151\\001\\255\\255\\153\\001\\255\\255\\\n\\049\\001\\050\\001\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\076\\001\\255\\255\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\011\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\124\\001\\019\\001\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\151\\001\\255\\255\\\n\\153\\001\\255\\255\\049\\001\\050\\001\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\255\\255\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\011\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\124\\001\\019\\001\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\029\\001\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\044\\001\\\n\\151\\001\\255\\255\\153\\001\\255\\255\\049\\001\\050\\001\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\076\\001\\\n\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\124\\001\\\n\\019\\001\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\029\\001\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\044\\001\\151\\001\\255\\255\\153\\001\\255\\255\\049\\001\\050\\001\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\076\\001\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\\n\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\124\\001\\\n\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\255\\255\\136\\001\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\\n\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\255\\255\\136\\001\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\050\\001\\\n\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\136\\001\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\\n\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\124\\001\\\n\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\255\\255\\136\\001\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\\n\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\\n\\255\\255\\136\\001\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\\n\\019\\001\\255\\255\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\\n\\255\\255\\255\\255\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\136\\001\\\n\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\\n\\255\\255\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\050\\001\\\n\\153\\001\\255\\255\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\\n\\161\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\\n\\255\\255\\124\\001\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\139\\001\\140\\001\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\\n\\255\\255\\255\\255\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\\n\\061\\001\\062\\001\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\\n\\077\\001\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\016\\001\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\124\\001\\\n\\255\\255\\255\\255\\255\\255\\128\\001\\255\\255\\255\\255\\255\\255\\132\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\\n\\255\\255\\255\\255\\143\\001\\255\\255\\255\\255\\044\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\151\\001\\050\\001\\153\\001\\255\\255\\255\\255\\255\\255\\\n\\157\\001\\158\\001\\159\\001\\160\\001\\161\\001\\060\\001\\061\\001\\062\\001\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\255\\255\\255\\255\\255\\255\\077\\001\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\020\\001\\109\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\035\\001\\124\\001\\255\\255\\255\\255\\\n\\255\\255\\128\\001\\255\\255\\042\\001\\255\\255\\132\\001\\255\\255\\046\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\139\\001\\140\\001\\255\\255\\255\\255\\\n\\143\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\151\\001\\255\\255\\153\\001\\255\\255\\255\\255\\068\\001\\157\\001\\158\\001\\\n\\159\\001\\160\\001\\161\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\089\\001\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\022\\001\\023\\001\\\n\\255\\255\\025\\001\\255\\255\\255\\255\\028\\001\\255\\255\\255\\255\\003\\001\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\116\\001\\117\\001\\118\\001\\\n\\040\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\022\\001\\023\\001\\052\\001\\025\\001\\054\\001\\255\\255\\\n\\028\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\141\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\146\\001\\040\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\052\\001\\081\\001\\054\\001\\255\\255\\255\\255\\255\\255\\255\\255\\166\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\\n\\104\\001\\105\\001\\106\\001\\107\\001\\108\\001\\081\\001\\110\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\\n\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\\n\\108\\001\\255\\255\\110\\001\\022\\001\\023\\001\\255\\255\\025\\001\\255\\255\\\n\\255\\255\\028\\001\\255\\255\\255\\255\\003\\001\\149\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\022\\001\\\n\\023\\001\\052\\001\\025\\001\\054\\001\\255\\255\\028\\001\\255\\255\\255\\255\\\n\\255\\255\\149\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\081\\001\\054\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\\n\\099\\001\\100\\001\\101\\001\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\\n\\107\\001\\108\\001\\081\\001\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\003\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\102\\001\\\n\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\108\\001\\255\\255\\110\\001\\\n\\022\\001\\023\\001\\255\\255\\025\\001\\255\\255\\255\\255\\028\\001\\255\\255\\\n\\255\\255\\255\\255\\149\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\040\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\052\\001\\255\\255\\\n\\054\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\149\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\081\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\098\\001\\099\\001\\100\\001\\101\\001\\\n\\102\\001\\103\\001\\104\\001\\105\\001\\106\\001\\107\\001\\108\\001\\255\\255\\\n\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\149\\001\"\n\nlet yynames_const = \"\\\n  AMP\\000\\\n  AND\\000\\\n  AS\\000\\\n  ASSERT\\000\\\n  ASSUME\\000\\\n  ATTRIBUTES\\000\\\n  BACKTICK\\000\\\n  BACKTICK_AT\\000\\\n  BACKTICK_HASH\\000\\\n  BACKTICK_PERC\\000\\\n  BANG_LBRACE\\000\\\n  BAR\\000\\\n  BAR_RBRACE\\000\\\n  BAR_RBRACK\\000\\\n  BEGIN\\000\\\n  BY\\000\\\n  CALC\\000\\\n  CLASS\\000\\\n  COLON\\000\\\n  COLON_COLON\\000\\\n  COLON_EQUALS\\000\\\n  COMMA\\000\\\n  CONJUNCTION\\000\\\n  DECREASES\\000\\\n  DEFAULT\\000\\\n  DISJUNCTION\\000\\\n  DOLLAR\\000\\\n  DOT\\000\\\n  DOT_LBRACK\\000\\\n  DOT_LBRACK_BAR\\000\\\n  DOT_LENS_PAREN_LEFT\\000\\\n  DOT_LPAREN\\000\\\n  EFFECT\\000\\\n  ELIM\\000\\\n  ELSE\\000\\\n  END\\000\\\n  ENSURES\\000\\\n  EOF\\000\\\n  EQUALS\\000\\\n  EQUALTYPE\\000\\\n  EXCEPTION\\000\\\n  EXISTS\\000\\\n  FALSE\\000\\\n  FORALL\\000\\\n  FRIEND\\000\\\n  FUN\\000\\\n  FUNCTION\\000\\\n  HASH\\000\\\n  IF\\000\\\n  IFF\\000\\\n  IMPLIES\\000\\\n  IN\\000\\\n  INCLUDE\\000\\\n  INLINE\\000\\\n  INLINE_FOR_EXTRACTION\\000\\\n  INSTANCE\\000\\\n  INTRO\\000\\\n  IRREDUCIBLE\\000\\\n  LARROW\\000\\\n  LAYERED_EFFECT\\000\\\n  LBRACE\\000\\\n  LBRACE_BAR\\000\\\n  LBRACE_COLON_PATTERN\\000\\\n  LBRACE_COLON_WELL_FOUNDED\\000\\\n  LBRACK\\000\\\n  LBRACK_AT\\000\\\n  LBRACK_AT_AT\\000\\\n  LBRACK_AT_AT_AT\\000\\\n  LBRACK_BAR\\000\\\n  LENS_PAREN_LEFT\\000\\\n  LENS_PAREN_RIGHT\\000\\\n  LOGIC\\000\\\n  LONG_LEFT_ARROW\\000\\\n  LPAREN\\000\\\n  LPAREN_RPAREN\\000\\\n  MATCH\\000\\\n  MINUS\\000\\\n  MODULE\\000\\\n  NEW\\000\\\n  NEW_EFFECT\\000\\\n  NOEQUALITY\\000\\\n  NOEXTRACT\\000\\\n  OF\\000\\\n  OPAQUE\\000\\\n  OPEN\\000\\\n  PERCENT_LBRACK\\000\\\n  PIPE_RIGHT\\000\\\n  POLYMONADIC_BIND\\000\\\n  POLYMONADIC_SUBCOMP\\000\\\n  PRAGMA_POP_OPTIONS\\000\\\n  PRAGMA_PRINT_EFFECTS_GRAPH\\000\\\n  PRAGMA_PUSH_OPTIONS\\000\\\n  PRAGMA_RESET_OPTIONS\\000\\\n  PRAGMA_RESTART_SOLVER\\000\\\n  PRAGMA_SET_OPTIONS\\000\\\n  PRIVATE\\000\\\n  QMARK\\000\\\n  QMARK_DOT\\000\\\n  QUOTE\\000\\\n  RANGE_OF\\000\\\n  RARROW\\000\\\n  RBRACE\\000\\\n  RBRACK\\000\\\n  REC\\000\\\n  REFLECTABLE\\000\\\n  REIFIABLE\\000\\\n  REIFY\\000\\\n  REQUIRES\\000\\\n  RETURNS\\000\\\n  RETURNS_EQ\\000\\\n  RPAREN\\000\\\n  SEMICOLON\\000\\\n  SET_RANGE_OF\\000\\\n  SPLICE\\000\\\n  SQUIGGLY_RARROW\\000\\\n  SUBKIND\\000\\\n  SUBTYPE\\000\\\n  SUB_EFFECT\\000\\\n  SYNTH\\000\\\n  THEN\\000\\\n  TOTAL\\000\\\n  TRUE\\000\\\n  TRY\\000\\\n  TYPE\\000\\\n  TYP_APP_GREATER\\000\\\n  TYP_APP_LESS\\000\\\n  UNDERSCORE\\000\\\n  UNFOLD\\000\\\n  UNFOLDABLE\\000\\\n  UNIV_HASH\\000\\\n  UNOPTEQUALITY\\000\\\n  VAL\\000\\\n  WHEN\\000\\\n  WITH\\000\\\n  \"\n\nlet yynames_block = \"\\\n  AND_OP\\000\\\n  CHAR\\000\\\n  IDENT\\000\\\n  IF_OP\\000\\\n  INT\\000\\\n  INT16\\000\\\n  INT32\\000\\\n  INT64\\000\\\n  INT8\\000\\\n  LET\\000\\\n  LET_OP\\000\\\n  MATCH_OP\\000\\\n  NAME\\000\\\n  OPINFIX0a\\000\\\n  OPINFIX0b\\000\\\n  OPINFIX0c\\000\\\n  OPINFIX0d\\000\\\n  OPINFIX1\\000\\\n  OPINFIX2\\000\\\n  OPINFIX3\\000\\\n  OPINFIX4\\000\\\n  OPPREFIX\\000\\\n  OP_MIXFIX_ACCESS\\000\\\n  OP_MIXFIX_ASSIGNMENT\\000\\\n  RANGE\\000\\\n  REAL\\000\\\n  SEMICOLON_OP\\000\\\n  SIZET\\000\\\n  STRING\\000\\\n  TILDE\\000\\\n  TVAR\\000\\\n  UINT16\\000\\\n  UINT32\\000\\\n  UINT64\\000\\\n  UINT8\\000\\\n  \"\n\nlet yyact = [|\n  (fun _ -> failwith \"parser\")\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 240 \"parse.mly\"\n    (    ( None ))\n# 6707 \"parse.ml\"\n               : 'option___anonymous_0_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 242 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\nlet x =                                                (t) in\n    ( Some x ))\n# 6716 \"parse.ml\"\n               : 'option___anonymous_0_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 248 \"parse.mly\"\n    (    ( None ))\n# 6722 \"parse.ml\"\n               : 'option___anonymous_1_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 250 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\nlet x =                                                    (t) in\n    ( Some x ))\n# 6731 \"parse.ml\"\n               : 'option___anonymous_1_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 256 \"parse.mly\"\n    (    ( None ))\n# 6737 \"parse.ml\"\n               : 'option___anonymous_12_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk2_typ_) in\n    Obj.repr(\n# 258 \"parse.mly\"\n    (let (_1, tactic) = ((), _2) in\nlet x =                                                                 (tactic) in\n    ( Some x ))\n# 6746 \"parse.ml\"\n               : 'option___anonymous_12_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 264 \"parse.mly\"\n    (    ( None ))\n# 6752 \"parse.ml\"\n               : 'option___anonymous_13_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'noSeqTerm) in\n    Obj.repr(\n# 266 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\nlet x =\n  let phi =                 ( {e with level=Formula} ) in\n                                               (phi)\nin\n    ( Some x ))\n# 6764 \"parse.ml\"\n               : 'option___anonymous_13_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 275 \"parse.mly\"\n    (    ( None ))\n# 6770 \"parse.ml\"\n               : 'option___anonymous_2_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 277 \"parse.mly\"\n    (let (_1, tm) = ((), _2) in\nlet x =                                                                    (tm) in\n    ( Some x ))\n# 6779 \"parse.ml\"\n               : 'option___anonymous_2_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 283 \"parse.mly\"\n    (    ( None ))\n# 6785 \"parse.ml\"\n               : 'option___anonymous_5_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_atomicTerm_) in\n    Obj.repr(\n# 285 \"parse.mly\"\n    (let (_1, tactic) = ((), _2) in\nlet x =                                                                       (tactic) in\n    ( Some x ))\n# 6794 \"parse.ml\"\n               : 'option___anonymous_5_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 291 \"parse.mly\"\n    (    ( None ))\n# 6800 \"parse.ml\"\n               : 'option___anonymous_6_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 293 \"parse.mly\"\n    (let (_1, i) = ((), _2) in\nlet x =                               (i) in\n    ( Some x ))\n# 6809 \"parse.ml\"\n               : 'option___anonymous_6_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 299 \"parse.mly\"\n    (    ( None ))\n# 6815 \"parse.ml\"\n               : 'option___anonymous_7_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 301 \"parse.mly\"\n    (let (_1, i) = ((), _2) in\nlet x =                               (i) in\n    ( Some x ))\n# 6824 \"parse.ml\"\n               : 'option___anonymous_7_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 307 \"parse.mly\"\n    (    ( None ))\n# 6830 \"parse.ml\"\n               : 'option___anonymous_8_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_typ_) in\n    Obj.repr(\n# 309 \"parse.mly\"\n    (let (_1, tactic) = ((), _2) in\nlet x =                                                                    (tactic) in\n    ( Some x ))\n# 6839 \"parse.ml\"\n               : 'option___anonymous_8_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 315 \"parse.mly\"\n    (    ( None ))\n# 6845 \"parse.ml\"\n               : 'option___anonymous_9_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_typ_) in\n    Obj.repr(\n# 317 \"parse.mly\"\n    (let (_1, tactic) = ((), _2) in\nlet x =                                                                      (tactic) in\n    ( Some x ))\n# 6854 \"parse.ml\"\n               : 'option___anonymous_9_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 323 \"parse.mly\"\n    (    ( None ))\n# 6860 \"parse.ml\"\n               : 'option_ascribeKind_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ascribeKind) in\n    Obj.repr(\n# 325 \"parse.mly\"\n    (let x = _1 in\n    ( Some x ))\n# 6868 \"parse.ml\"\n               : 'option_ascribeKind_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 330 \"parse.mly\"\n    (    ( None ))\n# 6874 \"parse.ml\"\n               : 'option_ascribeTyp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ascribeTyp) in\n    Obj.repr(\n# 332 \"parse.mly\"\n    (let x = _1 in\n    ( Some x ))\n# 6882 \"parse.ml\"\n               : 'option_ascribeTyp_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 337 \"parse.mly\"\n    (    ( None ))\n# 6888 \"parse.ml\"\n               : 'option_constructorPayload_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constructorPayload) in\n    Obj.repr(\n# 339 \"parse.mly\"\n    (let x = _1 in\n    ( Some x ))\n# 6896 \"parse.ml\"\n               : 'option_constructorPayload_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 344 \"parse.mly\"\n    (    ( None ))\n# 6902 \"parse.ml\"\n               : 'option_fsTypeArgs_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fsTypeArgs) in\n    Obj.repr(\n# 346 \"parse.mly\"\n    (let x = _1 in\n    ( Some x ))\n# 6910 \"parse.ml\"\n               : 'option_fsTypeArgs_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 351 \"parse.mly\"\n    (    ( None ))\n# 6916 \"parse.ml\"\n               : 'option_match_returning_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'match_returning) in\n    Obj.repr(\n# 353 \"parse.mly\"\n    (let x = _1 in\n    ( Some x ))\n# 6924 \"parse.ml\"\n               : 'option_match_returning_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 358 \"parse.mly\"\n    (    ( None ))\n# 6930 \"parse.ml\"\n               : 'option_pair_hasSort_simpleTerm__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'hasSort) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in\n    Obj.repr(\n# 360 \"parse.mly\"\n    (let (x, y) = (_1, _2) in\nlet x =     ( (x, y) ) in\n    ( Some x ))\n# 6940 \"parse.ml\"\n               : 'option_pair_hasSort_simpleTerm__))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 366 \"parse.mly\"\n    (    ( None ))\n# 6946 \"parse.ml\"\n               : 'option_string_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'string) in\n    Obj.repr(\n# 368 \"parse.mly\"\n    (let x = _1 in\n    ( Some x ))\n# 6954 \"parse.ml\"\n               : 'option_string_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 373 \"parse.mly\"\n    (    ( None ))\n# 6960 \"parse.ml\"\n               : 'option_term_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 375 \"parse.mly\"\n    (let x = _1 in\n    ( Some x ))\n# 6968 \"parse.ml\"\n               : 'option_term_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 380 \"parse.mly\"\n    (    ( false ))\n# 6974 \"parse.ml\"\n               : 'boption_SQUIGGLY_RARROW_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 382 \"parse.mly\"\n    (let _1 = () in\n    ( true ))\n# 6981 \"parse.ml\"\n               : 'boption_SQUIGGLY_RARROW_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 387 \"parse.mly\"\n    (    ( [] ))\n# 6987 \"parse.ml\"\n               : 'loption_separated_nonempty_list_COMMA_appTerm__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_appTerm_) in\n    Obj.repr(\n# 389 \"parse.mly\"\n    (let x = _1 in\n    ( x ))\n# 6995 \"parse.ml\"\n               : 'loption_separated_nonempty_list_COMMA_appTerm__))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 394 \"parse.mly\"\n    (    ( [] ))\n# 7001 \"parse.ml\"\n               : 'loption_separated_nonempty_list_SEMICOLON_ident__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_ident_) in\n    Obj.repr(\n# 396 \"parse.mly\"\n    (let x = _1 in\n    ( x ))\n# 7009 \"parse.ml\"\n               : 'loption_separated_nonempty_list_SEMICOLON_ident__))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 401 \"parse.mly\"\n    (    ( [] ))\n# 7015 \"parse.ml\"\n               : 'loption_separated_nonempty_list_SEMICOLON_tuplePattern__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_tuplePattern_) in\n    Obj.repr(\n# 403 \"parse.mly\"\n    (let x = _1 in\n    ( x ))\n# 7023 \"parse.ml\"\n               : 'loption_separated_nonempty_list_SEMICOLON_tuplePattern__))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 408 \"parse.mly\"\n    (    ( [] ))\n# 7029 \"parse.ml\"\n               : 'list___anonymous_11_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'letoperatorbinding) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_11_) in\n    Obj.repr(\n# 410 \"parse.mly\"\n    (let (op, b, xs) = (_1, _2, _3) in\nlet x =\n  let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                                                                             ((op, b))\nin\n    ( x :: xs ))\n# 7043 \"parse.ml\"\n               : 'list___anonymous_11_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 419 \"parse.mly\"\n    (    ( [] ))\n# 7049 \"parse.ml\"\n               : 'list___anonymous_14_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'argTerm) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in\n    Obj.repr(\n# 421 \"parse.mly\"\n    (let (t, xs) = (_1, _2) in\nlet x =                               (t) in\n    ( x :: xs ))\n# 7059 \"parse.ml\"\n               : 'list___anonymous_14_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'recordExp) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in\n    Obj.repr(\n# 425 \"parse.mly\"\n    (let (_2, t, _4, xs) = ((), _2, (), _4) in\nlet x =\n  let h =          ( Nothing ) in\n                                                                            (h, t)\nin\n    ( x :: xs ))\n# 7072 \"parse.ml\"\n               : 'list___anonymous_14_))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'recordExp) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in\n    Obj.repr(\n# 432 \"parse.mly\"\n    (let (_1, _2, t, _4, xs) = ((), (), _3, (), _5) in\nlet x =\n  let h =          ( Hash ) in\n                                                                            (h, t)\nin\n    ( x :: xs ))\n# 7085 \"parse.ml\"\n               : 'list___anonymous_14_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 441 \"parse.mly\"\n    (    ( [] ))\n# 7091 \"parse.ml\"\n               : 'list___anonymous_15_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'qlident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_15_) in\n    Obj.repr(\n# 443 \"parse.mly\"\n    (let (_1, id, xs) = ((), _2, _3) in\nlet x =                                                     (id) in\n    ( x :: xs ))\n# 7101 \"parse.ml\"\n               : 'list___anonymous_15_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 449 \"parse.mly\"\n    (    ( [] ))\n# 7107 \"parse.ml\"\n               : 'list___anonymous_4_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binder) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_4_) in\n    Obj.repr(\n# 451 \"parse.mly\"\n    (let (b, xs) = (_1, _2) in\nlet x =                            ([b]) in\n    ( x :: xs ))\n# 7117 \"parse.ml\"\n               : 'list___anonymous_4_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'multiBinder) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_4_) in\n    Obj.repr(\n# 455 \"parse.mly\"\n    (let (bs, xs) = (_1, _2) in\nlet x =                                                   (bs) in\n    ( x :: xs ))\n# 7127 \"parse.ml\"\n               : 'list___anonymous_4_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 461 \"parse.mly\"\n    (    ( [] ))\n# 7133 \"parse.ml\"\n               : 'list_argTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'argTerm) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_argTerm_) in\n    Obj.repr(\n# 463 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7142 \"parse.ml\"\n               : 'list_argTerm_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 468 \"parse.mly\"\n    (    ( [] ))\n# 7148 \"parse.ml\"\n               : 'list_atomicTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTerm) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_atomicTerm_) in\n    Obj.repr(\n# 470 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7157 \"parse.ml\"\n               : 'list_atomicTerm_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 475 \"parse.mly\"\n    (    ( [] ))\n# 7163 \"parse.ml\"\n               : 'list_attr_letbinding_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attr_letbinding) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_attr_letbinding_) in\n    Obj.repr(\n# 477 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7172 \"parse.ml\"\n               : 'list_attr_letbinding_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 482 \"parse.mly\"\n    (    ( [] ))\n# 7178 \"parse.ml\"\n               : 'list_calcStep_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'calcStep) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_calcStep_) in\n    Obj.repr(\n# 484 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7187 \"parse.ml\"\n               : 'list_calcStep_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 489 \"parse.mly\"\n    (    ( [] ))\n# 7193 \"parse.ml\"\n               : 'list_constructorDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constructorDecl) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_constructorDecl_) in\n    Obj.repr(\n# 491 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7202 \"parse.ml\"\n               : 'list_constructorDecl_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 496 \"parse.mly\"\n    (    ( [] ))\n# 7208 \"parse.ml\"\n               : 'list_decl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'decl) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_decl_) in\n    Obj.repr(\n# 498 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7217 \"parse.ml\"\n               : 'list_decl_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 503 \"parse.mly\"\n    (    ( [] ))\n# 7223 \"parse.ml\"\n               : 'list_decoration_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'decoration) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_decoration_) in\n    Obj.repr(\n# 505 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7232 \"parse.ml\"\n               : 'list_decoration_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 510 \"parse.mly\"\n    (    ( [] ))\n# 7238 \"parse.ml\"\n               : 'list_multiBinder_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'multiBinder) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_multiBinder_) in\n    Obj.repr(\n# 512 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7247 \"parse.ml\"\n               : 'list_multiBinder_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lident_) in\n    Obj.repr(\n# 517 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7255 \"parse.ml\"\n               : 'nonempty_list_aqualifiedWithAttrs_lident__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqualifiedWithAttrs_lident_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_aqualifiedWithAttrs_lident__) in\n    Obj.repr(\n# 520 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7264 \"parse.ml\"\n               : 'nonempty_list_aqualifiedWithAttrs_lident__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lidentOrUnderscore_) in\n    Obj.repr(\n# 525 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7272 \"parse.ml\"\n               : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqualifiedWithAttrs_lidentOrUnderscore_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__) in\n    Obj.repr(\n# 528 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7281 \"parse.ml\"\n               : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicPattern) in\n    Obj.repr(\n# 533 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7289 \"parse.ml\"\n               : 'nonempty_list_atomicPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicPattern) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicPattern_) in\n    Obj.repr(\n# 536 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7298 \"parse.ml\"\n               : 'nonempty_list_atomicPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 541 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7306 \"parse.ml\"\n               : 'nonempty_list_atomicTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTerm) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicTerm_) in\n    Obj.repr(\n# 544 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7315 \"parse.ml\"\n               : 'nonempty_list_atomicTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicUniverse) in\n    Obj.repr(\n# 549 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7323 \"parse.ml\"\n               : 'nonempty_list_atomicUniverse_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicUniverse) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicUniverse_) in\n    Obj.repr(\n# 552 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7332 \"parse.ml\"\n               : 'nonempty_list_atomicUniverse_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 557 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\nlet x =                              ( mk_ident (\".()\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( [ x ] ))\n# 7341 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 561 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\nlet x =                              ( mk_ident (\".[]\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( [ x ] ))\n# 7350 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 565 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\nlet x =                                      ( mk_ident (\".[||]\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( [ x ] ))\n# 7359 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 569 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\nlet x =                                                 ( mk_ident (\".(||)\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( [ x ] ))\n# 7368 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in\n    Obj.repr(\n# 573 \"parse.mly\"\n    (let (_1, e, _3, xs) = ((), _2, (), _4) in\nlet x =                              ( mk_ident (\".()\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( x :: xs ))\n# 7378 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in\n    Obj.repr(\n# 577 \"parse.mly\"\n    (let (_1, e, _3, xs) = ((), _2, (), _4) in\nlet x =                              ( mk_ident (\".[]\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( x :: xs ))\n# 7388 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in\n    Obj.repr(\n# 581 \"parse.mly\"\n    (let (_1, e, _3, xs) = ((), _2, (), _4) in\nlet x =                                      ( mk_ident (\".[||]\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( x :: xs ))\n# 7398 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in\n    Obj.repr(\n# 585 \"parse.mly\"\n    (let (_1, e, _3, xs) = ((), _2, (), _4) in\nlet x =                                                 ( mk_ident (\".(||)\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n    ( x :: xs ))\n# 7408 \"parse.ml\"\n               : 'nonempty_list_dotOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'patternOrMultibinder) in\n    Obj.repr(\n# 591 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7416 \"parse.ml\"\n               : 'nonempty_list_patternOrMultibinder_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'patternOrMultibinder) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_patternOrMultibinder_) in\n    Obj.repr(\n# 594 \"parse.mly\"\n    (let (x, xs) = (_1, _2) in\n    ( x :: xs ))\n# 7425 \"parse.ml\"\n               : 'nonempty_list_patternOrMultibinder_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in\n    Obj.repr(\n# 599 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7433 \"parse.ml\"\n               : 'separated_nonempty_list_AND_letbinding_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'letbinding) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_letbinding_) in\n    Obj.repr(\n# 602 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7442 \"parse.ml\"\n               : 'separated_nonempty_list_AND_letbinding_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typeDecl) in\n    Obj.repr(\n# 607 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7450 \"parse.ml\"\n               : 'separated_nonempty_list_AND_typeDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typeDecl) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_typeDecl_) in\n    Obj.repr(\n# 610 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7459 \"parse.ml\"\n               : 'separated_nonempty_list_AND_typeDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tuplePattern) in\n    Obj.repr(\n# 615 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7467 \"parse.ml\"\n               : 'separated_nonempty_list_BAR_tuplePattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_BAR_tuplePattern_) in\n    Obj.repr(\n# 618 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7476 \"parse.ml\"\n               : 'separated_nonempty_list_BAR_tuplePattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTerm) in\n    Obj.repr(\n# 623 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7484 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_appTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'appTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_appTerm_) in\n    Obj.repr(\n# 626 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7493 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_appTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 631 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7501 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_atomicTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'atomicTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_atomicTerm_) in\n    Obj.repr(\n# 634 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7510 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_atomicTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constructorPattern) in\n    Obj.repr(\n# 639 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7518 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_constructorPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constructorPattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_constructorPattern_) in\n    Obj.repr(\n# 642 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7527 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_constructorPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEq) in\n    Obj.repr(\n# 647 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7535 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_tmEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEq) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_tmEq_) in\n    Obj.repr(\n# 650 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7544 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_tmEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in\n    Obj.repr(\n# 655 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7552 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_tvar_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tvar) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_tvar_) in\n    Obj.repr(\n# 658 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7561 \"parse.ml\"\n               : 'separated_nonempty_list_COMMA_tvar_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'conjunctivePat) in\n    Obj.repr(\n# 663 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7569 \"parse.ml\"\n               : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'conjunctivePat) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_) in\n    Obj.repr(\n# 666 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7578 \"parse.ml\"\n               : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTerm) in\n    Obj.repr(\n# 671 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7586 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_appTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'appTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_appTerm_) in\n    Obj.repr(\n# 674 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7595 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_appTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'effectDecl) in\n    Obj.repr(\n# 679 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7603 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_effectDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'effectDecl) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_effectDecl_) in\n    Obj.repr(\n# 682 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7612 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_effectDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 687 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7620 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_ident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_ident_) in\n    Obj.repr(\n# 690 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7629 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_ident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tuplePattern) in\n    Obj.repr(\n# 695 \"parse.mly\"\n    (let x = _1 in\n    ( [ x ] ))\n# 7637 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_tuplePattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_tuplePattern_) in\n    Obj.repr(\n# 698 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n    ( x :: xs ))\n# 7646 \"parse.ml\"\n               : 'separated_nonempty_list_SEMICOLON_tuplePattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'list_decl_) in\n    Obj.repr(\n# 703 \"parse.mly\"\n    (let (decls, _2) = (_1, ()) in\n      (\n        as_frag decls\n      ))\n# 7656 \"parse.ml\"\n               : FStar_Parser_AST.inputFragment))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 710 \"parse.mly\"\n    (let _1 = () in\n        ( None ))\n# 7663 \"parse.ml\"\n               : FStar_Parser_AST.decl option))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'decl) in\n    Obj.repr(\n# 713 \"parse.mly\"\n    (let d = _1 in\n           ( Some d ))\n# 7671 \"parse.ml\"\n               : FStar_Parser_AST.decl option))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'string) in\n    Obj.repr(\n# 718 \"parse.mly\"\n    (let (_1, s) = ((), _2) in\n      ( SetOptions s ))\n# 7679 \"parse.ml\"\n               : 'pragma))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'option_string_) in\n    Obj.repr(\n# 721 \"parse.mly\"\n    (let (_1, s_opt) = ((), _2) in\n      ( ResetOptions s_opt ))\n# 7687 \"parse.ml\"\n               : 'pragma))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'option_string_) in\n    Obj.repr(\n# 724 \"parse.mly\"\n    (let (_1, s_opt) = ((), _2) in\n      ( PushOptions s_opt ))\n# 7695 \"parse.ml\"\n               : 'pragma))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 727 \"parse.mly\"\n    (let _1 = () in\n      ( PopOptions ))\n# 7702 \"parse.ml\"\n               : 'pragma))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 730 \"parse.mly\"\n    (let _1 = () in\n      ( RestartSolver ))\n# 7709 \"parse.ml\"\n               : 'pragma))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 733 \"parse.mly\"\n    (let _1 = () in\n      ( PrintEffectsGraph ))\n# 7716 \"parse.ml\"\n               : 'pragma))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'list_atomicTerm_) in\n    Obj.repr(\n# 738 \"parse.mly\"\n    (let (_1, x, _3) = ((), _2, ()) in\n      (\n        let _ =\n            match x with\n            | _::_::_ ->\n                  log_issue (lhs parseState) (Warning_DeprecatedAttributeSyntax,\n                                              old_attribute_syntax_warning)\n            | _ -> () in\n         x\n      ))\n# 7732 \"parse.ml\"\n               : 'attribute))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in\n    Obj.repr(\n# 749 \"parse.mly\"\n    (let (_1, l, _3) = ((), _2, ()) in\nlet x =                                                 ( l ) in\n      ( x ))\n# 7741 \"parse.ml\"\n               : 'attribute))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 755 \"parse.mly\"\n    (let x = _1 in\n      ( DeclAttributes x ))\n# 7749 \"parse.ml\"\n               : 'decoration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qualifier) in\n    Obj.repr(\n# 758 \"parse.mly\"\n    (let x = _1 in\n      ( Qualifier x ))\n# 7757 \"parse.ml\"\n               : 'decoration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 763 \"parse.mly\"\n    (let (_1, lid, _3, e) = ((), _2, (), _4) in\nlet phi =                 ( {e with level=Formula} ) in\n      ( mk_decl (Assume(lid, phi)) (rhs2 parseState 1 4) [ Qualifier Assumption ] ))\n# 7767 \"parse.ml\"\n               : 'decl))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'list_decoration_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'rawDecl) in\n    Obj.repr(\n# 767 \"parse.mly\"\n    (let (ds, decl) = (_1, _2) in\n      ( mk_decl decl (rhs parseState 2) ds ))\n# 7776 \"parse.ml\"\n               : 'decl))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'list_decoration_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typeclassDecl) in\n    Obj.repr(\n# 770 \"parse.mly\"\n    (let (ds, decl) = (_1, _2) in\n      ( let (decl, extra_attrs) = decl in\n        let d = mk_decl decl (rhs parseState 2) ds in\n        { d with attrs = extra_attrs @ d.attrs }\n      ))\n# 7788 \"parse.ml\"\n               : 'decl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typeDecl) in\n    Obj.repr(\n# 778 \"parse.mly\"\n    (let (_1, tcdef) = ((), _2) in\n      (\n        (* Only a single type decl allowed, but construct it the same as for multiple ones.\n         * Only difference is the `true` below marking that this a class so desugaring\n         * adds the needed %splice. *)\n        let d = Tycon (false, true, [tcdef]) in\n\n        (* No attrs yet, but perhaps we want a `class` attribute *)\n        (d, [])\n      ))\n# 7804 \"parse.ml\"\n               : 'typeclassDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'letqualifier) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in\n    Obj.repr(\n# 789 \"parse.mly\"\n    (let (_1, q, lb) = ((), _2, _3) in\n      (\n        (* Making a single letbinding *)\n        let r = rhs2 parseState 1 3 in\n        let lbs = focusLetBindings [lb] r in (* lbs is a singleton really *)\n        let d = TopLevelLet(q, lbs) in\n\n        (* Slapping a `tcinstance` attribute to it *)\n        let at = mk_term (Var tcinstance_lid) r Type_level in\n\n        (d, [at])\n      ))\n# 7823 \"parse.ml\"\n               : 'typeclassDecl))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pragma) in\n    Obj.repr(\n# 804 \"parse.mly\"\n    (let p = _1 in\n      ( Pragma p ))\n# 7831 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in\n    Obj.repr(\n# 807 \"parse.mly\"\n    (let (_1, uid) = ((), _2) in\n      ( Open uid ))\n# 7839 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in\n    Obj.repr(\n# 810 \"parse.mly\"\n    (let (_1, uid) = ((), _2) in\n      ( Friend uid ))\n# 7847 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in\n    Obj.repr(\n# 813 \"parse.mly\"\n    (let (_1, uid) = ((), _2) in\n      ( Include uid ))\n# 7855 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in\n    Obj.repr(\n# 816 \"parse.mly\"\n    (let (_1, uid1, _3, uid2) = ((), _2, (), _4) in\n      ( ModuleAbbrev(uid1, uid2) ))\n# 7864 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'qlident) in\n    Obj.repr(\n# 819 \"parse.mly\"\n    (let (_1, _2) = ((), _2) in\n      ( raise_error (Fatal_SyntaxError, \"Syntax error: expected a module name\") (rhs parseState 2) ))\n# 7872 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in\n    Obj.repr(\n# 822 \"parse.mly\"\n    (let (_1, uid) = ((), _2) in\n      (  TopLevelModule uid ))\n# 7880 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_typeDecl_) in\n    Obj.repr(\n# 825 \"parse.mly\"\n    (let (_1, tcdefs) = ((), _2) in\n      ( Tycon (false, false, tcdefs) ))\n# 7888 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'typars) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 828 \"parse.mly\"\n    (let (_1, uid, tparams, _4, t) = ((), _2, _3, (), _5) in\n      ( Tycon(true, false, [(TyconAbbrev(uid, tparams, None, t))]) ))\n# 7898 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : bool) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'letqualifier) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_AND_letbinding_) in\n    Obj.repr(\n# 831 \"parse.mly\"\n    (let (_1, q, lbs) = (_1, _2, _3) in\n      (\n        let r = rhs2 parseState 1 3 in\n        let lbs = focusLetBindings lbs r in\n        if q <> Rec && List.length lbs <> 1\n        then raise_error (Fatal_MultipleLetBinding, \"Unexpected multiple let-binding (Did you forget some rec qualifier ?)\") r;\n        TopLevelLet(q, lbs)\n      ))\n# 7914 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in\n    Obj.repr(\n# 840 \"parse.mly\"\n    (let (_1, c) = ((), _2) in\n      (\n        (* This is just to provide a better error than \"syntax error\" *)\n        raise_error (Fatal_SyntaxError, \"Syntax error: constants are not allowed in val declarations\") (rhs2 parseState 1 2)\n      ))\n# 7925 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : FStar_Ident.ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 846 \"parse.mly\"\n    (let (_1, id, bss, _4, t) = ((), _2, _3, (), _5) in\nlet lid =               ( id ) in\n      (\n        let t = match flatten bss with\n          | [] -> t\n          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level\n        in Val(lid, t)\n      ))\n# 7941 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 855 \"parse.mly\"\n    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in\nlet lid =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let t = match flatten bss with\n          | [] -> t\n          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level\n        in Val(lid, t)\n      ))\n# 7960 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binop_name) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 867 \"parse.mly\"\n    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in\nlet lid =\n  let id =     ( op ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let t = match flatten bss with\n          | [] -> t\n          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level\n        in Val(lid, t)\n      ))\n# 7979 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 879 \"parse.mly\"\n    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in\nlet lid =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let t = match flatten bss with\n          | [] -> t\n          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level\n        in Val(lid, t)\n      ))\n# 7998 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 891 \"parse.mly\"\n    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in\nlet lid =\n  let id =\n    let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let t = match flatten bss with\n          | [] -> t\n          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level\n        in Val(lid, t)\n      ))\n# 8020 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_multiBinder_) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 906 \"parse.mly\"\n    (let (_1, _1_inlined1, op, _3, bss, _4, t) = ((), (), _3, (), _5, (), _7) in\nlet lid =\n  let id =\n    let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let t = match flatten bss with\n          | [] -> t\n          | bs -> mk_term (Product(bs, t)) (rhs2 parseState 3 5) Type_level\n        in Val(lid, t)\n      ))\n# 8042 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'loption_separated_nonempty_list_SEMICOLON_ident__) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_atomicTerm_) in\n    Obj.repr(\n# 921 \"parse.mly\"\n    (let (_1, _2, xs, _4, t) = ((), (), _3, (), _5) in\nlet ids =     ( xs ) in\n      ( Splice (ids, t) ))\n# 8052 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_0_) in\n    Obj.repr(\n# 925 \"parse.mly\"\n    (let (_1, lid, t_opt) = ((), _2, _3) in\n      ( Exception(lid, t_opt) ))\n# 8061 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'newEffect) in\n    Obj.repr(\n# 928 \"parse.mly\"\n    (let (_1, ne) = ((), _2) in\n      ( NewEffect ne ))\n# 8069 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'effectDefinition) in\n    Obj.repr(\n# 931 \"parse.mly\"\n    (let (_1, ne) = ((), _2) in\n      ( LayeredEffect ne ))\n# 8077 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'layeredEffectDefinition) in\n    Obj.repr(\n# 934 \"parse.mly\"\n    (let (_1, ne) = ((), _2) in\n      ( LayeredEffect ne ))\n# 8085 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'subEffect) in\n    Obj.repr(\n# 937 \"parse.mly\"\n    (let (_1, se) = ((), _2) in\n      ( SubEffect se ))\n# 8093 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'polymonadic_bind) in\n    Obj.repr(\n# 940 \"parse.mly\"\n    (let (_1, b) = ((), _2) in\n      ( Polymonadic_bind b ))\n# 8101 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'polymonadic_subcomp) in\n    Obj.repr(\n# 943 \"parse.mly\"\n    (let (_1, c) = ((), _2) in\n      ( Polymonadic_subcomp c ))\n# 8109 \"parse.ml\"\n               : 'rawDecl))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'typars) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'option_ascribeKind_) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'typeDefinition) in\n    Obj.repr(\n# 948 \"parse.mly\"\n    (let (lid, tparams, ascr_opt, tcdef) = (_1, _2, _3, _4) in\n      ( tcdef lid tparams ascr_opt ))\n# 8120 \"parse.ml\"\n               : 'typeDecl))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvarinsts) in\n    Obj.repr(\n# 953 \"parse.mly\"\n    (let x = _1 in\n                             ( x ))\n# 8128 \"parse.ml\"\n               : 'typars))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'binders) in\n    Obj.repr(\n# 956 \"parse.mly\"\n    (let x = _1 in\n                             ( x ))\n# 8136 \"parse.ml\"\n               : 'typars))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_tvar_) in\n    Obj.repr(\n# 961 \"parse.mly\"\n    (let (_1, tvs, _3) = ((), _2, ()) in\n      ( map (fun tv -> mk_binder (TVariable(tv)) (range_of_id tv) Kind None) tvs ))\n# 8144 \"parse.ml\"\n               : 'tvarinsts))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 966 \"parse.mly\"\n    (      ( (fun id binders kopt -> check_id id; TyconAbstract(id, binders, kopt)) ))\n# 8150 \"parse.ml\"\n               : 'typeDefinition))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 968 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\n      ( (fun id binders kopt ->  check_id id; TyconAbbrev(id, binders, kopt, t)) ))\n# 8158 \"parse.ml\"\n               : 'typeDefinition))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_) in\n    Obj.repr(\n# 971 \"parse.mly\"\n    (let (_1, _1_inlined1, record_field_decls, _3) = ((), (), _3, ()) in\nlet record_field_decls =     ( record_field_decls ) in\nlet attrs_opt =     ( None ) in\n      ( (fun id binders kopt -> check_id id; TyconRecord(id, binders, kopt, none_to_empty_list attrs_opt, record_field_decls)) ))\n# 8168 \"parse.ml\"\n               : 'typeDefinition))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_) in\n    Obj.repr(\n# 976 \"parse.mly\"\n    (let (_1, x, _1_inlined1, record_field_decls, _3) = ((), _2, (), _4, ()) in\nlet record_field_decls =     ( record_field_decls ) in\nlet attrs_opt =     ( Some x ) in\n      ( (fun id binders kopt -> check_id id; TyconRecord(id, binders, kopt, none_to_empty_list attrs_opt, record_field_decls)) ))\n# 8179 \"parse.ml\"\n               : 'typeDefinition))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_constructorDecl_) in\n    Obj.repr(\n# 981 \"parse.mly\"\n    (let (_1, ct_decls) = ((), _2) in\n      ( (fun id binders kopt -> check_id id; TyconVariant(id, binders, kopt, ct_decls)) ))\n# 8187 \"parse.ml\"\n               : 'typeDefinition))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqualifiedWithAttrs_lidentOrOperator_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 986 \"parse.mly\"\n    (let (qualified_lid, _2, t) = (_1, (), _3) in\n      (\n        let (qual, attrs), lid = qualified_lid in\n        (lid, qual, attrs, t)\n      ))\n# 8199 \"parse.ml\"\n               : 'recordFieldDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 994 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\n                                                        (VpArbitrary  t))\n# 8207 \"parse.ml\"\n               : 'constructorPayload))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 997 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\n                                                        (VpOfNotation t))\n# 8215 \"parse.ml\"\n               : 'constructorPayload))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_1_) in\n    Obj.repr(\n# 1000 \"parse.mly\"\n    (let (_1, record_field_decls, _3, opt) = ((), _2, (), _4) in\nlet fields =     ( record_field_decls ) in\n                                                        (VpRecord(fields, opt)))\n# 8225 \"parse.ml\"\n               : 'constructorPayload))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option_constructorPayload_) in\n    Obj.repr(\n# 1006 \"parse.mly\"\n    (let (_1, uid, payload) = ((), _2, _3) in\nlet attrs_opt =     ( None ) in\n    ( uid, payload, none_to_empty_list attrs_opt ))\n# 8235 \"parse.ml\"\n               : 'constructorDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'uident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option_constructorPayload_) in\n    Obj.repr(\n# 1010 \"parse.mly\"\n    (let (_1, x, uid, payload) = ((), _2, _3, _4) in\nlet attrs_opt =     ( Some x ) in\n    ( uid, payload, none_to_empty_list attrs_opt ))\n# 8246 \"parse.ml\"\n               : 'constructorDecl))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in\n    Obj.repr(\n# 1016 \"parse.mly\"\n    (let (_2, lb) = ((), _2) in\nlet attr =     ( None ) in\n    ( attr, lb ))\n# 8255 \"parse.ml\"\n               : 'attr_letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'attribute) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'letbinding) in\n    Obj.repr(\n# 1020 \"parse.mly\"\n    (let (x, _2, lb) = (_1, (), _3) in\nlet attr =     ( Some x ) in\n    ( attr, lb ))\n# 8265 \"parse.ml\"\n               : 'attr_letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'option_ascribeTyp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_2_) in\n    Obj.repr(\n# 1026 \"parse.mly\"\n    (let (pat, ascr_opt, tm) = (_1, _2, _3) in\n    (\n        let h tm\n\t  = ( ( match ascr_opt with\n              | None   -> pat\n              | Some t -> mk_pattern (PatAscribed(pat, t)) (rhs2 parseState 1 2) )\n\t    , tm)\n\tin\n\tmatch pat.pat, tm with\n        | _               , Some tm -> h tm\n        | PatVar (v, _, _), None    ->\n          let v = lid_of_ns_and_id [] v in\n          h (mk_term (Var v) (rhs parseState 1) Expr)\n        | _ -> raise_error (Fatal_SyntaxError, \"Syntax error: let-punning expects a name, not a pattern\") (rhs parseState 2)\n    ))\n# 8288 \"parse.ml\"\n               : 'letoperatorbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'maybeFocus) in\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : FStar_Ident.ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1044 \"parse.mly\"\n    (let (focus_opt, id, lbp, ascr_opt, _5, tm) = (_1, _2, _3, _4, (), _6) in\nlet lid =               ( id ) in\n      (\n        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in\n        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in\n        let pos = rhs2 parseState 1 6 in\n        match ascr_opt with\n        | None -> (focus_opt, (pat, tm))\n        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))\n      ))\n# 8308 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1055 \"parse.mly\"\n    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in\nlet lid =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in\n        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in\n        let pos = rhs2 parseState 1 6 in\n        match ascr_opt with\n        | None -> (focus_opt, (pat, tm))\n        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))\n      ))\n# 8331 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'binop_name) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1069 \"parse.mly\"\n    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in\nlet lid =\n  let id =     ( op ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in\n        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in\n        let pos = rhs2 parseState 1 6 in\n        match ascr_opt with\n        | None -> (focus_opt, (pat, tm))\n        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))\n      ))\n# 8354 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1083 \"parse.mly\"\n    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in\nlet lid =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in\n        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in\n        let pos = rhs2 parseState 1 6 in\n        match ascr_opt with\n        | None -> (focus_opt, (pat, tm))\n        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))\n      ))\n# 8377 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1097 \"parse.mly\"\n    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in\nlet lid =\n  let id =\n    let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in\n        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in\n        let pos = rhs2 parseState 1 6 in\n        match ascr_opt with\n        | None -> (focus_opt, (pat, tm))\n        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))\n      ))\n# 8403 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'maybeFocus) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'nonempty_list_patternOrMultibinder_) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'option_ascribeTyp_) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1114 \"parse.mly\"\n    (let (focus_opt, _1, op, _3, lbp, ascr_opt, _5, tm) = (_1, (), _3, (), _5, _6, (), _8) in\nlet lid =\n  let id =\n    let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n      (\n        let pat = mk_pattern (PatVar(lid, None, [])) (rhs parseState 2) in\n        let pat = mk_pattern (PatApp (pat, flatten lbp)) (rhs2 parseState 1 3) in\n        let pos = rhs2 parseState 1 6 in\n        match ascr_opt with\n        | None -> (focus_opt, (pat, tm))\n        | Some t -> (focus_opt, (mk_pattern (PatAscribed(pat, t)) pos, tm))\n      ))\n# 8429 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'maybeFocus) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tuplePattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ascribeTyp) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1131 \"parse.mly\"\n    (let (focus_opt, pat, ascr, _4, tm) = (_1, _2, _3, (), _5) in\n      ( focus_opt, (mk_pattern (PatAscribed(pat, ascr)) (rhs2 parseState 1 4), tm) ))\n# 8440 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'maybeFocus) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tuplePattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1134 \"parse.mly\"\n    (let (focus_opt, pat, _3, tm) = (_1, _2, (), _4) in\n      ( focus_opt, (pat, tm) ))\n# 8450 \"parse.ml\"\n               : 'letbinding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'effectRedefinition) in\n    Obj.repr(\n# 1139 \"parse.mly\"\n    (let ed = _1 in\n    ( ed ))\n# 8458 \"parse.ml\"\n               : 'newEffect))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'effectDefinition) in\n    Obj.repr(\n# 1142 \"parse.mly\"\n    (let ed = _1 in\n    ( ed ))\n# 8466 \"parse.ml\"\n               : 'newEffect))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in\n    Obj.repr(\n# 1147 \"parse.mly\"\n    (let (lid, _2, t) = (_1, (), _3) in\n    ( RedefineEffect(lid, [], t) ))\n# 8475 \"parse.ml\"\n               : 'effectRedefinition))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'binders) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'tmArrow_tmNoEq_) in\n    let _7 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_SEMICOLON_effectDecl_) in\n    Obj.repr(\n# 1152 \"parse.mly\"\n    (let (_1, lid, bs, _4, typ, _6, eds, _8) = ((), _2, _3, (), _5, (), _7, ()) in\n    ( DefineEffect(lid, bs, typ, eds) ))\n# 8486 \"parse.ml\"\n               : 'effectDefinition))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'binders) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'tmNoEq) in\n    Obj.repr(\n# 1157 \"parse.mly\"\n    (let (_1, lid, bs, _4, r, _6) = ((), _2, _3, (), _5, ()) in\n    (\n      let typ =  (* bs -> Effect *)\n        let first_b, last_b =\n          match bs with\n          | [] ->\n             raise_error (Fatal_SyntaxError,\n                          \"Syntax error: unexpected empty binders list in the layered effect definition\")\n                         (range_of_id lid)\n          | _ -> hd bs, last bs in\n        let r = union_ranges first_b.brange last_b.brange in\n        mk_term (Product (bs, mk_term (Name (lid_of_str \"Effect\")) r Type_level)) r Type_level in\n      let rec decls (r:term) =\n        match r.tm with\n        | Paren r -> decls r\n        | Record (None, flds) ->\n           flds |> List.map (fun (lid, t) ->\n                              mk_decl (Tycon (false,\n                                              false,\n                                              [TyconAbbrev (ident_of_lid lid, [], None, t)]))\n                                      t.range [])\n        | _ ->\n           raise_error (Fatal_SyntaxError,\n                        \"Syntax error: layered effect combinators should be declared as a record\")\n                       r.range in\n      DefineEffect (lid, [], typ, decls r) ))\n# 8520 \"parse.ml\"\n               : 'layeredEffectDefinition))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : FStar_Ident.ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'binders) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in\n    Obj.repr(\n# 1186 \"parse.mly\"\n    (let (lid, action_params, _3, t) = (_1, _2, (), _4) in\n    ( mk_decl (Tycon (false, false, [TyconAbbrev(lid, action_params, None, t)])) (rhs2 parseState 1 3) [] ))\n# 8530 \"parse.ml\"\n               : 'effectDecl))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'quident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in\n    Obj.repr(\n# 1191 \"parse.mly\"\n    (let (src_eff, _2, tgt_eff, _4, lift) = (_1, (), _3, (), _5) in\n      ( { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift; braced=false } ))\n# 8540 \"parse.ml\"\n               : 'subEffect))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'quident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'quident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _7 = (Parsing.peek_val __caml_parser_env 1 : 'simpleTerm) in\n    Obj.repr(\n# 1194 \"parse.mly\"\n    (let (src_eff, _2, tgt_eff, _4, x, _2_inlined1, y, _7) = (_1, (), _3, (), _5, (), _7, ()) in\nlet lift2_opt =     ( None ) in\nlet lift1 =     ( (x, y) ) in\n     (\n       match lift2_opt with\n       | None ->\n          begin match lift1 with\n          | (\"lift\", lift) ->\n             { msource = src_eff; mdest = tgt_eff; lift_op = LiftForFree lift; braced=true }\n          | (\"lift_wp\", lift_wp) ->\n             { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift_wp; braced=true }\n          | _ ->\n             raise_error (Fatal_UnexpectedIdentifier, \"Unexpected identifier; expected {'lift', and possibly 'lift_wp'}\") (lhs parseState)\n          end\n       | Some (id2, tm2) ->\n          let (id1, tm1) = lift1 in\n          let lift, lift_wp = match (id1, id2) with\n                  | \"lift_wp\", \"lift\" -> tm1, tm2\n                  | \"lift\", \"lift_wp\" -> tm2, tm1\n                  | _ -> raise_error (Fatal_UnexpectedIdentifier, \"Unexpected identifier; expected {'lift', 'lift_wp'}\") (lhs parseState)\n          in\n          { msource = src_eff; mdest = tgt_eff; lift_op = ReifiableLift (lift, lift_wp); braced=true }\n     ))\n# 8572 \"parse.ml\"\n               : 'subEffect))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 11 : 'quident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 9 : 'quident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 7 : string) in\n    let _7 = (Parsing.peek_val __caml_parser_env 5 : 'simpleTerm) in\n    let _9 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _11 = (Parsing.peek_val __caml_parser_env 1 : 'simpleTerm) in\n    Obj.repr(\n# 1218 \"parse.mly\"\n    (let (src_eff, _2, tgt_eff, _4, x, _2_inlined1, y, _1, id, _2_inlined2, y_inlined1, _7) = (_1, (), _3, (), _5, (), _7, (), _9, (), _11, ()) in\nlet lift2_opt =\n  let y = y_inlined1 in\n  let x =\n    let x =                                                           (id) in\n        ( (x, y) )\n  in\n      ( Some x )\nin\nlet lift1 =     ( (x, y) ) in\n     (\n       match lift2_opt with\n       | None ->\n          begin match lift1 with\n          | (\"lift\", lift) ->\n             { msource = src_eff; mdest = tgt_eff; lift_op = LiftForFree lift; braced=true }\n          | (\"lift_wp\", lift_wp) ->\n             { msource = src_eff; mdest = tgt_eff; lift_op = NonReifiableLift lift_wp; braced=true }\n          | _ ->\n             raise_error (Fatal_UnexpectedIdentifier, \"Unexpected identifier; expected {'lift', and possibly 'lift_wp'}\") (lhs parseState)\n          end\n       | Some (id2, tm2) ->\n          let (id1, tm1) = lift1 in\n          let lift, lift_wp = match (id1, id2) with\n                  | \"lift_wp\", \"lift\" -> tm1, tm2\n                  | \"lift\", \"lift_wp\" -> tm2, tm1\n                  | _ -> raise_error (Fatal_UnexpectedIdentifier, \"Unexpected identifier; expected {'lift', 'lift_wp'}\") (lhs parseState)\n          in\n          { msource = src_eff; mdest = tgt_eff; lift_op = ReifiableLift (lift, lift_wp); braced=true }\n     ))\n# 8613 \"parse.ml\"\n               : 'subEffect))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 7 : 'quident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : 'quident) in\n    let _7 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in\n    Obj.repr(\n# 1251 \"parse.mly\"\n    (let (_1, m_eff, _3, n_eff, _5, _6, p_eff, _8, bind) = ((), _2, (), _4, (), (), _7, (), _9) in\n      ( (m_eff, n_eff, p_eff, bind) ))\n# 8624 \"parse.ml\"\n               : 'polymonadic_bind))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'quident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in\n    Obj.repr(\n# 1256 \"parse.mly\"\n    (let (m_eff, _2, n_eff, _4, subcomp) = (_1, (), _3, (), _5) in\n    ( (m_eff, n_eff, subcomp) ))\n# 8634 \"parse.ml\"\n               : 'polymonadic_subcomp))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1261 \"parse.mly\"\n    (let _1 = () in\n                  ( Assumption ))\n# 8641 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1264 \"parse.mly\"\n    (let _1 = () in\n                  (\n    raise_error (Fatal_InlineRenamedAsUnfold, \"The 'inline' qualifier has been renamed to 'unfold'\") (lhs parseState)\n   ))\n# 8650 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1269 \"parse.mly\"\n    (let _1 = () in\n                  (\n              raise_error (Fatal_UnfoldableDeprecated, \"The 'unfoldable' qualifier is no longer denotable; it is the default qualifier so just omit it\") (lhs parseState)\n   ))\n# 8659 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1274 \"parse.mly\"\n    (let _1 = () in\n                          (\n     Inline_for_extraction\n  ))\n# 8668 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1279 \"parse.mly\"\n    (let _1 = () in\n           (\n     Unfold_for_unification_and_vcgen\n  ))\n# 8677 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1284 \"parse.mly\"\n    (let _1 = () in\n                  ( Irreducible ))\n# 8684 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1287 \"parse.mly\"\n    (let _1 = () in\n                  ( NoExtract ))\n# 8691 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1290 \"parse.mly\"\n    (let _1 = () in\n                  ( DefaultEffect ))\n# 8698 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1293 \"parse.mly\"\n    (let _1 = () in\n                  ( TotalEffect ))\n# 8705 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1296 \"parse.mly\"\n    (let _1 = () in\n                  ( Private ))\n# 8712 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1299 \"parse.mly\"\n    (let _1 = () in\n                  ( Noeq ))\n# 8719 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1302 \"parse.mly\"\n    (let _1 = () in\n                  ( Unopteq ))\n# 8726 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1305 \"parse.mly\"\n    (let _1 = () in\n                  ( New ))\n# 8733 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1308 \"parse.mly\"\n    (let _1 = () in\n                  ( log_issue (lhs parseState) (Warning_logicqualifier,\n                                                logic_qualifier_deprecation_warning);\n                    Logic ))\n# 8742 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1313 \"parse.mly\"\n    (let _1 = () in\n                  ( Opaque ))\n# 8749 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1316 \"parse.mly\"\n    (let _1 = () in\n                  ( Reifiable ))\n# 8756 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1319 \"parse.mly\"\n    (let _1 = () in\n                  ( Reflectable ))\n# 8763 \"parse.ml\"\n               : 'qualifier))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'boption_SQUIGGLY_RARROW_) in\n    Obj.repr(\n# 1324 \"parse.mly\"\n    (let b = _1 in\n                               ( b ))\n# 8771 \"parse.ml\"\n               : 'maybeFocus))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1329 \"parse.mly\"\n    (let _1 = () in\n                ( Rec ))\n# 8778 \"parse.ml\"\n               : 'letqualifier))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1332 \"parse.mly\"\n    (                ( NoLetQualifier ))\n# 8784 \"parse.ml\"\n               : 'letqualifier))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'thunk_tmNoEq_) in\n    Obj.repr(\n# 1336 \"parse.mly\"\n    (let (_1, _2, t, _4) = ((), (), _3, ()) in\n                                       ( mk_meta_tac t ))\n# 8792 \"parse.ml\"\n               : 'aqual))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1339 \"parse.mly\"\n    (let _1 = () in\n              ( Implicit ))\n# 8799 \"parse.ml\"\n               : 'aqual))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1342 \"parse.mly\"\n    (let _1 = () in\n              ( Equality ))\n# 8806 \"parse.ml\"\n               : 'aqual))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in\n    Obj.repr(\n# 1347 \"parse.mly\"\n    (let (_1, l, _3) = ((), _2, ()) in\nlet t =                                                 ( l ) in\n                                               ( t ))\n# 8815 \"parse.ml\"\n               : 'binderAttributes))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_BAR_tuplePattern_) in\n    Obj.repr(\n# 1353 \"parse.mly\"\n    (let pats = _1 in\n                                                    ( pats ))\n# 8823 \"parse.ml\"\n               : 'disjunctivePattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_constructorPattern_) in\n    Obj.repr(\n# 1358 \"parse.mly\"\n    (let pats = _1 in\n      ( match pats with | [x] -> x | l -> mk_pattern (PatTuple (l, false)) (rhs parseState 1) ))\n# 8831 \"parse.ml\"\n               : 'tuplePattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constructorPattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constructorPattern) in\n    Obj.repr(\n# 1363 \"parse.mly\"\n    (let (pat, _2, pats) = (_1, (), _3) in\n      ( mk_pattern (consPat (rhs parseState 3) pat pats) (rhs2 parseState 1 3) ))\n# 8840 \"parse.ml\"\n               : 'constructorPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'quident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicPattern_) in\n    Obj.repr(\n# 1366 \"parse.mly\"\n    (let (uid, args) = (_1, _2) in\n      (\n        let head_pat = mk_pattern (PatName uid) (rhs parseState 1) in\n        mk_pattern (PatApp (head_pat, args)) (rhs2 parseState 1 2)\n      ))\n# 8852 \"parse.ml\"\n               : 'constructorPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicPattern) in\n    Obj.repr(\n# 1372 \"parse.mly\"\n    (let pat = _1 in\n      ( pat ))\n# 8860 \"parse.ml\"\n               : 'constructorPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tuplePattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrow) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'refineOpt) in\n    Obj.repr(\n# 1377 \"parse.mly\"\n    (let (_1, pat, _3, t, phi_opt, _6) = ((), _2, (), _4, _5, ()) in\n      (\n        let pos_t = rhs2 parseState 2 4 in\n        let pos = rhs2 parseState 1 6 in\n        mkRefinedPattern pat t true phi_opt pos_t pos\n      ))\n# 8874 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'loption_separated_nonempty_list_SEMICOLON_tuplePattern__) in\n    Obj.repr(\n# 1384 \"parse.mly\"\n    (let (_1, xs, _3) = ((), _2, ()) in\nlet pats =     ( xs ) in\n      ( mk_pattern (PatList pats) (rhs2 parseState 1 3) ))\n# 8883 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_nonempty_list_SEMICOLON_fieldPattern_) in\n    Obj.repr(\n# 1388 \"parse.mly\"\n    (let (_1, record_pat, _3) = ((), _2, ()) in\n      ( mk_pattern (PatRecord record_pat) (rhs2 parseState 1 3) ))\n# 8891 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'constructorPattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_constructorPattern_) in\n    Obj.repr(\n# 1391 \"parse.mly\"\n    (let (_1, pat0, _3, pats, _5) = ((), _2, (), _4, ()) in\n      ( mk_pattern (PatTuple(pat0::pats, true)) (rhs2 parseState 1 5) ))\n# 8900 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tuplePattern) in\n    Obj.repr(\n# 1394 \"parse.mly\"\n    (let (_1, pat, _3) = ((), _2, ()) in\n                                     ( pat ))\n# 8908 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in\n    Obj.repr(\n# 1397 \"parse.mly\"\n    (let tv = _1 in\n                              ( mk_pattern (PatTvar (tv, None, [])) (rhs parseState 1) ))\n# 8916 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1400 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))\n# 8925 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in\n    Obj.repr(\n# 1404 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =     ( op ) in\n      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))\n# 8934 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1408 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))\n# 8943 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1412 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =\n  let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                      (op)\nin\n      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))\n# 8955 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1419 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =\n  let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                      (op)\nin\n      ( mk_pattern (PatOp op) (rhs2 parseState 1 3) ))\n# 8967 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1426 \"parse.mly\"\n    (let _1 = () in\n      ( mk_pattern (PatWild (None, [])) (rhs parseState 1) ))\n# 8974 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1429 \"parse.mly\"\n    (let (_1, _2) = ((), ()) in\n      ( mk_pattern (PatWild (Some Implicit, [])) (rhs parseState 1) ))\n# 8981 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in\n    Obj.repr(\n# 1432 \"parse.mly\"\n    (let c = _1 in\n      ( mk_pattern (PatConst c) (rhs parseState 1) ))\n# 8989 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 1435 \"parse.mly\"\n    (let (_1, q) = ((), _2) in\n      ( mk_pattern (PatVQuote q) (rhs2 parseState 1 2) ))\n# 8997 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lident_) in\n    Obj.repr(\n# 1438 \"parse.mly\"\n    (let qual_id = _1 in\n    (\n      let (aqual, attrs), lid = qual_id in\n      mk_pattern (PatVar (lid, aqual, attrs)) (rhs parseState 1) ))\n# 9007 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in\n    Obj.repr(\n# 1443 \"parse.mly\"\n    (let uid = _1 in\n      ( mk_pattern (PatName uid) (rhs parseState 1) ))\n# 9015 \"parse.ml\"\n               : 'atomicPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'qlident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tuplePattern) in\n    Obj.repr(\n# 1448 \"parse.mly\"\n    (let (x, _2, y) = (_1, (), _3) in\nlet p =     ( (x, y) ) in\n      ( p ))\n# 9025 \"parse.ml\"\n               : 'fieldPattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qlident) in\n    Obj.repr(\n# 1452 \"parse.mly\"\n    (let lid = _1 in\n      ( lid, mk_pattern (PatVar (ident_of_lid lid, None, [])) (rhs parseState 1) ))\n# 9033 \"parse.ml\"\n               : 'fieldPattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'lidentOrUnderscore) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in\n    Obj.repr(\n# 1457 \"parse.mly\"\n    (let (_1, id, _3, t, _5) = ((), _2, (), _4, ()) in\n      ( let r = rhs2 parseState 1 5 in\n        let w = mk_pattern (PatVar (id, Some TypeClassArg, [])) r in\n        let asc = (t, None) in\n        [mk_pattern (PatAscribed(w, asc)) r]\n      ))\n# 9046 \"parse.ml\"\n               : 'patternOrMultibinder))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in\n    Obj.repr(\n# 1464 \"parse.mly\"\n    (let (_1, t, _3) = ((), _2, ()) in\n      ( let r = rhs2 parseState 1 3 in\n        let id = gen r in\n        let w = mk_pattern (PatVar (id, Some TypeClassArg, [])) r in\n        let asc = (t, None) in\n        [mk_pattern (PatAscribed(w, asc)) r]\n      ))\n# 9059 \"parse.ml\"\n               : 'patternOrMultibinder))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicPattern) in\n    Obj.repr(\n# 1472 \"parse.mly\"\n    (let pat = _1 in\n                      ( [pat] ))\n# 9067 \"parse.ml\"\n               : 'patternOrMultibinder))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'aqualifiedWithAttrs_lident_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'nonempty_list_aqualifiedWithAttrs_lident__) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrow) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'refineOpt) in\n    Obj.repr(\n# 1475 \"parse.mly\"\n    (let (_1, qual_id0, qual_ids, _4, t, r, _7) = ((), _2, _3, (), _5, _6, ()) in\n      (\n        let pos = rhs2 parseState 1 7 in\n        let t_pos = rhs parseState 5 in\n        let qual_ids = qual_id0 :: qual_ids in\n        List.map (fun ((aq, attrs), x) -> mkRefinedPattern (mk_pattern (PatVar (x, aq, attrs)) pos) t false r t_pos pos) qual_ids\n      ))\n# 9083 \"parse.ml\"\n               : 'patternOrMultibinder))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'aqualifiedWithAttrs_lidentOrUnderscore_) in\n    Obj.repr(\n# 1485 \"parse.mly\"\n    (let aqualifiedWithAttrs_lid = _1 in\n     (\n       let (q, attrs), lid = aqualifiedWithAttrs_lid in\n       mk_binder_with_attrs (Variable lid) (rhs parseState 1) Type_level q attrs\n     ))\n# 9094 \"parse.ml\"\n               : 'binder))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in\n    Obj.repr(\n# 1491 \"parse.mly\"\n    (let tv = _1 in\n             ( mk_binder (TVariable tv) (rhs parseState 1) Kind None  ))\n# 9102 \"parse.ml\"\n               : 'binder))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'lidentOrUnderscore) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in\n    Obj.repr(\n# 1496 \"parse.mly\"\n    (let (_1, id, _3, t, _5) = ((), _2, (), _4, ()) in\n      ( let r = rhs2 parseState 1 5 in\n        [mk_binder (Annotated (id, t)) r Type_level (Some TypeClassArg)]\n      ))\n# 9113 \"parse.ml\"\n               : 'multiBinder))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'simpleArrow) in\n    Obj.repr(\n# 1501 \"parse.mly\"\n    (let (_1, t, _3) = ((), _2, ()) in\n      ( let r = rhs2 parseState 1 3 in\n        let id = gen r in\n        [mk_binder (Annotated (id, t)) r Type_level (Some TypeClassArg)]\n      ))\n# 9124 \"parse.ml\"\n               : 'multiBinder))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nonempty_list_aqualifiedWithAttrs_lidentOrUnderscore__) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrow) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'refineOpt) in\n    Obj.repr(\n# 1507 \"parse.mly\"\n    (let (_1, qual_ids, _3, t, r, _6) = ((), _2, (), _4, _5, ()) in\n     (\n       let should_bind_var = match qual_ids with | [ _ ] -> true | _ -> false in\n       List.map (fun ((q, attrs), x) ->\n         mkRefinedBinder x t should_bind_var r (rhs2 parseState 1 6) q attrs) qual_ids\n     ))\n# 9138 \"parse.ml\"\n               : 'multiBinder))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_4_) in\n    Obj.repr(\n# 1516 \"parse.mly\"\n    (let bss = _1 in\n                                                        ( flatten bss ))\n# 9146 \"parse.ml\"\n               : 'binders))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1521 \"parse.mly\"\n    (let (aq, attrs, x) = (_1, _2, _3) in\n                                        ( (Some aq, attrs), x ))\n# 9156 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1524 \"parse.mly\"\n    (let (aq, x) = (_1, _2) in\n                 ( (Some aq, []), x ))\n# 9165 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1527 \"parse.mly\"\n    (let (attrs, x) = (_1, _2) in\n                               ( (None, attrs), x ))\n# 9174 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1530 \"parse.mly\"\n    (let x = _1 in\n        ( (None, []), x ))\n# 9182 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1535 \"parse.mly\"\n    (let (aq, attrs, id) = (_1, _2, _3) in\nlet x =               ( id ) in\n                                        ( (Some aq, attrs), x ))\n# 9193 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1539 \"parse.mly\"\n    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                                        ( (Some aq, attrs), x ))\n# 9207 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in\n    Obj.repr(\n# 1546 \"parse.mly\"\n    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in\nlet x =\n  let id =     ( op ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                                        ( (Some aq, attrs), x ))\n# 9221 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1553 \"parse.mly\"\n    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                                        ( (Some aq, attrs), x ))\n# 9235 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1560 \"parse.mly\"\n    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                                        ( (Some aq, attrs), x ))\n# 9252 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1570 \"parse.mly\"\n    (let (aq, attrs, _1, op, _3) = (_1, _2, (), _4, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                                        ( (Some aq, attrs), x ))\n# 9269 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1580 \"parse.mly\"\n    (let (aq, id) = (_1, _2) in\nlet x =               ( id ) in\n                 ( (Some aq, []), x ))\n# 9279 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1584 \"parse.mly\"\n    (let (aq, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                 ( (Some aq, []), x ))\n# 9292 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in\n    Obj.repr(\n# 1591 \"parse.mly\"\n    (let (aq, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =     ( op ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                 ( (Some aq, []), x ))\n# 9305 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1598 \"parse.mly\"\n    (let (aq, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                 ( (Some aq, []), x ))\n# 9318 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1605 \"parse.mly\"\n    (let (aq, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                 ( (Some aq, []), x ))\n# 9334 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1615 \"parse.mly\"\n    (let (aq, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                 ( (Some aq, []), x ))\n# 9350 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1625 \"parse.mly\"\n    (let (attrs, id) = (_1, _2) in\nlet x =               ( id ) in\n                               ( (None, attrs), x ))\n# 9360 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1629 \"parse.mly\"\n    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                               ( (None, attrs), x ))\n# 9373 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in\n    Obj.repr(\n# 1636 \"parse.mly\"\n    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =     ( op ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                               ( (None, attrs), x ))\n# 9386 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1643 \"parse.mly\"\n    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                               ( (None, attrs), x ))\n# 9399 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1650 \"parse.mly\"\n    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                               ( (None, attrs), x ))\n# 9415 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1660 \"parse.mly\"\n    (let (attrs, _1, op, _3) = (_1, (), _3, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n                               ( (None, attrs), x ))\n# 9431 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1670 \"parse.mly\"\n    (let id = _1 in\nlet x =               ( id ) in\n        ( (None, []), x ))\n# 9440 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1674 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n        ( (None, []), x ))\n# 9452 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in\n    Obj.repr(\n# 1681 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet x =\n  let id =     ( op ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n        ( (None, []), x ))\n# 9464 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1688 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet x =\n  let id =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n        ( (None, []), x ))\n# 9476 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1695 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n        ( (None, []), x ))\n# 9491 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1705 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet x =\n  let id =\n    let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                        (op)\n  in\n      ( mk_ident (compile_op' (string_of_id id) (range_of_id id), range_of_id id) )\nin\n        ( (None, []), x ))\n# 9506 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrOperator_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in\n    Obj.repr(\n# 1717 \"parse.mly\"\n    (let (aq, attrs, x) = (_1, _2, _3) in\n                                        ( (Some aq, attrs), x ))\n# 9516 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrUnderscore_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in\n    Obj.repr(\n# 1720 \"parse.mly\"\n    (let (aq, x) = (_1, _2) in\n                 ( (Some aq, []), x ))\n# 9525 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrUnderscore_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in\n    Obj.repr(\n# 1723 \"parse.mly\"\n    (let (attrs, x) = (_1, _2) in\n                               ( (None, attrs), x ))\n# 9534 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrUnderscore_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lidentOrUnderscore) in\n    Obj.repr(\n# 1726 \"parse.mly\"\n    (let x = _1 in\n        ( (None, []), x ))\n# 9542 \"parse.ml\"\n               : 'aqualifiedWithAttrs_lidentOrUnderscore_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'path_lident_) in\n    Obj.repr(\n# 1731 \"parse.mly\"\n    (let ids = _1 in\n                     ( lid_of_ids ids ))\n# 9550 \"parse.ml\"\n               : 'qlident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'path_uident_) in\n    Obj.repr(\n# 1736 \"parse.mly\"\n    (let ids = _1 in\n                     ( lid_of_ids ids ))\n# 9558 \"parse.ml\"\n               : 'quident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1741 \"parse.mly\"\n    (let id = _1 in\n          ( [id] ))\n# 9566 \"parse.ml\"\n               : 'path_lident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'path_lident_) in\n    Obj.repr(\n# 1744 \"parse.mly\"\n    (let (uid, _2, p) = (_1, (), _3) in\n                              ( uid::p ))\n# 9575 \"parse.ml\"\n               : 'path_lident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'uident) in\n    Obj.repr(\n# 1749 \"parse.mly\"\n    (let id = _1 in\n          ( [id] ))\n# 9583 \"parse.ml\"\n               : 'path_uident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'uident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'path_uident_) in\n    Obj.repr(\n# 1752 \"parse.mly\"\n    (let (uid, _2, p) = (_1, (), _3) in\n                              ( uid::p ))\n# 9592 \"parse.ml\"\n               : 'path_uident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 1757 \"parse.mly\"\n    (let x = _1 in\n             ( x ))\n# 9600 \"parse.ml\"\n               : 'ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'uident) in\n    Obj.repr(\n# 1760 \"parse.mly\"\n    (let x = _1 in\n              ( x ))\n# 9608 \"parse.ml\"\n               : 'ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qlident) in\n    Obj.repr(\n# 1765 \"parse.mly\"\n    (let qid = _1 in\n                ( qid ))\n# 9616 \"parse.ml\"\n               : 'qlidentOrOperator))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1768 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet id =     ( mk_ident (op, rhs parseState 1) ) in\n    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))\n# 9625 \"parse.ml\"\n               : 'qlidentOrOperator))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in\n    Obj.repr(\n# 1772 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet id =     ( op ) in\n    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))\n# 9634 \"parse.ml\"\n               : 'qlidentOrOperator))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1776 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet id =     ( mk_ident (op, rhs parseState 1) ) in\n    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))\n# 9643 \"parse.ml\"\n               : 'qlidentOrOperator))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1780 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet id =\n  let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                      (op)\nin\n    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))\n# 9655 \"parse.ml\"\n               : 'qlidentOrOperator))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1787 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet id =\n  let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                      (op)\nin\n    ( lid_of_ns_and_id [] (id_of_text (compile_op' (string_of_id id) (range_of_id id))) ))\n# 9667 \"parse.ml\"\n               : 'qlidentOrOperator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1796 \"parse.mly\"\n    (let _1 = () in\n          (None))\n# 9674 \"parse.ml\"\n               : 'matchMaybeOp))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1799 \"parse.mly\"\n    (let op = _1 in\n                ( Some (mk_ident (\"let\" ^ op, rhs parseState 1)) ))\n# 9682 \"parse.ml\"\n               : 'matchMaybeOp))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1804 \"parse.mly\"\n    (let _1 = () in\n       (None))\n# 9689 \"parse.ml\"\n               : 'ifMaybeOp))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1807 \"parse.mly\"\n    (let op = _1 in\n             ( Some (mk_ident (\"let\" ^ op, rhs parseState 1)) ))\n# 9697 \"parse.ml\"\n               : 'ifMaybeOp))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1812 \"parse.mly\"\n    (let id = _1 in\n             ( mk_ident(id, rhs parseState 1)))\n# 9705 \"parse.ml\"\n               : 'lidentOrUnderscore))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1815 \"parse.mly\"\n    (let _1 = () in\n               ( gen (rhs parseState 1) ))\n# 9712 \"parse.ml\"\n               : 'lidentOrUnderscore))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1820 \"parse.mly\"\n    (let id = _1 in\n             ( mk_ident(id, rhs parseState 1)))\n# 9720 \"parse.ml\"\n               : FStar_Ident.ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1825 \"parse.mly\"\n    (let id = _1 in\n            ( mk_ident(id, rhs parseState 1) ))\n# 9728 \"parse.ml\"\n               : 'uident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1830 \"parse.mly\"\n    (let tv = _1 in\n            ( mk_ident(tv, rhs parseState 1) ))\n# 9736 \"parse.ml\"\n               : 'tvar))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 1835 \"parse.mly\"\n    (let t = _1 in\n                ( mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))\n# 9744 \"parse.ml\"\n               : 'thunk_atomicTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEq) in\n    Obj.repr(\n# 1840 \"parse.mly\"\n    (let t = _1 in\n                ( mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))\n# 9752 \"parse.ml\"\n               : 'thunk_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 1845 \"parse.mly\"\n    (let t = _1 in\n                ( mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))\n# 9760 \"parse.ml\"\n               : 'thunk_typ_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 1850 \"parse.mly\"\n    (let t = _1 in\n     ( let u = mk_term (Const Const_unit) (rhs parseState 3) Expr in\n       let t = mk_term (Seq (u, t)) (rhs parseState 3) Expr in\n       mk_term (Abs ([mk_pattern (PatWild (None, [])) (rhs parseState 3)], t)) (rhs parseState 3) Expr ))\n# 9770 \"parse.ml\"\n               : 'thunk2_typ_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tmArrow_tmNoEq_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_5_) in\n    Obj.repr(\n# 1857 \"parse.mly\"\n    (let (_1, t, tacopt) = ((), _2, _3) in\n                                                                                ( t, tacopt ))\n# 9779 \"parse.ml\"\n               : 'ascribeTyp))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'kind) in\n    Obj.repr(\n# 1862 \"parse.mly\"\n    (let (_1, k) = ((), _2) in\n                  ( k ))\n# 9787 \"parse.ml\"\n               : 'ascribeKind))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 1867 \"parse.mly\"\n    (let t = _1 in\n                      ( {t with level=Kind} ))\n# 9795 \"parse.ml\"\n               : 'kind))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 1872 \"parse.mly\"\n    (let e = _1 in\n      ( e ))\n# 9803 \"parse.ml\"\n               : FStar_Parser_AST.term))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1875 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Seq(e1, e2)) (rhs2 parseState 1 3) Expr ))\n# 9812 \"parse.ml\"\n               : FStar_Parser_AST.term))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string option) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1878 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\n      ( let t = match op with\n\t  | Some op ->\n\t     let op = mk_ident (\"let\" ^ op, rhs parseState 2) in\n\t     let pat = mk_pattern (PatWild(None, [])) (rhs parseState 2) in\n\t     LetOperator ([(op, pat, e1)], e2)\n\t  | None   ->\n             log_issue (lhs parseState) (Warning_DeprecatedLightDoNotation, do_notation_deprecation_warning);\n\t     Bind(gen (rhs parseState 2), e1, e2)\n        in mk_term t (rhs2 parseState 1 3) Expr\n      ))\n# 9831 \"parse.ml\"\n               : FStar_Parser_AST.term))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'lidentOrUnderscore) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1890 \"parse.mly\"\n    (let (x, _2, e1, _4, e2) = (_1, (), _3, (), _5) in\n    ( log_issue (lhs parseState) (Warning_DeprecatedLightDoNotation, do_notation_deprecation_warning);\n      mk_term (Bind(x, e1, e2)) (rhs2 parseState 1 5) Expr ))\n# 9842 \"parse.ml\"\n               : FStar_Parser_AST.term))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'option___anonymous_6_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in\n    Obj.repr(\n# 1896 \"parse.mly\"\n    (let (as_opt, _2, t) = (_1, (), _3) in\n                                                   (as_opt,t,false))\n# 9851 \"parse.ml\"\n               : 'match_returning))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'option___anonymous_7_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in\n    Obj.repr(\n# 1899 \"parse.mly\"\n    (let (as_opt, _2, t) = (_1, (), _3) in\n                                                      (as_opt,t,true))\n# 9860 \"parse.ml\"\n               : 'match_returning))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 1904 \"parse.mly\"\n    (let t = _1 in\n           ( t ))\n# 9868 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'tmIff) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tmIff) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_8_) in\n    Obj.repr(\n# 1907 \"parse.mly\"\n    (let (e, _2, t, tactic_opt) = (_1, (), _3, _4) in\n      ( mk_term (Ascribed(e,{t with level=Expr},tactic_opt,false)) (rhs2 parseState 1 4) Expr ))\n# 9878 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'tmIff) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tmIff) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_9_) in\n    Obj.repr(\n# 1910 \"parse.mly\"\n    (let (e, _2, t, tactic_opt) = (_1, (), _3, _4) in\n      (\n        log_issue (lhs parseState)\n\t          (Warning_BleedingEdge_Feature,\n\t\t   \"Equality type ascriptions is an experimental feature subject to redesign in the future\");\n        mk_term (Ascribed(e,{t with level=Expr},tactic_opt,true)) (rhs2 parseState 1 4) Expr\n      ))\n# 9893 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 1918 \"parse.mly\"\n    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in\nlet op_expr =                              ( mk_ident (\".()\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n      (\n        let (op, e2, _) = op_expr in\n        let opid = mk_ident (string_of_id op ^ \"<-\", range_of_id op) in\n        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr\n      ))\n# 9908 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 1926 \"parse.mly\"\n    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in\nlet op_expr =                              ( mk_ident (\".[]\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n      (\n        let (op, e2, _) = op_expr in\n        let opid = mk_ident (string_of_id op ^ \"<-\", range_of_id op) in\n        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr\n      ))\n# 9923 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 1934 \"parse.mly\"\n    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in\nlet op_expr =                                      ( mk_ident (\".[||]\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n      (\n        let (op, e2, _) = op_expr in\n        let opid = mk_ident (string_of_id op ^ \"<-\", range_of_id op) in\n        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr\n      ))\n# 9938 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTermNotQUident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 1942 \"parse.mly\"\n    (let (e1, _1, e, _3_inlined1, _3, e3) = (_1, (), _3, (), (), _6) in\nlet op_expr =                                                 ( mk_ident (\".(||)\", rhs parseState 1), e, rhs2 parseState 1 3 ) in\n      (\n        let (op, e2, _) = op_expr in\n        let opid = mk_ident (string_of_id op ^ \"<-\", range_of_id op) in\n        mk_term (Op(opid, [ e1; e2; e3 ])) (rhs2 parseState 1 4) Expr\n      ))\n# 9953 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 1950 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\n      ( mk_term (Requires(t, None)) (rhs2 parseState 1 2) Type_level ))\n# 9961 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 1953 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\n      ( mk_term (Ensures(t, None)) (rhs2 parseState 1 2) Type_level ))\n# 9969 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'typ) in\n    Obj.repr(\n# 1956 \"parse.mly\"\n    (let (_1, t) = ((), _2) in\n      ( mk_term (Decreases (t, None)) (rhs2 parseState 1 2) Type_level ))\n# 9977 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'noSeqTerm) in\n    Obj.repr(\n# 1959 \"parse.mly\"\n    (let (_1, _2, t, _4) = ((), (), _3, ()) in\n      ( match t.tm with\n        | App (t1, t2, _) ->\n\t  let ot = mk_term (WFOrder (t1, t2)) (rhs2 parseState 3 3) Type_level in\n\t  mk_term (Decreases (ot, None)) (rhs2 parseState 1 4) Type_level\n\t| _ ->\n\t  raise_error (Fatal_SyntaxError,\n\t    \"Syntax error: To use well-founded relations, write e1 e2\") (rhs parseState 3) ))\n# 9991 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicTerm_) in\n    Obj.repr(\n# 1968 \"parse.mly\"\n    (let (_1, es) = ((), _2) in\n      ( mk_term (Attributes es) (rhs2 parseState 1 2) Type_level ))\n# 9999 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'ifMaybeOp) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'noSeqTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'option_match_returning_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 1971 \"parse.mly\"\n    (let (op, e1, ret_opt, _4, e2, _6, e3) = (_1, _2, _3, (), _5, (), _7) in\n      ( mk_term (If(e1, op, ret_opt, e2, e3)) (rhs2 parseState 1 7) Expr ))\n# 10011 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'ifMaybeOp) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'noSeqTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'option_match_returning_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 1974 \"parse.mly\"\n    (let (op, e1, ret_opt, _4, e2) = (_1, _2, _3, (), _5) in\n      (\n        let e3 = mk_term (Const Const_unit) (rhs2 parseState 1 5) Expr in\n        mk_term (If(e1, op, ret_opt, e2, e3)) (rhs2 parseState 1 5) Expr\n      ))\n# 10025 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_) in\n    Obj.repr(\n# 1980 \"parse.mly\"\n    (let (_1, e1, _3, xs) = ((), _2, (), _4) in\nlet pbs =    ( List.rev xs ) in\n      (\n         let branches = focusBranches (pbs) (rhs2 parseState 1 4) in\n         mk_term (TryWith(e1, branches)) (rhs2 parseState 1 4) Expr\n      ))\n# 10038 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'matchMaybeOp) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : FStar_Parser_AST.term) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'option_match_returning_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'reverse_left_flexible_list_BAR___anonymous_10_) in\n    Obj.repr(\n# 1987 \"parse.mly\"\n    (let (op, e, ret_opt, _4, xs) = (_1, _2, _3, (), _5) in\nlet pbs =    ( List.rev xs ) in\n      (\n        let branches = focusBranches pbs (rhs2 parseState 1 5) in\n        mk_term (Match(e, op, ret_opt, branches)) (rhs2 parseState 1 5) Expr\n      ))\n# 10053 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : bool) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 1994 \"parse.mly\"\n    (let (_1, _2, t, _4, e) = (_1, (), _3, (), _5) in\n      (\n            match t.tm with\n            | Ascribed(r, rty, None, _) ->\n              mk_term (LetOpenRecord(r, rty, e)) (rhs2 parseState 1 5) Expr\n\n            | Name uid ->\n              mk_term (LetOpen(uid, e)) (rhs2 parseState 1 5) Expr\n\n            | _ ->\n              raise_error (Fatal_SyntaxError, \"Syntax error: local opens expects either opening\\n\\\n                                               a module or namespace using `let open T in e`\\n\\\n                                               or, a record type with `let open e <: t in e'`\")\n                          (rhs parseState 3)\n      ))\n# 10076 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : bool) in\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'letqualifier) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'letbinding) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'list_attr_letbinding_) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2010 \"parse.mly\"\n    (let (_2, q, lb, lbs, _6, e) = (_1, _2, _3, _4, (), _6) in\nlet attrs =     ( None ) in\n      (\n        let lbs = (attrs, lb)::lbs in\n        let lbs = focusAttrLetBindings lbs (rhs2 parseState 2 3) in\n        mk_term (Let(q, lbs, e)) (rhs2 parseState 1 6) Expr\n      ))\n# 10093 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'attribute) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : bool) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'letqualifier) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'letbinding) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'list_attr_letbinding_) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2018 \"parse.mly\"\n    (let (x, _2, q, lb, lbs, _6, e) = (_1, _2, _3, _4, _5, (), _7) in\nlet attrs =     ( Some x ) in\n      (\n        let lbs = (attrs, lb)::lbs in\n        let lbs = focusAttrLetBindings lbs (rhs2 parseState 2 3) in\n        mk_term (Let(q, lbs, e)) (rhs2 parseState 1 6) Expr\n      ))\n# 10111 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'letoperatorbinding) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'list___anonymous_11_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2026 \"parse.mly\"\n    (let (op, b, lbs, _4, e) = (_1, _2, _3, (), _5) in\nlet op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n    ( let lbs = (op, b)::lbs in\n      mk_term (LetOperator ( List.map (fun (op, (pat, tm)) -> (op, pat, tm)) lbs\n\t\t\t   , e)) (rhs2 parseState 1 5) Expr\n    ))\n# 10126 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_) in\n    Obj.repr(\n# 2033 \"parse.mly\"\n    (let (_1, xs) = ((), _2) in\nlet pbs =    ( List.rev xs ) in\n      (\n        let branches = focusBranches pbs (rhs2 parseState 1 2) in\n        mk_function branches (lhs parseState) (rhs2 parseState 1 2)\n      ))\n# 10138 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2040 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( let a = set_lid_range assume_lid (rhs parseState 1) in\n        mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [e] (rhs2 parseState 1 2)\n      ))\n# 10148 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_12_) in\n    Obj.repr(\n# 2045 \"parse.mly\"\n    (let (_1, e, tactic_opt) = ((), _2, _3) in\n      (\n        match tactic_opt with\n        | None ->\n          let a = set_lid_range assert_lid (rhs parseState 1) in\n          mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [e] (rhs2 parseState 1 2)\n        | Some tac ->\n          let a = set_lid_range assert_by_tactic_lid (rhs parseState 1) in\n          mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [e; tac] (rhs2 parseState 1 4)\n      ))\n# 10165 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'thunk_atomicTerm_) in\n    Obj.repr(\n# 2056 \"parse.mly\"\n    (let (_1, _2, tactic) = ((), (), _3) in\n     (\n         let a = set_lid_range synth_lid (rhs parseState 1) in\n         mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [tactic] (rhs2 parseState 1 2)\n     ))\n# 10176 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2062 \"parse.mly\"\n    (let (_1, tactic) = ((), _2) in\n     (\n         let a = set_lid_range synth_lid (rhs parseState 1) in\n         mkExplicitApp (mk_term (Var a) (rhs parseState 1) Expr) [tactic] (rhs2 parseState 1 2)\n     ))\n# 10187 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'atomicTerm) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'noSeqTerm) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'list_calcStep_) in\n    Obj.repr(\n# 2068 \"parse.mly\"\n    (let (_1, rel, _3, init, _5, steps, _7) = ((), _2, (), _4, (), _6, ()) in\n     (\n         mk_term (CalcProof (rel, init, steps)) (rhs2 parseState 1 7) Expr\n     ))\n# 10199 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binders) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2073 \"parse.mly\"\n    (let (_1, _2, bs, _4, p, _6, e) = ((), (), _3, (), _5, (), _7) in\n     (\n        mk_term (IntroForall(bs, p, e)) (rhs2 parseState 1 7) Expr\n     ))\n# 10211 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'binders) in\n    let _5 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in\n    let _7 = (Parsing.peek_val __caml_parser_env 2 : 'list_atomicTerm_) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2078 \"parse.mly\"\n    (let (_1, _2, bs, _4, p, _6, vs, _8, e) = ((), (), _3, (), _5, (), _7, (), _9) in\n     (\n        if List.length bs <> List.length vs\n        then raise_error (Fatal_SyntaxError, \"Syntax error: expected instantiations for all binders\") (rhs parseState 7)\n        else mk_term (IntroExists(bs, p, vs, e)) (rhs2 parseState 1 9) Expr\n     ))\n# 10226 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tmFormula) in\n    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'tmFormula) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'singleBinder) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2085 \"parse.mly\"\n    (let (_1, p, _3, q, _5, y, _7, e) = ((), _2, (), _4, (), _6, (), _8) in\n     (\n        mk_term (IntroImplies(p, q, y, e)) (rhs2 parseState 1 8) Expr\n     ))\n# 10239 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'tmFormula) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'tmConjunction) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2090 \"parse.mly\"\n    (let (_1, p, _3, q, _5, lr, e) = ((), _2, (), _4, (), _6, _7) in\n     (\n        let b =\n            if lr = \"Left\" then true\n            else if lr = \"Right\" then false\n            else raise_error (Fatal_SyntaxError, \"Syntax error: _intro_ \\\\/ expects either 'Left' or 'Right'\") (rhs parseState 6)\n        in\n        mk_term (IntroOr(b, p, q, e))  (rhs2 parseState 1 7) Expr\n     ))\n# 10257 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tmConjunction) in\n    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'tmTuple) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2100 \"parse.mly\"\n    (let (_1, p, _3, q, _5, e1, _7, e2) = ((), _2, (), _4, (), _6, (), _8) in\n     (\n        mk_term (IntroAnd(p, q, e1, e2))  (rhs2 parseState 1 8) Expr\n     ))\n# 10270 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binders) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'list_atomicTerm_) in\n    Obj.repr(\n# 2105 \"parse.mly\"\n    (let (_1, _2, xs, _4, p, _6, vs) = ((), (), _3, (), _5, (), _7) in\n     (\n        mk_term (ElimForall(xs, p, vs)) (rhs2 parseState 1 7) Expr\n     ))\n# 10282 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 8 : 'binders) in\n    let _5 = (Parsing.peek_val __caml_parser_env 6 : 'noSeqTerm) in\n    let _7 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in\n    let _9 = (Parsing.peek_val __caml_parser_env 2 : 'singleBinder) in\n    let _11 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2110 \"parse.mly\"\n    (let (_1, _2, bs, _4, p, _6, q, _8, y, _10, e) = ((), (), _3, (), _5, (), _7, (), _9, (), _11) in\n     (\n        mk_term (ElimExists(bs, p, q, y, e)) (rhs2 parseState 1 11) Expr\n     ))\n# 10296 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tmFormula) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2115 \"parse.mly\"\n    (let (_1, p, _3, q, _5, e) = ((), _2, (), _4, (), _6) in\n     (\n        mk_term (ElimImplies(p, q, e)) (rhs2 parseState 1 6) Expr\n     ))\n# 10308 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 12 : 'tmFormula) in\n    let _4 = (Parsing.peek_val __caml_parser_env 10 : 'tmConjunction) in\n    let _6 = (Parsing.peek_val __caml_parser_env 8 : 'noSeqTerm) in\n    let _8 = (Parsing.peek_val __caml_parser_env 6 : 'singleBinder) in\n    let _10 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in\n    let _12 = (Parsing.peek_val __caml_parser_env 2 : 'singleBinder) in\n    let _14 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2120 \"parse.mly\"\n    (let (_1, p, _3, q, _5, r, _7, x, _9, e1, _11, y, _13, e2) = ((), _2, (), _4, (), _6, (), _8, (), _10, (), _12, (), _14) in\n     (\n        mk_term (ElimOr(p, q, r, x, e1, y, e2)) (rhs2 parseState 1 14) Expr\n     ))\n# 10324 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 8 : 'tmConjunction) in\n    let _4 = (Parsing.peek_val __caml_parser_env 6 : 'tmTuple) in\n    let _6 = (Parsing.peek_val __caml_parser_env 4 : 'noSeqTerm) in\n    let _8 = (Parsing.peek_val __caml_parser_env 2 : 'binders) in\n    let _10 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2125 \"parse.mly\"\n    (let (_1, p, _3, q, _5, r, _7, xs, _9, e) = ((), _2, (), _4, (), _6, (), _8, (), _10) in\n     (\n        match xs with\n        | [x;y] -> mk_term (ElimAnd(p, q, r, x, y, e)) (rhs2 parseState 1 10) Expr\n     ))\n# 10339 \"parse.ml\"\n               : 'noSeqTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'binders) in\n    Obj.repr(\n# 2133 \"parse.mly\"\n    (let bs = _1 in\n    (\n       match bs with\n       | [b] -> b\n       | _ -> raise_error (Fatal_SyntaxError, \"Syntax error: expected a single binder\") (rhs parseState 1)\n    ))\n# 10351 \"parse.ml\"\n               : 'singleBinder))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'binop_name) in\n    Obj.repr(\n# 2142 \"parse.mly\"\n    (let i = _1 in\n                 ( mk_term (Op (i, [])) (rhs parseState 1) Expr ))\n# 10359 \"parse.ml\"\n               : 'calcRel))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'qlident) in\n    Obj.repr(\n# 2145 \"parse.mly\"\n    (let (_1, id, _3) = ((), _2, ()) in\n                                 ( mk_term (Var id) (rhs2 parseState 2 4) Un ))\n# 10367 \"parse.ml\"\n               : 'calcRel))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2148 \"parse.mly\"\n    (let t = _1 in\n                 ( t ))\n# 10375 \"parse.ml\"\n               : 'calcRel))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'calcRel) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'option_term_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'noSeqTerm) in\n    Obj.repr(\n# 2153 \"parse.mly\"\n    (let (rel, _2, justif, _4, next, _6) = (_1, (), _3, (), _5, ()) in\n     (\n         let justif =\n             match justif with\n             | Some t -> t\n             | None -> mk_term (Const Const_unit) (rhs2 parseState 2 4) Expr\n         in\n         CalcStep (rel, justif, next)\n     ))\n# 10392 \"parse.ml\"\n               : 'calcStep))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simpleTerm) in\n    Obj.repr(\n# 2165 \"parse.mly\"\n    (let t = _1 in\n                 ( t ))\n# 10400 \"parse.ml\"\n               : 'typ))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binders) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'trigger) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2168 \"parse.mly\"\n    (let (_1, bs, _3, trigger, e) = ((), _2, (), _4, _5) in\nlet q =            ( fun x -> QForall x ) in\n      (\n        match bs with\n        | [] ->\n          raise_error (Fatal_MissingQuantifierBinder, \"Missing binders for a quantifier\") (rhs2 parseState 1 3)\n        | _ ->\n          let idents = idents_of_binders bs (rhs2 parseState 1 3) in\n          mk_term (q (bs, (idents, trigger), e)) (rhs2 parseState 1 5) Formula\n      ))\n# 10418 \"parse.ml\"\n               : 'typ))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binders) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'trigger) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2179 \"parse.mly\"\n    (let (_1, bs, _3, trigger, e) = ((), _2, (), _4, _5) in\nlet q =            ( fun x -> QExists x) in\n      (\n        match bs with\n        | [] ->\n          raise_error (Fatal_MissingQuantifierBinder, \"Missing binders for a quantifier\") (rhs2 parseState 1 3)\n        | _ ->\n          let idents = idents_of_binders bs (rhs2 parseState 1 3) in\n          mk_term (q (bs, (idents, trigger), e)) (rhs2 parseState 1 5) Formula\n      ))\n# 10436 \"parse.ml\"\n               : 'typ))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2192 \"parse.mly\"\n    (      ( [] ))\n# 10442 \"parse.ml\"\n               : 'trigger))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'disjunctivePats) in\n    Obj.repr(\n# 2194 \"parse.mly\"\n    (let (_1, pats, _3) = ((), _2, ()) in\n                                                     ( pats ))\n# 10450 \"parse.ml\"\n               : 'trigger))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_DISJUNCTION_conjunctivePat_) in\n    Obj.repr(\n# 2199 \"parse.mly\"\n    (let pats = _1 in\n                                                              ( pats ))\n# 10458 \"parse.ml\"\n               : 'disjunctivePats))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_SEMICOLON_appTerm_) in\n    Obj.repr(\n# 2204 \"parse.mly\"\n    (let pats = _1 in\n                                                              ( pats ))\n# 10466 \"parse.ml\"\n               : 'conjunctivePat))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in\n    Obj.repr(\n# 2209 \"parse.mly\"\n    (let e = _1 in\n            ( e ))\n# 10474 \"parse.ml\"\n               : 'simpleTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'nonempty_list_patternOrMultibinder_) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2212 \"parse.mly\"\n    (let (_1, pats, _3, e) = ((), _2, (), _4) in\n      ( mk_term (Abs(flatten pats, e)) (rhs2 parseState 1 4) Un ))\n# 10483 \"parse.ml\"\n               : 'simpleTerm))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2217 \"parse.mly\"\n    (let _1 = () in\n                    ( false ))\n# 10490 \"parse.ml\"\n               : 'maybeFocusArrow))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2220 \"parse.mly\"\n    (let _1 = () in\n                    ( true ))\n# 10497 \"parse.ml\"\n               : 'maybeFocusArrow))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'disjunctivePattern) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'maybeFocusArrow) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2225 \"parse.mly\"\n    (let (pat, focus, e) = (_1, _2, _3) in\nlet when_opt =                          ( None ) in\n      (\n        let pat = match pat with\n          | [p] -> p\n          | ps -> mk_pattern (PatOr ps) (rhs2 parseState 1 1)\n        in\n        (focus, (pat, when_opt, e))\n      ))\n# 10514 \"parse.ml\"\n               : 'patternBranch))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'disjunctivePattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'maybeFocusArrow) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2235 \"parse.mly\"\n    (let (pat, _1, e_inlined1, focus, e) = (_1, (), _3, _4, _5) in\nlet when_opt =\n  let e = e_inlined1 in\n                           ( Some e )\nin\n      (\n        let pat = match pat with\n          | [p] -> p\n          | ps -> mk_pattern (PatOr ps) (rhs2 parseState 1 1)\n        in\n        (focus, (pat, when_opt, e))\n      ))\n# 10535 \"parse.ml\"\n               : 'patternBranch))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmImplies) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmIff) in\n    Obj.repr(\n# 2250 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"<==>\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Formula ))\n# 10544 \"parse.ml\"\n               : 'tmIff))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmImplies) in\n    Obj.repr(\n# 2253 \"parse.mly\"\n    (let e = _1 in\n                ( e ))\n# 10552 \"parse.ml\"\n               : 'tmIff))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmArrow_tmFormula_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmImplies) in\n    Obj.repr(\n# 2258 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"==>\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Formula ))\n# 10561 \"parse.ml\"\n               : 'tmImplies))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2261 \"parse.mly\"\n    (let e = _1 in\n      ( e ))\n# 10569 \"parse.ml\"\n               : 'tmImplies))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tmFormula) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2266 \"parse.mly\"\n    (let (_1, t, _3, _2, tgt) = ((), _2, (), (), _5) in\nlet dom =                                ( ((Some TypeClassArg, []), t) ) in\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10586 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'tmFormula) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2277 \"parse.mly\"\n    (let (_1, q, dom_tm, _5, _2, tgt) = ((), _2, _3, (), (), _6) in\nlet dom =\n  let attrs_opt =     ( None ) in\n                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10607 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'tmFormula) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2291 \"parse.mly\"\n    (let (_1, q, x, dom_tm, _5, _2, tgt) = ((), _2, _3, _4, (), (), _7) in\nlet dom =\n  let attrs_opt =     ( Some x ) in\n                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10629 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2305 \"parse.mly\"\n    (let (dom_tm, _2, tgt) = (_1, (), _3) in\nlet dom =\n  let attrs_opt =     ( None ) in\n  let aq_opt =     ( None ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10650 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2320 \"parse.mly\"\n    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in\nlet dom =\n  let attrs_opt =     ( Some x ) in\n  let aq_opt =     ( None ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10672 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2335 \"parse.mly\"\n    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in\nlet dom =\n  let attrs_opt =     ( None ) in\n  let aq_opt =     ( Some x ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10694 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmFormula_) in\n    Obj.repr(\n# 2350 \"parse.mly\"\n    (let (x, x_inlined1, dom_tm, _2, tgt) = (_1, _2, _3, (), _5) in\nlet dom =\n  let attrs_opt =\n    let x = x_inlined1 in\n        ( Some x )\n  in\n  let aq_opt =     ( Some x ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10720 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmFormula) in\n    Obj.repr(\n# 2368 \"parse.mly\"\n    (let e = _1 in\n         ( e ))\n# 10728 \"parse.ml\"\n               : 'tmArrow_tmFormula_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tmNoEq) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 2373 \"parse.mly\"\n    (let (_1, t, _3, _2, tgt) = ((), _2, (), (), _5) in\nlet dom =                                ( ((Some TypeClassArg, []), t) ) in\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10745 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'tmNoEq) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 2384 \"parse.mly\"\n    (let (_1, q, dom_tm, _5, _2, tgt) = ((), _2, _3, (), (), _6) in\nlet dom =\n  let attrs_opt =     ( None ) in\n                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10766 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'aqual) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'binderAttributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'tmNoEq) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 2398 \"parse.mly\"\n    (let (_1, q, x, dom_tm, _5, _2, tgt) = ((), _2, _3, _4, (), (), _7) in\nlet dom =\n  let attrs_opt =     ( Some x ) in\n                                                                          ( (Some q, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10788 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 2412 \"parse.mly\"\n    (let (dom_tm, _2, tgt) = (_1, (), _3) in\nlet dom =\n  let attrs_opt =     ( None ) in\n  let aq_opt =     ( None ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10809 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 2427 \"parse.mly\"\n    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in\nlet dom =\n  let attrs_opt =     ( Some x ) in\n  let aq_opt =     ( None ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10831 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 2442 \"parse.mly\"\n    (let (x, dom_tm, _2, tgt) = (_1, _2, (), _4) in\nlet dom =\n  let attrs_opt =     ( None ) in\n  let aq_opt =     ( Some x ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10853 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEq) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmArrow_tmNoEq_) in\n    Obj.repr(\n# 2457 \"parse.mly\"\n    (let (x, x_inlined1, dom_tm, _2, tgt) = (_1, _2, _3, (), _5) in\nlet dom =\n  let attrs_opt =\n    let x = x_inlined1 in\n        ( Some x )\n  in\n  let aq_opt =     ( Some x ) in\n                                                                          ( (aq_opt, none_to_empty_list attrs_opt), dom_tm )\nin\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10879 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEq) in\n    Obj.repr(\n# 2475 \"parse.mly\"\n    (let e = _1 in\n         ( e ))\n# 10887 \"parse.ml\"\n               : 'tmArrow_tmNoEq_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simpleArrowDomain) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simpleArrow) in\n    Obj.repr(\n# 2480 \"parse.mly\"\n    (let (dom, _2, tgt) = (_1, (), _3) in\n     (\n       let ((aq_opt, attrs), dom_tm) = dom in\n       let b = match extract_named_refinement dom_tm with\n         | None -> mk_binder_with_attrs (NoName dom_tm) (rhs parseState 1) Un aq_opt attrs\n         | Some (x, t, f) -> mkRefinedBinder x t true f (rhs2 parseState 1 1) aq_opt attrs\n       in\n       mk_term (Product([b], tgt)) (rhs2 parseState 1 3)  Un\n     ))\n# 10903 \"parse.ml\"\n               : 'simpleArrow))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in\n    Obj.repr(\n# 2490 \"parse.mly\"\n    (let e = _1 in\n                       ( e ))\n# 10911 \"parse.ml\"\n               : 'simpleArrow))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tmEqNoRefinement) in\n    Obj.repr(\n# 2495 \"parse.mly\"\n    (let (_1, t, _3) = ((), _2, ()) in\n                                             ( ((Some TypeClassArg, []), t) ))\n# 10919 \"parse.ml\"\n               : 'simpleArrowDomain))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in\n    Obj.repr(\n# 2498 \"parse.mly\"\n    (let dom_tm = _1 in\nlet attrs_opt =     ( None ) in\nlet aq_opt =     ( None ) in\n                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))\n# 10929 \"parse.ml\"\n               : 'simpleArrowDomain))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in\n    Obj.repr(\n# 2503 \"parse.mly\"\n    (let (x, dom_tm) = (_1, _2) in\nlet attrs_opt =     ( Some x ) in\nlet aq_opt =     ( None ) in\n                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))\n# 10940 \"parse.ml\"\n               : 'simpleArrowDomain))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in\n    Obj.repr(\n# 2508 \"parse.mly\"\n    (let (x, dom_tm) = (_1, _2) in\nlet attrs_opt =     ( None ) in\nlet aq_opt =     ( Some x ) in\n                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))\n# 10951 \"parse.ml\"\n               : 'simpleArrowDomain))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'aqual) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binderAttributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqNoRefinement) in\n    Obj.repr(\n# 2513 \"parse.mly\"\n    (let (x, x_inlined1, dom_tm) = (_1, _2, _3) in\nlet attrs_opt =\n  let x = x_inlined1 in\n      ( Some x )\nin\nlet aq_opt =     ( Some x ) in\n                                                                                      ( (aq_opt, none_to_empty_list attrs_opt), dom_tm ))\n# 10966 \"parse.ml\"\n               : 'simpleArrowDomain))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmFormula) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmConjunction) in\n    Obj.repr(\n# 2523 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"\\\\/\", rhs parseState 2), [e1;e2])) (rhs2 parseState 1 3) Formula ))\n# 10975 \"parse.ml\"\n               : 'tmFormula))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmConjunction) in\n    Obj.repr(\n# 2526 \"parse.mly\"\n    (let e = _1 in\n                    ( e ))\n# 10983 \"parse.ml\"\n               : 'tmFormula))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmConjunction) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmTuple) in\n    Obj.repr(\n# 2531 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"/\\\\\", rhs parseState 2), [e1;e2])) (rhs2 parseState 1 3) Formula ))\n# 10992 \"parse.ml\"\n               : 'tmConjunction))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmTuple) in\n    Obj.repr(\n# 2534 \"parse.mly\"\n    (let e = _1 in\n              ( e ))\n# 11000 \"parse.ml\"\n               : 'tmConjunction))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'separated_nonempty_list_COMMA_tmEq_) in\n    Obj.repr(\n# 2539 \"parse.mly\"\n    (let el = _1 in\n      (\n        match el with\n          | [x] -> x\n          | components -> mkTuple components (rhs2 parseState 1 1)\n      ))\n# 11012 \"parse.ml\"\n               : 'tmTuple))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2548 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"=\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11021 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2551 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\":=\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11030 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2554 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"|>\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11039 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2557 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11050 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2561 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11061 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2565 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11072 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2569 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11083 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2573 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11094 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2577 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11105 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_appTermNoRecordExp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2581 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"-\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11114 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2584 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_uminus e (rhs parseState 1) (rhs2 parseState 1 2) Expr ))\n# 11122 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2587 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un ))\n# 11130 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2590 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (Quote (e, Static)) (rhs2 parseState 1 3) Un ))\n# 11138 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2593 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( let q = mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un in\n        mk_term (Antiquote q) (rhs2 parseState 1 3) Un ))\n# 11147 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2597 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (Antiquote e) (rhs2 parseState 1 3) Un ))\n# 11155 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2600 \"parse.mly\"\n    (let e = _1 in\n      ( e ))\n# 11163 \"parse.ml\"\n               : 'tmEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2605 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"=\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11172 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2608 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\":=\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11181 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2611 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"|>\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11190 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2614 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11201 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2618 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11212 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2622 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11223 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2626 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11234 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2630 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11245 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2634 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\nlet op =      ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11256 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmEqWith_tmRefinement_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2638 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( mk_term (Op(mk_ident(\"-\", rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11265 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2641 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_uminus e (rhs parseState 1) (rhs2 parseState 1 2) Expr ))\n# 11273 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2644 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un ))\n# 11281 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2647 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (Quote (e, Static)) (rhs2 parseState 1 3) Un ))\n# 11289 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2650 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( let q = mk_term (Quote (e, Dynamic)) (rhs2 parseState 1 3) Un in\n        mk_term (Antiquote q) (rhs2 parseState 1 3) Un ))\n# 11298 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2654 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (Antiquote e) (rhs2 parseState 1 3) Un ))\n# 11306 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in\n    Obj.repr(\n# 2657 \"parse.mly\"\n    (let e = _1 in\n      ( e ))\n# 11314 \"parse.ml\"\n               : 'tmEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2662 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( consTerm (rhs parseState 2) e1 e2 ))\n# 11323 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2665 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      (\n            let dom =\n               match extract_named_refinement e1 with\n               | Some (x, t, f) ->\n                 let dom = mkRefinedBinder x t true f (rhs parseState 1) None [] in\n                 Inl dom\n               | _ ->\n                 Inr e1\n            in\n            let tail = e2 in\n            let dom, res =\n                match tail.tm with\n                | Sum(dom', res) -> dom::dom', res\n                | _ -> [dom], tail\n            in\n            mk_term (Sum(dom, res)) (rhs2 parseState 1 3) Type_level\n      ))\n# 11348 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2684 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\n      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11358 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tmNoEqWith_appTermNoRecordExp_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2687 \"parse.mly\"\n    (let (e1, _2, op, _4, e2) = (_1, (), _3, (), _5) in\n      ( mkApp op [ e1, Infix; e2, Nothing ] (rhs2 parseState 1 5) ))\n# 11368 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_appTermNoRecordExp_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2690 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\n      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11378 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'recordExp) in\n    Obj.repr(\n# 2693 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\n                              ( e ))\n# 11386 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2696 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (VQuote e) (rhs2 parseState 1 3) Un ))\n# 11394 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2699 \"parse.mly\"\n    (let (op, e) = (_1, _2) in\n      ( mk_term (Op(mk_ident (op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Formula ))\n# 11403 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTermNoRecordExp) in\n    Obj.repr(\n# 2702 \"parse.mly\"\n    (let e = _1 in\n        ( e ))\n# 11411 \"parse.ml\"\n               : 'tmNoEqWith_appTermNoRecordExp_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in\n    Obj.repr(\n# 2707 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      ( consTerm (rhs parseState 2) e1 e2 ))\n# 11420 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in\n    Obj.repr(\n# 2710 \"parse.mly\"\n    (let (e1, _2, e2) = (_1, (), _3) in\n      (\n            let dom =\n               match extract_named_refinement e1 with\n               | Some (x, t, f) ->\n                 let dom = mkRefinedBinder x t true f (rhs parseState 1) None [] in\n                 Inl dom\n               | _ ->\n                 Inr e1\n            in\n            let tail = e2 in\n            let dom, res =\n                match tail.tm with\n                | Sum(dom', res) -> dom::dom', res\n                | _ -> [dom], tail\n            in\n            mk_term (Sum(dom, res)) (rhs2 parseState 1 3) Type_level\n      ))\n# 11445 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in\n    Obj.repr(\n# 2729 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\n      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11455 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tmNoEqWith_tmRefinement_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in\n    Obj.repr(\n# 2732 \"parse.mly\"\n    (let (e1, _2, op, _4, e2) = (_1, (), _3, (), _5) in\n      ( mkApp op [ e1, Infix; e2, Nothing ] (rhs2 parseState 1 5) ))\n# 11465 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tmNoEqWith_tmRefinement_) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in\n    Obj.repr(\n# 2735 \"parse.mly\"\n    (let (e1, op, e2) = (_1, _2, _3) in\n      ( mk_term (Op(mk_ident(op, rhs parseState 2), [e1; e2])) (rhs2 parseState 1 3) Un))\n# 11475 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'recordExp) in\n    Obj.repr(\n# 2738 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\n                              ( e ))\n# 11483 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2741 \"parse.mly\"\n    (let (_1, e) = ((), _2) in\n      ( mk_term (VQuote e) (rhs2 parseState 1 3) Un ))\n# 11491 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2744 \"parse.mly\"\n    (let (op, e) = (_1, _2) in\n      ( mk_term (Op(mk_ident (op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Formula ))\n# 11500 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmRefinement) in\n    Obj.repr(\n# 2747 \"parse.mly\"\n    (let e = _1 in\n        ( e ))\n# 11508 \"parse.ml\"\n               : 'tmNoEqWith_tmRefinement_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2752 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11516 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2755 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11524 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2758 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11532 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2761 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\"=\", rhs parseState 1) ))\n# 11539 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2764 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11547 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2767 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11555 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2770 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11563 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2773 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11571 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2776 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11579 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2779 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\"==>\", rhs parseState 1) ))\n# 11586 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2782 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\"/\\\\\", rhs parseState 1) ))\n# 11593 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2785 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\"\\\\/\", rhs parseState 1) ))\n# 11600 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2788 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\"<==>\", rhs parseState 1) ))\n# 11607 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2791 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\"|>\", rhs parseState 1) ))\n# 11614 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2794 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\":=\", rhs parseState 1) ))\n# 11621 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2797 \"parse.mly\"\n    (let o = () in\n                             ( mk_ident (\"::\", rhs parseState 1) ))\n# 11628 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2800 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11636 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2803 \"parse.mly\"\n    (let o = _1 in\n                             ( mk_ident (o, rhs parseState 1) ))\n# 11644 \"parse.ml\"\n               : 'binop_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_appTermNoRecordExp_) in\n    Obj.repr(\n# 2808 \"parse.mly\"\n    (let e = _1 in\n                                   ( e ))\n# 11652 \"parse.ml\"\n               : 'tmEqNoRefinement))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmEqWith_tmRefinement_) in\n    Obj.repr(\n# 2813 \"parse.mly\"\n    (let e = _1 in\n                              ( e ))\n# 11660 \"parse.ml\"\n               : 'tmEq))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tmNoEqWith_tmRefinement_) in\n    Obj.repr(\n# 2818 \"parse.mly\"\n    (let e = _1 in\n                               ( e ))\n# 11668 \"parse.ml\"\n               : 'tmNoEq))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'lidentOrUnderscore) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'appTermNoRecordExp) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'refineOpt) in\n    Obj.repr(\n# 2823 \"parse.mly\"\n    (let (id, _2, e, phi_opt) = (_1, (), _3, _4) in\n      (\n        let t = match phi_opt with\n          | None -> NamedTyp(id, e)\n          | Some phi -> Refine(mk_binder (Annotated(id, e)) (rhs2 parseState 1 3) Type_level None, phi)\n        in mk_term t (rhs2 parseState 1 4) Type_level\n      ))\n# 11683 \"parse.ml\"\n               : 'tmRefinement))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'appTerm) in\n    Obj.repr(\n# 2831 \"parse.mly\"\n    (let e = _1 in\n               ( e ))\n# 11691 \"parse.ml\"\n               : 'tmRefinement))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'option___anonymous_13_) in\n    Obj.repr(\n# 2836 \"parse.mly\"\n    (let phi_opt = _1 in\n                                                    (phi_opt))\n# 11699 \"parse.ml\"\n               : 'refineOpt))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_) in\n    Obj.repr(\n# 2841 \"parse.mly\"\n    (let record_fields = _1 in\n      ( mk_term (Record (None, record_fields)) (rhs parseState 1) Expr ))\n# 11707 \"parse.ml\"\n               : 'recordExp))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'appTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_) in\n    Obj.repr(\n# 2844 \"parse.mly\"\n    (let (e, _2, record_fields) = (_1, (), _3) in\n      ( mk_term (Record (Some e, record_fields)) (rhs2 parseState 1 3) Expr ))\n# 11716 \"parse.ml\"\n               : 'recordExp))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'qlidentOrOperator) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 2849 \"parse.mly\"\n    (let (x, _2, y) = (_1, (), _3) in\nlet e =     ( (x, y) ) in\n                                                           ( e ))\n# 11726 \"parse.ml\"\n               : 'simpleDef))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qlidentOrOperator) in\n    Obj.repr(\n# 2853 \"parse.mly\"\n    (let lid = _1 in\n                          ( lid, mk_term (Name (lid_of_ids [ ident_of_lid lid ])) (rhs parseState 1) Un ))\n# 11734 \"parse.ml\"\n               : 'simpleDef))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'indexingTerm) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_14_) in\n    Obj.repr(\n# 2858 \"parse.mly\"\n    (let (head, args) = (_1, _2) in\nlet t =       ( mkApp head (map (fun (x,y) -> (y,x)) args) (rhs2 parseState 1 2) ) in\n                                                                                  (t))\n# 11744 \"parse.ml\"\n               : 'appTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'indexingTerm) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list_argTerm_) in\n    Obj.repr(\n# 2864 \"parse.mly\"\n    (let (head, args) = (_1, _2) in\nlet t =       ( mkApp head (map (fun (x,y) -> (y,x)) args) (rhs2 parseState 1 2) ) in\n                             (t))\n# 11754 \"parse.ml\"\n               : 'appTermNoRecordExp))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'indexingTerm) in\n    Obj.repr(\n# 2870 \"parse.mly\"\n    (let y = _1 in\nlet x =\n  let x =          ( Nothing ) in\n      ( (x, y) )\nin\n                                    ( x ))\n# 11766 \"parse.ml\"\n               : 'argTerm))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'indexingTerm) in\n    Obj.repr(\n# 2877 \"parse.mly\"\n    (let (_1, y) = ((), _2) in\nlet x =\n  let x =          ( Hash ) in\n      ( (x, y) )\nin\n                                    ( x ))\n# 11778 \"parse.ml\"\n               : 'argTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'universe) in\n    Obj.repr(\n# 2884 \"parse.mly\"\n    (let u = _1 in\n               ( u ))\n# 11786 \"parse.ml\"\n               : 'argTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'atomicTermNotQUident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_dotOperator_) in\n    Obj.repr(\n# 2889 \"parse.mly\"\n    (let (e1, op_exprs) = (_1, _2) in\n      (\n        List.fold_left (fun e1 (op, e2, r) ->\n            mk_term (Op(op, [ e1; e2 ])) (union_ranges e1.range r) Expr)\n            e1 op_exprs\n      ))\n# 11799 \"parse.ml\"\n               : 'indexingTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTerm) in\n    Obj.repr(\n# 2896 \"parse.mly\"\n    (let e = _1 in\n    ( e ))\n# 11807 \"parse.ml\"\n               : 'indexingTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermNotQUident) in\n    Obj.repr(\n# 2901 \"parse.mly\"\n    (let x = _1 in\n    ( x ))\n# 11815 \"parse.ml\"\n               : 'atomicTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermQUident) in\n    Obj.repr(\n# 2904 \"parse.mly\"\n    (let x = _1 in\n    ( x ))\n# 11823 \"parse.ml\"\n               : 'atomicTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'opPrefixTerm_atomicTermQUident_) in\n    Obj.repr(\n# 2907 \"parse.mly\"\n    (let x = _1 in\n    ( x ))\n# 11831 \"parse.ml\"\n               : 'atomicTerm))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'quident) in\n    Obj.repr(\n# 2912 \"parse.mly\"\n    (let id = _1 in\n    (\n        let t = Name id in\n        let e = mk_term t (rhs parseState 1) Un in\n              e\n    ))\n# 11843 \"parse.ml\"\n               : 'atomicTermQUident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'quident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2919 \"parse.mly\"\n    (let (id, _2, t, _4) = (_1, (), _3, ()) in\n    (\n      mk_term (LetOpen (id, t)) (rhs2 parseState 1 4) Expr\n    ))\n# 11854 \"parse.ml\"\n               : 'atomicTermQUident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2926 \"parse.mly\"\n    (let _1 = () in\n               ( mk_term Wild (rhs parseState 1) Un ))\n# 11861 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tvar) in\n    Obj.repr(\n# 2929 \"parse.mly\"\n    (let tv = _1 in\n                ( mk_term (Tvar tv) (rhs parseState 1) Type_level ))\n# 11869 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in\n    Obj.repr(\n# 2932 \"parse.mly\"\n    (let c = _1 in\n               ( mk_term (Const c) (rhs parseState 1) Expr ))\n# 11877 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'opPrefixTerm_atomicTermNotQUident_) in\n    Obj.repr(\n# 2935 \"parse.mly\"\n    (let x = _1 in\n    ( x ))\n# 11885 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 2938 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))\n# 11894 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'binop_name) in\n    Obj.repr(\n# 2942 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =     ( op ) in\n      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))\n# 11903 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 2946 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =     ( mk_ident (op, rhs parseState 1) ) in\n      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))\n# 11912 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 2950 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =\n  let op =               ( mk_ident (\"and\" ^ op, rhs parseState 1) ) in\n                      (op)\nin\n      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))\n# 11924 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 2957 \"parse.mly\"\n    (let (_1, op, _3) = ((), _2, ()) in\nlet op =\n  let op =               ( mk_ident (\"let\" ^ op, rhs parseState 1) ) in\n                      (op)\nin\n      ( mk_term (Op(op, [])) (rhs2 parseState 1 3) Un ))\n# 11936 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'tmEq) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_tmEq_) in\n    Obj.repr(\n# 2964 \"parse.mly\"\n    (let (_1, e0, _3, el, _5) = ((), _2, (), _4, ()) in\n      ( mkDTuple (e0::el) (rhs2 parseState 1 5) ))\n# 11945 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'projectionLHS) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'list___anonymous_15_) in\n    Obj.repr(\n# 2967 \"parse.mly\"\n    (let (e, field_projs) = (_1, _2) in\n      ( fold_left (fun e lid -> mk_term (Project(e, lid)) (rhs2 parseState 1 2) Expr ) e field_projs ))\n# 11954 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : FStar_Parser_AST.term) in\n    Obj.repr(\n# 2970 \"parse.mly\"\n    (let (_1, e, _3) = ((), _2, ()) in\n      ( e ))\n# 11962 \"parse.ml\"\n               : 'atomicTermNotQUident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermNotQUident) in\n    Obj.repr(\n# 2975 \"parse.mly\"\n    (let (op, e) = (_1, _2) in\n      ( mk_term (Op(mk_ident(op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Expr ))\n# 11971 \"parse.ml\"\n               : 'opPrefixTerm_atomicTermNotQUident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicTermQUident) in\n    Obj.repr(\n# 2980 \"parse.mly\"\n    (let (op, e) = (_1, _2) in\n      ( mk_term (Op(mk_ident(op, rhs parseState 1), [e])) (rhs2 parseState 1 2) Expr ))\n# 11980 \"parse.ml\"\n               : 'opPrefixTerm_atomicTermQUident_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qidentWithTypeArgs_qlident_option_fsTypeArgs__) in\n    Obj.repr(\n# 2985 \"parse.mly\"\n    (let e = _1 in\n      ( e ))\n# 11988 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'qidentWithTypeArgs_quident_some_fsTypeArgs__) in\n    Obj.repr(\n# 2988 \"parse.mly\"\n    (let e = _1 in\n      ( e ))\n# 11996 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : FStar_Parser_AST.term) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'option_pair_hasSort_simpleTerm__) in\n    Obj.repr(\n# 2991 \"parse.mly\"\n    (let (_1, e, sort_opt, _4) = ((), _2, _3, ()) in\n      (\n        (* Note: we have to keep the parentheses here. Consider t * u * v. This\n         * is parsed as Op2( *, Op2( *, t, u), v). The desugaring phase then looks\n         * up * and figures out that it hasn't been overridden, meaning that\n         * it's a tuple type, and proceeds to flatten out the whole tuple. Now\n         * consider (t * u) * v. We keep the Paren node, which prevents the\n         * flattening from happening, hence ensuring the proper type is\n         * generated. *)\n        let e1 = match sort_opt with\n          | None -> e\n          | Some (level, t) -> mk_term (Ascribed(e,{t with level=level},None,false)) (rhs2 parseState 1 4) level\n        in mk_term (Paren e1) (rhs2 parseState 1 4) (e.level)\n      ))\n# 12017 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in\n    Obj.repr(\n# 3006 \"parse.mly\"\n    (let (_1, l, _3) = ((), _2, ()) in\nlet es =                                                 ( l ) in\n      (\n        let l = mkConsList (rhs2 parseState 1 3) es in\n        let pos = (rhs2 parseState 1 3) in\n        mkExplicitApp (mk_term (Var (array_of_list_lid)) pos Expr) [l] pos\n      ))\n# 12030 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in\n    Obj.repr(\n# 3014 \"parse.mly\"\n    (let (_1, l, _3) = ((), _2, ()) in\nlet es =                                                 ( l ) in\n      ( mkConsList (rhs2 parseState 1 3) es ))\n# 12039 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in\n    Obj.repr(\n# 3018 \"parse.mly\"\n    (let (_1, l, _3) = ((), _2, ()) in\nlet es =                                                 ( l ) in\n      ( mk_term (LexList es) (rhs2 parseState 1 3) Type_level ))\n# 12048 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'loption_separated_nonempty_list_COMMA_appTerm__) in\n    Obj.repr(\n# 3022 \"parse.mly\"\n    (let (_1, xs, _3) = ((), _2, ()) in\nlet es =     ( xs ) in\n      ( mkRefSet (rhs2 parseState 1 3) es ))\n# 12057 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'quident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 3026 \"parse.mly\"\n    (let (ns, _2, id) = (_1, (), _3) in\n      ( mk_term (Projector (ns, id)) (rhs2 parseState 1 3) Expr ))\n# 12066 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'quident) in\n    Obj.repr(\n# 3029 \"parse.mly\"\n    (let (lid, _2) = (_1, ()) in\n      ( mk_term (Discrim lid) (rhs2 parseState 1 2) Un ))\n# 12074 \"parse.ml\"\n               : 'projectionLHS))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'separated_nonempty_list_COMMA_atomicTerm_) in\n    Obj.repr(\n# 3034 \"parse.mly\"\n    (let (_1, targs, _3) = ((), _2, ()) in\n    (targs))\n# 12082 \"parse.ml\"\n               : 'fsTypeArgs))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'qlident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'option_fsTypeArgs_) in\n    Obj.repr(\n# 3039 \"parse.mly\"\n    (let (id, targs_opt) = (_1, _2) in\n      (\n        let t = if is_name id then Name id else Var id in\n        let e = mk_term t (rhs parseState 1) Un in\n        match targs_opt with\n        | None -> e\n        | Some targs -> mkFsTypApp e targs (rhs2 parseState 1 2)\n      ))\n# 12097 \"parse.ml\"\n               : 'qidentWithTypeArgs_qlident_option_fsTypeArgs__))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'quident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'some_fsTypeArgs_) in\n    Obj.repr(\n# 3050 \"parse.mly\"\n    (let (id, targs_opt) = (_1, _2) in\n      (\n        let t = if is_name id then Name id else Var id in\n        let e = mk_term t (rhs parseState 1) Un in\n        match targs_opt with\n        | None -> e\n        | Some targs -> mkFsTypApp e targs (rhs2 parseState 1 2)\n      ))\n# 12112 \"parse.ml\"\n               : 'qidentWithTypeArgs_quident_some_fsTypeArgs__))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3061 \"parse.mly\"\n    (let _1 = () in\n            ( Type_level ))\n# 12119 \"parse.ml\"\n               : 'hasSort))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3066 \"parse.mly\"\n    (let _1 = () in\n                  ( Const_unit ))\n# 12126 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in\n    Obj.repr(\n# 3069 \"parse.mly\"\n    (let n = _1 in\n     (\n        if snd n then\n          log_issue (lhs parseState) (Error_OutOfRange, \"This number is outside the allowable range for representable integer constants\");\n        Const_int (fst n, None)\n     ))\n# 12138 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : char) in\n    Obj.repr(\n# 3076 \"parse.mly\"\n    (let c = _1 in\n           ( Const_char c ))\n# 12146 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3079 \"parse.mly\"\n    (let s = _1 in\n             ( Const_string (s,lhs(parseState)) ))\n# 12154 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3082 \"parse.mly\"\n    (let _1 = () in\n         ( Const_bool true ))\n# 12161 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3085 \"parse.mly\"\n    (let _1 = () in\n          ( Const_bool false ))\n# 12168 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3088 \"parse.mly\"\n    (let r = _1 in\n           ( Const_real r ))\n# 12176 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3091 \"parse.mly\"\n    (let n = _1 in\n            ( Const_int (n, Some (Unsigned, Int8)) ))\n# 12184 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in\n    Obj.repr(\n# 3094 \"parse.mly\"\n    (let n = _1 in\n      (\n        if snd n then\n          log_issue (lhs(parseState)) (Error_OutOfRange, \"This number is outside the allowable range for 8-bit signed integers\");\n        Const_int (fst n, Some (Signed, Int8))\n      ))\n# 12196 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3101 \"parse.mly\"\n    (let n = _1 in\n             ( Const_int (n, Some (Unsigned, Int16)) ))\n# 12204 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in\n    Obj.repr(\n# 3104 \"parse.mly\"\n    (let n = _1 in\n      (\n        if snd n then\n          log_issue (lhs(parseState)) (Error_OutOfRange, \"This number is outside the allowable range for 16-bit signed integers\");\n        Const_int (fst n, Some (Signed, Int16))\n      ))\n# 12216 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3111 \"parse.mly\"\n    (let n = _1 in\n             ( Const_int (n, Some (Unsigned, Int32)) ))\n# 12224 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in\n    Obj.repr(\n# 3114 \"parse.mly\"\n    (let n = _1 in\n      (\n        if snd n then\n          log_issue (lhs(parseState)) (Error_OutOfRange, \"This number is outside the allowable range for 32-bit signed integers\");\n        Const_int (fst n, Some (Signed, Int32))\n      ))\n# 12236 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3121 \"parse.mly\"\n    (let n = _1 in\n             ( Const_int (n, Some (Unsigned, Int64)) ))\n# 12244 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in\n    Obj.repr(\n# 3124 \"parse.mly\"\n    (let n = _1 in\n      (\n        if snd n then\n          log_issue (lhs(parseState)) (Error_OutOfRange, \"This number is outside the allowable range for 64-bit signed integers\");\n        Const_int (fst n, Some (Signed, Int64))\n      ))\n# 12256 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3131 \"parse.mly\"\n    (let n = _1 in\n            ( Const_int (n, Some (Unsigned, Sizet)) ))\n# 12264 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3134 \"parse.mly\"\n    (let _1 = () in\n            ( Const_reify None ))\n# 12271 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3137 \"parse.mly\"\n    (let _1 = () in\n                 ( Const_range_of ))\n# 12278 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3140 \"parse.mly\"\n    (let _1 = () in\n                 ( Const_set_range_of ))\n# 12285 \"parse.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'atomicUniverse) in\n    Obj.repr(\n# 3145 \"parse.mly\"\n    (let (_1, ua) = ((), _2) in\n                                ( (UnivApp, ua) ))\n# 12293 \"parse.ml\"\n               : 'universe))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'atomicUniverse) in\n    Obj.repr(\n# 3150 \"parse.mly\"\n    (let ua = _1 in\n                      ( ua ))\n# 12301 \"parse.ml\"\n               : 'universeFrom))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'universeFrom) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'universeFrom) in\n    Obj.repr(\n# 3153 \"parse.mly\"\n    (let (u1, op_plus, u2) = (_1, _2, _3) in\n       (\n         if op_plus <> \"+\"\n         then log_issue (rhs parseState 2) (Error_OpPlusInUniverse, (\"The operator \" ^ op_plus ^ \" was found in universe context.\"\n                           ^ \"The only allowed operator in that context is +.\"));\n         mk_term (Op(mk_ident (op_plus, rhs parseState 2), [u1 ; u2])) (rhs2 parseState 1 3) Expr\n       ))\n# 12316 \"parse.ml\"\n               : 'universeFrom))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'nonempty_list_atomicUniverse_) in\n    Obj.repr(\n# 3161 \"parse.mly\"\n    (let (max, us) = (_1, _2) in\n      (\n        if string_of_id max <> string_of_lid max_lid\n        then log_issue (rhs parseState 1) (Error_InvalidUniverseVar, \"A lower case ident \" ^ string_of_id max ^\n                          \" was found in a universe context. \" ^\n                          \"It should be either max or a universe variable 'usomething.\");\n        let max = mk_term (Var (lid_of_ids [max])) (rhs parseState 1) Expr in\n        mkApp max (map (fun u -> u, Nothing) us) (rhs2 parseState 1 2)\n      ))\n# 12332 \"parse.ml\"\n               : 'universeFrom))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3173 \"parse.mly\"\n    (let _1 = () in\n      ( mk_term Wild (rhs parseState 1) Expr ))\n# 12339 \"parse.ml\"\n               : 'atomicUniverse))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in\n    Obj.repr(\n# 3176 \"parse.mly\"\n    (let n = _1 in\n      (\n        if snd n then\n          log_issue (lhs(parseState)) (Error_OutOfRange, \"This number is outside the allowable range for representable integer constants\");\n        mk_term (Const (Const_int (fst n, None))) (rhs parseState 1) Expr\n      ))\n# 12351 \"parse.ml\"\n               : 'atomicUniverse))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : FStar_Ident.ident) in\n    Obj.repr(\n# 3183 \"parse.mly\"\n    (let u = _1 in\n             ( mk_term (Uvar u) (range_of_id u) Expr ))\n# 12359 \"parse.ml\"\n               : 'atomicUniverse))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'universeFrom) in\n    Obj.repr(\n# 3186 \"parse.mly\"\n    (let (_1, u, _3) = ((), _2, ()) in\n    ( u (*mk_term (Paren u) (rhs2 parseState 1 3) Expr*) ))\n# 12367 \"parse.ml\"\n               : 'atomicUniverse))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'warn_error) in\n    Obj.repr(\n# 3191 \"parse.mly\"\n    (let (e, _2) = (_1, ()) in\n                     ( e ))\n# 12375 \"parse.ml\"\n               : (FStar_Errors_Codes.error_flag * string) list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'range) in\n    Obj.repr(\n# 3196 \"parse.mly\"\n    (let (f, r) = (_1, _2) in\n    ( [(f, r)] ))\n# 12384 \"parse.ml\"\n               : 'warn_error))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'range) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'warn_error) in\n    Obj.repr(\n# 3199 \"parse.mly\"\n    (let (f, r, e) = (_1, _2, _3) in\n    ( (f, r) :: e ))\n# 12394 \"parse.ml\"\n               : 'warn_error))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3204 \"parse.mly\"\n    (let op = _1 in\n    ( if op = \"@\" then CAlwaysError else failwith (format1 \"unexpected token %s in warn-error list\" op)))\n# 12402 \"parse.ml\"\n               : 'flag))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3207 \"parse.mly\"\n    (let op = _1 in\n    ( if op = \"+\" then CWarning else failwith (format1 \"unexpected token %s in warn-error list\" op)))\n# 12410 \"parse.ml\"\n               : 'flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3210 \"parse.mly\"\n    (let _1 = () in\n          ( CSilent ))\n# 12417 \"parse.ml\"\n               : 'flag))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * bool) in\n    Obj.repr(\n# 3215 \"parse.mly\"\n    (let i = _1 in\n    ( format2 \"%s..%s\" (fst i) (fst i) ))\n# 12425 \"parse.ml\"\n               : 'range))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3218 \"parse.mly\"\n    (let r = _1 in\n    ( r ))\n# 12433 \"parse.ml\"\n               : 'range))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 3223 \"parse.mly\"\n    (let s = _1 in\n             ( s ))\n# 12441 \"parse.ml\"\n               : 'string))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fsTypeArgs) in\n    Obj.repr(\n# 3228 \"parse.mly\"\n    (let x = _1 in\n        ( Some x ))\n# 12449 \"parse.ml\"\n               : 'some_fsTypeArgs_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3233 \"parse.mly\"\n    (        ( [] ))\n# 12455 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_fieldPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fieldPattern) in\n    Obj.repr(\n# 3235 \"parse.mly\"\n    (let x = _1 in\n        ( [x] ))\n# 12463 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_fieldPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'fieldPattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_fieldPattern_) in\n    Obj.repr(\n# 3238 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n                                           ( x :: xs ))\n# 12472 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_fieldPattern_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3243 \"parse.mly\"\n    (        ( [] ))\n# 12478 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_noSeqTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'noSeqTerm) in\n    Obj.repr(\n# 3245 \"parse.mly\"\n    (let x = _1 in\n        ( [x] ))\n# 12486 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_noSeqTerm_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'noSeqTerm) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_noSeqTerm_) in\n    Obj.repr(\n# 3248 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n                                           ( x :: xs ))\n# 12495 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_noSeqTerm_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3253 \"parse.mly\"\n    (        ( [] ))\n# 12501 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_recordFieldDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'recordFieldDecl) in\n    Obj.repr(\n# 3255 \"parse.mly\"\n    (let x = _1 in\n        ( [x] ))\n# 12509 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_recordFieldDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'recordFieldDecl) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_recordFieldDecl_) in\n    Obj.repr(\n# 3258 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n                                           ( x :: xs ))\n# 12518 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_recordFieldDecl_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3263 \"parse.mly\"\n    (        ( [] ))\n# 12524 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_simpleDef_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simpleDef) in\n    Obj.repr(\n# 3265 \"parse.mly\"\n    (let x = _1 in\n        ( [x] ))\n# 12532 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_simpleDef_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simpleDef) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_simpleDef_) in\n    Obj.repr(\n# 3268 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n                                           ( x :: xs ))\n# 12541 \"parse.ml\"\n               : 'right_flexible_list_SEMICOLON_simpleDef_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fieldPattern) in\n    Obj.repr(\n# 3273 \"parse.mly\"\n    (let x = _1 in\n        ( [x] ))\n# 12549 \"parse.ml\"\n               : 'right_flexible_nonempty_list_SEMICOLON_fieldPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'fieldPattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_fieldPattern_) in\n    Obj.repr(\n# 3276 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n                                           ( x :: xs ))\n# 12558 \"parse.ml\"\n               : 'right_flexible_nonempty_list_SEMICOLON_fieldPattern_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'recordFieldDecl) in\n    Obj.repr(\n# 3281 \"parse.mly\"\n    (let x = _1 in\n        ( [x] ))\n# 12566 \"parse.ml\"\n               : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'recordFieldDecl) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_recordFieldDecl_) in\n    Obj.repr(\n# 3284 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n                                           ( x :: xs ))\n# 12575 \"parse.ml\"\n               : 'right_flexible_nonempty_list_SEMICOLON_recordFieldDecl_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simpleDef) in\n    Obj.repr(\n# 3289 \"parse.mly\"\n    (let x = _1 in\n        ( [x] ))\n# 12583 \"parse.ml\"\n               : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simpleDef) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'right_flexible_list_SEMICOLON_simpleDef_) in\n    Obj.repr(\n# 3292 \"parse.mly\"\n    (let (x, _2, xs) = (_1, (), _3) in\n                                           ( x :: xs ))\n# 12592 \"parse.ml\"\n               : 'right_flexible_nonempty_list_SEMICOLON_simpleDef_))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 3297 \"parse.mly\"\n    (   ( [] ))\n# 12598 \"parse.ml\"\n               : 'reverse_left_flexible_list_BAR___anonymous_10_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in\n    Obj.repr(\n# 3299 \"parse.mly\"\n    (let pb = _1 in\nlet x =                                                                                                              (pb) in\n   ( [x] ))\n# 12607 \"parse.ml\"\n               : 'reverse_left_flexible_list_BAR___anonymous_10_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'reverse_left_flexible_list_BAR___anonymous_10_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in\n    Obj.repr(\n# 3303 \"parse.mly\"\n    (let (xs, _2, pb) = (_1, (), _3) in\nlet x =                                                                                                              (pb) in\n   ( x :: xs ))\n# 12617 \"parse.ml\"\n               : 'reverse_left_flexible_list_BAR___anonymous_10_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in\n    Obj.repr(\n# 3309 \"parse.mly\"\n    (let x = _1 in\nlet _1 =     ( None ) in\n   ( [x] ))\n# 12626 \"parse.ml\"\n               : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in\n    Obj.repr(\n# 3313 \"parse.mly\"\n    (let (x_inlined1, x) = ((), _2) in\nlet _1 =\n  let x = x_inlined1 in\n      ( Some x )\nin\n   ( [x] ))\n# 12638 \"parse.ml\"\n               : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'patternBranch) in\n    Obj.repr(\n# 3320 \"parse.mly\"\n    (let (xs, _2, x) = (_1, (), _3) in\n   ( x :: xs ))\n# 12647 \"parse.ml\"\n               : 'reverse_left_flexible_nonempty_list_BAR_patternBranch_))\n(* Entry inputFragment *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n(* Entry oneDeclOrEOF *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n(* Entry term *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n(* Entry warn_error_list *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n|]\nlet yytables =\n  { Parsing.actions=yyact;\n    Parsing.transl_const=yytransl_const;\n    Parsing.transl_block=yytransl_block;\n    Parsing.lhs=yylhs;\n    Parsing.len=yylen;\n    Parsing.defred=yydefred;\n    Parsing.dgoto=yydgoto;\n    Parsing.sindex=yysindex;\n    Parsing.rindex=yyrindex;\n    Parsing.gindex=yygindex;\n    Parsing.tablesize=yytablesize;\n    Parsing.table=yytable;\n    Parsing.check=yycheck;\n    Parsing.error_function=parse_error;\n    Parsing.names_const=yynames_const;\n    Parsing.names_block=yynames_block }\nlet inputFragment (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 1 lexfun lexbuf : FStar_Parser_AST.inputFragment)\nlet oneDeclOrEOF (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 2 lexfun lexbuf : FStar_Parser_AST.decl option)\nlet term (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 3 lexfun lexbuf : FStar_Parser_AST.term)\nlet warn_error_list (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 4 lexfun lexbuf : (FStar_Errors_Codes.error_flag * string) list)\n;;\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_ParseIt.ml",
    "content": "module U = FStar_Compiler_Util\nopen FStar_Errors\n(* open FStar_Syntax_Syntax *)\nopen Lexing\nopen FStar_Sedlexing\nopen FStar_Errors_Codes\nmodule Codes = FStar_Errors_Codes\n\ntype filename = string\n\ntype input_frag = {\n    frag_fname:filename;\n    frag_text:string;\n    frag_line:Prims.int;\n    frag_col:Prims.int\n}\n\nlet resetLexbufPos filename lexbuf =\n  lexbuf.cur_p <- {\n    pos_fname= filename;\n    pos_cnum = 0;\n    pos_bol = 0;\n    pos_lnum = 1 }\n\nlet setLexbufPos filename lexbuf line col =\n  lexbuf.cur_p <- {\n    pos_fname= filename;\n    pos_cnum = col;\n    pos_bol  = 0;\n    pos_lnum = line }\n\nmodule Path = BatPathGen.OfString\n\nlet find_file filename =\n      raise_err (Fatal_ModuleOrFileNotFound, U.format1 \"Unable to find file: %s\\n\" filename)\n\n(* let vfs_entries : (U.time * string) U.smap = U.smap_create (Z.of_int 1) *)\n\n(* let read_vfs_entry fname = *)\n(*   U.smap_try_find vfs_entries (U.normalize_file_path fname) *)\n\n(* let add_vfs_entry fname contents = *)\n(*   U.smap_add vfs_entries (U.normalize_file_path fname) (U.now (), contents) *)\n\n(* let get_file_last_modification_time filename = *)\n(*   match read_vfs_entry filename with *)\n(*   | Some (mtime, _contents) -> mtime *)\n(*   | None -> U.get_file_last_modification_time filename *)\n\nlet read_physical_file (filename: string) =\n  (* BatFile.with_file_in uses Unix.openfile (which isn't available in\n     js_of_ocaml) instead of Pervasives.open_in, so we don't use it here. *)\n  try\n    let channel = open_in_bin filename in\n    BatPervasives.finally\n      (fun () -> close_in channel)\n      (fun channel -> really_input_string channel (in_channel_length channel))\n      channel\n  with e ->\n    raise_err (Fatal_UnableToReadFile, U.format1 \"Unable to read file %s\\n\" filename)\n\nlet read_file (filename:string) =\n  let debug = false in\n  let filename = find_file filename in\n  if debug then U.print1 \"Opening file %s\\n\" filename;\n  filename, read_physical_file filename\n\nlet fs_extensions = [\".fs\"; \".fsi\"]\nlet fst_extensions = [\".fst\"; \".fsti\"]\nlet interface_extensions = [\".fsti\"; \".fsi\"]\n\nlet valid_extensions () =\n  fst_extensions @ if false then fs_extensions else []\n\nlet has_extension file extensions =\n  FStar_List.existsb (U.ends_with file) extensions\n\nlet check_extension fn =\n  if (not (has_extension fn (valid_extensions ()))) then\n    let message = U.format1 \"Unrecognized extension '%s'\" fn in\n    raise_err (Fatal_UnrecognizedExtension, if has_extension fn fs_extensions then\n                  message ^ \" (pass --MLish to process .fs and .fsi files)\"\n                else message)\n\ntype parse_frag =\n    | Filename of filename\n    | Toplevel of input_frag\n    | Incremental of input_frag\n    | Fragment of input_frag\n\ntype parse_error = (Codes.raw_error * string * FStar_Compiler_Range.range)\n\n\ntype code_fragment = {\n   range: FStar_Compiler_Range.range;\n   code: string;\n}\n\ntype parse_result =\n    | ASTFragment of (FStar_Parser_AST.inputFragment * (string * FStar_Compiler_Range.range) list)\n    | IncrementalFragment of ((FStar_Parser_AST.decl * code_fragment) list * (string * FStar_Compiler_Range.range) list * parse_error option)\n    | Term of FStar_Parser_AST.term\n    | ParseError of parse_error\n\nmodule BU = FStar_Compiler_Util\nmodule Range = FStar_Compiler_Range\n\nlet parse fn =\n  FStar_Parser_Util.warningHandler := (function\n    | e -> Printf.printf \"There was some warning (TODO)\\n\");\n\n  let lexbuf, filename, contents = match fn with\n    | Filename f ->\n        check_extension f;\n        let f', contents = read_file f in\n        (try create contents f' 1 0, f', contents\n         with _ -> raise_err (Fatal_InvalidUTF8Encoding, U.format1 \"File %s has invalid UTF-8 encoding.\\n\" f'))\n    | Incremental s\n    | Toplevel s\n    | Fragment s ->\n      create s.frag_text s.frag_fname (Z.to_int s.frag_line) (Z.to_int s.frag_col), \"<input>\", s.frag_text\n  in\n\n  let lexer () =\n    let tok = FStar_Parser_LexFStar.token lexbuf in\n    (tok, lexbuf.start_p, lexbuf.cur_p)\n  in\n  let range_of_positions start fin = \n    let start_pos = FStar_Parser_Util.pos_of_lexpos start in\n    let end_pos = FStar_Parser_Util.pos_of_lexpos fin in\n    FStar_Compiler_Range.mk_range filename start_pos end_pos\n  in\n  let err_of_parse_error () =\n      let pos = lexbuf.cur_p in\n      Fatal_SyntaxError,\n      \"Syntax error\",\n      range_of_positions pos pos\n  in\n  let parse_incremental_decls () =\n      let parse_one_decl = MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.oneDeclOrEOF in\n      let contents_at =\n        let lines = U.splitlines contents in\n        let split_line_at_col line col =\n            if col > 0\n            then (\n                (* Don't index directly into the string, since this is a UTF-8 string.\n                   Convert first to a list of charaters, index into that, and then convert\n                   back to a string *)\n                let chars = FStar_String.list_of_string line in\n                if col <= List.length chars\n                then (\n                  let prefix, suffix = FStar_Compiler_Util.first_N (Z.of_int col) chars in\n                  Some (FStar_String.string_of_list prefix, \n                        FStar_String.string_of_list suffix)\n                )\n                else (\n                  None\n                )\n            )\n            else None\n        in\n        let line_from_col line pos =\n          match split_line_at_col line pos with\n          | None -> None\n          | Some (_, p) -> Some p\n        in\n        let line_to_col line pos =\n          match split_line_at_col line pos with\n          | None -> None\n          | Some (p, _) -> Some p\n        in\n        (* Find the raw content of the input from the line of the start_pos to the end_pos.\n           This is used by Interactive.Incremental to record exactly the raw content of the\n           fragment that was checked *) \n        fun (range:Range.range) ->\n          (* discard all lines until the start line *)\n          let start_pos = Range.start_of_range range in\n          let end_pos = Range.end_of_range range in\n          let start_line = Z.to_int (Range.line_of_pos start_pos) in\n          let start_col = Z.to_int (Range.col_of_pos start_pos) in\n          let end_line = Z.to_int (Range.line_of_pos end_pos) in\n          let end_col = Z.to_int (Range.col_of_pos end_pos) in          \n          let suffix = \n            FStar_Compiler_Util.nth_tail \n              (Z.of_int (if start_line > 0 then start_line - 1 else 0))\n              lines\n          in\n          (* Take all the lines between the start and end lines *)\n          let text, rest =\n            FStar_Compiler_Util.first_N\n              (Z.of_int (end_line - start_line))\n              suffix\n          in\n          let text =\n            match text with\n            | first_line::rest -> (\n              match line_from_col first_line start_col with\n              | Some s -> s :: rest\n              | _ -> text\n            )\n            | _ -> text\n          in\n          let text = \n          (* For the last line itself, take the prefix of it up to the character of the end_pos *)\n            match rest with\n            | last::_ -> (\n              match line_to_col last end_col with\n              | None -> text\n              | Some last ->\n                (* The last line is also the first line *)\n                match text with\n                | [] -> (\n                  match line_from_col last start_col with\n                  | None -> [last]\n                  | Some l -> [l]\n                )\n                | _ -> text @ [last]\n            )\n            | _ -> text\n          in\n          { range;\n            code = FStar_String.concat \"\\n\" text }\n      in\n      let open FStar_Pervasives in\n      let rec parse decls =\n        let start_pos = current_pos lexbuf in\n        let d =\n          try\n            (* Reset the gensym between decls, to ensure determinism, \n               otherwise, every _ is parsed as different name *)\n            FStar_Ident.reset_gensym();\n            Inl (parse_one_decl lexer)\n          with \n          | FStar_Errors.Error(e, msg, r, _ctx) ->\n            Inr (e, msg, r)\n\n          | Parsing.Parse_error as _e -> \n            Inr (err_of_parse_error ())\n        in\n        match d with\n        | Inl None -> List.rev decls, None\n        | Inl (Some d) -> \n          (* The parser may advance the lexer beyond the decls last token.\n             E.g., in `let f x = 0 let g = 1`, we will have parsed the decl for `f`\n                   but the lexer will have advanced to `let ^ g ...` since the\n                   parser will have looked ahead.\n                   Rollback the lexer one token for declarations whose syntax\n                   requires such lookahead to complete a production.\n          *)\n          let end_pos =\n            if not (FStar_Parser_AST.decl_syntax_is_delimited d)\n            then (\n              rollback lexbuf;\n              current_pos lexbuf\n            )\n            else (\n              current_pos lexbuf\n            )\n          in\n          let raw_contents = contents_at d.drange in\n          (*\n          if FStar_Options.debug_any()\n          then (\n            FStar_Compiler_Util.print4 \"Parsed decl@%s=%s\\nRaw contents@%s=%s\\n\"\n              (FStar_Compiler_Range.string_of_def_range d.drange)\n              (FStar_Parser_AST.decl_to_string d)\n              (FStar_Compiler_Range.string_of_def_range raw_contents.range)\n              raw_contents.code\n          );\n          *)\n          parse ((d, raw_contents)::decls)\n        | Inr err -> List.rev decls, Some err\n      in\n      parse []\n  in\n  let parse_incremental_fragment () =\n      let decls, err_opt = parse_incremental_decls () in\n      match err_opt with\n      | None ->\n        FStar_Parser_AST.as_frag (List.map fst decls)\n      | Some (e, msg, r) ->\n        raise (FStar_Errors.Error(e, msg, r, []))\n  in\n\n  try\n    match fn with\n    | Filename _\n    | Toplevel _ -> begin\n      let fileOrFragment =\n          MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.inputFragment lexer\n      in\n      let frags = match fileOrFragment with\n          | FStar_Pervasives.Inl modul ->\n             if has_extension filename interface_extensions\n             then match modul with\n                  | FStar_Parser_AST.Module(l,d) ->\n                    FStar_Pervasives.Inl (FStar_Parser_AST.Interface(l, d, true))\n                  | _ -> failwith \"Impossible\"\n             else FStar_Pervasives.Inl modul\n          | _ -> fileOrFragment\n      in ASTFragment (frags, FStar_Parser_Util.flush_comments ())\n      end\n      \n    | Incremental _ ->\n      let decls, err_opt = parse_incremental_decls () in\n      IncrementalFragment(decls, FStar_Parser_Util.flush_comments(), err_opt)\n    \n    | Fragment _ ->\n      Term (MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.term lexer)\n  with\n    | FStar_Errors.Empty_frag ->\n      ASTFragment (FStar_Pervasives.Inr [], [])\n\n    | FStar_Errors.Error(e, msg, r, _ctx) ->\n      ParseError (e, msg, r)\n\n    | Parsing.Parse_error as _e ->\n      ParseError (err_of_parse_error())\n\n(** Parsing of command-line error/warning/silent flags. *)\nlet parse_warn_error s =\n  let user_flags =\n    if s = \"\"\n    then []\n    else\n      let lexbuf = FStar_Sedlexing.create s \"\" 0 (String.length s) in\n      let lexer() = let tok = FStar_Parser_LexFStar.token lexbuf in\n        (tok, lexbuf.start_p, lexbuf.cur_p)\n      in\n      try\n        MenhirLib.Convert.Simplified.traditional2revised FStar_Parser_Parse.warn_error_list lexer\n      with e ->\n        failwith (U.format1 \"Malformed warn-error list: %s\" s)\n  in\n  FStar_Errors.update_flags user_flags\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_ToDocument.ml",
    "content": "open Prims\nlet (maybe_unthunk : FStar_Parser_AST.term -> FStar_Parser_AST.term) =\n  fun t ->\n    match t.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Abs (uu___::[], body) -> body\n    | uu___ -> t\nlet (min : Prims.int -> Prims.int -> Prims.int) =\n  fun x -> fun y -> if x > y then y else x\nlet (max : Prims.int -> Prims.int -> Prims.int) =\n  fun x -> fun y -> if x > y then x else y\nlet map_rev : 'a 'b . ('a -> 'b) -> 'a Prims.list -> 'b Prims.list =\n  fun f ->\n    fun l ->\n      let rec aux l1 acc =\n        match l1 with\n        | [] -> acc\n        | x::xs ->\n            let uu___ = let uu___1 = f x in uu___1 :: acc in aux xs uu___ in\n      aux l []\nlet map_if_all :\n  'a 'b .\n    ('a -> 'b FStar_Pervasives_Native.option) ->\n      'a Prims.list -> 'b Prims.list FStar_Pervasives_Native.option\n  =\n  fun f ->\n    fun l ->\n      let rec aux l1 acc =\n        match l1 with\n        | [] -> acc\n        | x::xs ->\n            let uu___ = f x in\n            (match uu___ with\n             | FStar_Pervasives_Native.Some r -> aux xs (r :: acc)\n             | FStar_Pervasives_Native.None -> []) in\n      let r = aux l [] in\n      if (FStar_Compiler_List.length l) = (FStar_Compiler_List.length r)\n      then FStar_Pervasives_Native.Some r\n      else FStar_Pervasives_Native.None\nlet rec all : 'a . ('a -> Prims.bool) -> 'a Prims.list -> Prims.bool =\n  fun f ->\n    fun l ->\n      match l with\n      | [] -> true\n      | x::xs -> let uu___ = f x in if uu___ then all f xs else false\nlet (all1_explicit :\n  (FStar_Parser_AST.term * FStar_Parser_AST.imp) Prims.list -> Prims.bool) =\n  fun args ->\n    (Prims.op_Negation (FStar_Compiler_List.isEmpty args)) &&\n      (FStar_Compiler_Util.for_all\n         (fun uu___ ->\n            match uu___ with\n            | (uu___1, FStar_Parser_AST.Nothing) -> true\n            | uu___1 -> false) args)\nlet (unfold_tuples : Prims.bool FStar_Compiler_Effect.ref) =\n  FStar_Compiler_Util.mk_ref true\nlet (str : Prims.string -> FStar_Pprint.document) =\n  fun s -> FStar_Pprint.doc_of_string s\nlet default_or_map :\n  'uuuuu 'uuuuu1 .\n    'uuuuu ->\n      ('uuuuu1 -> 'uuuuu) -> 'uuuuu1 FStar_Pervasives_Native.option -> 'uuuuu\n  =\n  fun n ->\n    fun f ->\n      fun x ->\n        match x with\n        | FStar_Pervasives_Native.None -> n\n        | FStar_Pervasives_Native.Some x' -> f x'\nlet (prefix2 :\n  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =\n  fun prefix_ ->\n    fun body ->\n      FStar_Pprint.prefix (Prims.of_int (2)) Prims.int_one prefix_ body\nlet (prefix2_nonempty :\n  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =\n  fun prefix_ ->\n    fun body ->\n      if body = FStar_Pprint.empty then prefix_ else prefix2 prefix_ body\nlet (op_Hat_Slash_Plus_Hat :\n  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =\n  fun prefix_ -> fun body -> prefix2 prefix_ body\nlet (jump2 : FStar_Pprint.document -> FStar_Pprint.document) =\n  fun body -> FStar_Pprint.jump (Prims.of_int (2)) Prims.int_one body\nlet (infix2 :\n  FStar_Pprint.document ->\n    FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document)\n  = FStar_Pprint.infix (Prims.of_int (2)) Prims.int_one\nlet (infix0 :\n  FStar_Pprint.document ->\n    FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document)\n  = FStar_Pprint.infix Prims.int_zero Prims.int_one\nlet (break1 : FStar_Pprint.document) = FStar_Pprint.break_ Prims.int_one\nlet separate_break_map :\n  'uuuuu .\n    FStar_Pprint.document ->\n      ('uuuuu -> FStar_Pprint.document) ->\n        'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun sep ->\n    fun f ->\n      fun l ->\n        let uu___ =\n          let uu___1 =\n            let uu___2 = FStar_Pprint.op_Hat_Hat sep break1 in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n          FStar_Pprint.separate_map uu___1 f l in\n        FStar_Pprint.group uu___\nlet precede_break_separate_map :\n  'uuuuu .\n    FStar_Pprint.document ->\n      FStar_Pprint.document ->\n        ('uuuuu -> FStar_Pprint.document) ->\n          'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun prec ->\n    fun sep ->\n      fun f ->\n        fun l ->\n          let uu___ =\n            let uu___1 = FStar_Pprint.op_Hat_Hat prec FStar_Pprint.space in\n            let uu___2 =\n              let uu___3 = FStar_Compiler_List.hd l in\n              FStar_Compiler_Effect.op_Bar_Greater uu___3 f in\n            FStar_Pprint.precede uu___1 uu___2 in\n          let uu___1 =\n            let uu___2 = FStar_Compiler_List.tl l in\n            FStar_Pprint.concat_map\n              (fun x ->\n                 let uu___3 =\n                   let uu___4 =\n                     let uu___5 = f x in\n                     FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in\n                   FStar_Pprint.op_Hat_Hat sep uu___4 in\n                 FStar_Pprint.op_Hat_Hat break1 uu___3) uu___2 in\n          FStar_Pprint.op_Hat_Hat uu___ uu___1\nlet concat_break_map :\n  'uuuuu .\n    ('uuuuu -> FStar_Pprint.document) ->\n      'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun f ->\n    fun l ->\n      let uu___ =\n        FStar_Pprint.concat_map\n          (fun x -> let uu___1 = f x in FStar_Pprint.op_Hat_Hat uu___1 break1)\n          l in\n      FStar_Pprint.group uu___\nlet (parens_with_nesting : FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero\n      FStar_Pprint.lparen contents FStar_Pprint.rparen\nlet (soft_parens_with_nesting :\n  FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_zero\n      FStar_Pprint.lparen contents FStar_Pprint.rparen\nlet (braces_with_nesting : FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n      FStar_Pprint.lbrace contents FStar_Pprint.rbrace\nlet (soft_braces_with_nesting :\n  FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one\n      FStar_Pprint.lbrace contents FStar_Pprint.rbrace\nlet (soft_braces_with_nesting_tight :\n  FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_zero\n      FStar_Pprint.lbrace contents FStar_Pprint.rbrace\nlet (brackets_with_nesting : FStar_Pprint.document -> FStar_Pprint.document)\n  =\n  fun contents ->\n    FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n      FStar_Pprint.lbracket contents FStar_Pprint.rbracket\nlet (soft_brackets_with_nesting :\n  FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one\n      FStar_Pprint.lbracket contents FStar_Pprint.rbracket\nlet (soft_begin_end_with_nesting :\n  FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    let uu___ = str \"begin\" in\n    let uu___1 = str \"end\" in\n    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one uu___\n      contents uu___1\nlet (tc_arg : FStar_Pprint.document -> FStar_Pprint.document) =\n  fun contents ->\n    let uu___ = str \"{|\" in\n    let uu___1 = str \"|}\" in\n    FStar_Pprint.soft_surround (Prims.of_int (2)) Prims.int_one uu___\n      contents uu___1\nlet (is_tc_binder : FStar_Parser_AST.binder -> Prims.bool) =\n  fun b ->\n    match b.FStar_Parser_AST.aqual with\n    | FStar_Pervasives_Native.Some (FStar_Parser_AST.TypeClassArg) -> true\n    | uu___ -> false\nlet (is_meta_qualifier :\n  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option -> Prims.bool)\n  =\n  fun aq ->\n    match aq with\n    | FStar_Pervasives_Native.Some (FStar_Parser_AST.Meta uu___) -> true\n    | uu___ -> false\nlet (is_joinable_binder : FStar_Parser_AST.binder -> Prims.bool) =\n  fun b ->\n    (let uu___ = is_tc_binder b in Prims.op_Negation uu___) &&\n      (Prims.op_Negation (is_meta_qualifier b.FStar_Parser_AST.aqual))\nlet separate_map_last :\n  'uuuuu .\n    FStar_Pprint.document ->\n      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->\n        'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun sep ->\n    fun f ->\n      fun es ->\n        let l = FStar_Compiler_List.length es in\n        let es1 =\n          FStar_Compiler_List.mapi\n            (fun i -> fun e -> f (i <> (l - Prims.int_one)) e) es in\n        FStar_Pprint.separate sep es1\nlet separate_break_map_last :\n  'uuuuu .\n    FStar_Pprint.document ->\n      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->\n        'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun sep ->\n    fun f ->\n      fun l ->\n        let uu___ =\n          let uu___1 =\n            let uu___2 = FStar_Pprint.op_Hat_Hat sep break1 in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n          separate_map_last uu___1 f l in\n        FStar_Pprint.group uu___\nlet separate_map_or_flow :\n  'uuuuu .\n    FStar_Pprint.document ->\n      ('uuuuu -> FStar_Pprint.document) ->\n        'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun sep ->\n    fun f ->\n      fun l ->\n        if (FStar_Compiler_List.length l) < (Prims.of_int (10))\n        then FStar_Pprint.separate_map sep f l\n        else FStar_Pprint.flow_map sep f l\nlet flow_map_last :\n  'uuuuu .\n    FStar_Pprint.document ->\n      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->\n        'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun sep ->\n    fun f ->\n      fun es ->\n        let l = FStar_Compiler_List.length es in\n        let es1 =\n          FStar_Compiler_List.mapi\n            (fun i -> fun e -> f (i <> (l - Prims.int_one)) e) es in\n        FStar_Pprint.flow sep es1\nlet separate_map_or_flow_last :\n  'uuuuu .\n    FStar_Pprint.document ->\n      (Prims.bool -> 'uuuuu -> FStar_Pprint.document) ->\n        'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun sep ->\n    fun f ->\n      fun l ->\n        if (FStar_Compiler_List.length l) < (Prims.of_int (10))\n        then separate_map_last sep f l\n        else flow_map_last sep f l\nlet (separate_or_flow :\n  FStar_Pprint.document ->\n    FStar_Pprint.document Prims.list -> FStar_Pprint.document)\n  = fun sep -> fun l -> separate_map_or_flow sep FStar_Pervasives.id l\nlet (surround_maybe_empty :\n  Prims.int ->\n    Prims.int ->\n      FStar_Pprint.document ->\n        FStar_Pprint.document ->\n          FStar_Pprint.document -> FStar_Pprint.document)\n  =\n  fun n ->\n    fun b ->\n      fun doc1 ->\n        fun doc2 ->\n          fun doc3 ->\n            if doc2 = FStar_Pprint.empty\n            then\n              let uu___ = FStar_Pprint.op_Hat_Slash_Hat doc1 doc3 in\n              FStar_Pprint.group uu___\n            else FStar_Pprint.surround n b doc1 doc2 doc3\nlet soft_surround_separate_map :\n  'uuuuu .\n    Prims.int ->\n      Prims.int ->\n        FStar_Pprint.document ->\n          FStar_Pprint.document ->\n            FStar_Pprint.document ->\n              FStar_Pprint.document ->\n                ('uuuuu -> FStar_Pprint.document) ->\n                  'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun n ->\n    fun b ->\n      fun void_ ->\n        fun opening ->\n          fun sep ->\n            fun closing ->\n              fun f ->\n                fun xs ->\n                  if xs = []\n                  then void_\n                  else\n                    (let uu___1 = FStar_Pprint.separate_map sep f xs in\n                     FStar_Pprint.soft_surround n b opening uu___1 closing)\nlet soft_surround_map_or_flow :\n  'uuuuu .\n    Prims.int ->\n      Prims.int ->\n        FStar_Pprint.document ->\n          FStar_Pprint.document ->\n            FStar_Pprint.document ->\n              FStar_Pprint.document ->\n                ('uuuuu -> FStar_Pprint.document) ->\n                  'uuuuu Prims.list -> FStar_Pprint.document\n  =\n  fun n ->\n    fun b ->\n      fun void_ ->\n        fun opening ->\n          fun sep ->\n            fun closing ->\n              fun f ->\n                fun xs ->\n                  if xs = []\n                  then void_\n                  else\n                    (let uu___1 = separate_map_or_flow sep f xs in\n                     FStar_Pprint.soft_surround n b opening uu___1 closing)\nlet (is_unit : FStar_Parser_AST.term -> Prims.bool) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Const (FStar_Const.Const_unit) -> true\n    | uu___ -> false\nlet (matches_var : FStar_Parser_AST.term -> FStar_Ident.ident -> Prims.bool)\n  =\n  fun t ->\n    fun x ->\n      match t.FStar_Parser_AST.tm with\n      | FStar_Parser_AST.Var y ->\n          let uu___ = FStar_Ident.string_of_id x in\n          let uu___1 = FStar_Ident.string_of_lid y in uu___ = uu___1\n      | uu___ -> false\nlet (is_tuple_constructor : FStar_Ident.lident -> Prims.bool) =\n  FStar_Parser_Const.is_tuple_data_lid'\nlet (is_dtuple_constructor : FStar_Ident.lident -> Prims.bool) =\n  FStar_Parser_Const.is_dtuple_data_lid'\nlet (is_list_structure :\n  FStar_Ident.lident ->\n    FStar_Ident.lident -> FStar_Parser_AST.term -> Prims.bool)\n  =\n  fun cons_lid ->\n    fun nil_lid ->\n      let rec aux e =\n        match e.FStar_Parser_AST.tm with\n        | FStar_Parser_AST.Construct (lid, []) ->\n            FStar_Ident.lid_equals lid nil_lid\n        | FStar_Parser_AST.Construct (lid, uu___::(e2, uu___1)::[]) ->\n            (FStar_Ident.lid_equals lid cons_lid) && (aux e2)\n        | uu___ -> false in\n      aux\nlet (is_list : FStar_Parser_AST.term -> Prims.bool) =\n  is_list_structure FStar_Parser_Const.cons_lid FStar_Parser_Const.nil_lid\nlet rec (extract_from_list :\n  FStar_Parser_AST.term -> FStar_Parser_AST.term Prims.list) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Construct (uu___, []) -> []\n    | FStar_Parser_AST.Construct\n        (uu___,\n         (e1, FStar_Parser_AST.Nothing)::(e2, FStar_Parser_AST.Nothing)::[])\n        -> let uu___1 = extract_from_list e2 in e1 :: uu___1\n    | uu___ ->\n        let uu___1 =\n          let uu___2 = FStar_Parser_AST.term_to_string e in\n          FStar_Compiler_Util.format1 \"Not a list %s\" uu___2 in\n        failwith uu___1\nlet (is_array : FStar_Parser_AST.term -> Prims.bool) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.App\n        ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var lid;\n           FStar_Parser_AST.range = uu___; FStar_Parser_AST.level = uu___1;_},\n         l, FStar_Parser_AST.Nothing)\n        ->\n        (FStar_Ident.lid_equals lid FStar_Parser_Const.array_of_list_lid) &&\n          (is_list l)\n    | uu___ -> false\nlet rec (is_ref_set : FStar_Parser_AST.term -> Prims.bool) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Var maybe_empty_lid ->\n        FStar_Ident.lid_equals maybe_empty_lid FStar_Parser_Const.set_empty\n    | FStar_Parser_AST.App\n        ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var maybe_singleton_lid;\n           FStar_Parser_AST.range = uu___; FStar_Parser_AST.level = uu___1;_},\n         {\n           FStar_Parser_AST.tm = FStar_Parser_AST.App\n             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var maybe_addr_of_lid;\n                FStar_Parser_AST.range = uu___2;\n                FStar_Parser_AST.level = uu___3;_},\n              e1, FStar_Parser_AST.Nothing);\n           FStar_Parser_AST.range = uu___4;\n           FStar_Parser_AST.level = uu___5;_},\n         FStar_Parser_AST.Nothing)\n        ->\n        (FStar_Ident.lid_equals maybe_singleton_lid\n           FStar_Parser_Const.set_singleton)\n          &&\n          (FStar_Ident.lid_equals maybe_addr_of_lid\n             FStar_Parser_Const.heap_addr_of_lid)\n    | FStar_Parser_AST.App\n        ({\n           FStar_Parser_AST.tm = FStar_Parser_AST.App\n             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var maybe_union_lid;\n                FStar_Parser_AST.range = uu___;\n                FStar_Parser_AST.level = uu___1;_},\n              e1, FStar_Parser_AST.Nothing);\n           FStar_Parser_AST.range = uu___2;\n           FStar_Parser_AST.level = uu___3;_},\n         e2, FStar_Parser_AST.Nothing)\n        ->\n        ((FStar_Ident.lid_equals maybe_union_lid FStar_Parser_Const.set_union)\n           && (is_ref_set e1))\n          && (is_ref_set e2)\n    | uu___ -> false\nlet rec (extract_from_ref_set :\n  FStar_Parser_AST.term -> FStar_Parser_AST.term Prims.list) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Var uu___ -> []\n    | FStar_Parser_AST.App\n        ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var uu___;\n           FStar_Parser_AST.range = uu___1;\n           FStar_Parser_AST.level = uu___2;_},\n         {\n           FStar_Parser_AST.tm = FStar_Parser_AST.App\n             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var uu___3;\n                FStar_Parser_AST.range = uu___4;\n                FStar_Parser_AST.level = uu___5;_},\n              e1, FStar_Parser_AST.Nothing);\n           FStar_Parser_AST.range = uu___6;\n           FStar_Parser_AST.level = uu___7;_},\n         FStar_Parser_AST.Nothing)\n        -> [e1]\n    | FStar_Parser_AST.App\n        ({\n           FStar_Parser_AST.tm = FStar_Parser_AST.App\n             ({ FStar_Parser_AST.tm = FStar_Parser_AST.Var uu___;\n                FStar_Parser_AST.range = uu___1;\n                FStar_Parser_AST.level = uu___2;_},\n              e1, FStar_Parser_AST.Nothing);\n           FStar_Parser_AST.range = uu___3;\n           FStar_Parser_AST.level = uu___4;_},\n         e2, FStar_Parser_AST.Nothing)\n        ->\n        let uu___5 = extract_from_ref_set e1 in\n        let uu___6 = extract_from_ref_set e2 in\n        FStar_Compiler_List.op_At uu___5 uu___6\n    | uu___ ->\n        let uu___1 =\n          let uu___2 = FStar_Parser_AST.term_to_string e in\n          FStar_Compiler_Util.format1 \"Not a ref set %s\" uu___2 in\n        failwith uu___1\nlet (is_general_application : FStar_Parser_AST.term -> Prims.bool) =\n  fun e ->\n    let uu___ = (is_array e) || (is_ref_set e) in Prims.op_Negation uu___\nlet (is_general_construction : FStar_Parser_AST.term -> Prims.bool) =\n  fun e -> let uu___ = is_list e in Prims.op_Negation uu___\nlet (is_general_prefix_op : FStar_Ident.ident -> Prims.bool) =\n  fun op ->\n    let op_starting_char =\n      let uu___ = FStar_Ident.string_of_id op in\n      FStar_Compiler_Util.char_at uu___ Prims.int_zero in\n    ((op_starting_char = 33) || (op_starting_char = 63)) ||\n      ((op_starting_char = 126) &&\n         (let uu___ = FStar_Ident.string_of_id op in uu___ <> \"~\"))\nlet (head_and_args :\n  FStar_Parser_AST.term ->\n    (FStar_Parser_AST.term * (FStar_Parser_AST.term * FStar_Parser_AST.imp)\n      Prims.list))\n  =\n  fun e ->\n    let rec aux e1 acc =\n      match e1.FStar_Parser_AST.tm with\n      | FStar_Parser_AST.App (head, arg, imp) -> aux head ((arg, imp) :: acc)\n      | uu___ -> (e1, acc) in\n    aux e []\ntype associativity =\n  | Left \n  | Right \n  | NonAssoc \nlet (uu___is_Left : associativity -> Prims.bool) =\n  fun projectee -> match projectee with | Left -> true | uu___ -> false\nlet (uu___is_Right : associativity -> Prims.bool) =\n  fun projectee -> match projectee with | Right -> true | uu___ -> false\nlet (uu___is_NonAssoc : associativity -> Prims.bool) =\n  fun projectee -> match projectee with | NonAssoc -> true | uu___ -> false\ntype token =\n  | StartsWith of FStar_Char.char \n  | Exact of Prims.string \n  | UnicodeOperator \nlet (uu___is_StartsWith : token -> Prims.bool) =\n  fun projectee ->\n    match projectee with | StartsWith _0 -> true | uu___ -> false\nlet (__proj__StartsWith__item___0 : token -> FStar_Char.char) =\n  fun projectee -> match projectee with | StartsWith _0 -> _0\nlet (uu___is_Exact : token -> Prims.bool) =\n  fun projectee -> match projectee with | Exact _0 -> true | uu___ -> false\nlet (__proj__Exact__item___0 : token -> Prims.string) =\n  fun projectee -> match projectee with | Exact _0 -> _0\nlet (uu___is_UnicodeOperator : token -> Prims.bool) =\n  fun projectee ->\n    match projectee with | UnicodeOperator -> true | uu___ -> false\ntype associativity_level = (associativity * token Prims.list)\nlet (token_to_string : token -> Prims.string) =\n  fun uu___ ->\n    match uu___ with\n    | StartsWith c ->\n        Prims.op_Hat (FStar_Compiler_Util.string_of_char c) \".*\"\n    | Exact s -> s\n    | UnicodeOperator -> \"<unicode-op>\"\nlet (is_non_latin_char : FStar_Char.char -> Prims.bool) =\n  fun s -> (FStar_Compiler_Util.int_of_char s) > (Prims.of_int (0x024f))\nlet (matches_token : Prims.string -> token -> Prims.bool) =\n  fun s ->\n    fun uu___ ->\n      match uu___ with\n      | StartsWith c ->\n          let uu___1 = FStar_String.get s Prims.int_zero in uu___1 = c\n      | Exact s' -> s = s'\n      | UnicodeOperator ->\n          let uu___1 = FStar_String.get s Prims.int_zero in\n          is_non_latin_char uu___1\nlet matches_level :\n  'uuuuu . Prims.string -> ('uuuuu * token Prims.list) -> Prims.bool =\n  fun s ->\n    fun uu___ ->\n      match uu___ with\n      | (assoc_levels, tokens) ->\n          let uu___1 = FStar_Compiler_List.tryFind (matches_token s) tokens in\n          uu___1 <> FStar_Pervasives_Native.None\nlet (opinfix4 : associativity_level) = (Right, [Exact \"**\"; UnicodeOperator])\nlet (opinfix3 : associativity_level) =\n  (Left, [StartsWith 42; StartsWith 47; StartsWith 37])\nlet (opinfix2 : associativity_level) = (Left, [StartsWith 43; StartsWith 45])\nlet (minus_lvl : associativity_level) = (Left, [Exact \"-\"])\nlet (opinfix1 : associativity_level) =\n  (Right, [StartsWith 64; StartsWith 94])\nlet (pipe_right : associativity_level) = (Left, [Exact \"|>\"])\nlet (opinfix0d : associativity_level) = (Left, [StartsWith 36])\nlet (opinfix0c : associativity_level) =\n  (Left, [StartsWith 61; StartsWith 60; StartsWith 62])\nlet (equal : associativity_level) = (Left, [Exact \"=\"])\nlet (opinfix0b : associativity_level) = (Left, [StartsWith 38])\nlet (opinfix0a : associativity_level) = (Left, [StartsWith 124])\nlet (colon_equals : associativity_level) = (NonAssoc, [Exact \":=\"])\nlet (amp : associativity_level) = (Right, [Exact \"&\"])\nlet (colon_colon : associativity_level) = (Right, [Exact \"::\"])\nlet (level_associativity_spec : associativity_level Prims.list) =\n  [opinfix4;\n  opinfix3;\n  opinfix2;\n  opinfix1;\n  pipe_right;\n  opinfix0d;\n  opinfix0c;\n  opinfix0b;\n  opinfix0a;\n  colon_equals;\n  amp;\n  colon_colon]\nlet (level_table :\n  ((Prims.int * Prims.int * Prims.int) * token Prims.list) Prims.list) =\n  let levels_from_associativity l uu___ =\n    match uu___ with\n    | Left -> (l, l, (l - Prims.int_one))\n    | Right -> ((l - Prims.int_one), l, l)\n    | NonAssoc -> ((l - Prims.int_one), l, (l - Prims.int_one)) in\n  FStar_Compiler_List.mapi\n    (fun i ->\n       fun uu___ ->\n         match uu___ with\n         | (assoc, tokens) -> ((levels_from_associativity i assoc), tokens))\n    level_associativity_spec\nlet (assign_levels :\n  associativity_level Prims.list ->\n    Prims.string -> (Prims.int * Prims.int * Prims.int))\n  =\n  fun token_associativity_spec ->\n    fun s ->\n      let uu___ = FStar_Compiler_List.tryFind (matches_level s) level_table in\n      match uu___ with\n      | FStar_Pervasives_Native.Some (assoc_levels, uu___1) -> assoc_levels\n      | uu___1 -> failwith (Prims.op_Hat \"Unrecognized operator \" s)\nlet max_level : 'uuuuu . ('uuuuu * token Prims.list) Prims.list -> Prims.int\n  =\n  fun l ->\n    let find_level_and_max n level =\n      let uu___ =\n        FStar_Compiler_List.tryFind\n          (fun uu___1 ->\n             match uu___1 with\n             | (uu___2, tokens) ->\n                 tokens = (FStar_Pervasives_Native.snd level)) level_table in\n      match uu___ with\n      | FStar_Pervasives_Native.Some ((uu___1, l1, uu___2), uu___3) ->\n          max n l1\n      | FStar_Pervasives_Native.None ->\n          let uu___1 =\n            let uu___2 =\n              let uu___3 =\n                FStar_Compiler_List.map token_to_string\n                  (FStar_Pervasives_Native.snd level) in\n              FStar_String.concat \",\" uu___3 in\n            FStar_Compiler_Util.format1 \"Undefined associativity level %s\"\n              uu___2 in\n          failwith uu___1 in\n    FStar_Compiler_List.fold_left find_level_and_max Prims.int_zero l\nlet (levels : Prims.string -> (Prims.int * Prims.int * Prims.int)) =\n  fun op ->\n    let uu___ = assign_levels level_associativity_spec op in\n    match uu___ with\n    | (left, mine, right) ->\n        if op = \"*\"\n        then ((left - Prims.int_one), mine, right)\n        else (left, mine, right)\nlet (operatorInfix0ad12 : associativity_level Prims.list) =\n  [opinfix0a; opinfix0b; opinfix0c; opinfix0d; opinfix1; opinfix2]\nlet (is_operatorInfix0ad12 : FStar_Ident.ident -> Prims.bool) =\n  fun op ->\n    let uu___ =\n      let uu___1 =\n        let uu___2 = FStar_Ident.string_of_id op in\n        FStar_Compiler_Effect.op_Less_Bar matches_level uu___2 in\n      FStar_Compiler_List.tryFind uu___1 operatorInfix0ad12 in\n    uu___ <> FStar_Pervasives_Native.None\nlet (is_operatorInfix34 : FStar_Ident.ident -> Prims.bool) =\n  let opinfix34 = [opinfix3; opinfix4] in\n  fun op ->\n    let uu___ =\n      let uu___1 =\n        let uu___2 = FStar_Ident.string_of_id op in\n        FStar_Compiler_Effect.op_Less_Bar matches_level uu___2 in\n      FStar_Compiler_List.tryFind uu___1 opinfix34 in\n    uu___ <> FStar_Pervasives_Native.None\nlet (handleable_args_length : FStar_Ident.ident -> Prims.int) =\n  fun op ->\n    let op_s = FStar_Ident.string_of_id op in\n    let uu___ =\n      (is_general_prefix_op op) || (FStar_Compiler_List.mem op_s [\"-\"; \"~\"]) in\n    if uu___\n    then Prims.int_one\n    else\n      (let uu___2 =\n         ((is_operatorInfix0ad12 op) || (is_operatorInfix34 op)) ||\n           (FStar_Compiler_List.mem op_s\n              [\"<==>\"; \"==>\"; \"\\\\/\"; \"/\\\\\"; \"=\"; \"|>\"; \":=\"; \".()\"; \".[]\"]) in\n       if uu___2\n       then (Prims.of_int (2))\n       else\n         if FStar_Compiler_List.mem op_s [\".()<-\"; \".[]<-\"]\n         then (Prims.of_int (3))\n         else Prims.int_zero)\nlet handleable_op :\n  'uuuuu . FStar_Ident.ident -> 'uuuuu Prims.list -> Prims.bool =\n  fun op ->\n    fun args ->\n      match FStar_Compiler_List.length args with\n      | uu___ when uu___ = Prims.int_zero -> true\n      | uu___ when uu___ = Prims.int_one ->\n          (is_general_prefix_op op) ||\n            (let uu___1 = FStar_Ident.string_of_id op in\n             FStar_Compiler_List.mem uu___1 [\"-\"; \"~\"])\n      | uu___ when uu___ = (Prims.of_int (2)) ->\n          ((is_operatorInfix0ad12 op) || (is_operatorInfix34 op)) ||\n            (let uu___1 = FStar_Ident.string_of_id op in\n             FStar_Compiler_List.mem uu___1\n               [\"<==>\"; \"==>\"; \"\\\\/\"; \"/\\\\\"; \"=\"; \"|>\"; \":=\"; \".()\"; \".[]\"])\n      | uu___ when uu___ = (Prims.of_int (3)) ->\n          let uu___1 = FStar_Ident.string_of_id op in\n          FStar_Compiler_List.mem uu___1 [\".()<-\"; \".[]<-\"]\n      | uu___ -> false\ntype annotation_style =\n  | Binders of (Prims.int * Prims.int * Prims.bool) \n  | Arrows of (Prims.int * Prims.int) \nlet (uu___is_Binders : annotation_style -> Prims.bool) =\n  fun projectee -> match projectee with | Binders _0 -> true | uu___ -> false\nlet (__proj__Binders__item___0 :\n  annotation_style -> (Prims.int * Prims.int * Prims.bool)) =\n  fun projectee -> match projectee with | Binders _0 -> _0\nlet (uu___is_Arrows : annotation_style -> Prims.bool) =\n  fun projectee -> match projectee with | Arrows _0 -> true | uu___ -> false\nlet (__proj__Arrows__item___0 : annotation_style -> (Prims.int * Prims.int))\n  = fun projectee -> match projectee with | Arrows _0 -> _0\nlet (all_binders_annot : FStar_Parser_AST.term -> Prims.bool) =\n  fun e ->\n    let is_binder_annot b =\n      match b.FStar_Parser_AST.b with\n      | FStar_Parser_AST.Annotated uu___ -> true\n      | uu___ -> false in\n    let rec all_binders e1 l =\n      match e1.FStar_Parser_AST.tm with\n      | FStar_Parser_AST.Product (bs, tgt) ->\n          let uu___ = FStar_Compiler_List.for_all is_binder_annot bs in\n          if uu___\n          then all_binders tgt (l + (FStar_Compiler_List.length bs))\n          else (false, Prims.int_zero)\n      | uu___ -> (true, (l + Prims.int_one)) in\n    let uu___ = all_binders e Prims.int_zero in\n    match uu___ with\n    | (b, l) -> if b && (l > Prims.int_one) then true else false\ntype catf =\n  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document\nlet (cat_with_colon :\n  FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document) =\n  fun x ->\n    fun y ->\n      let uu___ = FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon y in\n      FStar_Pprint.op_Hat_Hat x uu___\nlet (comment_stack :\n  (Prims.string * FStar_Compiler_Range.range) Prims.list\n    FStar_Compiler_Effect.ref)\n  = FStar_Compiler_Util.mk_ref []\ntype decl_meta =\n  {\n  r: FStar_Compiler_Range.range ;\n  has_qs: Prims.bool ;\n  has_attrs: Prims.bool }\nlet (__proj__Mkdecl_meta__item__r : decl_meta -> FStar_Compiler_Range.range)\n  = fun projectee -> match projectee with | { r; has_qs; has_attrs;_} -> r\nlet (__proj__Mkdecl_meta__item__has_qs : decl_meta -> Prims.bool) =\n  fun projectee -> match projectee with | { r; has_qs; has_attrs;_} -> has_qs\nlet (__proj__Mkdecl_meta__item__has_attrs : decl_meta -> Prims.bool) =\n  fun projectee ->\n    match projectee with | { r; has_qs; has_attrs;_} -> has_attrs\nlet (dummy_meta : decl_meta) =\n  { r = FStar_Compiler_Range.dummyRange; has_qs = false; has_attrs = false }\nlet with_comment :\n  'uuuuu .\n    ('uuuuu -> FStar_Pprint.document) ->\n      'uuuuu -> FStar_Compiler_Range.range -> FStar_Pprint.document\n  =\n  fun printer ->\n    fun tm ->\n      fun tmrange ->\n        let rec comments_before_pos acc print_pos lookahead_pos =\n          let uu___ = FStar_Compiler_Effect.op_Bang comment_stack in\n          match uu___ with\n          | [] -> (acc, false)\n          | (c, crange)::cs ->\n              let comment =\n                let uu___1 = str c in\n                FStar_Pprint.op_Hat_Hat uu___1 FStar_Pprint.hardline in\n              let uu___1 =\n                FStar_Compiler_Range.range_before_pos crange print_pos in\n              if uu___1\n              then\n                (FStar_Compiler_Effect.op_Colon_Equals comment_stack cs;\n                 (let uu___3 = FStar_Pprint.op_Hat_Hat acc comment in\n                  comments_before_pos uu___3 print_pos lookahead_pos))\n              else\n                (let uu___3 =\n                   FStar_Compiler_Range.range_before_pos crange lookahead_pos in\n                 (acc, uu___3)) in\n        let uu___ =\n          let uu___1 =\n            let uu___2 = FStar_Compiler_Range.start_of_range tmrange in\n            FStar_Compiler_Range.end_of_line uu___2 in\n          let uu___2 = FStar_Compiler_Range.end_of_range tmrange in\n          comments_before_pos FStar_Pprint.empty uu___1 uu___2 in\n        match uu___ with\n        | (comments, has_lookahead) ->\n            let printed_e = printer tm in\n            let comments1 =\n              if has_lookahead\n              then\n                let pos = FStar_Compiler_Range.end_of_range tmrange in\n                let uu___1 = comments_before_pos comments pos pos in\n                FStar_Pervasives_Native.fst uu___1\n              else comments in\n            if comments1 = FStar_Pprint.empty\n            then printed_e\n            else\n              (let uu___2 = FStar_Pprint.op_Hat_Hat comments1 printed_e in\n               FStar_Pprint.group uu___2)\nlet with_comment_sep :\n  'uuuuu 'uuuuu1 .\n    ('uuuuu -> 'uuuuu1) ->\n      'uuuuu ->\n        FStar_Compiler_Range.range -> (FStar_Pprint.document * 'uuuuu1)\n  =\n  fun printer ->\n    fun tm ->\n      fun tmrange ->\n        let rec comments_before_pos acc print_pos lookahead_pos =\n          let uu___ = FStar_Compiler_Effect.op_Bang comment_stack in\n          match uu___ with\n          | [] -> (acc, false)\n          | (c, crange)::cs ->\n              let comment = str c in\n              let uu___1 =\n                FStar_Compiler_Range.range_before_pos crange print_pos in\n              if uu___1\n              then\n                (FStar_Compiler_Effect.op_Colon_Equals comment_stack cs;\n                 (let uu___3 =\n                    if acc = FStar_Pprint.empty\n                    then comment\n                    else\n                      (let uu___5 =\n                         FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                           comment in\n                       FStar_Pprint.op_Hat_Hat acc uu___5) in\n                  comments_before_pos uu___3 print_pos lookahead_pos))\n              else\n                (let uu___3 =\n                   FStar_Compiler_Range.range_before_pos crange lookahead_pos in\n                 (acc, uu___3)) in\n        let uu___ =\n          let uu___1 =\n            let uu___2 = FStar_Compiler_Range.start_of_range tmrange in\n            FStar_Compiler_Range.end_of_line uu___2 in\n          let uu___2 = FStar_Compiler_Range.end_of_range tmrange in\n          comments_before_pos FStar_Pprint.empty uu___1 uu___2 in\n        match uu___ with\n        | (comments, has_lookahead) ->\n            let printed_e = printer tm in\n            let comments1 =\n              if has_lookahead\n              then\n                let pos = FStar_Compiler_Range.end_of_range tmrange in\n                let uu___1 = comments_before_pos comments pos pos in\n                FStar_Pervasives_Native.fst uu___1\n              else comments in\n            (comments1, printed_e)\nlet rec (place_comments_until_pos :\n  Prims.int ->\n    Prims.int ->\n      FStar_Compiler_Range.pos ->\n        decl_meta ->\n          FStar_Pprint.document ->\n            Prims.bool -> Prims.bool -> FStar_Pprint.document)\n  =\n  fun k ->\n    fun lbegin ->\n      fun pos ->\n        fun meta_decl ->\n          fun doc ->\n            fun r ->\n              fun init ->\n                let uu___ = FStar_Compiler_Effect.op_Bang comment_stack in\n                match uu___ with\n                | (comment, crange)::cs when\n                    FStar_Compiler_Range.range_before_pos crange pos ->\n                    (FStar_Compiler_Effect.op_Colon_Equals comment_stack cs;\n                     (let lnum =\n                        let uu___2 =\n                          let uu___3 =\n                            let uu___4 =\n                              FStar_Compiler_Range.start_of_range crange in\n                            FStar_Compiler_Range.line_of_pos uu___4 in\n                          uu___3 - lbegin in\n                        max k uu___2 in\n                      let lnum1 = min (Prims.of_int (2)) lnum in\n                      let doc1 =\n                        let uu___2 =\n                          let uu___3 =\n                            FStar_Pprint.repeat lnum1 FStar_Pprint.hardline in\n                          let uu___4 = str comment in\n                          FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                        FStar_Pprint.op_Hat_Hat doc uu___2 in\n                      let uu___2 =\n                        let uu___3 = FStar_Compiler_Range.end_of_range crange in\n                        FStar_Compiler_Range.line_of_pos uu___3 in\n                      place_comments_until_pos Prims.int_one uu___2 pos\n                        meta_decl doc1 true init))\n                | uu___1 ->\n                    if doc = FStar_Pprint.empty\n                    then FStar_Pprint.empty\n                    else\n                      (let lnum =\n                         let uu___3 = FStar_Compiler_Range.line_of_pos pos in\n                         uu___3 - lbegin in\n                       let lnum1 = min (Prims.of_int (3)) lnum in\n                       let lnum2 =\n                         if meta_decl.has_qs || meta_decl.has_attrs\n                         then lnum1 - Prims.int_one\n                         else lnum1 in\n                       let lnum3 = max k lnum2 in\n                       let lnum4 =\n                         if meta_decl.has_qs && meta_decl.has_attrs\n                         then (Prims.of_int (2))\n                         else lnum3 in\n                       let lnum5 = if init then (Prims.of_int (2)) else lnum4 in\n                       let uu___3 =\n                         FStar_Pprint.repeat lnum5 FStar_Pprint.hardline in\n                       FStar_Pprint.op_Hat_Hat doc uu___3)\nlet separate_map_with_comments :\n  'uuuuu .\n    FStar_Pprint.document ->\n      FStar_Pprint.document ->\n        ('uuuuu -> FStar_Pprint.document) ->\n          'uuuuu Prims.list -> ('uuuuu -> decl_meta) -> FStar_Pprint.document\n  =\n  fun prefix ->\n    fun sep ->\n      fun f ->\n        fun xs ->\n          fun extract_meta ->\n            let fold_fun uu___ x =\n              match uu___ with\n              | (last_line, doc) ->\n                  let meta_decl = extract_meta x in\n                  let r = meta_decl.r in\n                  let doc1 =\n                    let uu___1 = FStar_Compiler_Range.start_of_range r in\n                    place_comments_until_pos Prims.int_one last_line uu___1\n                      meta_decl doc false false in\n                  let uu___1 =\n                    let uu___2 = FStar_Compiler_Range.end_of_range r in\n                    FStar_Compiler_Range.line_of_pos uu___2 in\n                  let uu___2 =\n                    let uu___3 =\n                      let uu___4 = f x in FStar_Pprint.op_Hat_Hat sep uu___4 in\n                    FStar_Pprint.op_Hat_Hat doc1 uu___3 in\n                  (uu___1, uu___2) in\n            let uu___ =\n              let uu___1 = FStar_Compiler_List.hd xs in\n              let uu___2 = FStar_Compiler_List.tl xs in (uu___1, uu___2) in\n            match uu___ with\n            | (x, xs1) ->\n                let init =\n                  let meta_decl = extract_meta x in\n                  let uu___1 =\n                    let uu___2 =\n                      FStar_Compiler_Range.end_of_range meta_decl.r in\n                    FStar_Compiler_Range.line_of_pos uu___2 in\n                  let uu___2 =\n                    let uu___3 = f x in FStar_Pprint.op_Hat_Hat prefix uu___3 in\n                  (uu___1, uu___2) in\n                let uu___1 = FStar_Compiler_List.fold_left fold_fun init xs1 in\n                FStar_Pervasives_Native.snd uu___1\nlet separate_map_with_comments_kw :\n  'uuuuu 'uuuuu1 .\n    'uuuuu ->\n      'uuuuu ->\n        ('uuuuu -> 'uuuuu1 -> FStar_Pprint.document) ->\n          'uuuuu1 Prims.list ->\n            ('uuuuu1 -> decl_meta) -> FStar_Pprint.document\n  =\n  fun prefix ->\n    fun sep ->\n      fun f ->\n        fun xs ->\n          fun extract_meta ->\n            let fold_fun uu___ x =\n              match uu___ with\n              | (last_line, doc) ->\n                  let meta_decl = extract_meta x in\n                  let r = meta_decl.r in\n                  let doc1 =\n                    let uu___1 = FStar_Compiler_Range.start_of_range r in\n                    place_comments_until_pos Prims.int_one last_line uu___1\n                      meta_decl doc false false in\n                  let uu___1 =\n                    let uu___2 = FStar_Compiler_Range.end_of_range r in\n                    FStar_Compiler_Range.line_of_pos uu___2 in\n                  let uu___2 =\n                    let uu___3 = f sep x in\n                    FStar_Pprint.op_Hat_Hat doc1 uu___3 in\n                  (uu___1, uu___2) in\n            let uu___ =\n              let uu___1 = FStar_Compiler_List.hd xs in\n              let uu___2 = FStar_Compiler_List.tl xs in (uu___1, uu___2) in\n            match uu___ with\n            | (x, xs1) ->\n                let init =\n                  let meta_decl = extract_meta x in\n                  let uu___1 =\n                    let uu___2 =\n                      FStar_Compiler_Range.end_of_range meta_decl.r in\n                    FStar_Compiler_Range.line_of_pos uu___2 in\n                  let uu___2 = f prefix x in (uu___1, uu___2) in\n                let uu___1 = FStar_Compiler_List.fold_left fold_fun init xs1 in\n                FStar_Pervasives_Native.snd uu___1\nlet (p_char_literal' :\n  FStar_Char.char -> FStar_BaseTypes.char -> FStar_Pprint.document) =\n  fun quote_char ->\n    fun c ->\n      str\n        (match c with\n         | 8 -> \"\\\\b\"\n         | 12 -> \"\\\\f\"\n         | 10 -> \"\\\\n\"\n         | 9 -> \"\\\\t\"\n         | 13 -> \"\\\\r\"\n         | 11 -> \"\\\\v\"\n         | 0 -> \"\\\\0\"\n         | c1 ->\n             let s = FStar_Compiler_Util.string_of_char c1 in\n             if quote_char = c1 then \"\\\\\" ^ s else s)\nlet (p_char_literal : FStar_BaseTypes.char -> FStar_Pprint.document) =\n  fun c -> let uu___ = p_char_literal' 39 c in FStar_Pprint.squotes uu___\nlet (p_string_literal : Prims.string -> FStar_Pprint.document) =\n  fun s ->\n    let quotation_mark = 34 in\n    let uu___ =\n      FStar_Pprint.concat_map (p_char_literal' quotation_mark)\n        (FStar_String.list_of_string s) in\n    FStar_Pprint.dquotes uu___\nlet rec (p_decl : FStar_Parser_AST.decl -> FStar_Pprint.document) =\n  fun d ->\n    let qualifiers =\n      match ((d.FStar_Parser_AST.quals), (d.FStar_Parser_AST.d)) with\n      | ((FStar_Parser_AST.Assumption)::[], FStar_Parser_AST.Assume\n         (id, uu___)) ->\n          let uu___1 =\n            let uu___2 =\n              let uu___3 = FStar_Ident.string_of_id id in\n              FStar_Compiler_Util.char_at uu___3 Prims.int_zero in\n            FStar_Compiler_Effect.op_Bar_Greater uu___2\n              FStar_Compiler_Util.is_upper in\n          if uu___1\n          then\n            let uu___2 = p_qualifier FStar_Parser_AST.Assumption in\n            FStar_Pprint.op_Hat_Hat uu___2 FStar_Pprint.space\n          else p_qualifiers d.FStar_Parser_AST.quals\n      | uu___ -> p_qualifiers d.FStar_Parser_AST.quals in\n    let uu___ = p_attributes true d.FStar_Parser_AST.attrs in\n    let uu___1 =\n      let uu___2 = p_rawDecl d in FStar_Pprint.op_Hat_Hat qualifiers uu___2 in\n    FStar_Pprint.op_Hat_Hat uu___ uu___1\nand (p_attributes :\n  Prims.bool -> FStar_Parser_AST.attributes_ -> FStar_Pprint.document) =\n  fun isTopLevel ->\n    fun attrs ->\n      match attrs with\n      | [] -> FStar_Pprint.empty\n      | uu___ ->\n          let uu___1 =\n            let uu___2 = str (if isTopLevel then \"@@ \" else \"@@@ \") in\n            let uu___3 =\n              let uu___4 =\n                let uu___5 =\n                  let uu___6 =\n                    let uu___7 = str \"; \" in\n                    let uu___8 =\n                      FStar_Compiler_List.map\n                        (p_noSeqTermAndComment false false) attrs in\n                    FStar_Pprint.flow uu___7 uu___8 in\n                  FStar_Pprint.op_Hat_Hat uu___6 FStar_Pprint.rbracket in\n                FStar_Pprint.align uu___5 in\n              FStar_Pprint.op_Hat_Hat uu___4\n                (if isTopLevel\n                 then FStar_Pprint.hardline\n                 else FStar_Pprint.empty) in\n            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.lbracket uu___1\nand (p_justSig : FStar_Parser_AST.decl -> FStar_Pprint.document) =\n  fun d ->\n    match d.FStar_Parser_AST.d with\n    | FStar_Parser_AST.Val (lid, t) ->\n        let uu___ =\n          let uu___1 = str \"val\" in\n          let uu___2 =\n            let uu___3 =\n              let uu___4 = p_lident lid in\n              let uu___5 =\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.colon in\n              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        let uu___1 = p_typ false false t in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.TopLevelLet (uu___, lbs) ->\n        FStar_Pprint.separate_map FStar_Pprint.hardline\n          (fun lb ->\n             let uu___1 = let uu___2 = str \"let\" in p_letlhs uu___2 lb false in\n             FStar_Pprint.group uu___1) lbs\n    | uu___ -> FStar_Pprint.empty\nand (p_list :\n  (FStar_Ident.ident -> FStar_Pprint.document) ->\n    FStar_Pprint.document ->\n      FStar_Ident.ident Prims.list -> FStar_Pprint.document)\n  =\n  fun f ->\n    fun sep ->\n      fun l ->\n        let rec p_list' uu___ =\n          match uu___ with\n          | [] -> FStar_Pprint.empty\n          | x::[] -> f x\n          | x::xs ->\n              let uu___1 = f x in\n              let uu___2 =\n                let uu___3 = p_list' xs in FStar_Pprint.op_Hat_Hat sep uu___3 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        let uu___ = str \"[\" in\n        let uu___1 =\n          let uu___2 = p_list' l in\n          let uu___3 = str \"]\" in FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\nand (p_rawDecl : FStar_Parser_AST.decl -> FStar_Pprint.document) =\n  fun d ->\n    match d.FStar_Parser_AST.d with\n    | FStar_Parser_AST.Open uid ->\n        let uu___ =\n          let uu___1 = str \"open\" in\n          let uu___2 = p_quident uid in\n          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.Include uid ->\n        let uu___ =\n          let uu___1 = str \"include\" in\n          let uu___2 = p_quident uid in\n          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.Friend uid ->\n        let uu___ =\n          let uu___1 = str \"friend\" in\n          let uu___2 = p_quident uid in\n          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.ModuleAbbrev (uid1, uid2) ->\n        let uu___ =\n          let uu___1 = str \"module\" in\n          let uu___2 =\n            let uu___3 =\n              let uu___4 = p_uident uid1 in\n              let uu___5 =\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                  FStar_Pprint.equals in\n              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        let uu___1 = p_quident uid2 in op_Hat_Slash_Plus_Hat uu___ uu___1\n    | FStar_Parser_AST.TopLevelModule uid ->\n        let uu___ =\n          let uu___1 = str \"module\" in\n          let uu___2 =\n            let uu___3 = p_quident uid in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.Tycon\n        (true, uu___, (FStar_Parser_AST.TyconAbbrev\n         (uid, tpars, FStar_Pervasives_Native.None, t))::[])\n        ->\n        let effect_prefix_doc =\n          let uu___1 = str \"effect\" in\n          let uu___2 =\n            let uu___3 = p_uident uid in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        let uu___1 =\n          let uu___2 = p_typars tpars in\n          FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n            effect_prefix_doc uu___2 FStar_Pprint.equals in\n        let uu___2 = p_typ false false t in\n        op_Hat_Slash_Plus_Hat uu___1 uu___2\n    | FStar_Parser_AST.Tycon (false, tc, tcdefs) ->\n        let s = if tc then str \"class\" else str \"type\" in\n        let uu___ =\n          let uu___1 = FStar_Compiler_List.hd tcdefs in\n          p_typeDeclWithKw s uu___1 in\n        let uu___1 =\n          let uu___2 = FStar_Compiler_List.tl tcdefs in\n          FStar_Compiler_Effect.op_Less_Bar\n            (FStar_Pprint.concat_map\n               (fun x ->\n                  let uu___3 =\n                    let uu___4 = str \"and\" in p_typeDeclWithKw uu___4 x in\n                  FStar_Pprint.op_Hat_Hat break1 uu___3)) uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.TopLevelLet (q, lbs) ->\n        let let_doc =\n          let uu___ = str \"let\" in\n          let uu___1 = p_letqualifier q in\n          FStar_Pprint.op_Hat_Hat uu___ uu___1 in\n        let uu___ = str \"and\" in\n        separate_map_with_comments_kw let_doc uu___ p_letbinding lbs\n          (fun uu___1 ->\n             match uu___1 with\n             | (p, t) ->\n                 let uu___2 =\n                   FStar_Compiler_Range.union_ranges\n                     p.FStar_Parser_AST.prange t.FStar_Parser_AST.range in\n                 { r = uu___2; has_qs = false; has_attrs = false })\n    | FStar_Parser_AST.Val (lid, t) ->\n        let uu___ =\n          let uu___1 = str \"val\" in\n          let uu___2 =\n            let uu___3 =\n              let uu___4 = p_lident lid in\n              let uu___5 = sig_as_binders_if_possible t false in\n              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        FStar_Compiler_Effect.op_Less_Bar FStar_Pprint.group uu___\n    | FStar_Parser_AST.Assume (id, t) ->\n        let decl_keyword =\n          let uu___ =\n            let uu___1 =\n              let uu___2 = FStar_Ident.string_of_id id in\n              FStar_Compiler_Util.char_at uu___2 Prims.int_zero in\n            FStar_Compiler_Effect.op_Bar_Greater uu___1\n              FStar_Compiler_Util.is_upper in\n          if uu___\n          then FStar_Pprint.empty\n          else\n            (let uu___2 = str \"val\" in\n             FStar_Pprint.op_Hat_Hat uu___2 FStar_Pprint.space) in\n        let uu___ =\n          let uu___1 = p_ident id in\n          let uu___2 =\n            let uu___3 =\n              let uu___4 =\n                let uu___5 = p_typ false false t in\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___4 in\n            FStar_Pprint.group uu___3 in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        FStar_Pprint.op_Hat_Hat decl_keyword uu___\n    | FStar_Parser_AST.Exception (uid, t_opt) ->\n        let uu___ = str \"exception\" in\n        let uu___1 =\n          let uu___2 =\n            let uu___3 = p_uident uid in\n            let uu___4 =\n              FStar_Pprint.optional\n                (fun t ->\n                   let uu___5 =\n                     let uu___6 = str \"of\" in\n                     let uu___7 = p_typ false false t in\n                     op_Hat_Slash_Plus_Hat uu___6 uu___7 in\n                   FStar_Pprint.op_Hat_Hat break1 uu___5) t_opt in\n            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.NewEffect ne ->\n        let uu___ = str \"new_effect\" in\n        let uu___1 =\n          let uu___2 = p_newEffect ne in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.SubEffect se ->\n        let uu___ = str \"sub_effect\" in\n        let uu___1 =\n          let uu___2 = p_subEffect se in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.LayeredEffect ne ->\n        let uu___ = str \"layered_effect\" in\n        let uu___1 =\n          let uu___2 = p_newEffect ne in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.Polymonadic_bind (l1, l2, l3, t) ->\n        let uu___ = str \"polymonadic_bind\" in\n        let uu___1 =\n          let uu___2 =\n            let uu___3 = p_quident l1 in\n            let uu___4 =\n              let uu___5 =\n                let uu___6 =\n                  let uu___7 = p_quident l2 in\n                  let uu___8 =\n                    let uu___9 =\n                      let uu___10 = str \"|>\" in\n                      let uu___11 =\n                        let uu___12 = p_quident l3 in\n                        let uu___13 =\n                          let uu___14 = p_simpleTerm false false t in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.equals uu___14 in\n                        FStar_Pprint.op_Hat_Hat uu___12 uu___13 in\n                      FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.rparen uu___9 in\n                  FStar_Pprint.op_Hat_Hat uu___7 uu___8 in\n                FStar_Pprint.op_Hat_Hat break1 uu___6 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.comma uu___5 in\n            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.Pragma p -> p_pragma p\n    | FStar_Parser_AST.Tycon (true, uu___, uu___1) ->\n        failwith\n          \"Effect abbreviation is expected to be defined by an abbreviation\"\n    | FStar_Parser_AST.Splice (ids, t) ->\n        let uu___ = str \"%splice\" in\n        let uu___1 =\n          let uu___2 = let uu___3 = str \";\" in p_list p_uident uu___3 ids in\n          let uu___3 =\n            let uu___4 = p_term false false t in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n          FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\nand (p_pragma : FStar_Parser_AST.pragma -> FStar_Pprint.document) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Parser_AST.SetOptions s ->\n        let uu___1 = str \"#set-options\" in\n        let uu___2 =\n          let uu___3 = let uu___4 = str s in FStar_Pprint.dquotes uu___4 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n        FStar_Pprint.op_Hat_Hat uu___1 uu___2\n    | FStar_Parser_AST.ResetOptions s_opt ->\n        let uu___1 = str \"#reset-options\" in\n        let uu___2 =\n          FStar_Pprint.optional\n            (fun s ->\n               let uu___3 = let uu___4 = str s in FStar_Pprint.dquotes uu___4 in\n               FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3) s_opt in\n        FStar_Pprint.op_Hat_Hat uu___1 uu___2\n    | FStar_Parser_AST.PushOptions s_opt ->\n        let uu___1 = str \"#push-options\" in\n        let uu___2 =\n          FStar_Pprint.optional\n            (fun s ->\n               let uu___3 = let uu___4 = str s in FStar_Pprint.dquotes uu___4 in\n               FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3) s_opt in\n        FStar_Pprint.op_Hat_Hat uu___1 uu___2\n    | FStar_Parser_AST.PopOptions -> str \"#pop-options\"\n    | FStar_Parser_AST.RestartSolver -> str \"#restart-solver\"\n    | FStar_Parser_AST.PrintEffectsGraph -> str \"#print-effects-graph\"\nand (p_typars : FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document)\n  = fun bs -> p_binders true bs\nand (p_typeDeclWithKw :\n  FStar_Pprint.document -> FStar_Parser_AST.tycon -> FStar_Pprint.document) =\n  fun kw ->\n    fun typedecl ->\n      let uu___ = p_typeDecl kw typedecl in\n      match uu___ with\n      | (comm, decl, body, pre) ->\n          if comm = FStar_Pprint.empty\n          then let uu___1 = pre body in FStar_Pprint.op_Hat_Hat decl uu___1\n          else\n            (let uu___2 =\n               let uu___3 =\n                 let uu___4 =\n                   let uu___5 = pre body in\n                   FStar_Pprint.op_Hat_Slash_Hat uu___5 comm in\n                 FStar_Pprint.op_Hat_Hat decl uu___4 in\n               let uu___4 =\n                 let uu___5 =\n                   let uu___6 =\n                     let uu___7 =\n                       let uu___8 =\n                         FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline body in\n                       FStar_Pprint.op_Hat_Hat comm uu___8 in\n                     FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___7 in\n                   FStar_Pprint.nest (Prims.of_int (2)) uu___6 in\n                 FStar_Pprint.op_Hat_Hat decl uu___5 in\n               FStar_Pprint.ifflat uu___3 uu___4 in\n             FStar_Compiler_Effect.op_Less_Bar FStar_Pprint.group uu___2)\nand (p_typeDecl :\n  FStar_Pprint.document ->\n    FStar_Parser_AST.tycon ->\n      (FStar_Pprint.document * FStar_Pprint.document * FStar_Pprint.document\n        * (FStar_Pprint.document -> FStar_Pprint.document)))\n  =\n  fun pre ->\n    fun uu___ ->\n      match uu___ with\n      | FStar_Parser_AST.TyconAbstract (lid, bs, typ_opt) ->\n          let uu___1 = p_typeDeclPrefix pre false lid bs typ_opt in\n          (FStar_Pprint.empty, uu___1, FStar_Pprint.empty,\n            FStar_Pervasives.id)\n      | FStar_Parser_AST.TyconAbbrev (lid, bs, typ_opt, t) ->\n          let uu___1 = p_typ_sep false false t in\n          (match uu___1 with\n           | (comm, doc) ->\n               let uu___2 = p_typeDeclPrefix pre true lid bs typ_opt in\n               (comm, uu___2, doc, jump2))\n      | FStar_Parser_AST.TyconRecord\n          (lid, bs, typ_opt, attrs, record_field_decls) ->\n          let uu___1 = p_typeDeclPrefix pre true lid bs typ_opt in\n          let uu___2 =\n            let uu___3 = p_attributes false attrs in\n            let uu___4 = p_typeDeclRecord record_field_decls in\n            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n          (FStar_Pprint.empty, uu___1, uu___2,\n            ((fun d -> FStar_Pprint.op_Hat_Hat FStar_Pprint.space d)))\n      | FStar_Parser_AST.TyconVariant (lid, bs, typ_opt, ct_decls) ->\n          let p_constructorBranchAndComments uu___1 =\n            match uu___1 with\n            | (uid, payload, attrs) ->\n                let range =\n                  let uu___2 =\n                    let uu___3 = FStar_Ident.range_of_id uid in\n                    let uu___4 =\n                      FStar_Compiler_Util.bind_opt payload\n                        (fun uu___5 ->\n                           match uu___5 with\n                           | FStar_Parser_AST.VpOfNotation t ->\n                               FStar_Pervasives_Native.Some\n                                 (t.FStar_Parser_AST.range)\n                           | FStar_Parser_AST.VpArbitrary t ->\n                               FStar_Pervasives_Native.Some\n                                 (t.FStar_Parser_AST.range)\n                           | FStar_Parser_AST.VpRecord (record, uu___6) ->\n                               FStar_Pervasives_Native.None) in\n                    FStar_Compiler_Util.dflt uu___3 uu___4 in\n                  FStar_Compiler_Range.extend_to_end_of_line uu___2 in\n                let uu___2 =\n                  with_comment_sep p_constructorBranch (uid, payload, attrs)\n                    range in\n                (match uu___2 with\n                 | (comm, ctor) ->\n                     inline_comment_or_above comm ctor FStar_Pprint.empty) in\n          let datacon_doc =\n            FStar_Pprint.separate_map FStar_Pprint.hardline\n              p_constructorBranchAndComments ct_decls in\n          let uu___1 = p_typeDeclPrefix pre true lid bs typ_opt in\n          (FStar_Pprint.empty, uu___1, datacon_doc, jump2)\nand (p_typeDeclRecord :\n  FStar_Parser_AST.tycon_record -> FStar_Pprint.document) =\n  fun fields ->\n    let p_recordField ps uu___ =\n      match uu___ with\n      | (lid, aq, attrs, t) ->\n          let uu___1 =\n            let uu___2 =\n              FStar_Compiler_Range.extend_to_end_of_line\n                t.FStar_Parser_AST.range in\n            with_comment_sep (p_recordFieldDecl ps) (lid, aq, attrs, t)\n              uu___2 in\n          (match uu___1 with\n           | (comm, field) ->\n               let sep = if ps then FStar_Pprint.semi else FStar_Pprint.empty in\n               inline_comment_or_above comm field sep) in\n    let uu___ = separate_map_last FStar_Pprint.hardline p_recordField fields in\n    FStar_Compiler_Effect.op_Bar_Greater uu___ braces_with_nesting\nand (p_typeDeclPrefix :\n  FStar_Pprint.document ->\n    Prims.bool ->\n      FStar_Ident.ident ->\n        FStar_Parser_AST.binder Prims.list ->\n          FStar_Parser_AST.knd FStar_Pervasives_Native.option ->\n            FStar_Pprint.document)\n  =\n  fun kw ->\n    fun eq ->\n      fun lid ->\n        fun bs ->\n          fun typ_opt ->\n            let with_kw cont =\n              let lid_doc = p_ident lid in\n              let kw_lid =\n                let uu___ = FStar_Pprint.op_Hat_Slash_Hat kw lid_doc in\n                FStar_Pprint.group uu___ in\n              cont kw_lid in\n            let typ =\n              let maybe_eq =\n                if eq then FStar_Pprint.equals else FStar_Pprint.empty in\n              match typ_opt with\n              | FStar_Pervasives_Native.None -> maybe_eq\n              | FStar_Pervasives_Native.Some t ->\n                  let uu___ =\n                    let uu___1 =\n                      let uu___2 = p_typ false false t in\n                      FStar_Pprint.op_Hat_Slash_Hat uu___2 maybe_eq in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___ in\n            if bs = []\n            then with_kw (fun n -> prefix2 n typ)\n            else\n              (let binders = p_binders_list true bs in\n               with_kw\n                 (fun n ->\n                    let uu___1 =\n                      let uu___2 = FStar_Pprint.flow break1 binders in\n                      prefix2 n uu___2 in\n                    prefix2 uu___1 typ))\nand (p_recordFieldDecl :\n  Prims.bool ->\n    (FStar_Ident.ident * FStar_Parser_AST.aqual *\n      FStar_Parser_AST.attributes_ * FStar_Parser_AST.term) ->\n      FStar_Pprint.document)\n  =\n  fun ps ->\n    fun uu___ ->\n      match uu___ with\n      | (lid, aq, attrs, t) ->\n          let uu___1 =\n            let uu___2 = FStar_Pprint.optional p_aqual aq in\n            let uu___3 =\n              let uu___4 = p_attributes false attrs in\n              let uu___5 =\n                let uu___6 = p_lident lid in\n                let uu___7 =\n                  let uu___8 = p_typ ps false t in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___8 in\n                FStar_Pprint.op_Hat_Hat uu___6 uu___7 in\n              FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n          FStar_Pprint.group uu___1\nand (p_constructorBranch :\n  (FStar_Ident.ident * FStar_Parser_AST.constructor_payload\n    FStar_Pervasives_Native.option * FStar_Parser_AST.attributes_) ->\n    FStar_Pprint.document)\n  =\n  fun uu___ ->\n    match uu___ with\n    | (uid, variant, attrs) ->\n        let h isOf t =\n          let uu___1 = if isOf then str \"of\" else FStar_Pprint.colon in\n          let uu___2 =\n            let uu___3 = p_typ false false t in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        let uu___1 =\n          let uu___2 =\n            let uu___3 =\n              let uu___4 =\n                let uu___5 = p_attributes false attrs in\n                let uu___6 = p_uident uid in\n                FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___3 in\n          FStar_Pprint.group uu___2 in\n        let uu___2 =\n          default_or_map FStar_Pprint.empty\n            (fun payload ->\n               let uu___3 =\n                 let uu___4 =\n                   match payload with\n                   | FStar_Parser_AST.VpOfNotation t -> h true t\n                   | FStar_Parser_AST.VpArbitrary t -> h false t\n                   | FStar_Parser_AST.VpRecord (r, t) ->\n                       let uu___5 = p_typeDeclRecord r in\n                       let uu___6 =\n                         default_or_map FStar_Pprint.empty (h false) t in\n                       FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                 FStar_Pprint.group uu___4 in\n               FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3) variant in\n        FStar_Pprint.op_Hat_Hat uu___1 uu___2\nand (p_letlhs :\n  FStar_Pprint.document ->\n    (FStar_Parser_AST.pattern * FStar_Parser_AST.term) ->\n      Prims.bool -> FStar_Pprint.document)\n  =\n  fun kw ->\n    fun uu___ ->\n      fun inner_let ->\n        match uu___ with\n        | (pat, uu___1) ->\n            let uu___2 =\n              match pat.FStar_Parser_AST.pat with\n              | FStar_Parser_AST.PatAscribed\n                  (pat1, (t, FStar_Pervasives_Native.None)) ->\n                  (pat1,\n                    (FStar_Pervasives_Native.Some (t, FStar_Pprint.empty)))\n              | FStar_Parser_AST.PatAscribed\n                  (pat1, (t, FStar_Pervasives_Native.Some tac)) ->\n                  let uu___3 =\n                    let uu___4 =\n                      let uu___5 =\n                        let uu___6 =\n                          let uu___7 =\n                            let uu___8 = str \"by\" in\n                            let uu___9 =\n                              let uu___10 = p_atomicTerm (maybe_unthunk tac) in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___10 in\n                            FStar_Pprint.op_Hat_Hat uu___8 uu___9 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                        FStar_Pprint.group uu___6 in\n                      (t, uu___5) in\n                    FStar_Pervasives_Native.Some uu___4 in\n                  (pat1, uu___3)\n              | uu___3 -> (pat, FStar_Pervasives_Native.None) in\n            (match uu___2 with\n             | (pat1, ascr) ->\n                 (match pat1.FStar_Parser_AST.pat with\n                  | FStar_Parser_AST.PatApp\n                      ({\n                         FStar_Parser_AST.pat = FStar_Parser_AST.PatVar\n                           (lid, uu___3, uu___4);\n                         FStar_Parser_AST.prange = uu___5;_},\n                       pats)\n                      ->\n                      let ascr_doc =\n                        match ascr with\n                        | FStar_Pervasives_Native.Some (t, tac) ->\n                            let uu___6 = sig_as_binders_if_possible t true in\n                            FStar_Pprint.op_Hat_Hat uu___6 tac\n                        | FStar_Pervasives_Native.None -> FStar_Pprint.empty in\n                      let uu___6 =\n                        if inner_let\n                        then\n                          let uu___7 = pats_as_binders_if_possible pats in\n                          match uu___7 with | (bs, style) -> (bs, style)\n                        else\n                          (let uu___8 = pats_as_binders_if_possible pats in\n                           match uu___8 with | (bs, style) -> (bs, style)) in\n                      (match uu___6 with\n                       | (terms, style) ->\n                           let uu___7 =\n                             let uu___8 =\n                               let uu___9 =\n                                 let uu___10 = p_lident lid in\n                                 let uu___11 =\n                                   format_sig style terms ascr_doc true true in\n                                 FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                               FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                 uu___9 in\n                             FStar_Pprint.op_Hat_Hat kw uu___8 in\n                           FStar_Compiler_Effect.op_Less_Bar\n                             FStar_Pprint.group uu___7)\n                  | uu___3 ->\n                      let ascr_doc =\n                        match ascr with\n                        | FStar_Pervasives_Native.Some (t, tac) ->\n                            let uu___4 =\n                              let uu___5 =\n                                let uu___6 =\n                                  p_typ_top\n                                    (Arrows\n                                       ((Prims.of_int (2)),\n                                         (Prims.of_int (2)))) false false t in\n                                FStar_Pprint.op_Hat_Hat FStar_Pprint.colon\n                                  uu___6 in\n                              FStar_Pprint.group uu___5 in\n                            FStar_Pprint.op_Hat_Hat uu___4 tac\n                        | FStar_Pervasives_Native.None -> FStar_Pprint.empty in\n                      let uu___4 =\n                        let uu___5 =\n                          let uu___6 =\n                            let uu___7 = p_tuplePattern pat1 in\n                            FStar_Pprint.op_Hat_Slash_Hat kw uu___7 in\n                          FStar_Pprint.group uu___6 in\n                        FStar_Pprint.op_Hat_Hat uu___5 ascr_doc in\n                      FStar_Pprint.group uu___4))\nand (p_letbinding :\n  FStar_Pprint.document ->\n    (FStar_Parser_AST.pattern * FStar_Parser_AST.term) ->\n      FStar_Pprint.document)\n  =\n  fun kw ->\n    fun uu___ ->\n      match uu___ with\n      | (pat, e) ->\n          let doc_pat = p_letlhs kw (pat, e) false in\n          let uu___1 = p_term_sep false false e in\n          (match uu___1 with\n           | (comm, doc_expr) ->\n               let doc_expr1 =\n                 inline_comment_or_above comm doc_expr FStar_Pprint.empty in\n               let uu___2 =\n                 let uu___3 =\n                   FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.equals\n                     doc_expr1 in\n                 FStar_Pprint.op_Hat_Slash_Hat doc_pat uu___3 in\n               let uu___3 =\n                 let uu___4 =\n                   let uu___5 =\n                     let uu___6 =\n                       let uu___7 = jump2 doc_expr1 in\n                       FStar_Pprint.op_Hat_Hat FStar_Pprint.equals uu___7 in\n                     FStar_Pprint.group uu___6 in\n                   FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in\n                 FStar_Pprint.op_Hat_Hat doc_pat uu___4 in\n               FStar_Pprint.ifflat uu___2 uu___3)\nand (p_term_list :\n  Prims.bool ->\n    Prims.bool -> FStar_Parser_AST.term Prims.list -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb ->\n      fun l ->\n        let rec aux uu___ =\n          match uu___ with\n          | [] -> FStar_Pprint.empty\n          | x::[] -> p_term ps pb x\n          | x::xs ->\n              let uu___1 = p_term ps pb x in\n              let uu___2 =\n                let uu___3 = str \";\" in\n                let uu___4 = aux xs in FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        let uu___ = str \"[\" in\n        let uu___1 =\n          let uu___2 = aux l in\n          let uu___3 = str \"]\" in FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\nand (p_newEffect : FStar_Parser_AST.effect_decl -> FStar_Pprint.document) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Parser_AST.RedefineEffect (lid, bs, t) ->\n        p_effectRedefinition lid bs t\n    | FStar_Parser_AST.DefineEffect (lid, bs, t, eff_decls) ->\n        p_effectDefinition lid bs t eff_decls\nand (p_effectRedefinition :\n  FStar_Ident.ident ->\n    FStar_Parser_AST.binder Prims.list ->\n      FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun uid ->\n    fun bs ->\n      fun t ->\n        let uu___ = p_uident uid in\n        let uu___1 = p_binders true bs in\n        let uu___2 =\n          let uu___3 = p_simpleTerm false false t in\n          prefix2 FStar_Pprint.equals uu___3 in\n        surround_maybe_empty (Prims.of_int (2)) Prims.int_one uu___ uu___1\n          uu___2\nand (p_effectDefinition :\n  FStar_Ident.ident ->\n    FStar_Parser_AST.binder Prims.list ->\n      FStar_Parser_AST.term ->\n        FStar_Parser_AST.decl Prims.list -> FStar_Pprint.document)\n  =\n  fun uid ->\n    fun bs ->\n      fun t ->\n        fun eff_decls ->\n          let binders = p_binders true bs in\n          let uu___ =\n            let uu___1 =\n              let uu___2 =\n                let uu___3 = p_uident uid in\n                let uu___4 = p_binders true bs in\n                let uu___5 =\n                  let uu___6 = p_typ false false t in\n                  prefix2 FStar_Pprint.colon uu___6 in\n                surround_maybe_empty (Prims.of_int (2)) Prims.int_one uu___3\n                  uu___4 uu___5 in\n              FStar_Pprint.group uu___2 in\n            let uu___2 =\n              let uu___3 = str \"with\" in\n              let uu___4 =\n                let uu___5 =\n                  let uu___6 =\n                    let uu___7 =\n                      let uu___8 =\n                        let uu___9 =\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.semi\n                            FStar_Pprint.space in\n                        FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___9 in\n                      separate_map_last uu___8 p_effectDecl eff_decls in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___6 in\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___5 in\n              FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n            FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n          braces_with_nesting uu___\nand (p_effectDecl :\n  Prims.bool -> FStar_Parser_AST.decl -> FStar_Pprint.document) =\n  fun ps ->\n    fun d ->\n      match d.FStar_Parser_AST.d with\n      | FStar_Parser_AST.Tycon\n          (false, uu___, (FStar_Parser_AST.TyconAbbrev\n           (lid, [], FStar_Pervasives_Native.None, e))::[])\n          ->\n          let uu___1 =\n            let uu___2 = p_lident lid in\n            let uu___3 =\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.equals in\n            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n          let uu___2 = p_simpleTerm ps false e in prefix2 uu___1 uu___2\n      | uu___ ->\n          let uu___1 =\n            let uu___2 = FStar_Parser_AST.decl_to_string d in\n            FStar_Compiler_Util.format1\n              \"Not a declaration of an effect member... or at least I hope so : %s\"\n              uu___2 in\n          failwith uu___1\nand (p_subEffect : FStar_Parser_AST.lift -> FStar_Pprint.document) =\n  fun lift ->\n    let lift_op_doc =\n      let lifts =\n        match lift.FStar_Parser_AST.lift_op with\n        | FStar_Parser_AST.NonReifiableLift t -> [(\"lift_wp\", t)]\n        | FStar_Parser_AST.ReifiableLift (t1, t2) ->\n            [(\"lift_wp\", t1); (\"lift\", t2)]\n        | FStar_Parser_AST.LiftForFree t -> [(\"lift\", t)] in\n      let p_lift ps uu___ =\n        match uu___ with\n        | (kwd, t) ->\n            let uu___1 =\n              let uu___2 = str kwd in\n              let uu___3 =\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                  FStar_Pprint.equals in\n              FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n            let uu___2 = p_simpleTerm ps false t in prefix2 uu___1 uu___2 in\n      separate_break_map_last FStar_Pprint.semi p_lift lifts in\n    let uu___ =\n      let uu___1 =\n        let uu___2 = p_quident lift.FStar_Parser_AST.msource in\n        let uu___3 =\n          let uu___4 = str \"~>\" in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n        FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n      let uu___2 = p_quident lift.FStar_Parser_AST.mdest in\n      prefix2 uu___1 uu___2 in\n    let uu___1 =\n      let uu___2 = braces_with_nesting lift_op_doc in\n      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n    FStar_Pprint.op_Hat_Hat uu___ uu___1\nand (p_qualifier : FStar_Parser_AST.qualifier -> FStar_Pprint.document) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Parser_AST.Private -> str \"private\"\n    | FStar_Parser_AST.Noeq -> str \"noeq\"\n    | FStar_Parser_AST.Unopteq -> str \"unopteq\"\n    | FStar_Parser_AST.Assumption -> str \"assume\"\n    | FStar_Parser_AST.DefaultEffect -> str \"default\"\n    | FStar_Parser_AST.TotalEffect -> str \"total\"\n    | FStar_Parser_AST.Effect_qual -> FStar_Pprint.empty\n    | FStar_Parser_AST.New -> str \"new\"\n    | FStar_Parser_AST.Inline -> str \"inline\"\n    | FStar_Parser_AST.Visible -> FStar_Pprint.empty\n    | FStar_Parser_AST.Unfold_for_unification_and_vcgen -> str \"unfold\"\n    | FStar_Parser_AST.Inline_for_extraction -> str \"inline_for_extraction\"\n    | FStar_Parser_AST.Irreducible -> str \"irreducible\"\n    | FStar_Parser_AST.NoExtract -> str \"noextract\"\n    | FStar_Parser_AST.Reifiable -> str \"reifiable\"\n    | FStar_Parser_AST.Reflectable -> str \"reflectable\"\n    | FStar_Parser_AST.Opaque -> str \"opaque\"\n    | FStar_Parser_AST.Logic -> str \"logic\"\nand (p_qualifiers : FStar_Parser_AST.qualifiers -> FStar_Pprint.document) =\n  fun qs ->\n    match qs with\n    | [] -> FStar_Pprint.empty\n    | q::[] ->\n        let uu___ = p_qualifier q in\n        FStar_Pprint.op_Hat_Hat uu___ FStar_Pprint.hardline\n    | uu___ ->\n        let uu___1 =\n          let uu___2 = FStar_Compiler_List.map p_qualifier qs in\n          FStar_Pprint.flow break1 uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___1 FStar_Pprint.hardline\nand (p_letqualifier :\n  FStar_Parser_AST.let_qualifier -> FStar_Pprint.document) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Parser_AST.Rec ->\n        let uu___1 = str \"rec\" in\n        FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1\n    | FStar_Parser_AST.NoLetQualifier -> FStar_Pprint.empty\nand (p_aqual : FStar_Parser_AST.arg_qualifier -> FStar_Pprint.document) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Parser_AST.Implicit -> str \"#\"\n    | FStar_Parser_AST.Equality -> str \"$\"\n    | FStar_Parser_AST.Meta t ->\n        let t1 =\n          match t.FStar_Parser_AST.tm with\n          | FStar_Parser_AST.Abs (uu___1, e) -> e\n          | uu___1 ->\n              FStar_Parser_AST.mk_term\n                (FStar_Parser_AST.App\n                   (t,\n                     (FStar_Parser_AST.unit_const t.FStar_Parser_AST.range),\n                     FStar_Parser_AST.Nothing)) t.FStar_Parser_AST.range\n                FStar_Parser_AST.Expr in\n        let uu___1 = str \"#[\" in\n        let uu___2 =\n          let uu___3 = p_term false false t1 in\n          let uu___4 =\n            let uu___5 = str \"]\" in FStar_Pprint.op_Hat_Hat uu___5 break1 in\n          FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n        FStar_Pprint.op_Hat_Hat uu___1 uu___2\n    | FStar_Parser_AST.TypeClassArg -> FStar_Pprint.empty\nand (p_disjunctivePattern :\n  FStar_Parser_AST.pattern -> FStar_Pprint.document) =\n  fun p ->\n    match p.FStar_Parser_AST.pat with\n    | FStar_Parser_AST.PatOr pats ->\n        let uu___ =\n          let uu___1 =\n            let uu___2 =\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.space in\n            FStar_Pprint.op_Hat_Hat break1 uu___2 in\n          FStar_Pprint.separate_map uu___1 p_tuplePattern pats in\n        FStar_Pprint.group uu___\n    | uu___ -> p_tuplePattern p\nand (p_tuplePattern : FStar_Parser_AST.pattern -> FStar_Pprint.document) =\n  fun p ->\n    match p.FStar_Parser_AST.pat with\n    | FStar_Parser_AST.PatTuple (pats, false) ->\n        let uu___ =\n          let uu___1 = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in\n          FStar_Pprint.separate_map uu___1 p_constructorPattern pats in\n        FStar_Pprint.group uu___\n    | uu___ -> p_constructorPattern p\nand (p_constructorPattern :\n  FStar_Parser_AST.pattern -> FStar_Pprint.document) =\n  fun p ->\n    match p.FStar_Parser_AST.pat with\n    | FStar_Parser_AST.PatApp\n        ({ FStar_Parser_AST.pat = FStar_Parser_AST.PatName maybe_cons_lid;\n           FStar_Parser_AST.prange = uu___;_},\n         hd::tl::[])\n        when\n        FStar_Ident.lid_equals maybe_cons_lid FStar_Parser_Const.cons_lid ->\n        let uu___1 =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.colon FStar_Pprint.colon in\n        let uu___2 = p_constructorPattern hd in\n        let uu___3 = p_constructorPattern tl in infix0 uu___1 uu___2 uu___3\n    | FStar_Parser_AST.PatApp\n        ({ FStar_Parser_AST.pat = FStar_Parser_AST.PatName uid;\n           FStar_Parser_AST.prange = uu___;_},\n         pats)\n        ->\n        let uu___1 = p_quident uid in\n        let uu___2 = FStar_Pprint.separate_map break1 p_atomicPattern pats in\n        prefix2 uu___1 uu___2\n    | uu___ -> p_atomicPattern p\nand (p_atomicPattern : FStar_Parser_AST.pattern -> FStar_Pprint.document) =\n  fun p ->\n    match p.FStar_Parser_AST.pat with\n    | FStar_Parser_AST.PatAscribed (pat, (t, FStar_Pervasives_Native.None))\n        ->\n        (match ((pat.FStar_Parser_AST.pat), (t.FStar_Parser_AST.tm)) with\n         | (FStar_Parser_AST.PatVar (lid, aqual, attrs),\n            FStar_Parser_AST.Refine\n            ({ FStar_Parser_AST.b = FStar_Parser_AST.Annotated (lid', t1);\n               FStar_Parser_AST.brange = uu___;\n               FStar_Parser_AST.blevel = uu___1;\n               FStar_Parser_AST.aqual = uu___2;\n               FStar_Parser_AST.battributes = uu___3;_},\n             phi)) when\n             let uu___4 = FStar_Ident.string_of_id lid in\n             let uu___5 = FStar_Ident.string_of_id lid' in uu___4 = uu___5 ->\n             let uu___4 =\n               let uu___5 = p_ident lid in\n               p_refinement aqual attrs uu___5 t1 phi in\n             soft_parens_with_nesting uu___4\n         | (FStar_Parser_AST.PatWild (aqual, attrs), FStar_Parser_AST.Refine\n            ({ FStar_Parser_AST.b = FStar_Parser_AST.NoName t1;\n               FStar_Parser_AST.brange = uu___;\n               FStar_Parser_AST.blevel = uu___1;\n               FStar_Parser_AST.aqual = uu___2;\n               FStar_Parser_AST.battributes = uu___3;_},\n             phi)) ->\n             let uu___4 =\n               p_refinement aqual attrs FStar_Pprint.underscore t1 phi in\n             soft_parens_with_nesting uu___4\n         | (FStar_Parser_AST.PatVar (uu___, aqual, uu___1), uu___2) ->\n             let wrap =\n               if\n                 aqual =\n                   (FStar_Pervasives_Native.Some\n                      FStar_Parser_AST.TypeClassArg)\n               then tc_arg\n               else soft_parens_with_nesting in\n             let uu___3 =\n               let uu___4 = p_tuplePattern pat in\n               let uu___5 =\n                 let uu___6 = p_tmEqNoRefinement t in\n                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___6 in\n               FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n             wrap uu___3\n         | (FStar_Parser_AST.PatWild (aqual, uu___), uu___1) ->\n             let wrap =\n               if\n                 aqual =\n                   (FStar_Pervasives_Native.Some\n                      FStar_Parser_AST.TypeClassArg)\n               then tc_arg\n               else soft_parens_with_nesting in\n             let uu___2 =\n               let uu___3 = p_tuplePattern pat in\n               let uu___4 =\n                 let uu___5 = p_tmEqNoRefinement t in\n                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___5 in\n               FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n             wrap uu___2\n         | uu___ ->\n             let uu___1 =\n               let uu___2 = p_tuplePattern pat in\n               let uu___3 =\n                 let uu___4 = p_tmEqNoRefinement t in\n                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___4 in\n               FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n             soft_parens_with_nesting uu___1)\n    | FStar_Parser_AST.PatList pats ->\n        let uu___ = separate_break_map FStar_Pprint.semi p_tuplePattern pats in\n        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero\n          FStar_Pprint.lbracket uu___ FStar_Pprint.rbracket\n    | FStar_Parser_AST.PatRecord pats ->\n        let p_recordFieldPat uu___ =\n          match uu___ with\n          | (lid, pat) ->\n              let uu___1 = p_qlident lid in\n              let uu___2 = p_tuplePattern pat in\n              infix2 FStar_Pprint.equals uu___1 uu___2 in\n        let uu___ =\n          separate_break_map FStar_Pprint.semi p_recordFieldPat pats in\n        soft_braces_with_nesting uu___\n    | FStar_Parser_AST.PatTuple (pats, true) ->\n        let uu___ =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen FStar_Pprint.bar in\n        let uu___1 =\n          separate_break_map FStar_Pprint.comma p_constructorPattern pats in\n        let uu___2 =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.rparen in\n        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one uu___ uu___1\n          uu___2\n    | FStar_Parser_AST.PatTvar (tv, arg_qualifier_opt, attrs) -> p_tvar tv\n    | FStar_Parser_AST.PatOp op ->\n        let uu___ =\n          let uu___1 =\n            let uu___2 =\n              let uu___3 = FStar_Ident.string_of_id op in str uu___3 in\n            let uu___3 =\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.rparen in\n            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1 in\n        FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___\n    | FStar_Parser_AST.PatWild (aqual, attrs) ->\n        let uu___ = FStar_Pprint.optional p_aqual aqual in\n        let uu___1 =\n          let uu___2 = p_attributes false attrs in\n          FStar_Pprint.op_Hat_Hat uu___2 FStar_Pprint.underscore in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.PatConst c -> p_constant c\n    | FStar_Parser_AST.PatVQuote e ->\n        let uu___ =\n          let uu___1 = str \"`%\" in\n          let uu___2 = p_noSeqTermAndComment false false e in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.PatVar (lid, aqual, attrs) ->\n        let uu___ = FStar_Pprint.optional p_aqual aqual in\n        let uu___1 =\n          let uu___2 = p_attributes false attrs in\n          let uu___3 = p_lident lid in FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.PatName uid -> p_quident uid\n    | FStar_Parser_AST.PatOr uu___ -> failwith \"Inner or pattern !\"\n    | FStar_Parser_AST.PatApp\n        ({ FStar_Parser_AST.pat = FStar_Parser_AST.PatName uu___;\n           FStar_Parser_AST.prange = uu___1;_},\n         uu___2)\n        -> let uu___3 = p_tuplePattern p in soft_parens_with_nesting uu___3\n    | FStar_Parser_AST.PatTuple (uu___, false) ->\n        let uu___1 = p_tuplePattern p in soft_parens_with_nesting uu___1\n    | uu___ ->\n        let uu___1 =\n          let uu___2 = FStar_Parser_AST.pat_to_string p in\n          FStar_Compiler_Util.format1 \"Invalid pattern %s\" uu___2 in\n        failwith uu___1\nand (is_typ_tuple : FStar_Parser_AST.term -> Prims.bool) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Op (id, uu___) when\n        let uu___1 = FStar_Ident.string_of_id id in uu___1 = \"*\" -> true\n    | uu___ -> false\nand (p_binder :\n  Prims.bool -> FStar_Parser_AST.binder -> FStar_Pprint.document) =\n  fun is_atomic ->\n    fun b ->\n      let is_tc = is_tc_binder b in\n      let uu___ = p_binder' false (is_atomic && (Prims.op_Negation is_tc)) b in\n      match uu___ with\n      | (b', t') ->\n          let d =\n            match t' with\n            | FStar_Pervasives_Native.Some (typ, catf1) -> catf1 b' typ\n            | FStar_Pervasives_Native.None -> b' in\n          if is_tc then tc_arg d else d\nand (p_binder' :\n  Prims.bool ->\n    Prims.bool ->\n      FStar_Parser_AST.binder ->\n        (FStar_Pprint.document * (FStar_Pprint.document * catf)\n          FStar_Pervasives_Native.option))\n  =\n  fun no_pars ->\n    fun is_atomic ->\n      fun b ->\n        match b.FStar_Parser_AST.b with\n        | FStar_Parser_AST.Variable lid ->\n            let uu___ =\n              let uu___1 =\n                FStar_Pprint.optional p_aqual b.FStar_Parser_AST.aqual in\n              let uu___2 =\n                let uu___3 =\n                  p_attributes false b.FStar_Parser_AST.battributes in\n                let uu___4 = p_lident lid in\n                FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            (uu___, FStar_Pervasives_Native.None)\n        | FStar_Parser_AST.TVariable lid ->\n            let uu___ =\n              let uu___1 = p_attributes false b.FStar_Parser_AST.battributes in\n              let uu___2 = p_lident lid in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            (uu___, FStar_Pervasives_Native.None)\n        | FStar_Parser_AST.Annotated (lid, t) ->\n            let uu___ =\n              match t.FStar_Parser_AST.tm with\n              | FStar_Parser_AST.Refine\n                  ({\n                     FStar_Parser_AST.b = FStar_Parser_AST.Annotated\n                       (lid', t1);\n                     FStar_Parser_AST.brange = uu___1;\n                     FStar_Parser_AST.blevel = uu___2;\n                     FStar_Parser_AST.aqual = uu___3;\n                     FStar_Parser_AST.battributes = uu___4;_},\n                   phi)\n                  when\n                  let uu___5 = FStar_Ident.string_of_id lid in\n                  let uu___6 = FStar_Ident.string_of_id lid' in\n                  uu___5 = uu___6 ->\n                  let uu___5 = p_lident lid in\n                  p_refinement' b.FStar_Parser_AST.aqual\n                    b.FStar_Parser_AST.battributes uu___5 t1 phi\n              | uu___1 ->\n                  let t' =\n                    let uu___2 = is_typ_tuple t in\n                    if uu___2\n                    then\n                      let uu___3 = p_tmFormula t in\n                      soft_parens_with_nesting uu___3\n                    else p_tmFormula t in\n                  let uu___2 =\n                    let uu___3 =\n                      FStar_Pprint.optional p_aqual b.FStar_Parser_AST.aqual in\n                    let uu___4 =\n                      let uu___5 =\n                        p_attributes false b.FStar_Parser_AST.battributes in\n                      let uu___6 = p_lident lid in\n                      FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                    FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                  (uu___2, t') in\n            (match uu___ with\n             | (b', t') ->\n                 let catf1 =\n                   if\n                     is_atomic ||\n                       ((is_meta_qualifier b.FStar_Parser_AST.aqual) &&\n                          (Prims.op_Negation no_pars))\n                   then\n                     fun x ->\n                       fun y ->\n                         let uu___1 =\n                           let uu___2 =\n                             let uu___3 = cat_with_colon x y in\n                             FStar_Pprint.op_Hat_Hat uu___3\n                               FStar_Pprint.rparen in\n                           FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___2 in\n                         FStar_Pprint.group uu___1\n                   else\n                     (fun x ->\n                        fun y ->\n                          let uu___2 = cat_with_colon x y in\n                          FStar_Pprint.group uu___2) in\n                 (b', (FStar_Pervasives_Native.Some (t', catf1))))\n        | FStar_Parser_AST.TAnnotated uu___ ->\n            failwith \"Is this still used ?\"\n        | FStar_Parser_AST.NoName t ->\n            (match t.FStar_Parser_AST.tm with\n             | FStar_Parser_AST.Refine\n                 ({ FStar_Parser_AST.b = FStar_Parser_AST.NoName t1;\n                    FStar_Parser_AST.brange = uu___;\n                    FStar_Parser_AST.blevel = uu___1;\n                    FStar_Parser_AST.aqual = uu___2;\n                    FStar_Parser_AST.battributes = uu___3;_},\n                  phi)\n                 ->\n                 let uu___4 =\n                   p_refinement' b.FStar_Parser_AST.aqual\n                     b.FStar_Parser_AST.battributes FStar_Pprint.underscore\n                     t1 phi in\n                 (match uu___4 with\n                  | (b', t') ->\n                      (b',\n                        (FStar_Pervasives_Native.Some (t', cat_with_colon))))\n             | uu___ ->\n                 let pref =\n                   let uu___1 =\n                     FStar_Pprint.optional p_aqual b.FStar_Parser_AST.aqual in\n                   let uu___2 =\n                     p_attributes false b.FStar_Parser_AST.battributes in\n                   FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n                 let p_Tm = if is_atomic then p_atomicTerm else p_appTerm in\n                 let uu___1 =\n                   let uu___2 = p_Tm t in FStar_Pprint.op_Hat_Hat pref uu___2 in\n                 (uu___1, FStar_Pervasives_Native.None))\nand (p_refinement :\n  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->\n    FStar_Parser_AST.term Prims.list ->\n      FStar_Pprint.document ->\n        FStar_Parser_AST.term ->\n          FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun aqual_opt ->\n    fun attrs ->\n      fun binder ->\n        fun t ->\n          fun phi ->\n            let uu___ = p_refinement' aqual_opt attrs binder t phi in\n            match uu___ with | (b, typ) -> cat_with_colon b typ\nand (p_refinement' :\n  FStar_Parser_AST.arg_qualifier FStar_Pervasives_Native.option ->\n    FStar_Parser_AST.term Prims.list ->\n      FStar_Pprint.document ->\n        FStar_Parser_AST.term ->\n          FStar_Parser_AST.term ->\n            (FStar_Pprint.document * FStar_Pprint.document))\n  =\n  fun aqual_opt ->\n    fun attrs ->\n      fun binder ->\n        fun t ->\n          fun phi ->\n            let is_t_atomic =\n              match t.FStar_Parser_AST.tm with\n              | FStar_Parser_AST.Construct uu___ -> false\n              | FStar_Parser_AST.App uu___ -> false\n              | FStar_Parser_AST.Op uu___ -> false\n              | uu___ -> true in\n            let uu___ = p_noSeqTerm false false phi in\n            match uu___ with\n            | (comm, phi1) ->\n                let phi2 =\n                  if comm = FStar_Pprint.empty\n                  then phi1\n                  else\n                    (let uu___2 =\n                       FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline phi1 in\n                     FStar_Pprint.op_Hat_Hat comm uu___2) in\n                let jump_break =\n                  if is_t_atomic then Prims.int_zero else Prims.int_one in\n                let uu___1 =\n                  let uu___2 = FStar_Pprint.optional p_aqual aqual_opt in\n                  let uu___3 =\n                    let uu___4 = p_attributes false attrs in\n                    FStar_Pprint.op_Hat_Hat uu___4 binder in\n                  FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n                let uu___2 =\n                  let uu___3 = p_appTerm t in\n                  let uu___4 =\n                    let uu___5 =\n                      let uu___6 =\n                        let uu___7 = soft_braces_with_nesting_tight phi2 in\n                        let uu___8 = soft_braces_with_nesting phi2 in\n                        FStar_Pprint.ifflat uu___7 uu___8 in\n                      FStar_Pprint.group uu___6 in\n                    FStar_Pprint.jump (Prims.of_int (2)) jump_break uu___5 in\n                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                (uu___1, uu___2)\nand (p_binders_list :\n  Prims.bool ->\n    FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document Prims.list)\n  =\n  fun is_atomic -> fun bs -> FStar_Compiler_List.map (p_binder is_atomic) bs\nand (p_binders :\n  Prims.bool -> FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document)\n  =\n  fun is_atomic ->\n    fun bs ->\n      let uu___ = p_binders_list is_atomic bs in\n      separate_or_flow break1 uu___\nand (p_binders_sep :\n  FStar_Parser_AST.binder Prims.list -> FStar_Pprint.document) =\n  fun bs ->\n    let uu___ = p_binders_list true bs in\n    FStar_Pprint.separate_map FStar_Pprint.space (fun x -> x) uu___\nand (string_of_id_or_underscore : FStar_Ident.ident -> FStar_Pprint.document)\n  =\n  fun lid ->\n    let uu___ =\n      (let uu___1 = FStar_Ident.string_of_id lid in\n       FStar_Compiler_Util.starts_with uu___1 FStar_Ident.reserved_prefix) &&\n        (let uu___1 = false in\n         Prims.op_Negation uu___1) in\n    if uu___\n    then FStar_Pprint.underscore\n    else (let uu___2 = FStar_Ident.string_of_id lid in str uu___2)\nand (text_of_lid_or_underscore : FStar_Ident.lident -> FStar_Pprint.document)\n  =\n  fun lid ->\n    let uu___ =\n      (let uu___1 =\n         let uu___2 = FStar_Ident.ident_of_lid lid in\n         FStar_Ident.string_of_id uu___2 in\n       FStar_Compiler_Util.starts_with uu___1 FStar_Ident.reserved_prefix) &&\n        (let uu___1 = false in\n         Prims.op_Negation uu___1) in\n    if uu___\n    then FStar_Pprint.underscore\n    else (let uu___2 = FStar_Ident.string_of_lid lid in str uu___2)\nand (p_qlident : FStar_Ident.lid -> FStar_Pprint.document) =\n  fun lid -> text_of_lid_or_underscore lid\nand (p_quident : FStar_Ident.lid -> FStar_Pprint.document) =\n  fun lid -> text_of_lid_or_underscore lid\nand (p_ident : FStar_Ident.ident -> FStar_Pprint.document) =\n  fun lid -> string_of_id_or_underscore lid\nand (p_lident : FStar_Ident.ident -> FStar_Pprint.document) =\n  fun lid -> string_of_id_or_underscore lid\nand (p_uident : FStar_Ident.ident -> FStar_Pprint.document) =\n  fun lid -> string_of_id_or_underscore lid\nand (p_tvar : FStar_Ident.ident -> FStar_Pprint.document) =\n  fun lid -> string_of_id_or_underscore lid\nand (paren_if : Prims.bool -> FStar_Pprint.document -> FStar_Pprint.document)\n  = fun b -> if b then soft_parens_with_nesting else (fun x -> x)\nand (inline_comment_or_above :\n  FStar_Pprint.document ->\n    FStar_Pprint.document -> FStar_Pprint.document -> FStar_Pprint.document)\n  =\n  fun comm ->\n    fun doc ->\n      fun sep ->\n        if comm = FStar_Pprint.empty\n        then\n          let uu___ = FStar_Pprint.op_Hat_Hat doc sep in\n          FStar_Pprint.group uu___\n        else\n          (let uu___1 =\n             let uu___2 =\n               let uu___3 =\n                 let uu___4 =\n                   let uu___5 = FStar_Pprint.op_Hat_Hat break1 comm in\n                   FStar_Pprint.op_Hat_Hat sep uu___5 in\n                 FStar_Pprint.op_Hat_Hat doc uu___4 in\n               FStar_Pprint.group uu___3 in\n             let uu___3 =\n               let uu___4 =\n                 let uu___5 = FStar_Pprint.op_Hat_Hat doc sep in\n                 FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___5 in\n               FStar_Pprint.op_Hat_Hat comm uu___4 in\n             FStar_Pprint.ifflat uu___2 uu___3 in\n           FStar_Compiler_Effect.op_Less_Bar FStar_Pprint.group uu___1)\nand (p_term :\n  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb ->\n      fun e ->\n        match e.FStar_Parser_AST.tm with\n        | FStar_Parser_AST.Seq (e1, e2) ->\n            let uu___ = p_noSeqTerm true false e1 in\n            (match uu___ with\n             | (comm, t1) ->\n                 let uu___1 =\n                   inline_comment_or_above comm t1 FStar_Pprint.semi in\n                 let uu___2 =\n                   let uu___3 = p_term ps pb e2 in\n                   FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in\n                 FStar_Pprint.op_Hat_Hat uu___1 uu___2)\n        | FStar_Parser_AST.Bind (x, e1, e2) ->\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = p_lident x in\n                  let uu___4 =\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                      FStar_Pprint.long_left_arrow in\n                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                let uu___3 =\n                  let uu___4 = p_noSeqTermAndComment true false e1 in\n                  let uu___5 =\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                      FStar_Pprint.semi in\n                  FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n                op_Hat_Slash_Plus_Hat uu___2 uu___3 in\n              FStar_Pprint.group uu___1 in\n            let uu___1 = p_term ps pb e2 in\n            FStar_Pprint.op_Hat_Slash_Hat uu___ uu___1\n        | uu___ ->\n            let uu___1 = p_noSeqTermAndComment ps pb e in\n            FStar_Pprint.group uu___1\nand (p_term_sep :\n  Prims.bool ->\n    Prims.bool ->\n      FStar_Parser_AST.term ->\n        (FStar_Pprint.document * FStar_Pprint.document))\n  =\n  fun ps ->\n    fun pb ->\n      fun e ->\n        match e.FStar_Parser_AST.tm with\n        | FStar_Parser_AST.Seq (e1, e2) ->\n            let uu___ = p_noSeqTerm true false e1 in\n            (match uu___ with\n             | (comm, t1) ->\n                 let uu___1 =\n                   let uu___2 =\n                     let uu___3 =\n                       FStar_Pprint.op_Hat_Hat t1 FStar_Pprint.semi in\n                     FStar_Pprint.group uu___3 in\n                   let uu___3 =\n                     let uu___4 = p_term ps pb e2 in\n                     FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___4 in\n                   FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n                 (comm, uu___1))\n        | FStar_Parser_AST.Bind (x, e1, e2) ->\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 =\n                    let uu___4 = p_lident x in\n                    let uu___5 =\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                        FStar_Pprint.long_left_arrow in\n                    FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n                  let uu___4 =\n                    let uu___5 = p_noSeqTermAndComment true false e1 in\n                    let uu___6 =\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                        FStar_Pprint.semi in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  op_Hat_Slash_Plus_Hat uu___3 uu___4 in\n                FStar_Pprint.group uu___2 in\n              let uu___2 = p_term ps pb e2 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n            (FStar_Pprint.empty, uu___)\n        | uu___ -> p_noSeqTerm ps pb e\nand (p_noSeqTerm :\n  Prims.bool ->\n    Prims.bool ->\n      FStar_Parser_AST.term ->\n        (FStar_Pprint.document * FStar_Pprint.document))\n  =\n  fun ps ->\n    fun pb ->\n      fun e ->\n        with_comment_sep (p_noSeqTerm' ps pb) e e.FStar_Parser_AST.range\nand (p_noSeqTermAndComment :\n  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb ->\n      fun e -> with_comment (p_noSeqTerm' ps pb) e e.FStar_Parser_AST.range\nand (p_noSeqTerm' :\n  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb ->\n      fun e ->\n        match e.FStar_Parser_AST.tm with\n        | FStar_Parser_AST.Ascribed\n            (e1, t, FStar_Pervasives_Native.None, use_eq) ->\n            let uu___ =\n              let uu___1 = p_tmIff e1 in\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 = p_typ ps pb t in\n                  FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___4 in\n                FStar_Pprint.op_Hat_Hat\n                  (if use_eq\n                   then FStar_Pprint.dollar\n                   else FStar_Pprint.langle) uu___3 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Ascribed\n            (e1, t, FStar_Pervasives_Native.Some tac, use_eq) ->\n            let uu___ =\n              let uu___1 = p_tmIff e1 in\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = p_typ false false t in\n                    let uu___6 =\n                      let uu___7 = str \"by\" in\n                      let uu___8 = p_typ ps pb (maybe_unthunk tac) in\n                      FStar_Pprint.op_Hat_Slash_Hat uu___7 uu___8 in\n                    FStar_Pprint.op_Hat_Slash_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___4 in\n                FStar_Pprint.op_Hat_Hat\n                  (if use_eq\n                   then FStar_Pprint.dollar\n                   else FStar_Pprint.langle) uu___3 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Op (id, e1::e2::e3::[]) when\n            let uu___ = FStar_Ident.string_of_id id in uu___ = \".()<-\" ->\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = p_atomicTermNotQUident e1 in\n                  let uu___4 =\n                    let uu___5 =\n                      let uu___6 =\n                        let uu___7 = p_term false false e2 in\n                        soft_parens_with_nesting uu___7 in\n                      let uu___7 =\n                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                          FStar_Pprint.larrow in\n                      FStar_Pprint.op_Hat_Hat uu___6 uu___7 in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___5 in\n                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                FStar_Pprint.group uu___2 in\n              let uu___2 =\n                let uu___3 = p_noSeqTermAndComment ps pb e3 in jump2 uu___3 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Op (id, e1::e2::e3::[]) when\n            let uu___ = FStar_Ident.string_of_id id in uu___ = \".[]<-\" ->\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = p_atomicTermNotQUident e1 in\n                  let uu___4 =\n                    let uu___5 =\n                      let uu___6 =\n                        let uu___7 = p_term false false e2 in\n                        soft_brackets_with_nesting uu___7 in\n                      let uu___7 =\n                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                          FStar_Pprint.larrow in\n                      FStar_Pprint.op_Hat_Hat uu___6 uu___7 in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___5 in\n                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                FStar_Pprint.group uu___2 in\n              let uu___2 =\n                let uu___3 = p_noSeqTermAndComment ps pb e3 in jump2 uu___3 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Requires (e1, wtf) ->\n            let uu___1 =\n              let uu___2 = str \"requires\" in\n              let uu___3 = p_typ ps pb e1 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n            FStar_Pprint.group uu___1\n        | FStar_Parser_AST.Ensures (e1, wtf) ->\n            let uu___1 =\n              let uu___2 = str \"ensures\" in\n              let uu___3 = p_typ ps pb e1 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n            FStar_Pprint.group uu___1\n        | FStar_Parser_AST.WFOrder (rel, e1) -> p_dec_wf ps pb rel e1\n        | FStar_Parser_AST.LexList l ->\n            let uu___ =\n              let uu___1 = str \"%\" in\n              let uu___2 = p_term_list ps pb l in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Decreases (e1, wtf) ->\n            let uu___1 =\n              let uu___2 = str \"decreases\" in\n              let uu___3 = p_typ ps pb e1 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n            FStar_Pprint.group uu___1\n        | FStar_Parser_AST.Attributes es ->\n            let uu___ =\n              let uu___1 = str \"attributes\" in\n              let uu___2 = FStar_Pprint.separate_map break1 p_atomicTerm es in\n              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.If (e1, op_opt, ret_opt, e2, e3) ->\n            if is_unit e3\n            then\n              let uu___ =\n                let uu___1 =\n                  let uu___2 =\n                    let uu___3 =\n                      let uu___4 =\n                        let uu___5 =\n                          let uu___6 =\n                            FStar_Compiler_Util.map_opt op_opt\n                              FStar_Ident.string_of_id in\n                          FStar_Compiler_Util.bind_opt uu___6\n                            (FStar_Parser_AST.strip_prefix \"let\") in\n                        FStar_Compiler_Util.dflt \"\" uu___5 in\n                      Prims.op_Hat \"if\" uu___4 in\n                    str uu___3 in\n                  let uu___3 = p_noSeqTermAndComment false false e1 in\n                  op_Hat_Slash_Plus_Hat uu___2 uu___3 in\n                let uu___2 =\n                  let uu___3 = str \"then\" in\n                  let uu___4 = p_noSeqTermAndComment ps pb e2 in\n                  op_Hat_Slash_Plus_Hat uu___3 uu___4 in\n                FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n              FStar_Pprint.group uu___\n            else\n              (let e2_doc =\n                 match e2.FStar_Parser_AST.tm with\n                 | FStar_Parser_AST.If (uu___1, uu___2, uu___3, uu___4, e31)\n                     when is_unit e31 ->\n                     let uu___5 = p_noSeqTermAndComment false false e2 in\n                     soft_parens_with_nesting uu___5\n                 | uu___1 -> p_noSeqTermAndComment false false e2 in\n               match ret_opt with\n               | FStar_Pervasives_Native.None ->\n                   let uu___1 =\n                     let uu___2 =\n                       let uu___3 = str \"if\" in\n                       let uu___4 = p_noSeqTermAndComment false false e1 in\n                       op_Hat_Slash_Plus_Hat uu___3 uu___4 in\n                     let uu___3 =\n                       let uu___4 =\n                         let uu___5 = str \"then\" in\n                         op_Hat_Slash_Plus_Hat uu___5 e2_doc in\n                       let uu___5 =\n                         let uu___6 = str \"else\" in\n                         let uu___7 = p_noSeqTermAndComment ps pb e3 in\n                         op_Hat_Slash_Plus_Hat uu___6 uu___7 in\n                       FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n                     FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n                   FStar_Pprint.group uu___1\n               | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->\n                   let uu___1 =\n                     let uu___2 =\n                       let uu___3 = str \"if\" in\n                       let uu___4 = p_noSeqTermAndComment false false e1 in\n                       op_Hat_Slash_Plus_Hat uu___3 uu___4 in\n                     let uu___3 =\n                       let uu___4 =\n                         let uu___5 =\n                           match as_opt with\n                           | FStar_Pervasives_Native.None ->\n                               FStar_Pprint.empty\n                           | FStar_Pervasives_Native.Some as_ident ->\n                               let uu___6 = str \"as\" in\n                               let uu___7 = p_ident as_ident in\n                               FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in\n                         let uu___6 =\n                           let uu___7 =\n                             str (if use_eq then \"returns$\" else \"returns\") in\n                           let uu___8 = p_tmIff ret in\n                           op_Hat_Slash_Plus_Hat uu___7 uu___8 in\n                         FStar_Pprint.op_Hat_Slash_Hat uu___5 uu___6 in\n                       let uu___5 =\n                         let uu___6 =\n                           let uu___7 = str \"then\" in\n                           op_Hat_Slash_Plus_Hat uu___7 e2_doc in\n                         let uu___7 =\n                           let uu___8 = str \"else\" in\n                           let uu___9 = p_noSeqTermAndComment ps pb e3 in\n                           op_Hat_Slash_Plus_Hat uu___8 uu___9 in\n                         FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in\n                       FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n                     FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n                   FStar_Pprint.group uu___1)\n        | FStar_Parser_AST.TryWith (e1, branches) ->\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = str \"try\" in\n                  let uu___4 = p_noSeqTermAndComment false false e1 in\n                  prefix2 uu___3 uu___4 in\n                let uu___3 =\n                  let uu___4 = str \"with\" in\n                  let uu___5 =\n                    separate_map_last FStar_Pprint.hardline p_patternBranch\n                      branches in\n                  FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n                FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n              FStar_Pprint.group uu___1 in\n            let uu___1 = paren_if (ps || pb) in uu___1 uu___\n        | FStar_Parser_AST.Match (e1, op_opt, ret_opt, branches) ->\n            let match_doc =\n              let uu___ =\n                let uu___1 =\n                  let uu___2 =\n                    let uu___3 =\n                      FStar_Compiler_Util.map_opt op_opt\n                        FStar_Ident.string_of_id in\n                    FStar_Compiler_Util.bind_opt uu___3\n                      (FStar_Parser_AST.strip_prefix \"let\") in\n                  FStar_Compiler_Util.dflt \"\" uu___2 in\n                Prims.op_Hat \"match\" uu___1 in\n              str uu___ in\n            let uu___ =\n              let uu___1 =\n                match ret_opt with\n                | FStar_Pervasives_Native.None ->\n                    let uu___2 =\n                      let uu___3 = p_noSeqTermAndComment false false e1 in\n                      let uu___4 = str \"with\" in\n                      FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n                        match_doc uu___3 uu___4 in\n                    FStar_Pprint.group uu___2\n                | FStar_Pervasives_Native.Some (as_opt, ret, use_eq) ->\n                    let uu___2 =\n                      let uu___3 =\n                        let uu___4 = p_noSeqTermAndComment false false e1 in\n                        let uu___5 =\n                          let uu___6 =\n                            match as_opt with\n                            | FStar_Pervasives_Native.None ->\n                                FStar_Pprint.empty\n                            | FStar_Pervasives_Native.Some as_ident ->\n                                let uu___7 = str \"as\" in\n                                let uu___8 = p_ident as_ident in\n                                op_Hat_Slash_Plus_Hat uu___7 uu___8 in\n                          let uu___7 =\n                            let uu___8 =\n                              str (if use_eq then \"returns$\" else \"returns\") in\n                            let uu___9 = p_tmIff ret in\n                            op_Hat_Slash_Plus_Hat uu___8 uu___9 in\n                          op_Hat_Slash_Plus_Hat uu___6 uu___7 in\n                        op_Hat_Slash_Plus_Hat uu___4 uu___5 in\n                      let uu___4 = str \"with\" in\n                      FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n                        match_doc uu___3 uu___4 in\n                    FStar_Pprint.group uu___2 in\n              let uu___2 =\n                separate_map_last FStar_Pprint.hardline p_patternBranch\n                  branches in\n              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n            let uu___1 = paren_if (ps || pb) in uu___1 uu___\n        | FStar_Parser_AST.LetOpen (uid, e1) ->\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = str \"let open\" in\n                  let uu___4 = p_quident uid in\n                  let uu___5 = str \"in\" in\n                  FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n                    uu___3 uu___4 uu___5 in\n                let uu___3 = p_term false pb e1 in\n                FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n              FStar_Pprint.group uu___1 in\n            let uu___1 = paren_if ps in uu___1 uu___\n        | FStar_Parser_AST.LetOpenRecord (r, rty, e1) ->\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = str \"let open\" in\n                  let uu___4 = p_term false pb r in\n                  let uu___5 = str \"as\" in\n                  FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n                    uu___3 uu___4 uu___5 in\n                let uu___3 =\n                  let uu___4 = p_term false pb rty in\n                  let uu___5 =\n                    let uu___6 = str \"in\" in\n                    let uu___7 = p_term false pb e1 in\n                    FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in\n                  FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n                FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n              FStar_Pprint.group uu___1 in\n            let uu___1 = paren_if ps in uu___1 uu___\n        | FStar_Parser_AST.LetOperator (lets, body) ->\n            let p_let uu___ is_last =\n              match uu___ with\n              | (id, pat, e1) ->\n                  let doc_let_or_and =\n                    let uu___1 = FStar_Ident.string_of_id id in str uu___1 in\n                  let doc_pat = p_letlhs doc_let_or_and (pat, e1) true in\n                  (match ((pat.FStar_Parser_AST.pat),\n                           (e1.FStar_Parser_AST.tm))\n                   with\n                   | (FStar_Parser_AST.PatVar (pid, uu___1, uu___2),\n                      FStar_Parser_AST.Name tid) when\n                       let uu___3 = FStar_Ident.string_of_id pid in\n                       let uu___4 =\n                         let uu___5 = FStar_Ident.path_of_lid tid in\n                         FStar_Compiler_List.last uu___5 in\n                       uu___3 = uu___4 ->\n                       let uu___3 =\n                         if is_last then str \"in\" else FStar_Pprint.empty in\n                       FStar_Pprint.op_Hat_Slash_Hat doc_pat uu___3\n                   | (FStar_Parser_AST.PatVar (pid, uu___1, uu___2),\n                      FStar_Parser_AST.Var tid) when\n                       let uu___3 = FStar_Ident.string_of_id pid in\n                       let uu___4 =\n                         let uu___5 = FStar_Ident.path_of_lid tid in\n                         FStar_Compiler_List.last uu___5 in\n                       uu___3 = uu___4 ->\n                       let uu___3 =\n                         if is_last then str \"in\" else FStar_Pprint.empty in\n                       FStar_Pprint.op_Hat_Slash_Hat doc_pat uu___3\n                   | uu___1 ->\n                       let uu___2 = p_term_sep false false e1 in\n                       (match uu___2 with\n                        | (comm, doc_expr) ->\n                            let doc_expr1 =\n                              inline_comment_or_above comm doc_expr\n                                FStar_Pprint.empty in\n                            if is_last\n                            then\n                              let uu___3 =\n                                FStar_Pprint.flow break1\n                                  [doc_pat; FStar_Pprint.equals] in\n                              let uu___4 = str \"in\" in\n                              FStar_Pprint.surround (Prims.of_int (2))\n                                Prims.int_one uu___3 doc_expr1 uu___4\n                            else\n                              (let uu___4 =\n                                 FStar_Pprint.flow break1\n                                   [doc_pat; FStar_Pprint.equals; doc_expr1] in\n                               FStar_Pprint.hang (Prims.of_int (2)) uu___4))) in\n            let l = FStar_Compiler_List.length lets in\n            let lets_docs =\n              FStar_Compiler_List.mapi\n                (fun i ->\n                   fun lb ->\n                     let uu___ = p_let lb (i = (l - Prims.int_one)) in\n                     FStar_Pprint.group uu___) lets in\n            let lets_doc =\n              let uu___ = FStar_Pprint.separate break1 lets_docs in\n              FStar_Pprint.group uu___ in\n            let r =\n              let uu___ =\n                let uu___1 =\n                  let uu___2 =\n                    let uu___3 = p_term false pb body in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in\n                  FStar_Pprint.op_Hat_Hat lets_doc uu___2 in\n                FStar_Pprint.group uu___1 in\n              let uu___1 = paren_if ps in uu___1 uu___ in\n            r\n        | FStar_Parser_AST.Let (q, lbs, e1) ->\n            let p_lb q1 uu___ is_last =\n              match uu___ with\n              | (a, (pat, e2)) ->\n                  let attrs = p_attrs_opt true a in\n                  let doc_let_or_and =\n                    match q1 with\n                    | FStar_Pervasives_Native.Some (FStar_Parser_AST.Rec) ->\n                        let uu___1 =\n                          let uu___2 = str \"let\" in\n                          let uu___3 = str \"rec\" in\n                          FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n                        FStar_Pprint.group uu___1\n                    | FStar_Pervasives_Native.Some\n                        (FStar_Parser_AST.NoLetQualifier) -> str \"let\"\n                    | uu___1 -> str \"and\" in\n                  let doc_pat = p_letlhs doc_let_or_and (pat, e2) true in\n                  let uu___1 = p_term_sep false false e2 in\n                  (match uu___1 with\n                   | (comm, doc_expr) ->\n                       let doc_expr1 =\n                         inline_comment_or_above comm doc_expr\n                           FStar_Pprint.empty in\n                       let uu___2 =\n                         if is_last\n                         then\n                           let uu___3 =\n                             FStar_Pprint.flow break1\n                               [doc_pat; FStar_Pprint.equals] in\n                           let uu___4 = str \"in\" in\n                           FStar_Pprint.surround (Prims.of_int (2))\n                             Prims.int_one uu___3 doc_expr1 uu___4\n                         else\n                           (let uu___4 =\n                              FStar_Pprint.flow break1\n                                [doc_pat; FStar_Pprint.equals; doc_expr1] in\n                            FStar_Pprint.hang (Prims.of_int (2)) uu___4) in\n                       FStar_Pprint.op_Hat_Hat attrs uu___2) in\n            let l = FStar_Compiler_List.length lbs in\n            let lbs_docs =\n              FStar_Compiler_List.mapi\n                (fun i ->\n                   fun lb ->\n                     if i = Prims.int_zero\n                     then\n                       let uu___ =\n                         p_lb (FStar_Pervasives_Native.Some q) lb\n                           (i = (l - Prims.int_one)) in\n                       FStar_Pprint.group uu___\n                     else\n                       (let uu___1 =\n                          p_lb FStar_Pervasives_Native.None lb\n                            (i = (l - Prims.int_one)) in\n                        FStar_Pprint.group uu___1)) lbs in\n            let lbs_doc =\n              let uu___ = FStar_Pprint.separate break1 lbs_docs in\n              FStar_Pprint.group uu___ in\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = p_term false pb e1 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in\n                FStar_Pprint.op_Hat_Hat lbs_doc uu___2 in\n              FStar_Pprint.group uu___1 in\n            let uu___1 = paren_if ps in uu___1 uu___\n        | FStar_Parser_AST.Abs\n            ({\n               FStar_Parser_AST.pat = FStar_Parser_AST.PatVar\n                 (x, typ_opt, uu___);\n               FStar_Parser_AST.prange = uu___1;_}::[],\n             {\n               FStar_Parser_AST.tm = FStar_Parser_AST.Match\n                 (maybe_x, FStar_Pervasives_Native.None,\n                  FStar_Pervasives_Native.None, branches);\n               FStar_Parser_AST.range = uu___2;\n               FStar_Parser_AST.level = uu___3;_})\n            when matches_var maybe_x x ->\n            let uu___4 =\n              let uu___5 =\n                let uu___6 = str \"function\" in\n                let uu___7 =\n                  separate_map_last FStar_Pprint.hardline p_patternBranch\n                    branches in\n                FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in\n              FStar_Pprint.group uu___5 in\n            let uu___5 = paren_if (ps || pb) in uu___5 uu___4\n        | FStar_Parser_AST.Quote (e1, FStar_Parser_AST.Dynamic) ->\n            let uu___ =\n              let uu___1 = str \"quote\" in\n              let uu___2 = p_noSeqTermAndComment ps pb e1 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Quote (e1, FStar_Parser_AST.Static) ->\n            let uu___ =\n              let uu___1 = str \"`\" in\n              let uu___2 = p_noSeqTermAndComment ps pb e1 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.VQuote e1 ->\n            let uu___ =\n              let uu___1 = str \"`%\" in\n              let uu___2 = p_noSeqTermAndComment ps pb e1 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Antiquote\n            {\n              FStar_Parser_AST.tm = FStar_Parser_AST.Quote\n                (e1, FStar_Parser_AST.Dynamic);\n              FStar_Parser_AST.range = uu___;\n              FStar_Parser_AST.level = uu___1;_}\n            ->\n            let uu___2 =\n              let uu___3 = str \"`@\" in\n              let uu___4 = p_noSeqTermAndComment ps pb e1 in\n              FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n            FStar_Pprint.group uu___2\n        | FStar_Parser_AST.Antiquote e1 ->\n            let uu___ =\n              let uu___1 = str \"`#\" in\n              let uu___2 = p_noSeqTermAndComment ps pb e1 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.CalcProof (rel, init, steps) ->\n            let head =\n              let uu___ = str \"calc\" in\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = p_noSeqTermAndComment false false rel in\n                  let uu___4 =\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                      FStar_Pprint.lbrace in\n                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n              FStar_Pprint.op_Hat_Hat uu___ uu___1 in\n            let bot = FStar_Pprint.rbrace in\n            let uu___ = FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline bot in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 = p_noSeqTermAndComment false false init in\n                  let uu___5 =\n                    let uu___6 = str \";\" in\n                    let uu___7 =\n                      let uu___8 =\n                        separate_map_last FStar_Pprint.hardline p_calcStep\n                          steps in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___8 in\n                    FStar_Pprint.op_Hat_Hat uu___6 uu___7 in\n                  FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in\n              FStar_Compiler_Effect.op_Less_Bar\n                (FStar_Pprint.nest (Prims.of_int (2))) uu___2 in\n            FStar_Pprint.enclose head uu___ uu___1\n        | FStar_Parser_AST.IntroForall (xs, p, e1) ->\n            let p1 = p_noSeqTermAndComment false false p in\n            let e2 = p_noSeqTermAndComment false false e1 in\n            let xs1 = p_binders_sep xs in\n            let uu___ = str \"introduce forall\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = str \".\" in\n                    let uu___6 =\n                      let uu___7 =\n                        let uu___8 =\n                          let uu___9 =\n                            let uu___10 = str \"with\" in\n                            let uu___11 =\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space e2 in\n                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___9 in\n                        FStar_Pprint.op_Hat_Hat p1 uu___8 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n                FStar_Pprint.op_Hat_Hat xs1 uu___3 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.IntroExists (xs, p, vs, e1) ->\n            let p1 = p_noSeqTermAndComment false false p in\n            let e2 = p_noSeqTermAndComment false false e1 in\n            let xs1 = p_binders_sep xs in\n            let uu___ = str \"introduce\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 = str \"exists\" in\n                let uu___4 =\n                  let uu___5 =\n                    let uu___6 =\n                      let uu___7 = str \".\" in\n                      let uu___8 =\n                        let uu___9 =\n                          let uu___10 =\n                            let uu___11 = str \"with\" in\n                            let uu___12 =\n                              let uu___13 =\n                                let uu___14 =\n                                  FStar_Pprint.separate_map\n                                    FStar_Pprint.space p_atomicTerm vs in\n                                let uu___15 =\n                                  let uu___16 =\n                                    let uu___17 = str \"and\" in\n                                    let uu___18 =\n                                      FStar_Pprint.op_Hat_Hat\n                                        FStar_Pprint.space e2 in\n                                    FStar_Pprint.op_Hat_Hat uu___17 uu___18 in\n                                  FStar_Pprint.op_Hat_Hat\n                                    FStar_Pprint.hardline uu___16 in\n                                FStar_Pprint.op_Hat_Hat uu___14 uu___15 in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___13 in\n                            FStar_Pprint.op_Hat_Hat uu___11 uu___12 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___10 in\n                        FStar_Pprint.op_Hat_Hat p1 uu___9 in\n                      FStar_Pprint.op_Hat_Hat uu___7 uu___8 in\n                    FStar_Pprint.op_Hat_Hat xs1 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in\n                FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.IntroImplies (p, q, x, e1) ->\n            let p1 = p_tmFormula p in\n            let q1 = p_tmFormula q in\n            let e2 = p_noSeqTermAndComment false false e1 in\n            let x1 = p_binders_sep [x] in\n            let uu___ = str \"introduce\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = str \"==>\" in\n                    let uu___6 =\n                      let uu___7 =\n                        let uu___8 =\n                          let uu___9 =\n                            let uu___10 = str \"with\" in\n                            let uu___11 =\n                              let uu___12 =\n                                let uu___13 =\n                                  let uu___14 = str \".\" in\n                                  let uu___15 =\n                                    FStar_Pprint.op_Hat_Hat\n                                      FStar_Pprint.space e2 in\n                                  FStar_Pprint.op_Hat_Hat uu___14 uu___15 in\n                                FStar_Pprint.op_Hat_Hat x1 uu___13 in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___12 in\n                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___9 in\n                        FStar_Pprint.op_Hat_Hat q1 uu___8 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n                FStar_Pprint.op_Hat_Hat p1 uu___3 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.IntroOr (b, p, q, e1) ->\n            let p1 = p_tmFormula p in\n            let q1 = p_tmFormula q in\n            let e2 = p_noSeqTermAndComment false false e1 in\n            let uu___ = str \"introduce\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = str \"\\\\/\" in\n                    let uu___6 =\n                      let uu___7 =\n                        let uu___8 =\n                          let uu___9 =\n                            let uu___10 = str \"with\" in\n                            let uu___11 =\n                              let uu___12 =\n                                let uu___13 =\n                                  if b then str \"Left\" else str \"Right\" in\n                                let uu___14 =\n                                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                    e2 in\n                                FStar_Pprint.op_Hat_Hat uu___13 uu___14 in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___12 in\n                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___9 in\n                        FStar_Pprint.op_Hat_Hat q1 uu___8 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n                FStar_Pprint.op_Hat_Hat p1 uu___3 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.IntroAnd (p, q, e1, e2) ->\n            let p1 = p_tmFormula p in\n            let q1 = p_tmTuple q in\n            let e11 = p_noSeqTermAndComment false false e1 in\n            let e21 = p_noSeqTermAndComment false false e2 in\n            let uu___ = str \"introduce\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = str \"/\\\\\" in\n                    let uu___6 =\n                      let uu___7 =\n                        let uu___8 =\n                          let uu___9 =\n                            let uu___10 = str \"with\" in\n                            let uu___11 =\n                              let uu___12 =\n                                let uu___13 =\n                                  let uu___14 =\n                                    let uu___15 = str \"and\" in\n                                    let uu___16 =\n                                      FStar_Pprint.op_Hat_Hat\n                                        FStar_Pprint.space e21 in\n                                    FStar_Pprint.op_Hat_Hat uu___15 uu___16 in\n                                  FStar_Pprint.op_Hat_Hat\n                                    FStar_Pprint.hardline uu___14 in\n                                FStar_Pprint.op_Hat_Hat e11 uu___13 in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___12 in\n                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___9 in\n                        FStar_Pprint.op_Hat_Hat q1 uu___8 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n                FStar_Pprint.op_Hat_Hat p1 uu___3 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.ElimForall (xs, p, vs) ->\n            let xs1 = p_binders_sep xs in\n            let p1 = p_noSeqTermAndComment false false p in\n            let vs1 =\n              FStar_Pprint.separate_map FStar_Pprint.space p_atomicTerm vs in\n            let uu___ = str \"eliminate\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 = str \"forall\" in\n                let uu___4 =\n                  let uu___5 =\n                    let uu___6 =\n                      let uu___7 = str \".\" in\n                      let uu___8 =\n                        let uu___9 =\n                          let uu___10 =\n                            let uu___11 =\n                              let uu___12 = str \"with\" in\n                              let uu___13 =\n                                FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                  vs1 in\n                              FStar_Pprint.op_Hat_Hat uu___12 uu___13 in\n                            FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                              uu___11 in\n                          FStar_Pprint.op_Hat_Hat p1 uu___10 in\n                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___9 in\n                      FStar_Pprint.op_Hat_Hat uu___7 uu___8 in\n                    FStar_Pprint.op_Hat_Hat xs1 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in\n                FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.ElimExists (bs, p, q, b, e1) ->\n            let head =\n              let uu___ = str \"eliminate exists\" in\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 = p_binders_sep bs in\n                  let uu___4 = str \".\" in\n                  FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n              FStar_Pprint.op_Hat_Hat uu___ uu___1 in\n            let p1 = p_noSeqTermAndComment false false p in\n            let q1 = p_noSeqTermAndComment false false q in\n            let e2 = p_noSeqTermAndComment false false e1 in\n            let uu___ =\n              let uu___1 =\n                let uu___2 =\n                  let uu___3 =\n                    let uu___4 = str \"returns\" in\n                    let uu___5 =\n                      let uu___6 =\n                        let uu___7 =\n                          let uu___8 =\n                            let uu___9 = str \"with\" in\n                            let uu___10 =\n                              let uu___11 =\n                                let uu___12 = p_binders_sep [b] in\n                                let uu___13 =\n                                  let uu___14 = str \".\" in\n                                  let uu___15 =\n                                    FStar_Pprint.op_Hat_Hat\n                                      FStar_Pprint.hardline e2 in\n                                  FStar_Pprint.op_Hat_Hat uu___14 uu___15 in\n                                FStar_Pprint.op_Hat_Hat uu___12 uu___13 in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___11 in\n                            FStar_Pprint.op_Hat_Hat uu___9 uu___10 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___8 in\n                        FStar_Pprint.op_Hat_Hat q1 uu___7 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___6 in\n                    FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in\n                FStar_Pprint.op_Hat_Hat p1 uu___2 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___1 in\n            FStar_Pprint.op_Hat_Hat head uu___\n        | FStar_Parser_AST.ElimImplies (p, q, e1) ->\n            let p1 = p_tmFormula p in\n            let q1 = p_tmFormula q in\n            let e2 = p_noSeqTermAndComment false false e1 in\n            let uu___ = str \"eliminate\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = str \"==>\" in\n                    let uu___6 =\n                      let uu___7 =\n                        let uu___8 =\n                          let uu___9 =\n                            let uu___10 = str \"with\" in\n                            let uu___11 =\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space e2 in\n                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___9 in\n                        FStar_Pprint.op_Hat_Hat q1 uu___8 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n                FStar_Pprint.op_Hat_Hat p1 uu___3 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.ElimOr (p, q, r, x, e1, y, e2) ->\n            let p1 = p_tmFormula p in\n            let q1 = p_tmFormula q in\n            let r1 = p_noSeqTermAndComment false false r in\n            let x1 = p_binders_sep [x] in\n            let e11 = p_noSeqTermAndComment false false e1 in\n            let y1 = p_binders_sep [y] in\n            let e21 = p_noSeqTermAndComment false false e2 in\n            let uu___ = str \"eliminate\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = str \"\\\\/\" in\n                    let uu___6 =\n                      let uu___7 =\n                        let uu___8 =\n                          let uu___9 =\n                            let uu___10 = str \"returns\" in\n                            let uu___11 =\n                              let uu___12 =\n                                let uu___13 =\n                                  let uu___14 =\n                                    let uu___15 = str \"with\" in\n                                    let uu___16 =\n                                      let uu___17 =\n                                        let uu___18 =\n                                          let uu___19 =\n                                            let uu___20 = str \".\" in\n                                            let uu___21 =\n                                              let uu___22 =\n                                                let uu___23 =\n                                                  let uu___24 =\n                                                    let uu___25 = str \"and\" in\n                                                    let uu___26 =\n                                                      let uu___27 =\n                                                        let uu___28 =\n                                                          let uu___29 =\n                                                            let uu___30 =\n                                                              str \".\" in\n                                                            let uu___31 =\n                                                              FStar_Pprint.op_Hat_Hat\n                                                                FStar_Pprint.space\n                                                                e21 in\n                                                            FStar_Pprint.op_Hat_Hat\n                                                              uu___30 uu___31 in\n                                                          FStar_Pprint.op_Hat_Hat\n                                                            FStar_Pprint.space\n                                                            uu___29 in\n                                                        FStar_Pprint.op_Hat_Hat\n                                                          y1 uu___28 in\n                                                      FStar_Pprint.op_Hat_Hat\n                                                        FStar_Pprint.space\n                                                        uu___27 in\n                                                    FStar_Pprint.op_Hat_Hat\n                                                      uu___25 uu___26 in\n                                                  FStar_Pprint.op_Hat_Hat\n                                                    FStar_Pprint.hardline\n                                                    uu___24 in\n                                                FStar_Pprint.op_Hat_Hat e11\n                                                  uu___23 in\n                                              FStar_Pprint.op_Hat_Hat\n                                                FStar_Pprint.space uu___22 in\n                                            FStar_Pprint.op_Hat_Hat uu___20\n                                              uu___21 in\n                                          FStar_Pprint.op_Hat_Hat\n                                            FStar_Pprint.space uu___19 in\n                                        FStar_Pprint.op_Hat_Hat x1 uu___18 in\n                                      FStar_Pprint.op_Hat_Hat\n                                        FStar_Pprint.space uu___17 in\n                                    FStar_Pprint.op_Hat_Hat uu___15 uu___16 in\n                                  FStar_Pprint.op_Hat_Hat\n                                    FStar_Pprint.hardline uu___14 in\n                                FStar_Pprint.op_Hat_Hat r1 uu___13 in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___12 in\n                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___9 in\n                        FStar_Pprint.op_Hat_Hat q1 uu___8 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n                FStar_Pprint.op_Hat_Hat p1 uu___3 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | FStar_Parser_AST.ElimAnd (p, q, r, x, y, e1) ->\n            let p1 = p_tmFormula p in\n            let q1 = p_tmTuple q in\n            let r1 = p_noSeqTermAndComment false false r in\n            let xy = p_binders_sep [x; y] in\n            let e2 = p_noSeqTermAndComment false false e1 in\n            let uu___ = str \"eliminate\" in\n            let uu___1 =\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = str \"/\\\\\" in\n                    let uu___6 =\n                      let uu___7 =\n                        let uu___8 =\n                          let uu___9 =\n                            let uu___10 = str \"returns\" in\n                            let uu___11 =\n                              let uu___12 =\n                                let uu___13 =\n                                  let uu___14 =\n                                    let uu___15 = str \"with\" in\n                                    let uu___16 =\n                                      let uu___17 =\n                                        let uu___18 =\n                                          let uu___19 =\n                                            let uu___20 = str \".\" in\n                                            let uu___21 =\n                                              FStar_Pprint.op_Hat_Hat\n                                                FStar_Pprint.space e2 in\n                                            FStar_Pprint.op_Hat_Hat uu___20\n                                              uu___21 in\n                                          FStar_Pprint.op_Hat_Hat\n                                            FStar_Pprint.space uu___19 in\n                                        FStar_Pprint.op_Hat_Hat xy uu___18 in\n                                      FStar_Pprint.op_Hat_Hat\n                                        FStar_Pprint.space uu___17 in\n                                    FStar_Pprint.op_Hat_Hat uu___15 uu___16 in\n                                  FStar_Pprint.op_Hat_Hat\n                                    FStar_Pprint.hardline uu___14 in\n                                FStar_Pprint.op_Hat_Hat r1 uu___13 in\n                              FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                                uu___12 in\n                            FStar_Pprint.op_Hat_Hat uu___10 uu___11 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___9 in\n                        FStar_Pprint.op_Hat_Hat q1 uu___8 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                    FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___4 in\n                FStar_Pprint.op_Hat_Hat p1 uu___3 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n            FStar_Pprint.op_Hat_Hat uu___ uu___1\n        | uu___ -> p_typ ps pb e\nand (p_dec_wf :\n  Prims.bool ->\n    Prims.bool ->\n      FStar_Parser_AST.term -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb ->\n      fun rel ->\n        fun e ->\n          let uu___ =\n            let uu___1 = str \"{:well-founded \" in\n            let uu___2 =\n              let uu___3 = p_typ ps pb rel in\n              let uu___4 =\n                let uu___5 = p_typ ps pb e in\n                let uu___6 = str \" }\" in\n                FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n              FStar_Pprint.op_Hat_Slash_Hat uu___3 uu___4 in\n            FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n          FStar_Pprint.group uu___\nand (p_calcStep :\n  Prims.bool -> FStar_Parser_AST.calc_step -> FStar_Pprint.document) =\n  fun uu___ ->\n    fun uu___1 ->\n      match uu___1 with\n      | FStar_Parser_AST.CalcStep (rel, just, next) ->\n          let uu___2 =\n            let uu___3 = p_noSeqTermAndComment false false rel in\n            let uu___4 =\n              let uu___5 =\n                let uu___6 =\n                  let uu___7 =\n                    let uu___8 = p_noSeqTermAndComment false false just in\n                    let uu___9 =\n                      let uu___10 =\n                        let uu___11 =\n                          let uu___12 =\n                            let uu___13 =\n                              p_noSeqTermAndComment false false next in\n                            let uu___14 = str \";\" in\n                            FStar_Pprint.op_Hat_Hat uu___13 uu___14 in\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline\n                            uu___12 in\n                        FStar_Pprint.op_Hat_Hat FStar_Pprint.rbrace uu___11 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___10 in\n                    FStar_Pprint.op_Hat_Hat uu___8 uu___9 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___7 in\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.lbrace uu___6 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___5 in\n            FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n          FStar_Pprint.group uu___2\nand (p_attrs_opt :\n  Prims.bool ->\n    FStar_Parser_AST.term Prims.list FStar_Pervasives_Native.option ->\n      FStar_Pprint.document)\n  =\n  fun isTopLevel ->\n    fun uu___ ->\n      match uu___ with\n      | FStar_Pervasives_Native.None -> FStar_Pprint.empty\n      | FStar_Pervasives_Native.Some terms ->\n          let uu___1 =\n            let uu___2 = str (if isTopLevel then \"[@@\" else \"[@@@\") in\n            let uu___3 =\n              let uu___4 =\n                let uu___5 = str \"; \" in\n                FStar_Pprint.separate_map uu___5\n                  (p_noSeqTermAndComment false false) terms in\n              let uu___5 = str \"]\" in\n              FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n            FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n          FStar_Pprint.group uu___1\nand (p_typ :\n  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb -> fun e -> with_comment (p_typ' ps pb) e e.FStar_Parser_AST.range\nand (p_typ_sep :\n  Prims.bool ->\n    Prims.bool ->\n      FStar_Parser_AST.term ->\n        (FStar_Pprint.document * FStar_Pprint.document))\n  =\n  fun ps ->\n    fun pb ->\n      fun e -> with_comment_sep (p_typ' ps pb) e e.FStar_Parser_AST.range\nand (p_typ' :\n  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb ->\n      fun e ->\n        match e.FStar_Parser_AST.tm with\n        | FStar_Parser_AST.QForall (bs, (uu___, trigger), e1) ->\n            let binders_doc = p_binders true bs in\n            let term_doc = p_noSeqTermAndComment ps pb e1 in\n            (match trigger with\n             | [] ->\n                 let uu___1 =\n                   let uu___2 =\n                     let uu___3 = p_quantifier e in\n                     FStar_Pprint.op_Hat_Hat uu___3 FStar_Pprint.space in\n                   FStar_Pprint.soft_surround (Prims.of_int (2))\n                     Prims.int_zero uu___2 binders_doc FStar_Pprint.dot in\n                 prefix2 uu___1 term_doc\n             | pats ->\n                 let uu___1 =\n                   let uu___2 =\n                     let uu___3 =\n                       let uu___4 =\n                         let uu___5 = p_quantifier e in\n                         FStar_Pprint.op_Hat_Hat uu___5 FStar_Pprint.space in\n                       FStar_Pprint.soft_surround (Prims.of_int (2))\n                         Prims.int_zero uu___4 binders_doc FStar_Pprint.dot in\n                     let uu___4 = p_trigger trigger in prefix2 uu___3 uu___4 in\n                   FStar_Pprint.group uu___2 in\n                 prefix2 uu___1 term_doc)\n        | FStar_Parser_AST.QExists (bs, (uu___, trigger), e1) ->\n            let binders_doc = p_binders true bs in\n            let term_doc = p_noSeqTermAndComment ps pb e1 in\n            (match trigger with\n             | [] ->\n                 let uu___1 =\n                   let uu___2 =\n                     let uu___3 = p_quantifier e in\n                     FStar_Pprint.op_Hat_Hat uu___3 FStar_Pprint.space in\n                   FStar_Pprint.soft_surround (Prims.of_int (2))\n                     Prims.int_zero uu___2 binders_doc FStar_Pprint.dot in\n                 prefix2 uu___1 term_doc\n             | pats ->\n                 let uu___1 =\n                   let uu___2 =\n                     let uu___3 =\n                       let uu___4 =\n                         let uu___5 = p_quantifier e in\n                         FStar_Pprint.op_Hat_Hat uu___5 FStar_Pprint.space in\n                       FStar_Pprint.soft_surround (Prims.of_int (2))\n                         Prims.int_zero uu___4 binders_doc FStar_Pprint.dot in\n                     let uu___4 = p_trigger trigger in prefix2 uu___3 uu___4 in\n                   FStar_Pprint.group uu___2 in\n                 prefix2 uu___1 term_doc)\n        | uu___ -> p_simpleTerm ps pb e\nand (p_typ_top :\n  annotation_style ->\n    Prims.bool ->\n      Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun style ->\n    fun ps ->\n      fun pb ->\n        fun e ->\n          with_comment (p_typ_top' style ps pb) e e.FStar_Parser_AST.range\nand (p_typ_top' :\n  annotation_style ->\n    Prims.bool ->\n      Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun style ->\n    fun ps -> fun pb -> fun e -> p_tmArrow style true p_tmFormula e\nand (sig_as_binders_if_possible :\n  FStar_Parser_AST.term -> Prims.bool -> FStar_Pprint.document) =\n  fun t ->\n    fun extra_space ->\n      let s = if extra_space then FStar_Pprint.space else FStar_Pprint.empty in\n      let uu___ = all_binders_annot t in\n      if uu___\n      then\n        let uu___1 =\n          p_typ_top (Binders ((Prims.of_int (4)), Prims.int_zero, true))\n            false false t in\n        FStar_Pprint.op_Hat_Hat s uu___1\n      else\n        (let uu___2 =\n           let uu___3 =\n             let uu___4 =\n               p_typ_top (Arrows ((Prims.of_int (2)), (Prims.of_int (2))))\n                 false false t in\n             FStar_Pprint.op_Hat_Hat s uu___4 in\n           FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___3 in\n         FStar_Pprint.group uu___2)\nand (collapse_pats :\n  (FStar_Pprint.document * FStar_Pprint.document * Prims.bool * Prims.bool)\n    Prims.list -> FStar_Pprint.document Prims.list)\n  =\n  fun pats ->\n    let fold_fun bs x =\n      let uu___ = x in\n      match uu___ with\n      | (b1, t1, tc1, j1) ->\n          (match bs with\n           | [] -> [([b1], t1, tc1, j1)]\n           | hd::tl ->\n               let uu___1 = hd in\n               (match uu___1 with\n                | (b2s, t2, tc2, j2) ->\n                    if ((t1 = t2) && j1) && j2\n                    then\n                      ((FStar_Compiler_List.op_At b2s [b1]), t1, false, true)\n                      :: tl\n                    else ([b1], t1, tc1, j1) :: hd :: tl)) in\n    let p_collapsed_binder cb =\n      let uu___ = cb in\n      match uu___ with\n      | (bs, typ, istcarg, uu___1) ->\n          let body =\n            match bs with\n            | [] -> failwith \"Impossible\"\n            | hd::tl ->\n                let uu___2 =\n                  FStar_Compiler_List.fold_left\n                    (fun x ->\n                       fun y ->\n                         let uu___3 =\n                           FStar_Pprint.op_Hat_Hat FStar_Pprint.space y in\n                         FStar_Pprint.op_Hat_Hat x uu___3) hd tl in\n                cat_with_colon uu___2 typ in\n          if istcarg then tc_arg body else soft_parens_with_nesting body in\n    let binders =\n      FStar_Compiler_List.fold_left fold_fun []\n        (FStar_Compiler_List.rev pats) in\n    map_rev p_collapsed_binder binders\nand (pats_as_binders_if_possible :\n  FStar_Parser_AST.pattern Prims.list ->\n    (FStar_Pprint.document Prims.list * annotation_style))\n  =\n  fun pats ->\n    let all_binders p =\n      match p.FStar_Parser_AST.pat with\n      | FStar_Parser_AST.PatAscribed (pat, (t, FStar_Pervasives_Native.None))\n          ->\n          (match ((pat.FStar_Parser_AST.pat), (t.FStar_Parser_AST.tm)) with\n           | (FStar_Parser_AST.PatVar (lid, aqual, attrs),\n              FStar_Parser_AST.Refine\n              ({ FStar_Parser_AST.b = FStar_Parser_AST.Annotated (lid', t1);\n                 FStar_Parser_AST.brange = uu___;\n                 FStar_Parser_AST.blevel = uu___1;\n                 FStar_Parser_AST.aqual = uu___2;\n                 FStar_Parser_AST.battributes = uu___3;_},\n               phi)) when\n               let uu___4 = FStar_Ident.string_of_id lid in\n               let uu___5 = FStar_Ident.string_of_id lid' in uu___4 = uu___5\n               ->\n               let uu___4 =\n                 let uu___5 = p_ident lid in\n                 p_refinement' aqual attrs uu___5 t1 phi in\n               (match uu___4 with\n                | (x, y) -> FStar_Pervasives_Native.Some (x, y, false, false))\n           | (FStar_Parser_AST.PatVar (lid, aqual, attrs), uu___) ->\n               let is_tc =\n                 aqual =\n                   (FStar_Pervasives_Native.Some\n                      FStar_Parser_AST.TypeClassArg) in\n               let is_meta =\n                 match aqual with\n                 | FStar_Pervasives_Native.Some (FStar_Parser_AST.Meta\n                     uu___1) -> true\n                 | uu___1 -> false in\n               let uu___1 =\n                 let uu___2 =\n                   let uu___3 = FStar_Pprint.optional p_aqual aqual in\n                   let uu___4 =\n                     let uu___5 = p_attributes false attrs in\n                     let uu___6 = p_ident lid in\n                     FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                   FStar_Pprint.op_Hat_Hat uu___3 uu___4 in\n                 let uu___3 = p_tmEqNoRefinement t in\n                 (uu___2, uu___3, is_tc,\n                   ((Prims.op_Negation is_tc) && (Prims.op_Negation is_meta))) in\n               FStar_Pervasives_Native.Some uu___1\n           | uu___ -> FStar_Pervasives_Native.None)\n      | uu___ -> FStar_Pervasives_Native.None in\n    let uu___ = map_if_all all_binders pats in\n    match uu___ with\n    | FStar_Pervasives_Native.Some bs ->\n        let uu___1 = collapse_pats bs in\n        (uu___1, (Binders ((Prims.of_int (4)), Prims.int_zero, true)))\n    | FStar_Pervasives_Native.None ->\n        let uu___1 = FStar_Compiler_List.map p_atomicPattern pats in\n        (uu___1, (Binders ((Prims.of_int (4)), Prims.int_zero, false)))\nand (p_quantifier : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.QForall uu___ -> str \"forall\"\n    | FStar_Parser_AST.QExists uu___ -> str \"exists\"\n    | uu___ ->\n        failwith \"Imposible : p_quantifier called on a non-quantifier term\"\nand (p_trigger :\n  FStar_Parser_AST.term Prims.list Prims.list -> FStar_Pprint.document) =\n  fun uu___ ->\n    match uu___ with\n    | [] -> FStar_Pprint.empty\n    | pats ->\n        let uu___1 =\n          let uu___2 =\n            let uu___3 =\n              let uu___4 = str \"pattern\" in\n              let uu___5 =\n                let uu___6 =\n                  let uu___7 = p_disjunctivePats pats in\n                  FStar_Pprint.jump (Prims.of_int (2)) Prims.int_zero uu___7 in\n                FStar_Pprint.op_Hat_Hat uu___6 FStar_Pprint.rbrace in\n              FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___3 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.lbrace uu___2 in\n        FStar_Pprint.group uu___1\nand (p_disjunctivePats :\n  FStar_Parser_AST.term Prims.list Prims.list -> FStar_Pprint.document) =\n  fun pats ->\n    let uu___ = str \"\\\\/\" in\n    FStar_Pprint.separate_map uu___ p_conjunctivePats pats\nand (p_conjunctivePats :\n  FStar_Parser_AST.term Prims.list -> FStar_Pprint.document) =\n  fun pats ->\n    let uu___ =\n      let uu___1 = FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in\n      FStar_Pprint.separate_map uu___1 p_appTerm pats in\n    FStar_Pprint.group uu___\nand (p_simpleTerm :\n  Prims.bool -> Prims.bool -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun pb ->\n      fun e ->\n        match e.FStar_Parser_AST.tm with\n        | FStar_Parser_AST.Abs (pats, e1) ->\n            let uu___ = p_term_sep false pb e1 in\n            (match uu___ with\n             | (comm, doc) ->\n                 let prefix =\n                   let uu___1 = str \"fun\" in\n                   let uu___2 =\n                     let uu___3 =\n                       FStar_Pprint.separate_map break1 p_atomicPattern pats in\n                     FStar_Pprint.op_Hat_Slash_Hat uu___3 FStar_Pprint.rarrow in\n                   op_Hat_Slash_Plus_Hat uu___1 uu___2 in\n                 let uu___1 =\n                   if comm <> FStar_Pprint.empty\n                   then\n                     let uu___2 =\n                       let uu___3 =\n                         let uu___4 =\n                           FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline doc in\n                         FStar_Pprint.op_Hat_Hat comm uu___4 in\n                       FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline uu___3 in\n                     FStar_Pprint.op_Hat_Hat prefix uu___2\n                   else\n                     (let uu___3 = op_Hat_Slash_Plus_Hat prefix doc in\n                      FStar_Pprint.group uu___3) in\n                 let uu___2 = paren_if ps in uu___2 uu___1)\n        | uu___ -> p_tmIff e\nand (p_maybeFocusArrow : Prims.bool -> FStar_Pprint.document) =\n  fun b -> if b then str \"~>\" else FStar_Pprint.rarrow\nand (p_patternBranch :\n  Prims.bool ->\n    (FStar_Parser_AST.pattern * FStar_Parser_AST.term\n      FStar_Pervasives_Native.option * FStar_Parser_AST.term) ->\n      FStar_Pprint.document)\n  =\n  fun pb ->\n    fun uu___ ->\n      match uu___ with\n      | (pat, when_opt, e) ->\n          let one_pattern_branch p =\n            let branch =\n              match when_opt with\n              | FStar_Pervasives_Native.None ->\n                  let uu___1 =\n                    let uu___2 =\n                      let uu___3 =\n                        let uu___4 = p_tuplePattern p in\n                        let uu___5 =\n                          FStar_Pprint.op_Hat_Hat FStar_Pprint.space\n                            FStar_Pprint.rarrow in\n                        FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___2 in\n                  FStar_Pprint.group uu___1\n              | FStar_Pervasives_Native.Some f ->\n                  let uu___1 =\n                    let uu___2 =\n                      let uu___3 =\n                        let uu___4 =\n                          let uu___5 =\n                            let uu___6 = p_tuplePattern p in\n                            let uu___7 = str \"when\" in\n                            FStar_Pprint.op_Hat_Slash_Hat uu___6 uu___7 in\n                          FStar_Pprint.group uu___5 in\n                        let uu___5 =\n                          let uu___6 =\n                            let uu___7 = p_tmFormula f in\n                            [uu___7; FStar_Pprint.rarrow] in\n                          FStar_Pprint.flow break1 uu___6 in\n                        FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___2 in\n                  FStar_Pprint.hang (Prims.of_int (2)) uu___1 in\n            let uu___1 = p_term_sep false pb e in\n            match uu___1 with\n            | (comm, doc) ->\n                if pb\n                then\n                  (if comm = FStar_Pprint.empty\n                   then\n                     let uu___2 = op_Hat_Slash_Plus_Hat branch doc in\n                     FStar_Pprint.group uu___2\n                   else\n                     (let uu___3 =\n                        let uu___4 =\n                          let uu___5 =\n                            let uu___6 =\n                              let uu___7 =\n                                FStar_Pprint.op_Hat_Hat break1 comm in\n                              FStar_Pprint.op_Hat_Hat doc uu___7 in\n                            op_Hat_Slash_Plus_Hat branch uu___6 in\n                          FStar_Pprint.group uu___5 in\n                        let uu___5 =\n                          let uu___6 =\n                            let uu___7 =\n                              inline_comment_or_above comm doc\n                                FStar_Pprint.empty in\n                            jump2 uu___7 in\n                          FStar_Pprint.op_Hat_Hat branch uu___6 in\n                        FStar_Pprint.ifflat uu___4 uu___5 in\n                      FStar_Pprint.group uu___3))\n                else\n                  if comm <> FStar_Pprint.empty\n                  then\n                    (let uu___3 =\n                       let uu___4 =\n                         FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline doc in\n                       FStar_Pprint.op_Hat_Hat comm uu___4 in\n                     op_Hat_Slash_Plus_Hat branch uu___3)\n                  else op_Hat_Slash_Plus_Hat branch doc in\n          (match pat.FStar_Parser_AST.pat with\n           | FStar_Parser_AST.PatOr pats ->\n               (match FStar_Compiler_List.rev pats with\n                | hd::tl ->\n                    let last_pat_branch = one_pattern_branch hd in\n                    let uu___1 =\n                      let uu___2 =\n                        let uu___3 =\n                          let uu___4 =\n                            let uu___5 =\n                              let uu___6 =\n                                FStar_Pprint.op_Hat_Hat FStar_Pprint.bar\n                                  FStar_Pprint.space in\n                              FStar_Pprint.op_Hat_Hat break1 uu___6 in\n                            FStar_Pprint.separate_map uu___5 p_tuplePattern\n                              (FStar_Compiler_List.rev tl) in\n                          FStar_Pprint.op_Hat_Slash_Hat uu___4\n                            last_pat_branch in\n                        FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.bar uu___2 in\n                    FStar_Pprint.group uu___1\n                | [] ->\n                    failwith \"Impossible: disjunctive pattern can't be empty\")\n           | uu___1 -> one_pattern_branch pat)\nand (p_tmIff : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Op (id, e1::e2::[]) when\n        let uu___ = FStar_Ident.string_of_id id in uu___ = \"<==>\" ->\n        let uu___ = str \"<==>\" in\n        let uu___1 = p_tmImplies e1 in\n        let uu___2 = p_tmIff e2 in infix0 uu___ uu___1 uu___2\n    | uu___ -> p_tmImplies e\nand (p_tmImplies : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Op (id, e1::e2::[]) when\n        let uu___ = FStar_Ident.string_of_id id in uu___ = \"==>\" ->\n        let uu___ = str \"==>\" in\n        let uu___1 =\n          p_tmArrow (Arrows ((Prims.of_int (2)), (Prims.of_int (2)))) false\n            p_tmFormula e1 in\n        let uu___2 = p_tmImplies e2 in infix0 uu___ uu___1 uu___2\n    | uu___ ->\n        p_tmArrow (Arrows ((Prims.of_int (2)), (Prims.of_int (2)))) false\n          p_tmFormula e\nand (format_sig :\n  annotation_style ->\n    FStar_Pprint.document Prims.list ->\n      FStar_Pprint.document ->\n        Prims.bool -> Prims.bool -> FStar_Pprint.document)\n  =\n  fun style ->\n    fun terms ->\n      fun ret_d ->\n        fun no_last_op ->\n          fun flat_space ->\n            let uu___ =\n              match style with\n              | Arrows (n, ln) ->\n                  let uu___1 =\n                    let uu___2 =\n                      FStar_Pprint.op_Hat_Hat FStar_Pprint.rarrow break1 in\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n                  let uu___2 =\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.rarrow\n                      FStar_Pprint.space in\n                  (n, ln, uu___1, uu___2)\n              | Binders (n, ln, parens) ->\n                  let uu___1 =\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.colon\n                      FStar_Pprint.space in\n                  (n, ln, break1, uu___1) in\n            match uu___ with\n            | (n, last_n, sep, last_op) ->\n                let last_op1 =\n                  if\n                    ((FStar_Compiler_List.length terms) > Prims.int_zero) &&\n                      (Prims.op_Negation no_last_op)\n                  then last_op\n                  else FStar_Pprint.empty in\n                let one_line_space =\n                  if\n                    (Prims.op_Negation (ret_d = FStar_Pprint.empty)) ||\n                      (Prims.op_Negation no_last_op)\n                  then FStar_Pprint.space\n                  else FStar_Pprint.empty in\n                let single_line_arg_indent =\n                  FStar_Pprint.repeat n FStar_Pprint.space in\n                let fs =\n                  if flat_space\n                  then FStar_Pprint.space\n                  else FStar_Pprint.empty in\n                (match FStar_Compiler_List.length terms with\n                 | uu___1 when uu___1 = Prims.int_zero -> ret_d\n                 | uu___1 ->\n                     let uu___2 =\n                       let uu___3 =\n                         let uu___4 =\n                           let uu___5 = FStar_Pprint.separate sep terms in\n                           let uu___6 =\n                             let uu___7 =\n                               FStar_Pprint.op_Hat_Hat last_op1 ret_d in\n                             FStar_Pprint.op_Hat_Hat one_line_space uu___7 in\n                           FStar_Pprint.op_Hat_Hat uu___5 uu___6 in\n                         FStar_Pprint.op_Hat_Hat fs uu___4 in\n                       let uu___4 =\n                         let uu___5 =\n                           let uu___6 =\n                             let uu___7 =\n                               let uu___8 = FStar_Pprint.separate sep terms in\n                               FStar_Pprint.op_Hat_Hat fs uu___8 in\n                             let uu___8 =\n                               let uu___9 =\n                                 let uu___10 =\n                                   let uu___11 =\n                                     FStar_Pprint.op_Hat_Hat sep\n                                       single_line_arg_indent in\n                                   let uu___12 =\n                                     FStar_Compiler_List.map\n                                       (fun x ->\n                                          let uu___13 =\n                                            FStar_Pprint.hang\n                                              (Prims.of_int (2)) x in\n                                          FStar_Pprint.align uu___13) terms in\n                                   FStar_Pprint.separate uu___11 uu___12 in\n                                 FStar_Pprint.op_Hat_Hat\n                                   single_line_arg_indent uu___10 in\n                               jump2 uu___9 in\n                             FStar_Pprint.ifflat uu___7 uu___8 in\n                           FStar_Pprint.group uu___6 in\n                         let uu___6 =\n                           let uu___7 =\n                             let uu___8 =\n                               FStar_Pprint.op_Hat_Hat last_op1 ret_d in\n                             FStar_Pprint.hang last_n uu___8 in\n                           FStar_Pprint.align uu___7 in\n                         FStar_Pprint.prefix n Prims.int_one uu___5 uu___6 in\n                       FStar_Pprint.ifflat uu___3 uu___4 in\n                     FStar_Pprint.group uu___2)\nand (p_tmArrow :\n  annotation_style ->\n    Prims.bool ->\n      (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n        FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun style ->\n    fun flat_space ->\n      fun p_Tm ->\n        fun e ->\n          let uu___ =\n            match style with\n            | Arrows uu___1 -> p_tmArrow' p_Tm e\n            | Binders uu___1 -> collapse_binders style p_Tm e in\n          match uu___ with\n          | (terms, ret_d) -> format_sig style terms ret_d false flat_space\nand (p_tmArrow' :\n  (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n    FStar_Parser_AST.term ->\n      (FStar_Pprint.document Prims.list * FStar_Pprint.document))\n  =\n  fun p_Tm ->\n    fun e ->\n      match e.FStar_Parser_AST.tm with\n      | FStar_Parser_AST.Product (bs, tgt) ->\n          let bs_ds = FStar_Compiler_List.map (fun b -> p_binder false b) bs in\n          let uu___ = p_tmArrow' p_Tm tgt in\n          (match uu___ with\n           | (bs_ds', ret) -> ((FStar_Compiler_List.op_At bs_ds bs_ds'), ret))\n      | uu___ -> let uu___1 = p_Tm e in ([], uu___1)\nand (collapse_binders :\n  annotation_style ->\n    (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n      FStar_Parser_AST.term ->\n        (FStar_Pprint.document Prims.list * FStar_Pprint.document))\n  =\n  fun style ->\n    fun p_Tm ->\n      fun e ->\n        let atomize =\n          match style with | Binders (uu___, uu___1, a) -> a | uu___ -> false in\n        let wrap is_tc doc =\n          if is_tc\n          then tc_arg doc\n          else if atomize then soft_parens_with_nesting doc else doc in\n        let rec accumulate_binders p_Tm1 e1 =\n          match e1.FStar_Parser_AST.tm with\n          | FStar_Parser_AST.Product (bs, tgt) ->\n              let bs_ds =\n                FStar_Compiler_List.map\n                  (fun b ->\n                     let uu___ = p_binder' true false b in\n                     let uu___1 = is_tc_binder b in\n                     let uu___2 = is_joinable_binder b in\n                     (uu___, uu___1, uu___2)) bs in\n              let uu___ = accumulate_binders p_Tm1 tgt in\n              (match uu___ with\n               | (bs_ds', ret) ->\n                   ((FStar_Compiler_List.op_At bs_ds bs_ds'), ret))\n          | uu___ -> let uu___1 = p_Tm1 e1 in ([], uu___1) in\n        let fold_fun bs x =\n          let uu___ = x in\n          match uu___ with\n          | ((b1, t1), tc1, j1) ->\n              (match bs with\n               | [] -> [([b1], t1, tc1, j1)]\n               | hd::tl ->\n                   let uu___1 = hd in\n                   (match uu___1 with\n                    | (b2s, t2, tc2, j2) ->\n                        (match (t1, t2) with\n                         | (FStar_Pervasives_Native.Some (typ1, catf1),\n                            FStar_Pervasives_Native.Some (typ2, uu___2)) when\n                             ((typ1 = typ2) && j1) && j2 ->\n                             ((FStar_Compiler_List.op_At b2s [b1]), t1,\n                               false, true)\n                             :: tl\n                         | uu___2 -> ([b1], t1, tc1, j1) :: bs))) in\n        let p_collapsed_binder cb =\n          let uu___ = cb in\n          match uu___ with\n          | (bs, t, is_tc, uu___1) ->\n              (match t with\n               | FStar_Pervasives_Native.None ->\n                   (match bs with\n                    | b::[] -> wrap is_tc b\n                    | uu___2 -> failwith \"Impossible\")\n               | FStar_Pervasives_Native.Some (typ, f) ->\n                   (match bs with\n                    | [] -> failwith \"Impossible\"\n                    | hd::tl ->\n                        let uu___2 =\n                          let uu___3 =\n                            FStar_Compiler_List.fold_left\n                              (fun x ->\n                                 fun y ->\n                                   let uu___4 =\n                                     FStar_Pprint.op_Hat_Hat\n                                       FStar_Pprint.space y in\n                                   FStar_Pprint.op_Hat_Hat x uu___4) hd tl in\n                          f uu___3 typ in\n                        FStar_Compiler_Effect.op_Less_Bar (wrap is_tc) uu___2)) in\n        let uu___ = accumulate_binders p_Tm e in\n        match uu___ with\n        | (bs_ds, ret_d) ->\n            let binders = FStar_Compiler_List.fold_left fold_fun [] bs_ds in\n            let uu___1 = map_rev p_collapsed_binder binders in\n            (uu___1, ret_d)\nand (p_tmFormula : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    let conj =\n      let uu___ =\n        let uu___1 = str \"/\\\\\" in FStar_Pprint.op_Hat_Hat uu___1 break1 in\n      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___ in\n    let disj =\n      let uu___ =\n        let uu___1 = str \"\\\\/\" in FStar_Pprint.op_Hat_Hat uu___1 break1 in\n      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___ in\n    let formula = p_tmDisjunction e in\n    FStar_Pprint.flow_map disj\n      (fun d -> FStar_Pprint.flow_map conj (fun x -> FStar_Pprint.group x) d)\n      formula\nand (p_tmDisjunction :\n  FStar_Parser_AST.term -> FStar_Pprint.document Prims.list Prims.list) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Op (id, e1::e2::[]) when\n        let uu___ = FStar_Ident.string_of_id id in uu___ = \"\\\\/\" ->\n        let uu___ = p_tmDisjunction e1 in\n        let uu___1 = let uu___2 = p_tmConjunction e2 in [uu___2] in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | uu___ -> let uu___1 = p_tmConjunction e in [uu___1]\nand (p_tmConjunction :\n  FStar_Parser_AST.term -> FStar_Pprint.document Prims.list) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Op (id, e1::e2::[]) when\n        let uu___ = FStar_Ident.string_of_id id in uu___ = \"/\\\\\" ->\n        let uu___ = p_tmConjunction e1 in\n        let uu___1 = let uu___2 = p_tmTuple e2 in [uu___2] in\n        FStar_Compiler_List.op_At uu___ uu___1\n    | uu___ -> let uu___1 = p_tmTuple e in [uu___1]\nand (p_tmTuple : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e -> with_comment p_tmTuple' e e.FStar_Parser_AST.range\nand (p_tmTuple' : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Construct (lid, args) when\n        (is_tuple_constructor lid) && (all1_explicit args) ->\n        let uu___ = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in\n        FStar_Pprint.separate_map uu___\n          (fun uu___1 -> match uu___1 with | (e1, uu___2) -> p_tmEq e1) args\n    | uu___ -> p_tmEq e\nand (paren_if_gt :\n  Prims.int -> Prims.int -> FStar_Pprint.document -> FStar_Pprint.document) =\n  fun curr ->\n    fun mine ->\n      fun doc ->\n        if mine <= curr\n        then doc\n        else\n          (let uu___1 =\n             let uu___2 = FStar_Pprint.op_Hat_Hat doc FStar_Pprint.rparen in\n             FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___2 in\n           FStar_Pprint.group uu___1)\nand (p_tmEqWith :\n  (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n    FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun p_X ->\n    fun e ->\n      let n =\n        max_level\n          (FStar_Compiler_List.op_At [colon_equals; pipe_right]\n             operatorInfix0ad12) in\n      p_tmEqWith' p_X n e\nand (p_tmEqWith' :\n  (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n    Prims.int -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun p_X ->\n    fun curr ->\n      fun e ->\n        match e.FStar_Parser_AST.tm with\n        | FStar_Parser_AST.Op (op, e1::e2::[]) when\n            (let uu___ =\n               (let uu___1 = FStar_Ident.string_of_id op in uu___1 = \"==>\")\n                 ||\n                 (let uu___1 = FStar_Ident.string_of_id op in uu___1 = \"<==>\") in\n             Prims.op_Negation uu___) &&\n              (((is_operatorInfix0ad12 op) ||\n                  (let uu___ = FStar_Ident.string_of_id op in uu___ = \"=\"))\n                 || (let uu___ = FStar_Ident.string_of_id op in uu___ = \"|>\"))\n            ->\n            let op1 = FStar_Ident.string_of_id op in\n            let uu___ = levels op1 in\n            (match uu___ with\n             | (left, mine, right) ->\n                 let uu___1 =\n                   let uu___2 = FStar_Compiler_Effect.op_Less_Bar str op1 in\n                   let uu___3 = p_tmEqWith' p_X left e1 in\n                   let uu___4 = p_tmEqWith' p_X right e2 in\n                   infix0 uu___2 uu___3 uu___4 in\n                 paren_if_gt curr mine uu___1)\n        | FStar_Parser_AST.Op (id, e1::e2::[]) when\n            let uu___ = FStar_Ident.string_of_id id in uu___ = \":=\" ->\n            let uu___ =\n              let uu___1 = p_tmEqWith p_X e1 in\n              let uu___2 =\n                let uu___3 =\n                  let uu___4 =\n                    let uu___5 = p_tmEqWith p_X e2 in\n                    op_Hat_Slash_Plus_Hat FStar_Pprint.equals uu___5 in\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.colon uu___4 in\n                FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n              FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n            FStar_Pprint.group uu___\n        | FStar_Parser_AST.Op (id, e1::[]) when\n            let uu___ = FStar_Ident.string_of_id id in uu___ = \"-\" ->\n            let uu___ = levels \"-\" in\n            (match uu___ with\n             | (left, mine, right) ->\n                 let uu___1 = p_tmEqWith' p_X mine e1 in\n                 FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.minus uu___1)\n        | uu___ -> p_tmNoEqWith p_X e\nand (p_tmNoEqWith :\n  (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n    FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun p_X ->\n    fun e ->\n      let n = max_level [colon_colon; amp; opinfix3; opinfix4] in\n      p_tmNoEqWith' false p_X n e\nand (p_tmNoEqWith' :\n  Prims.bool ->\n    (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n      Prims.int -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun inside_tuple ->\n    fun p_X ->\n      fun curr ->\n        fun e ->\n          match e.FStar_Parser_AST.tm with\n          | FStar_Parser_AST.Construct (lid, (e1, uu___)::(e2, uu___1)::[])\n              when\n              (FStar_Ident.lid_equals lid FStar_Parser_Const.cons_lid) &&\n                (let uu___2 = is_list e in Prims.op_Negation uu___2)\n              ->\n              let op = \"::\" in\n              let uu___2 = levels op in\n              (match uu___2 with\n               | (left, mine, right) ->\n                   let uu___3 =\n                     let uu___4 = str op in\n                     let uu___5 = p_tmNoEqWith' false p_X left e1 in\n                     let uu___6 = p_tmNoEqWith' false p_X right e2 in\n                     infix0 uu___4 uu___5 uu___6 in\n                   paren_if_gt curr mine uu___3)\n          | FStar_Parser_AST.Sum (binders, res) ->\n              let op = \"&\" in\n              let uu___ = levels op in\n              (match uu___ with\n               | (left, mine, right) ->\n                   let p_dsumfst bt =\n                     match bt with\n                     | FStar_Pervasives.Inl b ->\n                         let uu___1 = p_binder false b in\n                         let uu___2 =\n                           let uu___3 =\n                             let uu___4 = str op in\n                             FStar_Pprint.op_Hat_Hat uu___4 break1 in\n                           FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n                         FStar_Pprint.op_Hat_Hat uu___1 uu___2\n                     | FStar_Pervasives.Inr t ->\n                         let uu___1 = p_tmNoEqWith' false p_X left t in\n                         let uu___2 =\n                           let uu___3 =\n                             let uu___4 = str op in\n                             FStar_Pprint.op_Hat_Hat uu___4 break1 in\n                           FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___3 in\n                         FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n                   let uu___1 =\n                     let uu___2 = FStar_Pprint.concat_map p_dsumfst binders in\n                     let uu___3 = p_tmNoEqWith' false p_X right res in\n                     FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n                   paren_if_gt curr mine uu___1)\n          | FStar_Parser_AST.Op (id, e1::e2::[]) when\n              (let uu___ = FStar_Ident.string_of_id id in uu___ = \"*\") &&\n                (FStar_Compiler_Effect.op_Bang unfold_tuples)\n              ->\n              let op = \"*\" in\n              let uu___ = levels op in\n              (match uu___ with\n               | (left, mine, right) ->\n                   if inside_tuple\n                   then\n                     let uu___1 = str op in\n                     let uu___2 = p_tmNoEqWith' true p_X left e1 in\n                     let uu___3 = p_tmNoEqWith' true p_X right e2 in\n                     infix0 uu___1 uu___2 uu___3\n                   else\n                     (let uu___2 =\n                        let uu___3 = str op in\n                        let uu___4 = p_tmNoEqWith' true p_X left e1 in\n                        let uu___5 = p_tmNoEqWith' true p_X right e2 in\n                        infix0 uu___3 uu___4 uu___5 in\n                      paren_if_gt curr mine uu___2))\n          | FStar_Parser_AST.Op (op, e1::e2::[]) when is_operatorInfix34 op\n              ->\n              let op1 = FStar_Ident.string_of_id op in\n              let uu___ = levels op1 in\n              (match uu___ with\n               | (left, mine, right) ->\n                   let uu___1 =\n                     let uu___2 = str op1 in\n                     let uu___3 = p_tmNoEqWith' false p_X left e1 in\n                     let uu___4 = p_tmNoEqWith' false p_X right e2 in\n                     infix0 uu___2 uu___3 uu___4 in\n                   paren_if_gt curr mine uu___1)\n          | FStar_Parser_AST.Record (with_opt, record_fields) ->\n              let uu___ =\n                let uu___1 =\n                  default_or_map FStar_Pprint.empty p_with_clause with_opt in\n                let uu___2 =\n                  let uu___3 =\n                    FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in\n                  separate_map_last uu___3 p_simpleDef record_fields in\n                FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n              braces_with_nesting uu___\n          | FStar_Parser_AST.Op (id, e1::[]) when\n              let uu___ = FStar_Ident.string_of_id id in uu___ = \"~\" ->\n              let uu___ =\n                let uu___1 = str \"~\" in\n                let uu___2 = p_atomicTerm e1 in\n                FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n              FStar_Pprint.group uu___\n          | FStar_Parser_AST.Paren p when inside_tuple ->\n              (match p.FStar_Parser_AST.tm with\n               | FStar_Parser_AST.Op (id, e1::e2::[]) when\n                   let uu___ = FStar_Ident.string_of_id id in uu___ = \"*\" ->\n                   let op = \"*\" in\n                   let uu___ = levels op in\n                   (match uu___ with\n                    | (left, mine, right) ->\n                        let uu___1 =\n                          let uu___2 = str op in\n                          let uu___3 = p_tmNoEqWith' true p_X left e1 in\n                          let uu___4 = p_tmNoEqWith' true p_X right e2 in\n                          infix0 uu___2 uu___3 uu___4 in\n                        paren_if_gt curr mine uu___1)\n               | uu___ -> p_X e)\n          | uu___ -> p_X e\nand (p_tmEqNoRefinement : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e -> p_tmEqWith p_appTerm e\nand (p_tmEq : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e -> p_tmEqWith p_tmRefinement e\nand (p_tmNoEq : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e -> p_tmNoEqWith p_tmRefinement e\nand (p_tmRefinement : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.NamedTyp (lid, e1) ->\n        let uu___ =\n          let uu___1 = p_lident lid in\n          let uu___2 =\n            let uu___3 = p_appTerm e1 in\n            FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.colon uu___3 in\n          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.Refine (b, phi) -> p_refinedBinder b phi\n    | uu___ -> p_appTerm e\nand (p_with_clause : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    let uu___ = p_appTerm e in\n    let uu___1 =\n      let uu___2 =\n        let uu___3 = str \"with\" in FStar_Pprint.op_Hat_Hat uu___3 break1 in\n      FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___2 in\n    FStar_Pprint.op_Hat_Hat uu___ uu___1\nand (p_refinedBinder :\n  FStar_Parser_AST.binder -> FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun b ->\n    fun phi ->\n      match b.FStar_Parser_AST.b with\n      | FStar_Parser_AST.Annotated (lid, t) ->\n          let uu___ = p_lident lid in\n          p_refinement b.FStar_Parser_AST.aqual\n            b.FStar_Parser_AST.battributes uu___ t phi\n      | FStar_Parser_AST.Variable lid ->\n          let uu___ = p_lident lid in\n          let uu___1 =\n            let uu___2 = FStar_Ident.range_of_id lid in\n            FStar_Parser_AST.mk_term FStar_Parser_AST.Wild uu___2\n              FStar_Parser_AST.Type_level in\n          p_refinement b.FStar_Parser_AST.aqual\n            b.FStar_Parser_AST.battributes uu___ uu___1 phi\n      | FStar_Parser_AST.TAnnotated uu___ -> failwith \"Is this still used ?\"\n      | FStar_Parser_AST.TVariable uu___ ->\n          let uu___1 =\n            let uu___2 = FStar_Parser_AST.binder_to_string b in\n            FStar_Compiler_Util.format1\n              \"Impossible: a refined binder ought to be annotated (%s)\"\n              uu___2 in\n          failwith uu___1\n      | FStar_Parser_AST.NoName uu___ ->\n          let uu___1 =\n            let uu___2 = FStar_Parser_AST.binder_to_string b in\n            FStar_Compiler_Util.format1\n              \"Impossible: a refined binder ought to be annotated (%s)\"\n              uu___2 in\n          failwith uu___1\nand (p_simpleDef :\n  Prims.bool ->\n    (FStar_Ident.lid * FStar_Parser_AST.term) -> FStar_Pprint.document)\n  =\n  fun ps ->\n    fun uu___ ->\n      match uu___ with\n      | (lid, e) ->\n          let uu___1 =\n            let uu___2 = p_qlident lid in\n            let uu___3 =\n              let uu___4 = p_noSeqTermAndComment ps false e in\n              FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.equals uu___4 in\n            FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3 in\n          FStar_Pprint.group uu___1\nand (p_appTerm : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.App uu___ when is_general_application e ->\n        let uu___1 = head_and_args e in\n        (match uu___1 with\n         | (head, args) ->\n             (match args with\n              | e1::e2::[] when\n                  (FStar_Pervasives_Native.snd e1) = FStar_Parser_AST.Infix\n                  ->\n                  let uu___2 = p_argTerm e1 in\n                  let uu___3 =\n                    let uu___4 =\n                      let uu___5 =\n                        let uu___6 = str \"`\" in\n                        let uu___7 =\n                          let uu___8 = p_indexingTerm head in\n                          let uu___9 = str \"`\" in\n                          FStar_Pprint.op_Hat_Hat uu___8 uu___9 in\n                        FStar_Pprint.op_Hat_Hat uu___6 uu___7 in\n                      FStar_Pprint.group uu___5 in\n                    let uu___5 = p_argTerm e2 in\n                    FStar_Pprint.op_Hat_Slash_Hat uu___4 uu___5 in\n                  FStar_Pprint.op_Hat_Slash_Hat uu___2 uu___3\n              | uu___2 ->\n                  let uu___3 =\n                    let uu___4 = p_indexingTerm head in (uu___4, args) in\n                  (match uu___3 with\n                   | (head_doc, args1) ->\n                       let uu___4 =\n                         let uu___5 =\n                           FStar_Pprint.op_Hat_Hat head_doc\n                             FStar_Pprint.space in\n                         soft_surround_map_or_flow (Prims.of_int (2))\n                           Prims.int_zero head_doc uu___5 break1\n                           FStar_Pprint.empty p_argTerm args1 in\n                       FStar_Pprint.group uu___4)))\n    | FStar_Parser_AST.Construct (lid, args) when\n        (is_general_construction e) &&\n          (let uu___ = (is_dtuple_constructor lid) && (all1_explicit args) in\n           Prims.op_Negation uu___)\n        ->\n        (match args with\n         | [] -> p_quident lid\n         | arg::[] ->\n             let uu___ =\n               let uu___1 = p_quident lid in\n               let uu___2 = p_argTerm arg in\n               FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n             FStar_Pprint.group uu___\n         | hd::tl ->\n             let uu___ =\n               let uu___1 =\n                 let uu___2 =\n                   let uu___3 = p_quident lid in\n                   let uu___4 = p_argTerm hd in prefix2 uu___3 uu___4 in\n                 FStar_Pprint.group uu___2 in\n               let uu___2 =\n                 let uu___3 = FStar_Pprint.separate_map break1 p_argTerm tl in\n                 jump2 uu___3 in\n               FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n             FStar_Pprint.group uu___)\n    | uu___ -> p_indexingTerm e\nand (p_argTerm :\n  (FStar_Parser_AST.term * FStar_Parser_AST.imp) -> FStar_Pprint.document) =\n  fun arg_imp ->\n    match arg_imp with\n    | (u, FStar_Parser_AST.UnivApp) -> p_universe u\n    | (e, FStar_Parser_AST.FsTypApp) ->\n        (FStar_Errors.log_issue e.FStar_Parser_AST.range\n           (FStar_Errors_Codes.Warning_UnexpectedFsTypApp,\n             \"Unexpected FsTypApp, output might not be formatted correctly.\");\n         (let uu___1 = p_indexingTerm e in\n          FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one\n            FStar_Pprint.langle uu___1 FStar_Pprint.rangle))\n    | (e, FStar_Parser_AST.Hash) ->\n        let uu___ = str \"#\" in\n        let uu___1 = p_indexingTerm e in FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | (e, FStar_Parser_AST.HashBrace t) ->\n        let uu___ = str \"#[\" in\n        let uu___1 =\n          let uu___2 = p_indexingTerm t in\n          let uu___3 =\n            let uu___4 = str \"]\" in\n            let uu___5 = p_indexingTerm e in\n            FStar_Pprint.op_Hat_Hat uu___4 uu___5 in\n          FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | (e, FStar_Parser_AST.Infix) -> p_indexingTerm e\n    | (e, FStar_Parser_AST.Nothing) -> p_indexingTerm e\nand (p_indexingTerm_aux :\n  (FStar_Parser_AST.term -> FStar_Pprint.document) ->\n    FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun exit ->\n    fun e ->\n      match e.FStar_Parser_AST.tm with\n      | FStar_Parser_AST.Op (id, e1::e2::[]) when\n          let uu___ = FStar_Ident.string_of_id id in uu___ = \".()\" ->\n          let uu___ =\n            let uu___1 = p_indexingTerm_aux p_atomicTermNotQUident e1 in\n            let uu___2 =\n              let uu___3 =\n                let uu___4 = p_term false false e2 in\n                soft_parens_with_nesting uu___4 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in\n            FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n          FStar_Pprint.group uu___\n      | FStar_Parser_AST.Op (id, e1::e2::[]) when\n          let uu___ = FStar_Ident.string_of_id id in uu___ = \".[]\" ->\n          let uu___ =\n            let uu___1 = p_indexingTerm_aux p_atomicTermNotQUident e1 in\n            let uu___2 =\n              let uu___3 =\n                let uu___4 = p_term false false e2 in\n                soft_brackets_with_nesting uu___4 in\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in\n            FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n          FStar_Pprint.group uu___\n      | uu___ -> exit e\nand (p_indexingTerm : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e -> p_indexingTerm_aux p_atomicTerm e\nand (p_atomicTerm : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.LetOpen (lid, e1) ->\n        let uu___ = p_quident lid in\n        let uu___1 =\n          let uu___2 =\n            let uu___3 = p_term false false e1 in\n            soft_parens_with_nesting uu___3 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.Name lid -> p_quident lid\n    | FStar_Parser_AST.Construct (lid, []) when is_general_construction e ->\n        p_quident lid\n    | FStar_Parser_AST.Op (op, e1::[]) when is_general_prefix_op op ->\n        let uu___ = let uu___1 = FStar_Ident.string_of_id op in str uu___1 in\n        let uu___1 = p_atomicTerm e1 in FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | uu___ -> p_atomicTermNotQUident e\nand (p_atomicTermNotQUident : FStar_Parser_AST.term -> FStar_Pprint.document)\n  =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Wild -> FStar_Pprint.underscore\n    | FStar_Parser_AST.Var lid when\n        FStar_Ident.lid_equals lid FStar_Parser_Const.assert_lid ->\n        str \"assert\"\n    | FStar_Parser_AST.Var lid when\n        FStar_Ident.lid_equals lid FStar_Parser_Const.assume_lid ->\n        str \"assume\"\n    | FStar_Parser_AST.Tvar tv -> p_tvar tv\n    | FStar_Parser_AST.Const c -> p_constant c\n    | FStar_Parser_AST.Name lid when\n        FStar_Ident.lid_equals lid FStar_Parser_Const.true_lid -> str \"True\"\n    | FStar_Parser_AST.Name lid when\n        FStar_Ident.lid_equals lid FStar_Parser_Const.false_lid ->\n        str \"False\"\n    | FStar_Parser_AST.Op (op, e1::[]) when is_general_prefix_op op ->\n        let uu___ = let uu___1 = FStar_Ident.string_of_id op in str uu___1 in\n        let uu___1 = p_atomicTermNotQUident e1 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.Op (op, []) ->\n        let uu___ =\n          let uu___1 =\n            let uu___2 =\n              let uu___3 = FStar_Ident.string_of_id op in str uu___3 in\n            let uu___3 =\n              FStar_Pprint.op_Hat_Hat FStar_Pprint.space FStar_Pprint.rparen in\n            FStar_Pprint.op_Hat_Hat uu___2 uu___3 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.space uu___1 in\n        FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen uu___\n    | FStar_Parser_AST.Construct (lid, args) when\n        (is_dtuple_constructor lid) && (all1_explicit args) ->\n        let uu___ =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.lparen FStar_Pprint.bar in\n        let uu___1 =\n          let uu___2 = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in\n          FStar_Pprint.separate_map uu___2\n            (fun uu___3 -> match uu___3 with | (e1, uu___4) -> p_tmEq e1)\n            args in\n        let uu___2 =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.rparen in\n        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_one uu___ uu___1\n          uu___2\n    | FStar_Parser_AST.Project (e1, lid) ->\n        let uu___ =\n          let uu___1 = p_atomicTermNotQUident e1 in\n          let uu___2 =\n            let uu___3 = p_qlident lid in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in\n          FStar_Pprint.prefix (Prims.of_int (2)) Prims.int_zero uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | uu___ -> p_projectionLHS e\nand (p_projectionLHS : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    match e.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Var lid -> p_qlident lid\n    | FStar_Parser_AST.Projector (constr_lid, field_lid) ->\n        let uu___ = p_quident constr_lid in\n        let uu___1 =\n          let uu___2 =\n            let uu___3 = p_lident field_lid in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___3 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.qmark uu___2 in\n        FStar_Pprint.op_Hat_Hat uu___ uu___1\n    | FStar_Parser_AST.Discrim constr_lid ->\n        let uu___ = p_quident constr_lid in\n        FStar_Pprint.op_Hat_Hat uu___ FStar_Pprint.qmark\n    | FStar_Parser_AST.Paren e1 ->\n        let uu___ = p_term_sep false false e1 in\n        (match uu___ with\n         | (comm, t) ->\n             let doc = soft_parens_with_nesting t in\n             if comm = FStar_Pprint.empty\n             then doc\n             else\n               (let uu___2 =\n                  FStar_Pprint.op_Hat_Hat FStar_Pprint.hardline doc in\n                FStar_Pprint.op_Hat_Hat comm uu___2))\n    | uu___ when is_array e ->\n        let es = extract_from_list e in\n        let uu___1 =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.lbracket FStar_Pprint.bar in\n        let uu___2 =\n          let uu___3 = FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in\n          separate_map_or_flow_last uu___3\n            (fun ps -> p_noSeqTermAndComment ps false) es in\n        let uu___3 =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.bar FStar_Pprint.rbracket in\n        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero uu___1 uu___2\n          uu___3\n    | uu___ when is_list e ->\n        let uu___1 =\n          let uu___2 = FStar_Pprint.op_Hat_Hat FStar_Pprint.semi break1 in\n          let uu___3 = extract_from_list e in\n          separate_map_or_flow_last uu___2\n            (fun ps -> p_noSeqTermAndComment ps false) uu___3 in\n        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero\n          FStar_Pprint.lbracket uu___1 FStar_Pprint.rbracket\n    | uu___ when is_ref_set e ->\n        let es = extract_from_ref_set e in\n        let uu___1 =\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.bang FStar_Pprint.lbrace in\n        let uu___2 =\n          let uu___3 = FStar_Pprint.op_Hat_Hat FStar_Pprint.comma break1 in\n          separate_map_or_flow uu___3 p_appTerm es in\n        FStar_Pprint.surround (Prims.of_int (2)) Prims.int_zero uu___1 uu___2\n          FStar_Pprint.rbrace\n    | FStar_Parser_AST.Labeled (e1, s, b) ->\n        let uu___ = str (Prims.op_Hat \"(*\" (Prims.op_Hat s \"*)\")) in\n        let uu___1 = p_term false false e1 in\n        FStar_Pprint.op_Hat_Slash_Hat uu___ uu___1\n    | FStar_Parser_AST.Op (op, args) when\n        let uu___ = handleable_op op args in Prims.op_Negation uu___ ->\n        let uu___ =\n          let uu___1 =\n            let uu___2 = FStar_Ident.string_of_id op in\n            let uu___3 =\n              let uu___4 =\n                let uu___5 =\n                  FStar_Compiler_Util.string_of_int\n                    (FStar_Compiler_List.length args) in\n                Prims.op_Hat uu___5\n                  \" arguments couldn't be handled by the pretty printer\" in\n              Prims.op_Hat \" with \" uu___4 in\n            Prims.op_Hat uu___2 uu___3 in\n          Prims.op_Hat \"Operation \" uu___1 in\n        failwith uu___\n    | FStar_Parser_AST.Uvar id ->\n        failwith \"Unexpected universe variable out of universe context\"\n    | FStar_Parser_AST.Wild ->\n        let uu___ = p_term false false e in soft_parens_with_nesting uu___\n    | FStar_Parser_AST.Const uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Op uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Tvar uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Var uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Name uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Construct uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Abs uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.App uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Let uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.LetOperator uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.LetOpen uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.LetOpenRecord uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Seq uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Bind uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.If uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Match uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.TryWith uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Ascribed uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Record uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Project uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Product uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Sum uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.QForall uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.QExists uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Refine uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.NamedTyp uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Requires uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Ensures uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Decreases uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Attributes uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Quote uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.VQuote uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Antiquote uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.CalcProof uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.ElimExists uu___ ->\n        let uu___1 = p_term false false e in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.LexList l ->\n        let uu___ =\n          let uu___1 = str \"%\" in\n          let uu___2 = p_term_list false false l in\n          FStar_Pprint.op_Hat_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.WFOrder (rel, e1) -> p_dec_wf false false rel e1\nand (p_constant : FStar_Const.sconst -> FStar_Pprint.document) =\n  fun uu___ ->\n    match uu___ with\n    | FStar_Const.Const_effect -> str \"Effect\"\n    | FStar_Const.Const_unit -> str \"()\"\n    | FStar_Const.Const_bool b -> FStar_Pprint.doc_of_bool b\n    | FStar_Const.Const_real r -> str (Prims.op_Hat r \"R\")\n    | FStar_Const.Const_char x -> p_char_literal x\n    | FStar_Const.Const_string (s, uu___1) -> p_string_literal s\n    | FStar_Const.Const_int (repr, sign_width_opt) ->\n        let signedness uu___1 =\n          match uu___1 with\n          | FStar_Const.Unsigned -> str \"u\"\n          | FStar_Const.Signed -> FStar_Pprint.empty in\n        let width uu___1 =\n          match uu___1 with\n          | FStar_Const.Int8 -> str \"y\"\n          | FStar_Const.Int16 -> str \"s\"\n          | FStar_Const.Int32 -> str \"l\"\n          | FStar_Const.Int64 -> str \"L\" in\n        let suffix uu___1 =\n          match uu___1 with\n          | (s, w) ->\n              (match (s, w) with\n               | (uu___2, FStar_Const.Sizet) -> str \"sz\"\n               | uu___2 ->\n                   let uu___3 = signedness s in\n                   let uu___4 = width w in\n                   FStar_Pprint.op_Hat_Hat uu___3 uu___4) in\n        let ending = default_or_map FStar_Pprint.empty suffix sign_width_opt in\n        let uu___1 = str repr in\n        let result = FStar_Pprint.op_Hat_Hat uu___1 ending in\n        let ( ^^ ) = FStar_Pprint.op_Hat_Hat in\n        if String.get repr 0 == '-'\n        then str \"(\" ^^ result ^^ str \")\" else result\n    | FStar_Const.Const_range_of -> str \"range_of\"\n    | FStar_Const.Const_set_range_of -> str \"set_range_of\"\n    | FStar_Const.Const_range r ->\n        let uu___1 = FStar_Compiler_Range.string_of_range r in str uu___1\n    | FStar_Const.Const_reify uu___1 -> str \"reify\"\n    | FStar_Const.Const_reflect lid ->\n        let uu___1 = p_quident lid in\n        let uu___2 =\n          let uu___3 =\n            let uu___4 = str \"reflect\" in\n            FStar_Pprint.op_Hat_Hat FStar_Pprint.dot uu___4 in\n          FStar_Pprint.op_Hat_Hat FStar_Pprint.qmark uu___3 in\n        FStar_Pprint.op_Hat_Hat uu___1 uu___2\nand (p_universe : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun u ->\n    let uu___ = str \"u#\" in\n    let uu___1 = p_atomicUniverse u in FStar_Pprint.op_Hat_Hat uu___ uu___1\nand (p_universeFrom : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun u ->\n    match u.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Op (id, u1::u2::[]) when\n        let uu___ = FStar_Ident.string_of_id id in uu___ = \"+\" ->\n        let uu___ =\n          let uu___1 = p_universeFrom u1 in\n          let uu___2 =\n            let uu___3 = p_universeFrom u2 in\n            FStar_Pprint.op_Hat_Slash_Hat FStar_Pprint.plus uu___3 in\n          FStar_Pprint.op_Hat_Slash_Hat uu___1 uu___2 in\n        FStar_Pprint.group uu___\n    | FStar_Parser_AST.App uu___ ->\n        let uu___1 = head_and_args u in\n        (match uu___1 with\n         | (head, args) ->\n             (match head.FStar_Parser_AST.tm with\n              | FStar_Parser_AST.Var maybe_max_lid when\n                  FStar_Ident.lid_equals maybe_max_lid\n                    FStar_Parser_Const.max_lid\n                  ->\n                  let uu___2 =\n                    let uu___3 = p_qlident FStar_Parser_Const.max_lid in\n                    let uu___4 =\n                      FStar_Pprint.separate_map FStar_Pprint.space\n                        (fun uu___5 ->\n                           match uu___5 with\n                           | (u1, uu___6) -> p_atomicUniverse u1) args in\n                    op_Hat_Slash_Plus_Hat uu___3 uu___4 in\n                  FStar_Pprint.group uu___2\n              | uu___2 ->\n                  let uu___3 =\n                    let uu___4 = FStar_Parser_AST.term_to_string u in\n                    FStar_Compiler_Util.format1\n                      \"Invalid term in universe context %s\" uu___4 in\n                  failwith uu___3))\n    | uu___ -> p_atomicUniverse u\nand (p_atomicUniverse : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun u ->\n    match u.FStar_Parser_AST.tm with\n    | FStar_Parser_AST.Wild -> FStar_Pprint.underscore\n    | FStar_Parser_AST.Const (FStar_Const.Const_int (r, sw)) ->\n        p_constant (FStar_Const.Const_int (r, sw))\n    | FStar_Parser_AST.Uvar id ->\n        let uu___ = FStar_Ident.string_of_id id in str uu___\n    | FStar_Parser_AST.Paren u1 ->\n        let uu___ = p_universeFrom u1 in soft_parens_with_nesting uu___\n    | FStar_Parser_AST.App uu___ ->\n        let uu___1 = p_universeFrom u in soft_parens_with_nesting uu___1\n    | FStar_Parser_AST.Op (id, uu___::uu___1::[]) when\n        let uu___2 = FStar_Ident.string_of_id id in uu___2 = \"+\" ->\n        let uu___2 = p_universeFrom u in soft_parens_with_nesting uu___2\n    | uu___ ->\n        let uu___1 =\n          let uu___2 = FStar_Parser_AST.term_to_string u in\n          FStar_Compiler_Util.format1 \"Invalid term in universe context %s\"\n            uu___2 in\n        failwith uu___1\nlet (term_to_document : FStar_Parser_AST.term -> FStar_Pprint.document) =\n  fun e ->\n    let old_unfold_tuples = FStar_Compiler_Effect.op_Bang unfold_tuples in\n    FStar_Compiler_Effect.op_Colon_Equals unfold_tuples false;\n    (let res = p_term false false e in\n     FStar_Compiler_Effect.op_Colon_Equals unfold_tuples old_unfold_tuples;\n     res)\nlet (signature_to_document : FStar_Parser_AST.decl -> FStar_Pprint.document)\n  = fun e -> p_justSig e\nlet (decl_to_document : FStar_Parser_AST.decl -> FStar_Pprint.document) =\n  fun e -> p_decl e\nlet (pat_to_document : FStar_Parser_AST.pattern -> FStar_Pprint.document) =\n  fun p -> p_disjunctivePattern p\nlet (binder_to_document : FStar_Parser_AST.binder -> FStar_Pprint.document) =\n  fun b -> p_binder true b\nlet (modul_to_document : FStar_Parser_AST.modul -> FStar_Pprint.document) =\n  fun m ->\n    match m with\n    | FStar_Parser_AST.Module (uu___, decls) ->\n        let uu___1 =\n          FStar_Compiler_Effect.op_Bar_Greater decls\n            (FStar_Compiler_List.map decl_to_document) in\n        FStar_Compiler_Effect.op_Bar_Greater uu___1\n          (FStar_Pprint.separate FStar_Pprint.hardline)\n    | FStar_Parser_AST.Interface (uu___, decls, uu___1) ->\n        let uu___2 =\n          FStar_Compiler_Effect.op_Bar_Greater decls\n            (FStar_Compiler_List.map decl_to_document) in\n        FStar_Compiler_Effect.op_Bar_Greater uu___2\n          (FStar_Pprint.separate FStar_Pprint.hardline)\nlet (comments_to_document :\n  (Prims.string * FStar_Compiler_Range.range) Prims.list ->\n    FStar_Pprint.document)\n  =\n  fun comments ->\n    FStar_Pprint.separate_map FStar_Pprint.hardline\n      (fun uu___ -> match uu___ with | (comment, range) -> str comment)\n      comments\nlet (extract_decl_range : FStar_Parser_AST.decl -> decl_meta) =\n  fun d ->\n    let has_qs =\n      match ((d.FStar_Parser_AST.quals), (d.FStar_Parser_AST.d)) with\n      | ((FStar_Parser_AST.Assumption)::[], FStar_Parser_AST.Assume\n         (id, uu___)) -> false\n      | ([], uu___) -> false\n      | uu___ -> true in\n    {\n      r = (d.FStar_Parser_AST.drange);\n      has_qs;\n      has_attrs =\n        (Prims.op_Negation\n           (FStar_Compiler_List.isEmpty d.FStar_Parser_AST.attrs))\n    }\nlet (decls_with_comments_to_document :\n  FStar_Parser_AST.decl Prims.list ->\n    (Prims.string * FStar_Compiler_Range.range) Prims.list ->\n      (FStar_Pprint.document * (Prims.string * FStar_Compiler_Range.range)\n        Prims.list))\n  =\n  fun decls ->\n    fun comments ->\n      match decls with\n      | [] -> (FStar_Pprint.empty, comments)\n      | d::ds ->\n          let uu___ = ((d :: ds), (d.FStar_Parser_AST.drange)) in\n          (match uu___ with\n           | (decls1, first_range) ->\n               (FStar_Compiler_Effect.op_Colon_Equals comment_stack comments;\n                (let initial_comment =\n                   let uu___2 =\n                     FStar_Compiler_Range.start_of_range first_range in\n                   place_comments_until_pos Prims.int_zero Prims.int_one\n                     uu___2 dummy_meta FStar_Pprint.empty false true in\n                 let doc =\n                   separate_map_with_comments FStar_Pprint.empty\n                     FStar_Pprint.empty p_decl decls1 extract_decl_range in\n                 let comments1 = FStar_Compiler_Effect.op_Bang comment_stack in\n                 FStar_Compiler_Effect.op_Colon_Equals comment_stack [];\n                 (let uu___3 = FStar_Pprint.op_Hat_Hat initial_comment doc in\n                  (uu___3, comments1)))))\nlet (modul_with_comments_to_document :\n  FStar_Parser_AST.modul ->\n    (Prims.string * FStar_Compiler_Range.range) Prims.list ->\n      (FStar_Pprint.document * (Prims.string * FStar_Compiler_Range.range)\n        Prims.list))\n  =\n  fun m ->\n    fun comments ->\n      let decls =\n        match m with\n        | FStar_Parser_AST.Module (uu___, decls1) -> decls1\n        | FStar_Parser_AST.Interface (uu___, decls1, uu___1) -> decls1 in\n      decls_with_comments_to_document decls comments\nlet (decl_with_comments_to_document :\n  FStar_Parser_AST.decl ->\n    (Prims.string * FStar_Compiler_Range.range) Prims.list ->\n      (FStar_Pprint.document * (Prims.string * FStar_Compiler_Range.range)\n        Prims.list))\n  = fun d -> fun comments -> decls_with_comments_to_document [d] comments\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_Utf8.ml",
    "content": "(*\n   Originally part of the ulex package with the following license:\n\n   Copyright 2005 by Alain Frisch.\n\n   Permission is hereby granted, free of charge, to any person obtaining\n   a copy of this software and associated documentation files (the\n   \"Software\"), to deal in the Software without restriction, including\n   without limitation the rights to use, copy, modify, merge, publish,\n   distribute, sublicense, and/or sell copies of the Software, and to\n   permit persons to whom the Software is furnished to do so, subject to\n   the following conditions:\n\n   The above copyright notice and this permission notice shall be\n   included in all copies or substantial portions of the Software.\n\n   THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n   EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n   MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n   NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\n   LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n   OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\n   WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n*)\n\n\nexception MalFormed\n\n(* cf http://www.faqs.org/rfcs/rfc3629.html *)\n\nlet width = Array.make 256 (-1)\nlet () =\n  for i = 0 to 127 do width.(i) <- 1 done;\n  for i = 192 to 223 do width.(i) <- 2 done;\n  for i = 224 to 239 do width.(i) <- 3 done;\n  for i = 240 to 247 do width.(i) <- 4 done\n\nlet next s i =\n  match s.[i] with\n    | '\\000'..'\\127' as c ->\n        Char.code c\n    | '\\192'..'\\223' as c ->\n\tlet n1 = Char.code c in\n\tlet n2 = Char.code s.[i+1] in\n        if (n2 lsr 6 != 0b10) then raise MalFormed;\n        ((n1 land 0x1f) lsl 6) lor (n2 land 0x3f)\n    | '\\224'..'\\239' as c ->\n\tlet n1 = Char.code c in\n\tlet n2 = Char.code s.[i+1] in\n\tlet n3 = Char.code s.[i+2] in\n        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) then raise MalFormed;\n\tlet p =\n          ((n1 land 0x0f) lsl 12) lor ((n2 land 0x3f) lsl 6) lor (n3 land 0x3f)\n\tin\n\tif (p >= 0xd800) && (p <= 0xdf00) then raise MalFormed;\n\tp\n    | '\\240'..'\\247' as c ->\n\tlet n1 = Char.code c in\n\tlet n2 = Char.code s.[i+1] in\n\tlet n3 = Char.code s.[i+2] in\n\tlet n4 = Char.code s.[i+3] in\n        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) || (n4 lsr 6 != 0b10)\n\tthen raise MalFormed;\n        ((n1 land 0x07) lsl 18) lor ((n2 land 0x3f) lsl 12) lor\n        ((n3 land 0x3f) lsl 6) lor (n4 land 0x3f)\n    | _ -> raise MalFormed\n\n\n(* With this implementation, a truncated code point will result\n   in Stream.Failure, not in MalFormed. *)\n\nlet from_stream s =\n  match Stream.next s with\n    | '\\000'..'\\127' as c ->\n        Char.code c\n    | '\\192'..'\\223' as c ->\n\tlet n1 = Char.code c in\n\tlet n2 = Char.code (Stream.next s) in\n        if (n2 lsr 6 != 0b10) then raise MalFormed;\n        ((n1 land 0x1f) lsl 6) lor (n2 land 0x3f)\n    | '\\224'..'\\239' as c ->\n\tlet n1 = Char.code c in\n\tlet n2 = Char.code (Stream.next s) in\n\tlet n3 = Char.code (Stream.next s) in\n        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) then raise MalFormed;\n        ((n1 land 0x0f) lsl 12) lor ((n2 land 0x3f) lsl 6) lor (n3 land 0x3f)\n    | '\\240'..'\\247' as c ->\n\tlet n1 = Char.code c in\n\tlet n2 = Char.code (Stream.next s) in\n\tlet n3 = Char.code (Stream.next s) in\n\tlet n4 = Char.code (Stream.next s) in\n        if (n2 lsr 6 != 0b10) || (n3 lsr 6 != 0b10) || (n4 lsr 6 != 0b10)\n\tthen raise MalFormed;\n        ((n1 land 0x07) lsl 18) lor ((n2 land 0x3f) lsl 12) lor\n        ((n3 land 0x3f) lsl 6) lor (n4 land 0x3f)\n    | _ -> raise MalFormed\n\n\n\nlet compute_len s pos bytes =\n  let rec aux n i =\n    if i >= pos + bytes then if i = pos + bytes then n else raise MalFormed\n    else\n      let w = width.(Char.code s.[i]) in\n      if w > 0 then aux (succ n) (i + w)\n      else raise MalFormed\n  in\n  aux 0 pos\n\nlet rec blit_to_int s spos a apos n =\n  if n > 0 then begin\n    a.(apos) <- next s spos;\n    blit_to_int s (spos + width.(Char.code s.[spos])) a (succ apos) (pred n)\n  end\n\nlet to_int_array s pos bytes =\n  let n = compute_len s pos bytes in\n  let a = Array.make n 0 in\n  blit_to_int s pos a 0 n;\n  a\n\n(**************************)\n\nlet width_code_point p =\n  if p <= 0x7f then 1\n  else if p <= 0x7ff then 2\n  else if p <= 0xffff then 3\n  else if p <= 0x10ffff then 4\n  else raise MalFormed\n\nlet store b p =\n  if p <= 0x7f then\n    Buffer.add_char b (Char.chr p)\n  else if p <= 0x7ff then (\n    Buffer.add_char b (Char.chr (0xc0 lor (p lsr 6)));\n    Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f)))\n  )\n  else if p <= 0xffff then (\n    if (p >= 0xd800 && p < 0xe000) then raise MalFormed;\n    Buffer.add_char b (Char.chr (0xe0 lor (p lsr 12)));\n    Buffer.add_char b (Char.chr (0x80 lor ((p lsr 6) land 0x3f)));\n    Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f)))\n  )\n  else if p <= 0x10ffff then (\n    Buffer.add_char b (Char.chr (0xf0 lor (p lsr 18)));\n    Buffer.add_char b (Char.chr (0x80 lor ((p lsr 12) land 0x3f)));\n    Buffer.add_char b (Char.chr (0x80 lor ((p lsr 6)  land 0x3f)));\n    Buffer.add_char b (Char.chr (0x80 lor (p land 0x3f)))\n  )\n  else raise MalFormed\n\n\nlet from_int_array a apos len =\n  let b = Buffer.create (len * 4) in\n  let rec aux apos len =\n    if len > 0 then (store b a.(apos); aux (succ apos) (pred len))\n    else Buffer.contents b in\n  aux apos len\n\nlet stream_from_char_stream s =\n  Stream.from\n    (fun _ ->\n       try Some (from_stream s)\n       with Stream.Failure -> None)\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Parser_Util.ml",
    "content": "open FStar_Compiler_Range\nopen Lexing\n\n(* This brings into scope enough the translation of F# type names into the\n * corresponding OCaml type names; the reason for that is that we massage\n * parse.fsy (using sed) into parse.mly; but, we don't rename types. *)\ninclude FStar_BaseTypes\ntype single = float\ntype decimal = int\ntype bytes = byte array\n\nlet parseState = ()\n\nlet pos_of_lexpos (p:position) =\n  mk_pos (Z.of_int p.pos_lnum) (Z.of_int (p.pos_cnum - p.pos_bol))\n\nlet mksyn_range (p1:position) p2 =\n  mk_range p1.pos_fname (pos_of_lexpos p1) (pos_of_lexpos p2)\n\nlet getLexerRange (lexbuf:lexbuf) =\n  mksyn_range lexbuf.lex_start_p lexbuf.lex_curr_p\n\nlet lhs () =\n  mksyn_range (Parsing.symbol_start_pos ()) (Parsing.symbol_end_pos ())\n\nlet rhs () n =\n  mksyn_range (Parsing.rhs_start_pos n) (Parsing.rhs_end_pos n)\n\nlet rhspos () n =\n  pos_of_lexpos (Parsing.rhs_start_pos n)\n\nlet rhs2 () n m =\n  mksyn_range (Parsing.rhs_start_pos n) (Parsing.rhs_end_pos m)\n\nexception WrappedError of exn * range\nexception ReportedError\nexception StopProcessing\n\nlet warningHandler = ref (fun (e:exn) -> \n                          FStar_Compiler_Util.print_string \"no warning handler installed\\n\" ; \n                          FStar_Compiler_Util.print_any e; ())\nlet errorHandler = ref (fun (e:exn) -> \n                        FStar_Compiler_Util.print_string \"no warning handler installed\\n\" ; \n                        FStar_Compiler_Util.print_any e; ())\nlet errorAndWarningCount = ref 0\nlet errorR  exn = incr errorAndWarningCount; match exn with StopProcessing | ReportedError -> raise exn | _ -> !errorHandler exn\nlet warning exn = incr errorAndWarningCount; match exn with StopProcessing | ReportedError -> raise exn | _ -> !warningHandler exn\n\nlet comments : (string * FStar_Compiler_Range.range) list ref = ref []\nlet add_comment x = comments := x :: !comments\nlet flush_comments () =\n  let lexed_comments = !comments in\n  comments := []; lexed_comments\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Pervasives.ml",
    "content": "let id : 'a . 'a -> 'a = fun x -> x\ntype ('a, 'b) either =\n  | Inl of 'a\n  | Inr of 'b\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Pervasives_Native.ml",
    "content": "type 'a option' = 'a option =\n  | None\n  | Some of 'a[@@deriving yojson,show]\n\ntype 'a option = 'a option' =\n  | None\n  | Some of 'a[@@deriving yojson,show]\n\nlet fst = Stdlib.fst\nlet snd = Stdlib.snd\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Pprint.ml",
    "content": "(*\n   Copyright 2016 Microsoft Research\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n*)\n\n(*  prettyprint.fsti's OCaml implementation is just a thin wrapper around\n    Francois Pottier's pprint package. *)\ninclude PPrint\n\n(* FIXME(adl) also print the char in a comment if it's representable *)\nlet doc_of_char c = PPrint.OCaml.char (Char.chr c)\nlet doc_of_string = PPrint.string\nlet doc_of_bool b = PPrint.string (string_of_bool b)\nlet blank_buffer_doc = [ (\"\", PPrint.empty) ]\n\nlet substring s ofs len =\n    PPrint.substring s (Z.to_int ofs) (Z.to_int len)\n\nlet fancystring s apparent_length =\n    PPrint.fancystring s (Z.to_int apparent_length)\n\nlet fancysubstring s ofs len apparent_length =\n    PPrint.fancysubstring  s (Z.to_int ofs) (Z.to_int len) (Z.to_int apparent_length)\n\nlet blank n = PPrint.blank (Z.to_int n)\n\nlet break_ n = PPrint.break (Z.to_int n)\n\nlet op_Hat_Hat = PPrint.(^^)\nlet op_Hat_Slash_Hat = PPrint.(^/^)\n\nlet nest j doc = PPrint.nest (Z.to_int j) doc\n\nlet long_left_arrow = PPrint.string \"<--\"\nlet larrow = PPrint.string \"<-\"\nlet rarrow = PPrint.string \"->\"\n\nlet repeat n doc = PPrint.repeat (Z.to_int n) doc\n\nlet hang n doc = PPrint.hang (Z.to_int n) doc\n\nlet prefix n b left right =\n    PPrint.prefix (Z.to_int n) (Z.to_int b) left right\n\nlet jump n b right =\n    PPrint.jump (Z.to_int n) (Z.to_int b) right\n\nlet infix n b middle left right =\n    PPrint.infix (Z.to_int n) (Z.to_int b) middle left right\n\nlet surround n b opening contents closing =\n    PPrint.surround (Z.to_int n) (Z.to_int b) opening contents closing\n\nlet soft_surround n b opening contents closing =\n    PPrint.soft_surround (Z.to_int n) (Z.to_int b) opening contents closing\n\nlet surround_separate n b void_ opening sep closing docs =\n    PPrint.surround_separate (Z.to_int n) (Z.to_int b) void_ opening sep closing docs\n\nlet surround_separate_map n b void_ opening sep closing f xs =\n    PPrint.surround_separate_map (Z.to_int n) (Z.to_int b) void_ opening sep closing f xs\n\n(* Wrap up ToBuffer.pretty. *)\nlet pretty_string rfrac width doc =\n    let buf = Buffer.create 0 in\n    PPrint.ToBuffer.pretty rfrac (Z.to_int width) buf doc;\n    Buffer.contents buf\n\n(* Wrap up ToChannel.pretty *)\nlet pretty_out_channel rfrac width doc ch =\n    PPrint.ToChannel.pretty rfrac (Z.to_int width) ch doc;\n    flush ch\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_Sedlexing.ml",
    "content": "(**\nA custom version of Sedlexing enhanced with\nlc, bol and fname position tracking and\nspecialized for UTF-8 string inputs\n(the parser driver always reads whole files)\n**)\n\nexception Error\n\nmodule L = Lexing\ntype pos = L.position\n\ntype lexbuf = {\n  buf: int array;\n  len: int;\n\n  mutable cur: int;\n  mutable cur_p: pos;\n  mutable start: int;\n  mutable start_p: pos;\n\n  mutable mark: int;\n  mutable mark_p: pos;\n  mutable mark_val: int;\n}\n\nlet get_buf lb = lb.buf\nlet get_cur lb = lb.cur\nlet get_start lb = lb.start\n\n(* N.B. the offsets are for interactive mode\n   we want to ble able to interpret a fragment as if it was part\n   of a larger file and report absolute error positions *)\nlet create (s:string) fn loffset coffset =\n  let a = FStar_Parser_Utf8.to_int_array s 0 (String.length s) in\n  let start_p = {\n    L.pos_fname = fn;\n    L.pos_cnum = coffset;\n    L.pos_bol  = 0;\n    L.pos_lnum = loffset; }\n  in {\n    buf = a;\n    len = Array.length a;\n\n    cur = 0;\n    cur_p = start_p;\n\n    start = 0;\n    start_p = start_p;\n\n    mark = 0;\n    mark_p = start_p;\n    mark_val = 0;\n  }\n\nlet current_pos b = b.cur_p\n  \nlet start b =\n  b.mark <- b.cur;\n  b.mark_val <- (-1);\n  b.mark_p <- b.cur_p;\n  b.start <- b.cur;\n  b.start_p <- b.cur_p\n\nlet mark b i =\n  b.mark <- b.cur;\n  b.mark_p <- b.cur_p;\n  b.mark_val <- i\n\nlet backtrack b =\n  b.cur <- b.mark;\n  b.cur_p <- b.mark_p;\n  b.mark_val\n\nlet next b =\n  if b.cur = b.len then None\n  else\n    let c = b.buf.(b.cur) in\n    (b.cur <- b.cur + 1;\n    b.cur_p <- {b.cur_p with L.pos_cnum = b.cur_p.L.pos_cnum + 1}; Some (Uchar.of_int c))\n\nlet new_line b =\n  b.cur_p <- { b.cur_p with\n    L.pos_lnum = b.cur_p.L.pos_lnum + 1;\n    L.pos_bol = b.cur_p.L.pos_cnum;\n  }\n\nlet range b = (b.start_p, b.cur_p)\n\nlet ulexeme lexbuf =\n  Array.sub lexbuf.buf lexbuf.start (lexbuf.cur - lexbuf.start)\n\nlet rollback b =\n  b.cur <- b.start;\n  b.cur_p <- b.start_p\n\nlet lexeme lexbuf =\n  FStar_Parser_Utf8.from_int_array lexbuf.buf lexbuf.start (lexbuf.cur - lexbuf.start)\n\nlet lookahead b pos =\n  if b.len <= pos then \"\"\n  else FStar_Parser_Utf8.from_int_array b.buf pos (b.len - pos)\n\nlet source_file b =\n  b.cur_p.L.pos_fname\n\nlet current_line b =\n  b.cur_p.Lexing.pos_lnum\n\n(* Since sedlex 2.4, we need to expose Sedlexing.__private_next_int\n   (see #2343)\n\n   From https://github.com/ocaml-communi-ty/sedlex/blob/268c553f474457574e22701679d68f66aa771551/src/lib/sedlexing.mli#L154-L161\n   [next] and [__private__next_int] have the same doc description,\n   the only difference is the return type *)\nlet __private__next_int b =\n  match next b with\n  | Some v -> Uchar.to_int v\n  | None -> -1\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_String.ml",
    "content": "let make i c = BatUTF8.init (Z.to_int i) (fun _ -> BatUChar.chr c)\nlet strcat s t = s ^ t\nlet op_Hat s t =  strcat s t\n\n(* restore pre-2.11 BatString.nsplit behavior,\n   see https://github.com/ocaml-batteries-team/batteries-included/issues/845 *)\nlet batstring_nsplit s t =\n  if s = \"\" then [] else BatString.split_on_string t s\n\nlet split seps s =\n  let rec repeat_split acc = function\n    | [] -> acc\n    | sep::seps ->\n       let usep = BatUTF8.init 1 (fun _ -> BatUChar.chr sep) in\n       let l = BatList.flatten (BatList.map (fun x -> batstring_nsplit x usep) acc) in\n       repeat_split l seps in\n  repeat_split [s] seps\nlet compare x y = Z.of_int (BatString.compare x y)\nlet concat = BatString.concat\nlet length s = Z.of_int (BatUTF8.length s)\n\nlet substring s i j =\n  BatUTF8.init (Z.to_int j) (fun k -> BatUTF8.get s (k + Z.to_int i))\n\nlet get s i = BatUChar.code (BatUTF8.get s (Z.to_int i))\nlet lowercase = BatString.lowercase_ascii\nlet uppercase = BatString.uppercase_ascii\nlet escaped = BatString.escaped\nlet list_of_string s = BatList.init (BatUTF8.length s) (fun i -> BatUChar.code (BatUTF8.get s i))\nlet string_of_list l = BatUTF8.init (BatList.length l) (fun i -> BatUChar.chr (BatList.at l i))\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/FStar_VConfig.ml",
    "content": "open Prims\ntype vconfig =\n  {\n  initial_fuel: Prims.int ;\n  max_fuel: Prims.int ;\n  initial_ifuel: Prims.int ;\n  max_ifuel: Prims.int ;\n  detail_errors: Prims.bool ;\n  detail_hint_replay: Prims.bool ;\n  no_smt: Prims.bool ;\n  quake_lo: Prims.int ;\n  quake_hi: Prims.int ;\n  quake_keep: Prims.bool ;\n  retry: Prims.bool ;\n  smtencoding_elim_box: Prims.bool ;\n  smtencoding_nl_arith_repr: Prims.string ;\n  smtencoding_l_arith_repr: Prims.string ;\n  smtencoding_valid_intro: Prims.bool ;\n  smtencoding_valid_elim: Prims.bool ;\n  tcnorm: Prims.bool ;\n  no_plugins: Prims.bool ;\n  no_tactics: Prims.bool ;\n  z3cliopt: Prims.string Prims.list ;\n  z3smtopt: Prims.string Prims.list ;\n  z3refresh: Prims.bool ;\n  z3rlimit: Prims.int ;\n  z3rlimit_factor: Prims.int ;\n  z3seed: Prims.int ;\n  trivial_pre_for_unannotated_effectful_fns: Prims.bool ;\n  reuse_hint_for: Prims.string FStar_Pervasives_Native.option }\nlet (__proj__Mkvconfig__item__initial_fuel : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        initial_fuel\nlet (__proj__Mkvconfig__item__max_fuel : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        max_fuel\nlet (__proj__Mkvconfig__item__initial_ifuel : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        initial_ifuel\nlet (__proj__Mkvconfig__item__max_ifuel : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        max_ifuel\nlet (__proj__Mkvconfig__item__detail_errors : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        detail_errors\nlet (__proj__Mkvconfig__item__detail_hint_replay : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        detail_hint_replay\nlet (__proj__Mkvconfig__item__no_smt : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        no_smt\nlet (__proj__Mkvconfig__item__quake_lo : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        quake_lo\nlet (__proj__Mkvconfig__item__quake_hi : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        quake_hi\nlet (__proj__Mkvconfig__item__quake_keep : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        quake_keep\nlet (__proj__Mkvconfig__item__retry : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} -> retry\nlet (__proj__Mkvconfig__item__smtencoding_elim_box : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        smtencoding_elim_box\nlet (__proj__Mkvconfig__item__smtencoding_nl_arith_repr :\n  vconfig -> Prims.string) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        smtencoding_nl_arith_repr\nlet (__proj__Mkvconfig__item__smtencoding_l_arith_repr :\n  vconfig -> Prims.string) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        smtencoding_l_arith_repr\nlet (__proj__Mkvconfig__item__smtencoding_valid_intro :\n  vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        smtencoding_valid_intro\nlet (__proj__Mkvconfig__item__smtencoding_valid_elim : vconfig -> Prims.bool)\n  =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        smtencoding_valid_elim\nlet (__proj__Mkvconfig__item__tcnorm : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        tcnorm\nlet (__proj__Mkvconfig__item__no_plugins : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        no_plugins\nlet (__proj__Mkvconfig__item__no_tactics : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        no_tactics\nlet (__proj__Mkvconfig__item__z3cliopt : vconfig -> Prims.string Prims.list)\n  =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        z3cliopt\nlet (__proj__Mkvconfig__item__z3smtopt : vconfig -> Prims.string Prims.list)\n  =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        z3smtopt\nlet (__proj__Mkvconfig__item__z3refresh : vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        z3refresh\nlet (__proj__Mkvconfig__item__z3rlimit : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        z3rlimit\nlet (__proj__Mkvconfig__item__z3rlimit_factor : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        z3rlimit_factor\nlet (__proj__Mkvconfig__item__z3seed : vconfig -> Prims.int) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        z3seed\nlet (__proj__Mkvconfig__item__trivial_pre_for_unannotated_effectful_fns :\n  vconfig -> Prims.bool) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        trivial_pre_for_unannotated_effectful_fns\nlet (__proj__Mkvconfig__item__reuse_hint_for :\n  vconfig -> Prims.string FStar_Pervasives_Native.option) =\n  fun projectee ->\n    match projectee with\n    | { initial_fuel; max_fuel; initial_ifuel; max_ifuel; detail_errors;\n        detail_hint_replay; no_smt; quake_lo; quake_hi; quake_keep; retry;\n        smtencoding_elim_box; smtencoding_nl_arith_repr;\n        smtencoding_l_arith_repr; smtencoding_valid_intro;\n        smtencoding_valid_elim; tcnorm; no_plugins; no_tactics; z3cliopt;\n        z3smtopt; z3refresh; z3rlimit; z3rlimit_factor; z3seed;\n        trivial_pre_for_unannotated_effectful_fns; reuse_hint_for;_} ->\n        reuse_hint_for"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/README",
    "content": "Those files were extracted from github:fstarlang/fstar\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/dune",
    "content": "(library\n (name fstar_surface_ast)\n (package hax-engine)\n (libraries batteries stdint ppxlib menhirLib pprint base)\n (wrapped false)\n (preprocess\n  (pps ppx_deriving.show ppx_deriving_yojson sedlex.ppx)))\n\n(env\n (_\n  (flags\n   (:standard -warn-error -A -warn-error +8))))\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/prims.ml",
    "content": "type int = Z.t[@printer Z.pp_print][@@deriving show]\nlet of_int = Z.of_int\nlet int_zero = Z.zero\nlet int_one = Z.one\nlet parse_int = Z.of_string\nlet to_string = Z.to_string\n\ntype tmp = string [@@deriving yojson]\nlet int_to_yojson x = tmp_to_yojson (to_string x)\nlet int_of_yojson x =\n  match tmp_of_yojson x with\n  | Ok x -> Ok (parse_int x)\n  | Error x -> Error x\n\ntype bool' = bool\n[@@deriving yojson,show]\ntype bool = bool'\n[@@deriving yojson,show]\n\ntype string' = string[@@deriving yojson,show]\ntype string = string'[@@deriving yojson,show]\n\nlet op_Negation x = not x\n\nlet ( + )     = Z.add\nlet ( - )     = Z.sub\nlet ( * )     = Z.mul\nlet ( / )     = Z.ediv\nlet ( <= )    = Z.leq\nlet ( >= )    = Z.geq\nlet ( < )     = Z.lt\nlet ( > )     = Z.gt\nlet ( mod )   = Z.erem\nlet ( ~- )    = Z.neg\nlet abs       = Z.abs\n\ntype nonrec exn = exn\nlet op_Hat x y = x ^ y\n\ntype 'a list' = 'a list[@@deriving yojson,show]\ntype 'a list = 'a list'[@@deriving yojson,show]\n\ntype nat = int\ntype pos = int\nlet string_of_bool = string_of_bool\nlet string_of_int = to_string\n"
  },
  {
    "path": "engine/backends/fstar/fstar-surface-ast/z.ml",
    "content": "type t = String.t [@@deriving show]\n\nlet to_t = Base.Int.of_string\nlet of_t = Base.Int.to_string\n\nlet compare = String.compare\nlet pp_print = pp\nlet hash = Base.String.hash\n\n\nlet to_int: String.t -> Base.Int.t = Base.Int.of_string\nlet of_int: Base.Int.t -> String.t = Base.Int.to_string\n\n\nlet zero: String.t = \"0\"\nlet one: String.t = \"1\"\nlet of_string x = x\nlet to_string x = x\n\nopen struct\n    let map (f: int -> int): string -> string = fun s -> Base.Int.of_string s |> f |> Base.Int.to_string\n    let map2 (f: int -> int -> int): string -> string -> string = fun x y -> f (Base.Int.of_string x) (Base.Int.of_string y) |> Base.Int.to_string\n    let map2' (f: int -> int -> 'a): string -> string -> 'a = fun x y -> f (Base.Int.of_string x) (Base.Int.of_string y)\n    end\n\nlet add = map2 ( + )\nlet sub = map2 ( - )\nlet mul = map2 ( * )\nlet ediv = map2 ( / )\nlet leq = map2' ( <= )\nlet geq = map2' ( >= )\nlet lt = map2' ( < )\nlet gt = map2' ( > )\nlet erem = map2 Base.Int.( % )\nlet neg = map Base.Int.neg\nlet abs = map abs\nlet shift_left: string -> Base.Int.t -> string = fun x i -> Base.Int.shift_left (Base.Int.of_string x) i |> Base.Int.to_string\nlet shift_right: string -> Base.Int.t -> string = fun x i -> Base.Int.shift_right (Base.Int.of_string x) i |> Base.Int.to_string\n"
  },
  {
    "path": "engine/backends/fstar/fstar_ast.ml",
    "content": "open Hax_engine.Utils\nopen Base\nmodule Util = FStar_Parser_Util\nmodule AST = FStar_Parser_AST\nmodule Const = FStar_Const\nmodule Range = FStar_Compiler_Range\nmodule Char = FStar_Char\nmodule Ident = FStar_Ident\n\nlet dummyRange = Range.dummyRange\nlet id ident = Ident.mk_ident (ident, dummyRange)\nlet id_prime (ident : Ident.ident) = id (ident.idText ^ \"'\")\n\nlet lid path =\n  let init, last = List.(drop_last_exn path, last_exn path) in\n  let last = if String.(last = \"new\") then \"new_\" else last in\n  let init = List.map ~f:(map_first_letter String.uppercase) init in\n  let path = init @ [ last ] in\n  Ident.lid_of_path path dummyRange\n\nlet lid_of_id id = Ident.lid_of_ids [ id ]\nlet term (tm : AST.term') = AST.{ tm; range = dummyRange; level = Expr }\nlet generate_fresh_ident () = Ident.gen dummyRange\n\nlet decl ?(fsti = true) ?(quals = []) ?(attrs = []) (d : AST.decl') =\n  let decl = AST.{ d; drange = dummyRange; quals; attrs } in\n  if fsti then `Intf decl else `Impl decl\n\nlet decls ?(fsti = true) ?(quals = []) ?(attrs = []) x =\n  [ decl ~fsti ~quals ~attrs x ]\n\nlet pat (pat : AST.pattern') = AST.{ pat; prange = dummyRange }\n\nmodule Attrs = struct\n  let no_method = term @@ AST.Var FStar_Parser_Const.no_method_lid\nend\n\nlet tcresolve = term @@ AST.Var FStar_Parser_Const.tcresolve_lid\nlet solve = term @@ AST.Var FStar_Parser_Const.solve_lid\n\nlet pat_var_tcresolve (var : string option) =\n  let tcresolve = Some (AST.Meta tcresolve) in\n  pat\n  @@\n  match var with\n  | Some var -> AST.PatVar (id var, tcresolve, [])\n  | _ -> AST.PatWild (tcresolve, [])\n\nlet pat_app name l = pat @@ AST.PatApp (name, l)\nlet wild = pat @@ AST.PatWild (None, [])\n\nlet mk_e_abs args body =\n  if List.is_empty args then body else term (AST.Abs (args, body))\n\nlet mk_e_app base args =\n  AST.mkApp base (List.map ~f:(fun arg -> (arg, AST.Nothing)) args) dummyRange\n\nlet mk_app base args = AST.mkApp base args dummyRange\nlet unit = term AST.(Const Const_unit)\n\nlet tc_solve =\n  term\n  @@ AST.Var (FStar_Parser_Const.fstar_tactics_lid' [ \"Typeclasses\"; \"solve\" ])\n\nlet mk_binder ?(aqual : FStar_Parser_AST.arg_qualifier option = Some Implicit) b\n    =\n  AST.{ b; brange = dummyRange; blevel = Un; aqual; battributes = [] }\n\nlet mk_e_binder b = mk_binder ~aqual:None b\nlet term_of_lid path = term @@ AST.Name (lid path)\n\nlet binder_of_term ?name (t : AST.term) : AST.binder =\n  let b =\n    match name with None -> AST.NoName t | Some n -> AST.Annotated (n, t)\n  in\n  mk_e_binder b\n\nlet mk_e_arrow inputs output =\n  term @@ AST.Product (List.map ~f:binder_of_term inputs, output)\n\nlet mk_e_arrow' types =\n  let inputs, output = (List.drop_last_exn types, List.last_exn types) in\n  mk_e_arrow inputs output\n\nlet mk_refined (x : string) (typ : AST.term) (phi : x:AST.term -> AST.term) =\n  let x = id x in\n  let x_bd = mk_e_binder @@ AST.Annotated (x, typ) in\n  term @@ AST.Refine (x_bd, phi (term @@ AST.Var (lid_of_id x)))\n\nlet type0_term = AST.Name (lid [ \"Type0\" ]) |> term\nlet eqtype_term = AST.Name (lid [ \"eqtype\" ]) |> term\n\nlet parse_string f s =\n  let open FStar_Parser_ParseIt in\n  let frag_of_text s =\n    {\n      frag_fname = \"<string_of_term>\";\n      frag_line = Z.of_int 1;\n      frag_col = Z.of_int 0;\n      frag_text = s;\n    }\n  in\n  match parse (f (frag_of_text s)) with\n  | ParseError (_, err, _) ->\n      failwith (\"string_of_term: got error [\" ^ err ^ \"] on input: [\" ^ s ^ \"]\")\n  | x -> x\n\nlet term_of_string s =\n  match parse_string (fun x -> Fragment x) s with\n  | Term t -> t\n  | _ -> failwith \"parse failed\"\n\nlet decls_of_string s =\n  match parse_string (fun x -> Toplevel x) s with\n  | ASTFragment (Inr l, _) -> List.map ~f:(fun i -> `Impl i) l\n  | _ -> failwith \"parse failed\"\n\nlet decl_of_string s =\n  match decls_of_string s with [ d ] -> d | _ -> failwith \"decl_of_string\"\n\nlet ascribe t e = term @@ AST.Ascribed (e, t, None, false)\nlet implies p q = AST.Op (id \"==>\", [ p; q ]) |> term\n"
  },
  {
    "path": "engine/backends/fstar/fstar_backend.ml",
    "content": "open Hax_engine\nopen Utils\nopen Base\n\ninclude\n  Backend.Make\n    (struct\n      open Features\n      include Off\n      include On.Monadic_binding\n      include On.Slice\n      include On.Macro\n      include On.Construct_base\n      include On.Quote\n      include On.Dyn\n      include On.Unsafe\n    end)\n    (struct\n      let backend = Diagnostics.Backend.FStar\n    end)\n\nmodule SubtypeToInputLanguage\n    (FA :\n      Features.T\n        with type mutable_reference = Features.Off.mutable_reference\n         and type continue = Features.Off.continue\n         and type break = Features.Off.break\n         and type mutable_reference = Features.Off.mutable_reference\n         and type mutable_pointer = Features.Off.mutable_pointer\n         and type mutable_variable = Features.Off.mutable_variable\n         and type reference = Features.Off.reference\n         and type raw_pointer = Features.Off.raw_pointer\n         and type early_exit = Features.Off.early_exit\n         and type question_mark = Features.Off.question_mark\n         and type as_pattern = Features.Off.as_pattern\n         and type lifetime = Features.Off.lifetime\n         and type monadic_action = Features.Off.monadic_action\n         and type arbitrary_lhs = Features.Off.arbitrary_lhs\n         and type nontrivial_lhs = Features.Off.nontrivial_lhs\n         and type loop = Features.Off.loop\n         and type block = Features.Off.block\n         and type for_loop = Features.Off.for_loop\n         and type while_loop = Features.Off.while_loop\n         and type for_index_loop = Features.Off.for_index_loop\n         and type state_passing_loop = Features.Off.state_passing_loop\n         and type fold_like_loop = Features.Off.fold_like_loop\n         and type match_guard = Features.Off.match_guard\n         and type trait_item_default = Features.Off.trait_item_default) =\nstruct\n  module FB = InputLanguage\n\n  include\n    Subtype.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Features.SUBTYPE.Id\n        include Features.SUBTYPE.On.Monadic_binding\n        include Features.SUBTYPE.On.Construct_base\n        include Features.SUBTYPE.On.Slice\n        include Features.SUBTYPE.On.Macro\n        include Features.SUBTYPE.On.Quote\n        include Features.SUBTYPE.On.Dyn\n        include Features.SUBTYPE.On.Unsafe\n      end)\n\n  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))\nend\n\nmodule AST = Ast.Make (InputLanguage)\n\nmodule BackendOptions = struct\n  type t = Hax_engine.Types.f_star_options_for__null\nend\n\nopen Ast\n\nmodule FStarNamePolicy = struct\n  include Concrete_ident.DefaultNamePolicy\n\n  [@@@ocamlformat \"disable\"]\n\n  let anonymous_field_transform index = \"_\" ^ index\n\n  let reserved_words = Hash_set.of_list (module String) [\"attributes\";\"noeq\";\"unopteq\";\"and\";\"assert\";\"assume\";\"begin\";\"by\";\"calc\";\"class\";\"default\";\"decreases\";\"b2t\";\"effect\";\"eliminate\";\"else\";\"end\";\"ensures\";\"exception\";\"exists\";\"false\";\"friend\";\"forall\";\"fun\";\"λ\";\"function\";\"if\";\"in\";\"include\";\"inline\";\"inline_for_extraction\";\"instance\";\"introduce\";\"irreducible\";\"let\";\"logic\";\"match\";\"returns\";\"as\";\"module\";\"new\";\"new_effect\";\"layered_effect\";\"polymonadic_bind\";\"polymonadic_subcomp\";\"noextract\";\"of\";\"open\";\"opaque\";\"private\";\"quote\";\"range_of\";\"rec\";\"reifiable\";\"reify\";\"reflectable\";\"requires\";\"set_range_of\";\"sub_effect\";\"synth\";\"then\";\"total\";\"true\";\"try\";\"type\";\"unfold\";\"unfoldable\";\"val\";\"when\";\"with\";\"_\";\"__SOURCE_FILE__\";\"__LINE__\";\"match\";\"if\";\"let\";\"and\";\"string\"]\nend\n\nmodule RenderId = Concrete_ident.MakeRenderAPI (FStarNamePolicy)\nmodule U = Ast_utils.Make (InputLanguage)\nmodule Visitors = Ast_visitors.Make (InputLanguage)\nopen AST\nmodule F = Fstar_ast\nmodule Destruct = Ast_destruct.Make (InputLanguage)\n\nmodule Context = struct\n  type t = {\n    current_namespace : string list;\n    items : item list;\n    interface_mode : bool;\n    line_width : int;\n  }\nend\n\n(** Convers a namespace to a module name *)\nlet module_name (ns : string list) : string =\n  String.concat ~sep:\".\" (List.map ~f:(map_first_letter String.uppercase) ns)\n\n(** Set to true when extracting core_models (HAX_CORE_MODELS_EXTRACTION_MODE set\n    to 'on') *)\nlet hax_core_models_extraction =\n  Sys.getenv \"HAX_CORE_MODELS_EXTRACTION_MODE\"\n  |> [%equal: string option] (Some \"on\")\n\nmodule Make\n    (Attrs : Attrs.WITH_ITEMS)\n    (Ctx : sig\n      val ctx : Context.t\n    end) =\nstruct\n  open Ctx\n\n  module StringToFStar = struct\n    let catch_parsing_error (type a b) kind span (f : a -> b) x =\n      try f x\n      with e ->\n        let kind =\n          Types.FStarParseError\n            {\n              fstar_snippet = \"\";\n              details =\n                \"While parsing a \" ^ kind ^ \", error: \"\n                ^ Base.Error.to_string_hum (Base.Error.of_exn e);\n            }\n        in\n        Error.raise { span; kind }\n\n    let term span = catch_parsing_error \"term\" span F.term_of_string\n  end\n\n  let doc_to_string : PPrint.document -> string =\n    FStar_Pprint.pretty_string 1.0 (Z.of_int ctx.line_width)\n\n  let term_to_string : F.AST.term -> string =\n    FStar_Parser_ToDocument.term_to_document >> doc_to_string\n\n  let pat_to_string : F.AST.pattern -> string =\n    FStar_Parser_ToDocument.pat_to_document >> doc_to_string\n\n  let decl_to_string : F.AST.decl -> string =\n    FStar_Parser_ToDocument.decl_to_document >> doc_to_string\n\n  let pprim_ident (span : span) (id : primitive_ident) =\n    match id with\n    | Deref -> Error.assertion_failure span \"pprim_ident Deref\"\n    | Cast -> F.lid [ \"cast\" ]\n    | LogicalOp op -> (\n        match op with\n        | And -> F.lid [ \"Prims\"; \"op_AmpAmp\" ]\n        | Or -> F.lid [ \"Prims\"; \"op_BarBar\" ])\n\n  let pnegative = function true -> \"-\" | false -> \"\"\n\n  let dummy_clone_impl =\n    StringToFStar.term Span.default\n      {fstar|{\n        f_clone = (fun x -> x);\n        f_clone_pre = (fun _ -> True);\n        f_clone_post = (fun _ _ -> True);\n      }|fstar}\n\n  (* Print a literal as an F* constant *)\n  let rec pliteral_as_const span (e : literal) =\n    match e with\n    | String s -> F.Const.Const_string (s, F.dummyRange)\n    | Char c -> F.Const.Const_char (Char.to_int c)\n    | Int { value; kind = { size; signedness }; negative } ->\n        Error.unimplemented\n          ~details:\n            \"Integers cannot be printed as constants, they can only be printed \\\n             as expressions.\"\n          span\n    | Float _ ->\n        Error.unimplemented\n          ~details:\n            \"Floats cannot be printed as constants, they can only be printed \\\n             as expressions.\"\n          span\n    | Bool b -> F.Const.Const_bool b\n\n  (* Print a literal appearing in a pattern as an F* pattern *)\n  let rec pliteral_as_pat span (e : literal) =\n    match e with\n    | Int { value; kind = { size; signedness }; negative } ->\n        let pat_name =\n          F.pat\n          @@ F.AST.PatName (F.lid [ \"Rust_primitives\"; \"Integers\"; \"MkInt\" ])\n        in\n        let mk_const c = F.AST.PatConst c |> F.pat in\n        let mk_int value negative =\n          mk_const (F.Const.Const_int (pnegative negative ^ value, None))\n        in\n        F.pat_app pat_name @@ [ mk_int value negative ]\n    | Float _ ->\n        Error.unimplemented ~issue_id:464\n          ~details:\"Pattern matching on floats is not yet supported.\" span\n    | _ -> F.pat @@ F.AST.PatConst (pliteral_as_const span e)\n\n  let pliteral_as_expr span (e : literal) =\n    let mk_const c = F.AST.Const c |> F.term in\n    let mk_int value negative =\n      mk_const (F.Const.Const_int (pnegative negative ^ value, None))\n    in\n    match e with\n    | Int { value; kind = { size; signedness }; negative = n } ->\n        let f =\n          match (size, signedness) with\n          | S8, Signed -> F.lid [ \"mk_i8\" ]\n          | S16, Signed -> F.lid [ \"mk_i16\" ]\n          | S32, Signed -> F.lid [ \"mk_i32\" ]\n          | S64, Signed -> F.lid [ \"mk_i64\" ]\n          | S128, Signed -> F.lid [ \"mk_i128\" ]\n          | SSize, Signed -> F.lid [ \"mk_isize\" ]\n          | S8, Unsigned -> F.lid [ \"mk_u8\" ]\n          | S16, Unsigned -> F.lid [ \"mk_u16\" ]\n          | S32, Unsigned -> F.lid [ \"mk_u32\" ]\n          | S64, Unsigned -> F.lid [ \"mk_u64\" ]\n          | S128, Unsigned -> F.lid [ \"mk_u128\" ]\n          | SSize, Unsigned -> F.lid [ \"mk_usize\" ]\n        in\n        F.mk_e_app (F.term @@ F.AST.Name f) [ mk_int value n ]\n    | Float { value; negative; _ } ->\n        F.mk_e_app\n          (F.term_of_lid [ \"mk_float\" ])\n          [\n            mk_const\n              (F.Const.Const_string (pnegative negative ^ value, F.dummyRange));\n          ]\n    | _ -> mk_const @@ pliteral_as_const span e\n\n  let pconcrete_ident (id : concrete_ident) =\n    let id = RenderId.render id in\n    let path = ctx.current_namespace in\n    if [%eq: string list] path id.path then F.lid [ id.name ]\n    else F.lid (id.path @ [ id.name ])\n\n  let rec pglobal_ident (span : span) (id : global_ident) =\n    match id with\n    | `Concrete cid -> pconcrete_ident cid\n    | `Primitive prim_id -> pprim_ident span prim_id\n    | `TupleType 0 -> F.lid [ \"prims\"; \"unit\" ]\n    | `TupleCons n when n <= 1 ->\n        Error.assertion_failure span\n          (\"Got a [TupleCons \" ^ string_of_int n ^ \"]\")\n    | `TupleType n when n <= 14 ->\n        F.lid [ \"FStar\"; \"Pervasives\"; \"tuple\" ^ string_of_int n ]\n    | `TupleCons n when n <= 14 ->\n        F.lid [ \"FStar\"; \"Pervasives\"; \"Mktuple\" ^ string_of_int n ]\n    | `TupleType n | `TupleCons n ->\n        let reason = \"F* doesn't support tuple of size greater than 14\" in\n        Error.raise\n          {\n            kind = UnsupportedTupleSize { tuple_size = Int64.of_int n; reason };\n            span;\n          }\n    | `TupleField _ | `Projector _ ->\n        Error.assertion_failure span\n          (\"pglobal_ident: expected to be handled somewhere else: \"\n         ^ show_global_ident id)\n\n  let plocal_ident_str (e : Local_ident.t) =\n    RenderId.local_ident\n      (match String.chop_prefix ~prefix:\"impl \" e.name with\n      | Some name ->\n          let name = \"impl_\" ^ Int.to_string ([%hash: string] name) in\n          { e with name }\n      | _ -> e)\n\n  let plocal_ident = plocal_ident_str >> F.id\n\n  let pfield_ident span (f : global_ident) : F.Ident.lident =\n    match f with\n    | `Concrete cid -> pconcrete_ident cid\n    | `Projector (`TupleField (n, len)) | `TupleField (n, len) ->\n        F.lid [ \"_\" ^ Int.to_string (n + 1) ]\n    | `Projector (`Concrete cid) -> pconcrete_ident cid\n    | _ ->\n        Error.assertion_failure span\n          (\"pfield_ident: not a valid field name in F* backend: \"\n         ^ show_global_ident f)\n\n  let index_of_field_concrete id =\n    try Some (Int.of_string @@ (RenderId.render id).name) with _ -> None\n\n  let index_of_field = function\n    | `Concrete id -> index_of_field_concrete id\n    | `TupleField (nth, _) -> Some nth\n    | _ -> None\n\n  let is_field_an_index = index_of_field >> Option.is_some\n\n  let operators =\n    let c = Global_ident.of_name ~value:true in\n    [\n      (c Rust_primitives__hax__array_of_list, (3, \".[]<-\"));\n      (c Core__ops__index__Index__index, (2, \".[]\"));\n      (c Core__ops__bit__Not__not, (1, \"~.\"));\n      (c Rust_primitives__hax__machine_int__not, (1, \"~.\"));\n      (c Rust_primitives__hax__machine_int__add, (2, \"+!\"));\n      (c Rust_primitives__hax__machine_int__sub, (2, \"-!\"));\n      (c Rust_primitives__hax__machine_int__div, (2, \"/!\"));\n      (c Rust_primitives__hax__machine_int__mul, (2, \"*!\"));\n      (c Rust_primitives__hax__machine_int__rem, (2, \"%!\"));\n      (c Rust_primitives__hax__machine_int__shl, (2, \"<<!\"));\n      (c Rust_primitives__hax__machine_int__shr, (2, \">>!\"));\n      (c Rust_primitives__hax__machine_int__bitxor, (2, \"^.\"));\n      (c Rust_primitives__hax__machine_int__bitor, (2, \"|.\"));\n      (c Rust_primitives__hax__machine_int__bitand, (2, \"&.\"));\n      (c Core__cmp__PartialEq__eq, (2, \"=.\"));\n      (c Rust_primitives__hax__machine_int__eq, (2, \"=.\"));\n      (c Core__cmp__PartialEq__ne, (2, \"<>.\"));\n      (c Rust_primitives__hax__machine_int__ne, (2, \"<>.\"));\n      (c Rust_primitives__hax__machine_int__le, (2, \"<=.\"));\n      (c Rust_primitives__hax__machine_int__lt, (2, \"<.\"));\n      (c Rust_primitives__hax__machine_int__gt, (2, \">.\"));\n      (c Rust_primitives__hax__machine_int__ge, (2, \">=.\"));\n      (`Primitive (LogicalOp And), (2, \"&&\"));\n      (`Primitive (LogicalOp Or), (2, \"||\"));\n      (c Rust_primitives__hax__int__add, (2, \"+\"));\n      (c Rust_primitives__hax__int__sub, (2, \"-\"));\n      (c Rust_primitives__hax__int__mul, (2, \"*\"));\n      (c Rust_primitives__hax__int__div, (2, \"/\"));\n      (c Rust_primitives__hax__int__rem, (2, \"%\"));\n      (c Rust_primitives__hax__int__neg, (1, \"-\"));\n      (c Rust_primitives__hax__int__ge, (2, \">=\"));\n      (c Rust_primitives__hax__int__le, (2, \"<=\"));\n      (c Rust_primitives__hax__int__gt, (2, \">\"));\n      (c Rust_primitives__hax__int__lt, (2, \"<\"));\n      (c Rust_primitives__hax__int__ne, (2, \"<>\"));\n      (c Rust_primitives__hax__int__eq, (2, \"=\"));\n      (c Hax_lib__prop__constructors__and, (2, \"/\\\\\"));\n      (c Hax_lib__prop__constructors__or, (2, \"\\\\/\"));\n      (c Hax_lib__prop__constructors__not, (1, \"~\"));\n      (c Hax_lib__prop__constructors__eq, (2, \"==\"));\n      (c Hax_lib__prop__constructors__ne, (2, \"=!=\"));\n      (c Hax_lib__prop__constructors__implies, (2, \"==>\"));\n    ]\n    |> Map.of_alist_exn (module Global_ident)\n\n  let rec pty span (t : ty) =\n    match t with\n    | TBool -> F.term_of_lid [ \"bool\" ]\n    | TChar -> F.term_of_lid [ \"FStar\"; \"Char\"; \"char\" ]\n    | TInt k -> F.term_of_lid [ show_int_kind k ]\n    | TStr -> F.term_of_lid [ \"string\" ]\n    | TSlice { ty; _ } ->\n        F.mk_e_app (F.term_of_lid [ \"t_Slice\" ]) [ pty span ty ]\n    | TApp { ident = `TupleType 0 as ident; args = [] } ->\n        F.term @@ F.AST.Name (pglobal_ident span ident)\n    | TApp { ident = `TupleType 1; args = [ GType ty ] } -> pty span ty\n    | TApp { ident = `TupleType n; args } when n >= 2 -> (\n        let args =\n          List.filter_map\n            ~f:(function GType t -> Some (pty span t) | _ -> None)\n            args\n        in\n        let mk_star a b = F.term @@ F.AST.Op (F.id \"&\", [ a; b ]) in\n        match args with\n        | hd :: tl ->\n            F.term @@ F.AST.Paren (List.fold_left ~init:hd ~f:mk_star tl)\n        | _ -> Error.assertion_failure span \"Tuple type: bad arity\")\n    | TApp { ident; args } ->\n        let base = F.term @@ F.AST.Name (pglobal_ident span ident) in\n        let args = List.map ~f:(pgeneric_value span) args in\n        F.mk_e_app base args\n    | TArrow (inputs, output) ->\n        F.mk_e_arrow (List.map ~f:(pty span) inputs) (pty span output)\n    | TFloat _ -> F.term_of_lid [ \"float\" ]\n    | TArray { typ; length } ->\n        F.mk_e_app (F.term_of_lid [ \"t_Array\" ]) [ pty span typ; pexpr length ]\n    | TParam i -> F.term @@ F.AST.Var (F.lid_of_id @@ plocal_ident i)\n    | TAssociatedType { impl = { kind = Self; _ }; item } ->\n        F.term @@ F.AST.Var (F.lid [ (RenderId.render item).name ])\n    | TAssociatedType { impl; item } -> (\n        match pimpl_expr span impl with\n        | Some impl ->\n            F.term @@ F.AST.Project (impl, F.lid [ (RenderId.render item).name ])\n        | None -> F.term @@ F.AST.Wild)\n    | TOpaque s -> F.term @@ F.AST.Wild\n    | TDyn { goals; _ } ->\n        let traits = List.map ~f:(pdyn_trait_goal span) goals in\n        let dyn = F.AST.Var (F.lid [ \"dyn\" ]) |> F.term in\n        let length =\n          F.AST.Const\n            (FStar_Const.Const_int (List.length goals |> Int.to_string, None))\n          |> F.term\n        in\n        F.mk_e_app dyn (length :: traits)\n    | _ -> .\n\n  and pdyn_trait_goal span (goal : dyn_trait_goal) =\n    (* This introduces a potential shadowing *)\n    let type_var = \"z\" in\n    let pat = F.pat @@ F.AST.PatVar (F.id type_var, None, []) in\n    let trait = F.AST.Var (pconcrete_ident goal.trait) |> F.term in\n    let args =\n      (F.AST.Var (F.lid [ type_var ]) |> F.term)\n      :: List.map ~f:(pgeneric_value span) goal.non_self_args\n    in\n    F.mk_e_abs [ pat ] (F.mk_e_app trait args)\n\n  and pimpl_expr span (ie : impl_expr) =\n    let some = Option.some in\n    let hax_unstable_impl_exprs = hax_core_models_extraction in\n    match ie.kind with\n    | Concrete tr -> c_trait_goal span tr |> some\n    | LocalBound { id } ->\n        let local_ident =\n          Local_ident.{ name = id; id = Local_ident.mk_id Expr 0 }\n        in\n        F.term @@ F.AST.Var (F.lid_of_id @@ plocal_ident local_ident) |> some\n    | ImplApp { impl; _ } when not hax_unstable_impl_exprs ->\n        pimpl_expr span impl\n    | Parent { impl; ident }\n      when hax_unstable_impl_exprs && [%matches? Self _] impl.kind ->\n        let trait = \"_super_\" ^ ident.name in\n        F.term_of_lid [ trait ] |> some\n    | Parent { impl; ident } when hax_unstable_impl_exprs ->\n        let* impl = pimpl_expr span impl in\n        let trait = \"_super_\" ^ ident.name in\n        F.term @@ F.AST.Project (impl, F.lid [ trait ]) |> some\n    | ImplApp { impl; args = [] } when hax_unstable_impl_exprs ->\n        pimpl_expr span impl\n    | ImplApp { impl; args } when hax_unstable_impl_exprs ->\n        let* impl = pimpl_expr span impl in\n        let* args = List.map ~f:(pimpl_expr span) args |> Option.all in\n        F.mk_e_app impl args |> some\n    | Projection _ when hax_unstable_impl_exprs ->\n        F.term_of_lid [ \"_Projection\" ] |> some\n    | Dyn _ when hax_unstable_impl_exprs -> F.term_of_lid [ \"_Dyn\" ] |> some\n    | Builtin _ when hax_unstable_impl_exprs ->\n        F.term_of_lid [ \"_Builtin\" ] |> some\n    | _ -> None\n\n  and c_trait_goal span trait_goal =\n    let trait = F.term @@ F.AST.Name (pconcrete_ident trait_goal.trait) in\n    List.map ~f:(pgeneric_value span) trait_goal.args |> F.mk_e_app trait\n\n  and pgeneric_value span (g : generic_value) =\n    match g with\n    | GType ty -> pty span ty\n    | GConst e -> pexpr e\n    | GLifetime _ -> .\n\n  and ppat (p : pat) = ppat' true p\n\n  and ppat' (shallow : bool) (p : pat) =\n    let ppat = ppat' false in\n    match p.p with\n    | PWild -> F.wild\n    | PAscription { typ; pat = { p = PBinding _; _ } as pat } ->\n        F.pat @@ F.AST.PatAscribed (ppat pat, (pty p.span typ, None))\n    | PAscription { pat; _ } -> ppat pat\n    | PBinding\n        {\n          mut = Immutable;\n          mode = _;\n          subpat = None;\n          var;\n          typ = _ (* we skip type annot here *);\n        } ->\n        F.pat @@ F.AST.PatVar (plocal_ident var, None, [])\n    | POr { subpats } when shallow ->\n        F.pat @@ F.AST.PatOr (List.map ~f:ppat subpats)\n    | POr _ ->\n        Error.assertion_failure p.span\n          \"Nested disjuntive patterns should have been eliminated by phase \\\n           `HoistDisjunctions` (see PR #830).\"\n    | PArray { args } -> F.pat @@ F.AST.PatList (List.map ~f:ppat args)\n    | PConstruct { constructor = `TupleCons 0; fields = [] } ->\n        F.pat @@ F.AST.PatConst F.Const.Const_unit\n    | PConstruct { constructor = `TupleCons 1; fields = [ { pat } ] } ->\n        ppat pat\n    | PConstruct { constructor = `TupleCons n; fields } ->\n        F.pat\n        @@ F.AST.PatTuple (List.map ~f:(fun { pat } -> ppat pat) fields, false)\n    | PConstruct { constructor; fields; is_record; is_struct } ->\n        let pat_rec () =\n          F.pat @@ F.AST.PatRecord (List.map ~f:pfield_pat fields)\n        in\n        if is_struct && is_record then pat_rec ()\n        else\n          let pat_name =\n            F.pat @@ F.AST.PatName (pglobal_ident p.span constructor)\n          in\n          F.pat_app pat_name\n          @@\n          if is_record then [ pat_rec () ]\n          else List.map ~f:(fun { field; pat } -> ppat pat) fields\n    | PConstant { lit } -> pliteral_as_pat p.span lit\n    | _ -> .\n\n  and pfield_pat ({ field; pat } : field_pat) =\n    (pglobal_ident pat.span field, ppat pat)\n\n  and pexpr (e : expr) =\n    try pexpr_unwrapped e\n    with Diagnostics.SpanFreeError.Exn _ ->\n      (* let typ = *)\n      (* try pty e.span e.typ *)\n      (* with Diagnostics.SpanFreeError _ -> U.hax_failure_typ *)\n      (* in *)\n      F.term @@ F.AST.Const (F.Const.Const_string (\"failure\", F.dummyRange))\n\n  and fun_application ~span f args ~trait_generic_args ~generic_args =\n    let pgeneric_args ?qualifier =\n      let qualifier_or default = Option.value ~default qualifier in\n      List.map ~f:(function\n        | GConst const -> (pexpr const, qualifier_or F.AST.Nothing)\n        | GLifetime _ -> .\n        | GType ty -> (pty span ty, qualifier_or F.AST.Hash))\n    in\n    let args = List.map ~f:(pexpr &&& Fn.const F.AST.Nothing) args in\n    let trait_generic_args =\n      Option.map\n        ~f:\n          (pgeneric_args ~qualifier:F.AST.Hash\n          >> Fn.flip ( @ ) [ (F.solve, F.AST.Hash) ])\n        trait_generic_args\n      |> Option.value ~default:[]\n    in\n    F.mk_app f (trait_generic_args @ pgeneric_args generic_args @ args)\n\n  and pexpr_unwrapped (e : expr) =\n    match e.e with\n    | Literal l -> pliteral_as_expr e.span l\n    | LocalVar local_ident ->\n        F.term @@ F.AST.Var (F.lid_of_id @@ plocal_ident local_ident)\n    | GlobalVar (`TupleCons 0)\n    | Construct { constructor = `TupleCons 0; fields = [] } ->\n        F.AST.unit_const F.dummyRange\n    | GlobalVar global_ident ->\n        F.term @@ F.AST.Var (pglobal_ident e.span @@ global_ident)\n    | App { f = { e = GlobalVar f; _ }; args = [ x ] }\n      when Global_ident.eq_name Hax_lib__prop__constructors__from_bool f ->\n        let x = pexpr x in\n        F.mk_e_app (F.term_of_lid [ \"b2t\" ]) [ x ]\n    | App\n        {\n          f = { e = GlobalVar f; _ };\n          args = [ { e = Closure { params = [ x ]; body = phi; _ }; _ } ];\n        }\n      when Global_ident.eq_name Hax_lib__prop__constructors__forall f ->\n        let phi = pexpr phi in\n        let binders =\n          let b = Destruct.pat_PBinding x |> Option.value_exn in\n          [\n            F.AST.\n              {\n                b = F.AST.Annotated (plocal_ident b.var, pty x.span b.typ);\n                brange = F.dummyRange;\n                blevel = Un;\n                aqual = None;\n                battributes = [];\n              };\n          ]\n        in\n        F.term @@ F.AST.QForall (binders, ([], []), phi)\n    | App\n        {\n          f = { e = GlobalVar f; _ };\n          args = [ { e = Closure { params = [ x ]; body = phi; _ }; _ } ];\n        }\n      when Global_ident.eq_name Hax_lib__prop__constructors__exists f ->\n        let phi = pexpr phi in\n        let binders =\n          let b = Destruct.pat_PBinding x |> Option.value_exn in\n          [\n            F.AST.\n              {\n                b = F.AST.Annotated (plocal_ident b.var, pty x.span b.typ);\n                brange = F.dummyRange;\n                blevel = Un;\n                aqual = None;\n                battributes = [];\n              };\n          ]\n        in\n        F.term @@ F.AST.QExists (binders, ([], []), phi)\n    | App\n        {\n          f = { e = GlobalVar (`Projector (`TupleField (_, 1))) };\n          args = [ arg ];\n        } ->\n        pexpr arg\n    | App\n        {\n          f = { e = GlobalVar (`Projector (`TupleField (n, len))) };\n          args = [ arg ];\n        } ->\n        F.term\n        @@ F.AST.Project (pexpr arg, F.lid [ \"_\" ^ string_of_int (n + 1) ])\n    | App { f = { e = GlobalVar (`Projector (`Concrete cid)) }; args = [ arg ] }\n      ->\n        F.term @@ F.AST.Project (pexpr arg, pconcrete_ident cid)\n    | App { f = { e = GlobalVar x }; args } when Map.mem operators x ->\n        let arity, op = Map.find_exn operators x in\n        if List.length args <> arity then\n          Error.assertion_failure e.span\n            (\"pexpr: bad arity for operator application (\" ^ op ^ \")\");\n        F.term @@ F.AST.Op (F.Ident.id_of_text op, List.map ~f:pexpr args)\n    | App\n        {\n          f = { e = GlobalVar f; _ };\n          args = [ { e = Literal (String s); _ } ];\n          generic_args = _;\n        }\n      when Global_ident.eq_name Hax_lib__int__Impl_7___unsafe_from_str f ->\n        (match\n           String.chop_prefix ~prefix:\"-\" s\n           |> Option.value ~default:s\n           |> String.filter ~f:([%matches? '0' .. '9'] >> not)\n         with\n        | \"\" -> ()\n        | s ->\n            Error.assertion_failure e.span\n            @@ \"pexpr: expected a integer, found the following non-digit \\\n                chars: '\" ^ s ^ \"'\");\n        F.AST.Const (F.Const.Const_int (s, None)) |> F.term\n    | App { f; args; generic_args; bounds_impls = _; trait } ->\n        let trait_generic_args = Option.map ~f:snd trait in\n        fun_application (pexpr f) args ~span:e.span ~trait_generic_args\n          ~generic_args\n    | If { cond; then_; else_ } ->\n        F.term\n        @@ F.AST.If\n             ( pexpr cond,\n               None,\n               None,\n               pexpr then_,\n               Option.value_map else_ ~default:F.unit ~f:pexpr )\n    | Array l ->\n        let len = List.length l in\n        let body = F.AST.mkConsList F.dummyRange (List.map ~f:pexpr l) in\n        let array_of_list =\n          let id =\n            Concrete_ident.of_name ~value:true\n              Rust_primitives__hax__array_of_list\n          in\n          F.term @@ F.AST.Name (pconcrete_ident id)\n        in\n        let list_ident = F.id \"list\" in\n        let list = F.term_of_lid [ \"list\" ] in\n        let assert_norm =\n          F.term_of_lid [ \"FStar\"; \"Pervasives\"; \"assert_norm\" ]\n        in\n        let equality = F.term_of_lid [ \"Prims\"; \"eq2\" ] in\n        let length = F.term_of_lid [ \"List\"; \"Tot\"; \"length\" ] in\n        let length = F.mk_e_app length [ list ] in\n        let len =\n          F.term @@ F.AST.Const (F.Const.Const_int (Int.to_string len, None))\n        in\n        let array = F.mk_e_app array_of_list [ len; list ] in\n        let formula = F.mk_e_app equality [ length; len ] in\n        let assertion = F.mk_e_app assert_norm [ formula ] in\n        let pat = F.AST.PatVar (list_ident, None, []) |> F.pat in\n        let pat =\n          match l with\n          | [] ->\n              let list_ty =\n                let prims_list = F.term_of_lid [ \"Prims\"; \"list\" ] in\n                let inner_typ =\n                  match e.typ with\n                  | TArray { typ; _ } -> pty e.span typ\n                  | _ ->\n                      Error.assertion_failure e.span\n                        \"Malformed type for array literal\"\n                in\n                F.mk_e_app prims_list [ inner_typ ]\n              in\n              F.pat @@ F.AST.PatAscribed (pat, (list_ty, None))\n          | _ -> pat\n        in\n        F.term\n        @@ F.AST.Let\n             ( NoLetQualifier,\n               [ (None, (pat, body)) ],\n               F.term @@ F.AST.Seq (assertion, array) )\n    | Let { lhs; rhs; body; monadic = Some (monad, _) } ->\n        let p =\n          F.pat @@ F.AST.PatAscribed (ppat lhs, (pty lhs.span lhs.typ, None))\n        in\n        let op =\n          \"let\"\n          ^\n          match monad with\n          | MResult _ -> \"|\"\n          | MOption -> \"?\"\n          | MException _ -> \"!\"\n        in\n        F.term @@ F.AST.LetOperator ([ (F.id op, p, pexpr rhs) ], pexpr body)\n    | Let { lhs; rhs; body; monadic = None } ->\n        let rec ascribe_tuple_components pattern =\n          match pattern with\n          | { p = PConstruct { constructor = `TupleCons n1; fields; _ }; _ }\n            when n1 > 1 ->\n              (* F* type inference works better if the ascription is on each component intead of the whole tuple. *)\n              F.pat\n              @@ F.AST.PatTuple\n                   ( List.map\n                       ~f:(fun { pat } -> ascribe_tuple_components pat)\n                       fields,\n                     false )\n          | _ ->\n              (* TODO: temp patch that remove annotation when we see an associated type *)\n              if [%matches? TAssociatedType _] @@ U.remove_tuple1 pattern.typ\n              then ppat pattern\n              else\n                F.pat\n                @@ F.AST.PatAscribed\n                     (ppat pattern, (pty pattern.span pattern.typ, None))\n        in\n        F.term\n        @@ F.AST.Let\n             ( NoLetQualifier,\n               [ (None, (ascribe_tuple_components lhs, pexpr rhs)) ],\n               pexpr body )\n    | EffectAction _ -> .\n    | Match { scrutinee; arms } ->\n        F.term\n        @@ F.AST.Match (pexpr scrutinee, None, None, List.map ~f:parm arms)\n    | Ascription { e; typ } ->\n        F.term @@ F.AST.Ascribed (pexpr e, pty e.span typ, None, false)\n    | Construct { constructor = `TupleCons 1; fields = [ (_, e') ]; base } ->\n        pexpr e'\n    | Construct { constructor = `TupleCons n; fields; base = None } ->\n        F.AST.mkTuple (List.map ~f:(snd >> pexpr) fields) F.dummyRange\n    | Construct\n        { is_record = true; is_struct = true; constructor; fields; base } ->\n        F.term\n        @@ F.AST.Record\n             ( Option.map ~f:(fst >> pexpr) base,\n               List.map\n                 ~f:(fun (f, e) -> (pfield_ident e.span f, pexpr e))\n                 fields )\n    | Construct { is_record = false; constructor; fields; base } ->\n        if [%matches? Some _] base then\n          Diagnostics.failure ~context:(Backend FStar) ~span:e.span\n            (AssertionFailure { details = \"non-record type with base present\" });\n        F.mk_e_app (F.term @@ F.AST.Name (pglobal_ident e.span constructor))\n        @@ List.map ~f:(snd >> pexpr) fields\n    | Construct { is_record = true; constructor; fields; base } ->\n        let r =\n          F.term\n          @@ F.AST.Record\n               ( Option.map ~f:(fst >> pexpr) base,\n                 List.map\n                   ~f:(fun (f, e') -> (pglobal_ident e.span f, pexpr e'))\n                   fields )\n        in\n        F.mk_e_app\n          (F.term @@ F.AST.Name (pglobal_ident e.span constructor))\n          [ r ]\n    | Closure { params; body } ->\n        let params =\n          List.mapi\n            ~f:(fun i p ->\n              match p.p with\n              | PBinding { var; subpat = None; _ } -> (var, p)\n              | _ ->\n                  ( Local_ident.\n                      { name = \"temp_\" ^ Int.to_string i; id = mk_id Expr (-1) },\n                    p ))\n            params\n        in\n        let body =\n          let f (lid, (pat : pat)) =\n            let rhs = { e = LocalVar lid; span = pat.span; typ = pat.typ } in\n            U.make_let pat rhs\n          in\n          List.fold_right ~init:body ~f params\n        in\n        let mk_pat ((lid, pat) : local_ident * pat) =\n          ppat (U.make_var_pat lid pat.typ pat.span)\n        in\n        F.mk_e_abs (List.map ~f:mk_pat params) (pexpr body)\n    | Return { e } ->\n        F.term @@ F.AST.App (F.term_of_lid [ \"RETURN_STMT\" ], pexpr e, Nothing)\n    | MacroInvokation { macro; args; witness } ->\n        Error.raise\n        @@ {\n             kind = UnsupportedMacro { id = [%show: global_ident] macro };\n             span = e.span;\n           }\n    | Quote quote -> pquote e.span quote |> StringToFStar.term e.span\n    | _ -> .\n\n  (** Prints a `quote` to a string *)\n  and pquote span { contents; _ } =\n    List.map\n      ~f:(function\n        | Verbatim code -> code\n        | Expr e -> pexpr e |> term_to_string\n        | Pattern p -> ppat p |> pat_to_string\n        | Typ p -> pty span p |> term_to_string)\n      contents\n    |> String.concat\n\n  and parm { arm = { arm_pat; body } } = (ppat arm_pat, None, pexpr body)\n\n  module FStarBinder = struct\n    type kind = Implicit | Tcresolve | Explicit\n    type t = { kind : kind; ident : F.Ident.ident; typ : F.AST.term }\n\n    let make_explicit x = { x with kind = Explicit }\n\n    let implicit_to_explicit x =\n      if [%matches? Tcresolve] x.kind then x else make_explicit x\n\n    let of_generic_param span (p : generic_param) : t =\n      let ident = plocal_ident p.ident in\n      match p.kind with\n      | GPLifetime _ -> Error.assertion_failure span \"pgeneric_param:LIFETIME\"\n      | GPType -> { kind = Implicit; typ = F.type0_term; ident }\n      | GPConst { typ } -> { kind = Explicit; typ = pty span typ; ident }\n\n    let of_generic_constraint span (nth : int) (c : generic_constraint) =\n      match c with\n      | GCLifetime _ -> .\n      | GCType { goal; name } ->\n          let typ = c_trait_goal span goal in\n          Some { kind = Tcresolve; ident = F.id name; typ }\n      | GCProjection { impl = { kind = LocalBound { id }; _ }; assoc_item; typ }\n        ->\n          let proj =\n            F.term\n            @@ F.AST.Project\n                 (F.term @@ F.AST.Var (F.lid [ id ]), pconcrete_ident assoc_item)\n          in\n          let typ =\n            F.mk_refined \"_\" (F.term_of_string \"unit\") (fun ~x ->\n                F.term\n                @@ F.AST.Op (FStar_Ident.id_of_text \"==\", [ proj; pty span typ ]))\n          in\n          Some { kind = Implicit; typ; ident = FStar_Ident.id_of_text \"_\" }\n      | _ -> None\n\n    let of_generics span generics : t list =\n      List.map ~f:(of_generic_param span) generics.params\n      @ (generics.constraints\n        |> List.sort ~compare:(fun c1 c2 ->\n               match (c1, c2) with\n               | GCType _, GCProjection _ -> -1\n               | GCProjection _, GCType _ -> 1\n               | _ -> 0)\n        |> List.filter_mapi ~f:(of_generic_constraint span))\n\n    let of_typ span (nth : int) typ : t =\n      let ident = F.id (\"x\" ^ Int.to_string nth) in\n      { kind = Explicit; ident; typ = pty span typ }\n\n    (** Makes an F* binder from a name and an F* type *)\n    let of_named_fstar_typ span name typ : t =\n      let ident = plocal_ident name in\n      { kind = Explicit; ident; typ }\n\n    (** Makes an F* binder from a name and an hax type *)\n    let of_named_typ span name = pty span >> of_named_fstar_typ span name\n\n    let to_pattern (x : t) : F.AST.pattern =\n      let subpat =\n        match x.kind with\n        | Tcresolve ->\n            let tcresolve =\n              Some\n                (F.AST.Meta\n                   (F.term @@ F.AST.Var FStar_Parser_Const.tcresolve_lid))\n            in\n            F.pat @@ F.AST.PatVar (x.ident, tcresolve, [])\n        | _ ->\n            let aqual =\n              match x.kind with Implicit -> Some F.AST.Implicit | _ -> None\n            in\n            F.pat @@ F.AST.PatVar (x.ident, aqual, [])\n      in\n      F.pat @@ F.AST.PatAscribed (subpat, (x.typ, None))\n\n    let to_typ (x : t) : F.AST.term = x.typ\n    let to_ident (x : t) : F.Ident.ident = x.ident\n\n    let to_term (x : t) : F.AST.term =\n      F.term @@ F.AST.Var (FStar_Ident.lid_of_ns_and_id [] (to_ident x))\n\n    let to_imp (x : t) : F.AST.imp =\n      match x.kind with Tcresolve | Implicit -> Hash | Explicit -> Nothing\n\n    let to_qualified_term : t -> F.AST.term * F.AST.imp = to_term &&& to_imp\n\n    let to_qualifier (x : t) : F.AST.arg_qualifier option =\n      match x.kind with\n      | Tcresolve -> Some TypeClassArg\n      | Implicit -> Some Implicit\n      | Explicit -> None\n\n    let to_binder (x : t) : F.AST.binder =\n      F.AST.\n        {\n          b = F.AST.Annotated (x.ident, x.typ);\n          brange = F.dummyRange;\n          blevel = Un;\n          aqual = to_qualifier x;\n          battributes = [];\n        }\n  end\n\n  let rec pgeneric_constraint_type span (c : generic_constraint) =\n    match c with\n    | GCLifetime _ ->\n        Error.assertion_failure span \"pgeneric_constraint_bd:LIFETIME\"\n    | GCType { goal; name = _ } -> c_trait_goal span goal\n\n  let pmaybe_refined_ty span (free_variables : string list) (attrs : attrs)\n      (binder_name : string) (ty : ty) : F.AST.term =\n    match Attrs.associated_refinement_in_type span free_variables attrs with\n    | Some refinement ->\n        F.mk_refined binder_name (pty span ty) (fun ~x -> pexpr refinement)\n    | None -> pty span ty\n\n  let add_clauses_effect_type ~self ~no_tot_abbrev (attrs : attrs) typ :\n      F.AST.typ =\n    let attr_term ?keep_last_args ?map_expr kind f =\n      (* A clause on a method with a `self` produces a function whose first argument is `self_`.\n         `subst_self` will substitute that first argument `self_` into the provided local identifier `self`.\n      *)\n      let subst_self : (expr -> expr) option =\n        (* If `self` was present on the original function.  *)\n        let* self = self in\n        (* Lookup the pre/post/decreases function, get the first argument: that is `self`. *)\n        let* self' =\n          let* _, params, _ = Attrs.associated_fn kind attrs in\n          let* first_param = List.hd params in\n          let* { var; _ } = Destruct.pat_PBinding first_param.pat in\n          Some var\n        in\n        let f id = if [%eq: local_ident] self' id then self else id in\n        Some ((U.Mappers.rename_local_idents f)#visit_expr ())\n      in\n      Attrs.associated_expr ?keep_last_args kind attrs\n      |> Option.map\n           ~f:\n             (Option.value ~default:Fn.id subst_self\n             >> Option.value ~default:Fn.id map_expr\n             >> pexpr >> f >> F.term)\n    in\n    let extract_any_to_unit_payload =\n      let visitor =\n        object\n          inherit [_] U.Visitors.map as super\n\n          method! visit_expr () e =\n            match e.e with\n            | App { f = { e = GlobalVar f; _ }; args = [ e ]; _ }\n              when Global_ident.eq_name Hax_lib__any_to_unit f ->\n                e\n            | _ -> super#visit_expr () e\n        end\n      in\n      visitor#visit_expr ()\n    in\n    let decreases =\n      attr_term Decreases ~map_expr:extract_any_to_unit_payload (fun t ->\n          F.AST.Decreases (t, None))\n    in\n    let smtpat =\n      let smt_pat = F.term_of_lid [ \"SMTPat\" ] in\n      attr_term SMTPat ~map_expr:extract_any_to_unit_payload (fun t ->\n          let payload = F.mk_e_app smt_pat [ t ] in\n          (F.AST.mkConsList F.dummyRange [ payload ]).tm)\n    in\n    let is_lemma = Attrs.lemma attrs in\n    let prepost_bundle =\n      let trivial_pre = F.term_of_lid [ \"Prims\"; \"l_True\" ] in\n      let trivial_post =\n        if is_lemma then trivial_pre\n        else F.mk_e_abs [ F.pat @@ F.AST.PatWild (None, []) ] trivial_pre\n      in\n      let pre = attr_term Requires (fun t -> F.AST.Requires (t, None)) in\n      let post =\n        let keep_last_args = if is_lemma then 0 else 1 in\n        attr_term ~keep_last_args Ensures (fun t -> F.AST.Ensures (t, None))\n      in\n      if is_lemma || no_tot_abbrev || Option.is_some pre || Option.is_some post\n      then\n        Some\n          ( Option.value ~default:trivial_pre pre,\n            Option.value ~default:trivial_post post )\n      else None\n    in\n    let args =\n      (Option.map ~f:(fun (req, ens) -> [ req; ens ]) prepost_bundle\n      |> Option.value ~default:[])\n      @ Option.to_list decreases @ Option.to_list smtpat\n    in\n    match args with\n    | [] -> typ\n    | _ ->\n        let mk namespace eff = F.term_of_lid (namespace @ [ eff ]) in\n        let prims = mk [ \"Prims\" ] in\n        let eff =\n          if Option.is_some prepost_bundle then\n            if is_lemma then mk [] \"Lemma\" else prims \"Pure\"\n          else prims \"Tot\"\n        in\n        F.mk_e_app eff (if is_lemma then List.drop args 1 else typ :: args)\n\n  (** Prints doc comments out of a list of attributes *)\n  let pdoc_comments attrs =\n    attrs\n    |> List.filter_map ~f:(fun (attr : attr) ->\n           match attr.kind with\n           | DocComment { kind; body } -> Some (kind, body)\n           | _ -> None)\n    |> List.map ~f:(fun (kind, string) ->\n           match kind with\n           | DCKLine ->\n               String.split_lines string\n               |> List.map ~f:(fun s -> \"///\" ^ s)\n               |> String.concat_lines\n           | DCKBlock -> \"(**\" ^ string ^ \"*)\")\n    |> List.map ~f:(fun s -> `VerbatimIntf (s, `NoNewline))\n\n  let rec pitem (e : item) :\n      [> `Impl of F.AST.decl\n      | `Intf of F.AST.decl\n      | `VerbatimImpl of string * [ `NoNewline | `Newline ]\n      | `VerbatimIntf of string * [ `NoNewline | `Newline ]\n      | `Comment of string ]\n      list =\n    try\n      match pitem_unwrapped e with\n      | [] -> []\n      | printed_items ->\n          (* Print comments only for items that are being printed *)\n          pdoc_comments e.attrs @ printed_items\n    with Diagnostics.SpanFreeError.Exn error ->\n      let error = Diagnostics.SpanFreeError.payload error in\n      let error = [%show: Diagnostics.Context.t * Diagnostics.kind] error in\n      [\n        `Comment\n          (\"item error backend: \" ^ error ^ \"\\n\\nLast AST:\\n\"\n          ^ (U.LiftToFullAst.item e |> Print_rust.pitem_str));\n      ]\n\n  and pitem_unwrapped (e : item) :\n      [> `Impl of F.AST.decl\n      | `Intf of F.AST.decl\n      | `VerbatimImpl of string * [ `NoNewline | `Newline ]\n      | `VerbatimIntf of string * [ `NoNewline | `Newline ]\n      | `Comment of string ]\n      list =\n    let is_erased = Attrs.is_erased e.attrs in\n    let erased_impl name ty attrs binders =\n      let name' = F.id_prime name in\n      let pat = F.AST.PatVar (name, None, []) in\n      let term = F.term @@ F.AST.Var (F.lid_of_id @@ name') in\n      let pat, term =\n        match binders with\n        | [] -> (pat, term)\n        | _ ->\n            ( F.AST.PatApp\n                (F.pat pat, List.map ~f:FStarBinder.to_pattern binders),\n              List.fold_left binders ~init:term ~f:(fun term binder ->\n                  let binder_term, binder_imp =\n                    FStarBinder.to_qualified_term binder\n                  in\n                  F.term @@ F.AST.App (term, binder_term, binder_imp)) )\n      in\n      [\n        F.decl ~quals:[ Assumption ] ~fsti:false ~attrs\n        @@ F.AST.Assume (name', ty);\n        F.decl\n          ~quals:\n            (if ctx.interface_mode then []\n             else [ Unfold_for_unification_and_vcgen ])\n          ~fsti:false\n        @@ F.AST.TopLevelLet (NoLetQualifier, [ (F.pat @@ pat, term) ]);\n      ]\n    in\n    match e.v with\n    | Alias { name; item } ->\n        (* These should come from bundled items (in the case of cyclic module dependencies).\n           We make use of this f* feature: https://github.com/FStarLang/FStar/pull/3369 *)\n        let bundle = (RenderId.render item).path |> module_name in\n        [\n          `VerbatimImpl\n            ( Printf.sprintf \"include %s {%s as %s}\" bundle\n                (RenderId.render item).name (RenderId.render name).name,\n              `Newline );\n        ]\n    | Fn { name; generics; body; params } ->\n        let name = F.id @@ (RenderId.render name).name in\n        let pat = F.pat @@ F.AST.PatVar (name, None, []) in\n        let generics = FStarBinder.of_generics e.span generics in\n        let pat_args =\n          List.map ~f:FStarBinder.to_pattern generics\n          @ List.map\n              ~f:(fun { pat; typ_span; typ } ->\n                let span = Option.value ~default:e.span typ_span in\n                F.pat @@ F.AST.PatAscribed (ppat pat, (pty span typ, None)))\n              params\n        in\n        let pat = F.pat @@ F.AST.PatApp (pat, pat_args) in\n        let qualifier = F.AST.(NoLetQualifier) in\n        let impl =\n          F.decl ~fsti:false\n          @@ F.AST.TopLevelLet (qualifier, [ (pat, pexpr body) ])\n        in\n        let is_const = List.is_empty params in\n        let ty =\n          add_clauses_effect_type\n            ~self:\n              (let* hd = List.hd params in\n               let* { var; _ } = Destruct.pat_PBinding hd.pat in\n               let*? () = String.equal var.name \"self\" in\n               Some var)\n            ~no_tot_abbrev:(ctx.interface_mode && not is_const)\n            e.attrs (pty body.span body.typ)\n        in\n        let arrow_typ =\n          F.term\n          @@ F.AST.Product\n               ( List.map ~f:FStarBinder.to_binder generics\n                 @ List.mapi\n                     ~f:(fun i { pat; typ_span; typ } ->\n                       let name =\n                         match pat.p with\n                         | PBinding { var; _ } ->\n                             Some (RenderId.local_ident var)\n                         | _ ->\n                             (* TODO: this might generate bad code,\n                                see\n                                https://github.com/hacspec/hax/issues/402\n                             *)\n                             None\n                       in\n                       let name = Option.map ~f:F.id name in\n                       let span = Option.value ~default:e.span typ_span in\n                       pty span typ |> F.binder_of_term ?name)\n                     params,\n                 ty )\n        in\n        let pat = F.pat @@ F.AST.PatAscribed (pat, (ty, None)) in\n        let full =\n          F.decl @@ F.AST.TopLevelLet (qualifier, [ (pat, pexpr body) ])\n        in\n\n        let intf = F.decl ~fsti:true (F.AST.Val (name, arrow_typ)) in\n\n        let erased = erased_impl name arrow_typ [] generics in\n        let impl, full =\n          if is_erased then (erased, erased) else ([ impl ], [ full ])\n        in\n        if ctx.interface_mode && ((not is_const) || is_erased) then intf :: impl\n        else full\n    | TyAlias { name; generics; ty } ->\n        let pat =\n          F.pat @@ F.AST.PatVar (F.id @@ (RenderId.render name).name, None, [])\n        in\n        let ty, quals =\n          (* Adds a refinement if a refinement attribute is detected *)\n          match Attrs.associated_expr ~keep_last_args:1 Ensures e.attrs with\n          | Some { e = Closure { params = [ binder ]; body; _ }; _ } ->\n              let binder, _ =\n                U.Expect.pbinding_simple binder |> Option.value_exn\n              in\n              let ty =\n                F.mk_refined (plocal_ident_str binder) (pty e.span ty)\n                  (fun ~x -> pexpr body)\n              in\n              (ty, [])\n          | _ -> (pty e.span ty, [ F.AST.Unfold_for_unification_and_vcgen ])\n        in\n        F.decls ~quals\n        @@ F.AST.TopLevelLet\n             ( NoLetQualifier,\n               [\n                 ( F.pat\n                   @@ F.AST.PatApp\n                        ( pat,\n                          FStarBinder.(\n                            of_generics e.span generics\n                            |> List.map ~f:to_pattern) ),\n                   ty );\n               ] )\n    | Type { name; generics; _ } when is_erased ->\n        let generics =\n          FStarBinder.of_generics e.span generics\n          |> List.map ~f:FStarBinder.implicit_to_explicit\n        in\n        let ty = F.eqtype_term in\n        let arrow_typ =\n          F.term\n          @@ F.AST.Product (List.map ~f:FStarBinder.to_binder generics, ty)\n        in\n        let name = F.id @@ (RenderId.render name).name in\n        let erased = erased_impl name arrow_typ [] generics in\n        let intf = F.decl ~fsti:true (F.AST.Val (name, arrow_typ)) in\n        if ctx.interface_mode then intf :: erased else erased\n    | Type\n        {\n          name;\n          generics;\n          variants = [ { arguments; is_record = true; _ } ];\n          is_struct = true;\n        } ->\n        F.decls\n        @@ F.AST.Tycon\n             ( false,\n               false,\n               [\n                 F.AST.TyconRecord\n                   ( F.id @@ (RenderId.render name).name,\n                     FStarBinder.of_generics e.span generics\n                     |> List.map ~f:FStarBinder.implicit_to_explicit\n                     |> List.map ~f:FStarBinder.to_binder,\n                     None,\n                     [],\n                     List.map\n                       ~f:(fun (prev, (field, ty, attrs)) ->\n                         let fname : string = (RenderId.render field).name in\n                         let fvars =\n                           List.map prev ~f:(fun (field, _, _) ->\n                               (RenderId.render field).name)\n                         in\n                         ( F.id fname,\n                           None,\n                           [],\n                           pmaybe_refined_ty e.span fvars attrs fname ty ))\n                       (inits arguments) );\n               ] )\n    | Type { name; generics; variants; _ } ->\n        let self =\n          F.mk_e_app\n            (F.term_of_lid [ (RenderId.render name).name ])\n            (List.map\n               ~f:FStarBinder.(of_generic_param e.span >> to_ident)\n               generics.params\n            |> List.map ~f:(fun id -> F.term @@ F.AST.Name (F.lid_of_id id)))\n        in\n\n        let constructors =\n          List.map\n            ~f:(fun { name; arguments; is_record; _ } ->\n              ( F.id (RenderId.render name).name,\n                Some\n                  (let field_indexes =\n                     List.map ~f:(fst3 >> index_of_field_concrete) arguments\n                   in\n                   if is_record then\n                     F.AST.VpRecord\n                       ( List.map\n                           ~f:(fun (field, ty, attrs) ->\n                             let fname : string =\n                               (RenderId.render field).name\n                             in\n                             (F.id fname, None, [], pty e.span ty))\n                           arguments,\n                         Some self )\n                   else\n                     F.AST.VpArbitrary\n                       (F.term\n                       @@ F.AST.Product\n                            ( List.map\n                                ~f:(fun (_, ty, _) ->\n                                  F.mk_e_binder @@ F.AST.NoName (pty e.span ty))\n                                arguments,\n                              self ))),\n                [] ))\n            variants\n        in\n        F.decls\n        @@ F.AST.Tycon\n             ( false,\n               false,\n               [\n                 F.AST.TyconVariant\n                   ( F.id @@ (RenderId.render name).name,\n                     FStarBinder.of_generics e.span generics\n                     |> List.map ~f:FStarBinder.implicit_to_explicit\n                     |> List.map ~f:FStarBinder.to_binder,\n                     None,\n                     constructors );\n               ] )\n    | IMacroInvokation { macro; argument; span } -> (\n        let open Hacspeclib_macro_parser in\n        let unsupported_macro () =\n          Error.raise\n          @@ {\n               kind = UnsupportedMacro { id = [%show: concrete_ident] macro };\n               span = e.span;\n             }\n        in\n        match RenderId.render macro with\n        | { path = \"hacspec_lib\" :: _; name } -> (\n            let unwrap r =\n              match r with\n              | Ok r -> r\n              | Error details ->\n                  let macro_id = [%show: concrete_ident] macro in\n                  Error.raise\n                    {\n                      kind = ErrorParsingMacroInvocation { macro_id; details };\n                      span = e.span;\n                    }\n            in\n            let mk_typ_name name = \"t_\" ^ String.lowercase name in\n            match name with\n            | \"public_nat_mod\" ->\n                let o = PublicNatMod.parse argument |> unwrap in\n                (F.decls_of_string @@ \"unfold type \" ^ mk_typ_name o.type_name\n               ^ \"  = nat_mod 0x\" ^ o.modulo_value)\n                @ F.decls_of_string @@ \"unfold type \"\n                ^ mk_typ_name o.type_of_canvas\n                ^ \"  = lseq pub_uint8 \"\n                ^ string_of_int o.bit_size_of_field\n            | \"bytes\" ->\n                let o = Bytes.parse argument |> unwrap in\n                F.decls_of_string @@ \"unfold type \" ^ mk_typ_name o.bytes_name\n                ^ \"  = lseq uint8 \" ^ o.size\n            | \"public_bytes\" ->\n                let o = Bytes.parse argument |> unwrap in\n                F.decls_of_string @@ \"unfold type \" ^ mk_typ_name o.bytes_name\n                ^ \"  = lseq uint8 \" ^ o.size\n            | \"array\" ->\n                let o = Array.parse argument |> unwrap in\n                let typ =\n                  match o.typ with\n                  | \"U32\" -> \"uint32\"\n                  | \"U16\" -> \"uint16\"\n                  | \"U8\" -> \"uint8\"\n                  | usize -> \"uint_size\"\n                in\n                let size = o.size in\n                let array_def =\n                  F.decls_of_string @@ \"unfold type \" ^ mk_typ_name o.array_name\n                  ^ \"  = lseq \" ^ typ ^ \" \" ^ size\n                in\n                let index_def =\n                  match o.index_typ with\n                  | Some index ->\n                      F.decls_of_string @@ \"unfold type \"\n                      ^ mk_typ_name (o.array_name ^ \"_idx\")\n                      ^ \" = nat_mod \" ^ size\n                  | None -> []\n                in\n                array_def @ index_def\n            | \"unsigned_public_integer\" ->\n                let o = UnsignedPublicInteger.parse argument |> unwrap in\n                F.decls_of_string @@ \"unfold type \" ^ mk_typ_name o.integer_name\n                ^ \"  = lseq uint8 (\"\n                ^ (Int.to_string @@ ((o.bits + 7) / 8))\n                ^ \")\"\n            | _ -> unsupported_macro ())\n        | _ -> unsupported_macro ())\n    | Trait { name; generics; items } ->\n        let name_str = (RenderId.render name).name in\n        let name_id = F.id @@ name_str in\n        let fields =\n          List.concat_map\n            ~f:(fun i ->\n              let name = (RenderId.render i.ti_ident).name in\n              let generics = FStarBinder.of_generics i.ti_span i.ti_generics in\n              let bds = generics |> List.map ~f:FStarBinder.to_binder in\n              let fields =\n                match i.ti_v with\n                | TIType bounds ->\n                    let t = F.type0_term in\n                    (* let constraints = *)\n                    (*   List.map *)\n                    (*     ~f:(fun implements -> *)\n                    (*       { typ = TApp { ident = i.ti_ident } }) *)\n                    (*     bounds *)\n                    (* in *)\n                    ( F.id name,\n                      None,\n                      [ F.term @@ F.AST.Var FStar_Parser_Const.no_method_lid ],\n                      t )\n                    :: List.map\n                         ~f:(fun\n                             { goal = { trait; args }; name = impl_ident_name }\n                           ->\n                           let base =\n                             F.term @@ F.AST.Name (pconcrete_ident trait)\n                           in\n                           let args =\n                             List.map ~f:(pgeneric_value e.span) args\n                           in\n                           ( F.id (name ^ \"_\" ^ impl_ident_name),\n                             (* Dodgy concatenation *)\n                             None,\n                             [],\n                             F.mk_e_app base args ))\n                         bounds\n                | TIFn ty\n                  when Attrs.find_unique_attr i.ti_attrs ~f:(function\n                         | TraitMethodNoPrePost -> Some ()\n                         | _ -> None)\n                       |> Option.is_some ->\n                    let weakest =\n                      let h kind =\n                        Attrs.associated_fns kind i.ti_attrs\n                        |> List.hd\n                        |> Option.map ~f:(fun attr ->\n                               ( attr,\n                                 [%eq: Attr_payloads.AssocRole.t] kind Requires\n                               ))\n                      in\n                      Option.first_some (h Ensures) (h Requires)\n                      |> Option.map\n                           ~f:(fun ((generics, params, expr), is_req) ->\n                             let dummy_self =\n                               List.find generics.params\n                                 ~f:[%matches? { kind = GPType _; _ }]\n                               |> Option.value_or_thunk ~default:(fun () ->\n                                      Error.assertion_failure i.ti_span\n                                        (\"Expected a first generic of type \\\n                                          `Self`. Instead generics params \\\n                                          are: \"\n                                        ^ [%show: generic_param list]\n                                            generics.params))\n                               |> fun x -> x.ident\n                             in\n                             let self =\n                               Local_ident.{ name = \"Self\"; id = mk_id Typ 0 }\n                             in\n                             let renamer =\n                               let f (id : local_ident) =\n                                 if [%eq: string] dummy_self.name id.name then\n                                   self\n                                 else id\n                               in\n                               U.Mappers.rename_local_idents f\n                             in\n                             let generics =\n                               renamer#visit_generics () generics\n                             in\n                             let params =\n                               List.map ~f:(renamer#visit_param ()) params\n                             in\n                             let expr = renamer#visit_expr () expr in\n                             (generics, params, expr, is_req))\n                    in\n                    let ty =\n                      let variables =\n                        let idents_visitor = U.Reducers.collect_local_idents in\n                        idents_visitor#visit_trait_item () i\n                        :: (Option.map\n                              ~f:(fun (generics, params, expr, _) ->\n                                [\n                                  idents_visitor#visit_generics () generics;\n                                  idents_visitor#visit_expr () expr;\n                                ]\n                                @ List.map\n                                    ~f:(idents_visitor#visit_param ())\n                                    params)\n                              weakest\n                           |> Option.value ~default:[])\n                        |> Set.union_list (module Local_ident)\n                        |> Set.to_list |> ref\n                      in\n                      let mk_fresh prefix =\n                        let v = U.fresh_local_ident_in !variables prefix in\n                        variables := v :: !variables;\n                        v\n                      in\n                      let bindings = ref [] in\n                      let f (p : param) =\n                        let name =\n                          match p.pat.p with\n                          | PBinding { var; _ } -> var\n                          | _ ->\n                              let name = mk_fresh \"x\" in\n                              let ({ span; typ; _ } : pat) = p.pat in\n                              let expr = { e = LocalVar name; span; typ } in\n                              bindings := (p.pat, expr) :: !bindings;\n                              name\n                        in\n                        FStarBinder.of_named_typ p.pat.span name p.typ\n                      in\n                      weakest\n                      |> Option.map ~f:(fun (generics, binders, expr, is_req) ->\n                             (generics, List.map ~f binders, expr, is_req))\n                      |> Option.map\n                           ~f:(fun (generics, binders, (expr : expr), is_req) ->\n                             let result_ident = mk_fresh \"pred\" in\n                             let result_bd =\n                               FStarBinder.of_named_fstar_typ expr.span\n                                 result_ident F.type0_term\n                             in\n                             let expr = U.make_lets !bindings expr in\n                             let expr = pexpr expr in\n                             let result =\n                               F.term\n                               @@ F.AST.Var\n                                    (plocal_ident result_ident |> F.lid_of_id)\n                             in\n                             let result =\n                               F.AST.Refine\n                                 ( FStarBinder.to_binder result_bd,\n                                   (if is_req then Fn.flip else Fn.id)\n                                     F.implies result expr )\n                               |> F.term\n                             in\n                             F.AST.Product\n                               ( List.map ~f:FStarBinder.to_binder binders,\n                                 result )\n                             |> F.term)\n                      |> Option.value_or_thunk ~default:(fun _ ->\n                             let ty = pty e.span ty in\n                             match ty.tm with\n                             | F.AST.Product (inputs, _) ->\n                                 {\n                                   ty with\n                                   tm = F.AST.Product (inputs, F.type0_term);\n                                 }\n                             | _ -> F.type0_term)\n                    in\n\n                    let ty =\n                      F.term\n                      @@ F.AST.Product\n                           (generics |> List.map ~f:FStarBinder.to_binder, ty)\n                    in\n                    [ (F.id name, None, [], ty) ]\n                | TIFn (TArrow (inputs, output)) ->\n                    let inputs =\n                      List.mapi ~f:(FStarBinder.of_typ e.span) inputs\n                    in\n                    let inputs = generics @ inputs in\n                    let output = pty e.span output in\n                    let ty_pre_post =\n                      let inputs =\n                        List.map ~f:FStarBinder.to_qualified_term inputs\n                      in\n                      let add_pre n = n ^ \"_pre\" in\n                      let pre_name_str =\n                        (RenderId.render\n                           (Concrete_ident.with_suffix `Pre i.ti_ident))\n                          .name\n                      in\n                      let pre =\n                        F.mk_app (F.term_of_lid [ pre_name_str ]) inputs\n                      in\n                      let result = F.term_of_lid [ \"result\" ] in\n                      let add_post n = n ^ \"_post\" in\n                      let post_name_str =\n                        (RenderId.render\n                           (Concrete_ident.with_suffix `Post i.ti_ident))\n                          .name\n                      in\n                      let post =\n                        F.mk_app\n                          (F.term_of_lid [ post_name_str ])\n                          (inputs @ [ (result, Nothing) ])\n                      in\n                      let post =\n                        F.mk_e_abs\n                          [ F.pat @@ F.AST.PatVar (F.id \"result\", None, []) ]\n                          post\n                      in\n                      F.mk_e_app\n                        (F.term_of_lid [ \"Prims\"; \"Pure\" ])\n                        [ output; pre; post ]\n                    in\n                    let inputs = List.map ~f:FStarBinder.to_binder inputs in\n                    let ty = F.term @@ F.AST.Product (inputs, ty_pre_post) in\n                    [ (F.id name, None, [], ty) ]\n                | TIFn non_arrow_ty ->\n                    let inputs = generics in\n                    let output = pty e.span non_arrow_ty in\n                    let inputs = List.map ~f:FStarBinder.to_binder inputs in\n                    let ty = F.term @@ F.AST.Product (inputs, output) in\n                    [ (F.id name, None, [], ty) ]\n                | _ -> .\n              in\n              List.map ~f:Fn.id\n                (* ~f:(fun (n, q, a, ty) -> (n, q, a, F.mk_e_app bds ty)) *)\n                fields)\n            items\n        in\n        let constraints_fields : FStar_Parser_AST.tycon_record =\n          generics.constraints\n          |> List.filter_map ~f:(fun c ->\n                 match c with\n                 | GCType { goal = bound; name = id } ->\n                     let name = \"_super_\" ^ id in\n                     let typ = pgeneric_constraint_type e.span c in\n                     Some (F.id name, None, [ F.Attrs.no_method ], typ)\n                 | GCProjection _ ->\n                     (* TODO: Not yet implemented, see https://github.com/hacspec/hax/issues/785 *)\n                     None\n                 | _ -> .)\n        in\n        let fields : FStar_Parser_AST.tycon_record =\n          constraints_fields @ fields\n        in\n        let fields : FStar_Parser_AST.tycon_record =\n          if List.is_empty fields then\n            let marker_field = \"__marker_trait_\" ^ name_str in\n            [ (F.id marker_field, None, [], pty e.span U.unit_typ) ]\n          else fields\n        in\n        (* Binders are explicit on class definitions *)\n        let bds =\n          List.map\n            ~f:\n              FStarBinder.(\n                of_generic_param e.span >> implicit_to_explicit >> to_binder)\n            generics.params\n        in\n        let tcdef = F.AST.TyconRecord (name_id, bds, None, [], fields) in\n        let d = F.AST.Tycon (false, true, [ tcdef ]) in\n        (* This helps f* in type class resolution *)\n        let constraints_export =\n          constraints_fields\n          |> List.map ~f:(fun (super_name, _, _, typ) ->\n                 let super_name = FStar_Ident.string_of_id super_name in\n                 let tc_name = FStar_Ident.string_of_id name_id in\n                 let typ = FStar_Parser_AST.term_to_string typ in\n                 let binders = FStar_Parser_AST.binders_to_string \") (\" bds in\n                 let tc_instance =\n                   name_id\n                   :: FStar_Parser_AST.idents_of_binders bds\n                        FStar_Compiler_Range.dummyRange\n                   |> List.map ~f:FStar_Ident.string_of_id\n                   |> String.concat ~sep:\" \"\n                 in\n                 `VerbatimIntf\n                   ( \"[@@ FStar.Tactics.Typeclasses.tcinstance]\\nlet _ = fun (\"\n                     ^ binders ^ \") {|i: \" ^ tc_instance ^ \"|} -> i.\"\n                     ^ super_name,\n                     `Newline ))\n        in\n        `Intf { d; drange = F.dummyRange; quals = []; attrs = [] }\n        :: constraints_export\n    | Impl\n        {\n          generics;\n          self_ty = _;\n          of_trait = trait, generic_args;\n          items;\n          parent_bounds;\n        } ->\n        let name = (RenderId.render e.ident).name |> F.id in\n        let pat = F.pat @@ F.AST.PatVar (name, None, []) in\n        let generics = FStarBinder.of_generics e.span generics in\n        let pat =\n          F.pat\n          @@ F.AST.PatApp (pat, List.map ~f:FStarBinder.to_pattern generics)\n        in\n        let typ =\n          F.mk_e_app\n            (F.term @@ F.AST.Name (pconcrete_ident trait))\n            (List.map ~f:(pgeneric_value e.span) generic_args)\n        in\n        let pat = F.pat @@ F.AST.PatAscribed (pat, (typ, None)) in\n        let fields =\n          List.concat_map\n            ~f:(fun { ii_span; ii_generics; ii_v; ii_ident } ->\n              let name = (RenderId.render ii_ident).name in\n              let ii_generics =\n                {\n                  ii_generics with\n                  constraints =\n                    List.filter ~f:[%matches? GCType _] ii_generics.constraints;\n                }\n              in\n              match ii_v with\n              | IIFn { body; params } ->\n                  let pats =\n                    FStarBinder.(\n                      of_generics ii_span ii_generics |> List.map ~f:to_pattern)\n                    @ List.map\n                        ~f:(fun { pat; typ_span; typ } ->\n                          let span = Option.value ~default:ii_span typ_span in\n                          F.pat\n                          @@ F.AST.PatAscribed (ppat pat, (pty span typ, None)))\n                        params\n                  in\n                  [ (F.lid [ name ], F.mk_e_abs pats (pexpr body)) ]\n              | IIType { typ; parent_bounds } ->\n                  (F.lid [ name ], pty ii_span typ)\n                  :: List.map\n                       ~f:(fun (_impl_expr, impl_ident) ->\n                         (F.lid [ name ^ \"_\" ^ impl_ident.name ], F.tc_solve))\n                       parent_bounds)\n            items\n        in\n        let parent_bounds_fields =\n          List.map\n            ~f:(fun (_impl_expr, impl_ident) ->\n              (F.lid [ \"_super_\" ^ impl_ident.name ], F.tc_solve))\n            parent_bounds\n        in\n        let fields = parent_bounds_fields @ fields in\n        let fields =\n          if List.is_empty fields then\n            [\n              ( F.lid\n                  [\n                    \"__marker_trait_\"\n                    ^ List.last_exn\n                        (FStar_Ident.path_of_lid (pconcrete_ident trait));\n                  ],\n                pexpr (U.unit_expr e.span) );\n            ]\n          else fields\n        in\n        let body = F.term @@ F.AST.Record (None, fields) in\n        let tcinst = F.term @@ F.AST.Var FStar_Parser_Const.tcinstance_lid in\n        let has_type =\n          List.exists items ~f:(fun { ii_v; _ } ->\n              match ii_v with IIType _ -> true | _ -> false)\n        in\n        let let_impl = F.AST.TopLevelLet (NoLetQualifier, [ (pat, body) ]) in\n        let generics_binders = List.map ~f:FStarBinder.to_binder generics in\n        let val_type = F.term @@ F.AST.Product (generics_binders, typ) in\n        let v = F.AST.Val (name, val_type) in\n        let intf = F.decls ~fsti:true ~attrs:[ tcinst ] v in\n        let impl =\n          if is_erased then erased_impl name val_type [ tcinst ] generics\n          else\n            F.decls\n              ~fsti:(ctx.interface_mode && has_type)\n              ~attrs:[ tcinst ] let_impl\n        in\n        let is_auto_clone =\n          List.exists\n            ~f:(function\n              | { kind = Tool { path = \"automatically_derived\"; _ }; _ } -> true\n              | _ -> false)\n            e.attrs\n          && Concrete_ident.eq_name Core__clone__Clone trait\n        in\n        let intf = if has_type && not is_erased then [] else intf in\n        if is_erased && is_auto_clone then\n          F.decls ~fsti:ctx.interface_mode\n            (F.AST.TopLevelLet (NoLetQualifier, [ (pat, dummy_clone_impl) ]))\n        else if ctx.interface_mode then intf @ impl\n        else impl\n    | Quote { quote; _ } ->\n        let fstar_opts =\n          Attrs.find_unique_attr e.attrs ~f:(function\n            | ItemQuote q -> Some q.fstar_options\n            | _ -> None)\n          |> Option.value_or_thunk ~default:(fun _ ->\n                 Error.assertion_failure e.span\n                   \"Malformed `Quote` item: could not find a ItemQuote payload\")\n          |> Option.value ~default:Types.{ intf = false; impl = true }\n        in\n        let payload = (pquote e.span quote, `Newline) in\n        if ctx.interface_mode then\n          (if fstar_opts.intf then [ `VerbatimIntf payload ] else [])\n          @ if fstar_opts.impl then [ `VerbatimImpl payload ] else []\n        else [ `VerbatimImpl payload ]\n    | HaxError details ->\n        [\n          `Comment\n            (\"item error backend: \" ^ details ^ \"\\n\\nLast AST:\\n\"\n            ^ (U.LiftToFullAst.item e |> Print_rust.pitem_str));\n        ]\n    | Use _ (* TODO: Not Yet Implemented *) | NotImplementedYet -> []\n    | _ -> .\nend\n\nmodule type S = sig\n  val decl_to_string : F.AST.decl -> string\n\n  val pitem :\n    item ->\n    [> `Impl of F.AST.decl\n    | `Intf of F.AST.decl\n    | `VerbatimImpl of string * [ `NoNewline | `Newline ]\n    | `VerbatimIntf of string * [ `NoNewline | `Newline ]\n    | `Comment of string ]\n    list\nend\n\nlet make (module M : Attrs.WITH_ITEMS) ctx =\n  (module Make\n            (M)\n            (struct\n              let ctx = ctx\n            end) : S)\n\nlet strings_of_item (bo : BackendOptions.t) m items (item : item) :\n    ([> `Impl of string | `Intf of string ] * [ `NoNewline | `Newline ]) list =\n  let interface_mode' : Types.inclusion_kind =\n    List.rev bo.interfaces\n    |> List.find ~f:(fun (clause : Types.inclusion_clause) ->\n           let namespace = clause.namespace in\n           (* match anything under that **module** namespace *)\n           let namespace =\n             {\n               namespace with\n               chunks = namespace.chunks @ [ Glob One; Glob Many ];\n             }\n           in\n           Concrete_ident.matches_namespace namespace item.ident)\n    |> Option.map ~f:(fun (clause : Types.inclusion_clause) -> clause.kind)\n    |> Option.value ~default:(Types.Excluded : Types.inclusion_kind)\n  in\n  let interface_mode =\n    not ([%matches? (Types.Excluded : Types.inclusion_kind)] interface_mode')\n  in\n  let (module Print) =\n    make m\n      {\n        current_namespace = (RenderId.render item.ident).path;\n        interface_mode;\n        items;\n        line_width = bo.line_width;\n      }\n  in\n  let mk_impl i = `Impl i in\n  let mk_intf = if interface_mode then fun i -> `Intf i else fun i -> `Impl i in\n  let no_impl =\n    [%matches? (Types.Included None' : Types.inclusion_kind)] interface_mode'\n  in\n  Print.pitem item\n  |> List.concat_map ~f:(function\n       | `Impl i -> [ (mk_impl (Print.decl_to_string i), `Newline) ]\n       | `Intf i -> [ (mk_intf (Print.decl_to_string i), `Newline) ]\n       | `VerbatimIntf (s, nl) -> [ (mk_intf s, nl) ]\n       | `VerbatimImpl (s, nl) -> [ (`Impl s, nl) ]\n       | `Comment s ->\n           let s = \"(* \" ^ s ^ \" *)\" in\n           if interface_mode then [ (`Impl s, `Newline); (`Intf s, `Newline) ]\n           else [ (`Impl s, `Newline) ])\n  |> List.filter ~f:(function `Impl _, _ when no_impl -> false | _ -> true)\n\ntype rec_prefix = NonRec | FirstMutRec | MutRec\n\nlet string_of_items ~mod_name ~bundles (bo : BackendOptions.t) m items :\n    string * string =\n  let collect_trait_goal_idents =\n    object\n      inherit [_] Visitors.reduce as super\n      inherit [_] U.Sets.Concrete_ident.monoid as _m\n\n      method! visit_trait_goal (_env : unit) x =\n        Set.singleton (module Concrete_ident) x.trait\n    end\n  in\n  let header =\n    let lines =\n      List.map ~f:(collect_trait_goal_idents#visit_item ()) items\n      |> Set.union_list (module Concrete_ident)\n      |> Set.map\n           (module String)\n           ~f:(fun i -> (RenderId.render i).path |> module_name)\n      |> Fn.flip Set.remove mod_name\n      |> Set.to_list\n      |> List.filter ~f:(fun m ->\n             (* Special treatment for modules handled specifically in our F* libraries *)\n             String.is_prefix ~prefix:\"Core_models.\" m |> not\n             && String.is_prefix ~prefix:\"Alloc.\" m |> not\n             && String.equal \"Hax_lib.Int\" m |> not)\n      |> List.map ~f:(fun mod_path -> \"let open \" ^ mod_path ^ \" in\")\n    in\n    match lines with\n    | [] -> \"\"\n    | _ ->\n        \"let _ =\"\n        ^ ([\n             \"(* This module has implicit dependencies, here we make them \\\n              explicit. *)\";\n             \"(* The implicit dependencies arise from typeclasses instances. *)\";\n           ]\n           @ lines @ [ \"()\" ]\n          |> List.map ~f:(( ^ ) \"\\n  \")\n          |> String.concat ~sep:\"\")\n        ^ \"\\n\\n\"\n  in\n  let map_string ~f ?(map_intf = true) (str, space) =\n    ( (match str with\n      | `Impl s -> `Impl (f s)\n      | `Intf s -> `Intf (if map_intf then f s else s)),\n      space )\n  in\n  let replace_in_strs ~pattern ~with_ =\n    List.map\n      ~f:\n        (map_string ~map_intf:false ~f:(fun str ->\n             String.substr_replace_first ~pattern ~with_ str))\n  in\n\n  (* Each of these bundles contains recursive items (mutually if the bundle has more than one element).\n     We know that these items will already be grouped together but we need to add the `rec` qualifier\n     to the first one (in the case of functions). And to replace the `let`/`type` keyword by `and`\n     for the other elements coming after. *)\n  let first_in_bundles = Array.create (List.length bundles) None in\n  let get_recursivity_prefix it =\n    match\n      List.findi bundles ~f:(fun _ bundle ->\n          List.mem bundle it ~equal:[%eq: item])\n    with\n    | Some (i, _) -> (\n        match first_in_bundles.(i) with\n        | Some first_it when [%eq: item] first_it it -> FirstMutRec\n        | Some _ -> MutRec\n        | None ->\n            first_in_bundles.(i) <- Some it;\n            FirstMutRec)\n    | None -> NonRec\n  in\n  let strings its =\n    List.concat_map\n      ~f:(fun item ->\n        let recursivity_prefix = get_recursivity_prefix item in\n        let strs = strings_of_item bo m items item in\n        match (recursivity_prefix, item.v) with\n        | FirstMutRec, Fn _ ->\n            replace_in_strs ~pattern:\"let\" ~with_:\"let rec\" strs\n        | MutRec, Fn _ -> replace_in_strs ~pattern:\"let\" ~with_:\"and\" strs\n        | MutRec, Type _ -> replace_in_strs ~pattern:\"type\" ~with_:\"and\" strs\n        | _ -> strs)\n      its\n    |> List.map ~f:(map_string ~f:String.strip)\n    |> List.filter\n         ~f:(fst >> ( function `Impl s | `Intf s -> String.is_empty s ) >> not)\n  in\n  let string_for filter =\n    let l =\n      List.filter_map\n        ~f:(fun (s, space) ->\n          let* s = filter s in\n          Some (s, space))\n        (strings items)\n    in\n    let n = List.length l - 1 in\n    let lines =\n      List.mapi\n        ~f:(fun i (s, space) ->\n          s\n          ^ if [%matches? `NoNewline] space || [%eq: int] i n then \"\" else \"\\n\")\n        l\n    in\n    match lines with [] -> \"\" | _ -> header ^ String.concat ~sep:\"\\n\" lines\n  in\n  let replace =\n    String.substr_replace_all ~pattern:\"_hax_panic_freedom_admit_\"\n      ~with_:\"admit () (* Panic freedom *)\"\n  in\n  ( string_for (function `Impl s -> Some (replace s) | _ -> None),\n    string_for (function `Intf s -> Some (replace s) | _ -> None) )\n\nlet fstar_headers (bo : BackendOptions.t) (mod_name : string) =\n  let opts =\n    Printf.sprintf {|#set-options \"--fuel %Ld --ifuel %Ld --z3rlimit %Ld\"|}\n      bo.fuel bo.ifuel bo.z3rlimit\n  in\n\n  List.append [ opts; \"open FStar.Mul\" ]\n    (if hax_core_models_extraction then [ \"open Rust_primitives\" ]\n     else [ \"open Core_models\" ])\n  |> String.concat ~sep:\"\\n\"\n\n(** Rewrites `unsize x` to `x <: τ` when `τ` is in the allowlist described by\n    `unsize_identity_typ` *)\nlet unsize_as_identity =\n  (* Tells if a unsize should be treated as identity by type *)\n  let rec unsize_identity_typ = function\n    | TArray _ -> true\n    | TRef { typ; _ } -> unsize_identity_typ typ\n    | _ -> false\n  in\n  let visitor =\n    object\n      inherit [_] U.Visitors.map as super\n\n      method! visit_expr () e =\n        match e.e with\n        | App { f = { e = GlobalVar f; _ }; args = [ x ]; _ }\n          when Global_ident.eq_name Rust_primitives__unsize f\n               && unsize_identity_typ x.typ ->\n            let x = super#visit_expr () x in\n            { e with e = Ascription { e = x; typ = e.typ } }\n        | _ -> super#visit_expr () e\n    end\n  in\n  visitor#visit_item ()\n\n(** Translate as F* (the \"legacy\" printer) *)\nlet translate_as_fstar m (bo : BackendOptions.t) ~(bundles : AST.item list list)\n    (items : AST.item list) : Types.file list =\n  U.group_items_by_namespace items\n  |> Map.to_alist\n  |> List.filter_map ~f:(fun (_, items) ->\n         let* first_item = List.hd items in\n         Some ((RenderId.render first_item.ident).path, items))\n  |> List.concat_map ~f:(fun (ns, items) ->\n         let mod_name = module_name ns in\n         let impl, intf = string_of_items ~mod_name ~bundles bo m items in\n         let make ~ext body =\n           if String.is_empty body then None\n           else\n             Some\n               Types.\n                 {\n                   path = mod_name ^ \".\" ^ ext;\n                   contents =\n                     \"module \" ^ mod_name ^ \"\\n\" ^ fstar_headers bo mod_name\n                     ^ \"\\n\\n\" ^ body ^ \"\\n\";\n                   sourcemap = None;\n                 }\n         in\n         List.filter_map ~f:Fn.id\n           [ make ~ext:\"fst\" impl; make ~ext:\"fsti\" intf ])\n\nlet translate =\n  if\n    Sys.getenv \"HAX_ENGINE_EXPERIMENTAL_RUST_PRINTER_INSTEAD_OF_FSTAR\"\n    |> Option.is_some\n  then failwith \"todo\"\n  else translate_as_fstar\n\nopen Phase_utils\nmodule DepGraphR = Dependencies.Make (Features.Rust)\n\nmodule TransformToInputLanguage =\n  [%functor_application\n    Phases.Reject.RawOrMutPointer(Features.Rust)\n  |> Phases.Reject_impl_type_method\n  |> Phases.Rewrite_local_self\n  |> Phases.Transform_hax_lib_inline\n  |> Phases.Specialize\n  |> Phases.Drop_sized_trait\n  |> Phases.Simplify_question_marks\n  |> Phases.And_mut_defsite\n  |> Phases.Reconstruct_asserts\n  |> Phases.Reconstruct_for_loops\n  |> Phases.Reconstruct_while_loops\n  |> Phases.Direct_and_mut\n  |> Phases.Reject.Arbitrary_lhs\n  |> Phases.Drop_blocks\n  |> Phases.Drop_match_guards\n  |> Phases.Drop_references\n  |> Phases.Explicit_conversions\n  |> Phases.Trivialize_assign_lhs\n  |> Side_effect_utils.Hoist\n  |> Phases.Hoist_disjunctive_patterns\n  |> Phases.Simplify_match_return\n  |> Phases.Local_mutation\n  |> Phases.Rewrite_control_flow\n  |> Phases.Drop_return_break_continue\n  |> Phases.Functionalize_loops\n  |> Phases.Reject.Question_mark\n  |> Phases.Reject.As_pattern\n  |> Phases.Traits_specs\n  |> Phases.Simplify_hoisting\n  |> Phases.Newtype_as_refinement\n  |> Phases.Reject.Trait_item_default\n  |> Phases.Bundle_cycles\n  |> Phases.Reorder_fields\n  |> Phases.Sort_items_namespace_wise\n  |> SubtypeToInputLanguage\n  |> Identity\n  ]\n  [@ocamlformat \"disable\"]\n\nlet post_process_items =\n  List.map ~f:unsize_as_identity\n  >> List.map ~f:unsize_as_identity\n  >> List.map ~f:U.Mappers.add_typ_ascription\n\nlet apply_phases (bo : BackendOptions.t) (items : Ast.Rust.item list) :\n    AST.item list =\n  let items =\n    (* let hax_core_extraction = *)\n    (*   Sys.getenv \"HAX_CORE_EXTRACTION_MODE\" *)\n    (*   |> [%equal: string option] (Some \"on\") *)\n    (* in *)\n    (* if hax_core_extraction then *)\n    (*   let names = *)\n    (*     Core_names.names |> List.map ~f:(Concrete_ident.of_def_id Value) *)\n    (*   in *)\n    (*   DepGraphR.ItemGraph.transitive_dependencies_of_items names items *)\n    (* else *)\n    items\n  in\n  let items = TransformToInputLanguage.ditems items |> post_process_items in\n  items\n"
  },
  {
    "path": "engine/backends/fstar/fstar_backend.mli",
    "content": "open Hax_engine.Backend\ninclude T with type BackendOptions.t = Hax_engine.Types.f_star_options_for__null\n\nval post_process_items : AST.item list -> AST.item list\n"
  },
  {
    "path": "engine/backends/lean/dune",
    "content": "(library\n (name lean_backend)\n (package hax-engine)\n (wrapped false)\n (libraries hax_engine base)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_inline\n   ppx_functor_application\n   ppx_matches)))\n\n(env\n (_\n  (flags\n   (:standard -w -A))))\n"
  },
  {
    "path": "engine/backends/lean/lean_backend.ml",
    "content": "open Hax_engine\nopen Utils\nopen Base\n\ninclude\n  Backend.Make\n    (struct\n      open Features\n      include Off\n      include On.Monadic_binding\n      include On.Slice\n      include On.Macro\n      include On.Construct_base\n      include On.Quote\n      include On.Dyn\n      include On.Unsafe\n      include On.Trait_item_default\n      include On.As_pattern\n    end)\n    (struct\n      let backend = Diagnostics.Backend.FStar\n    end)\n\nmodule SubtypeToInputLanguage\n    (FA :\n      Features.T\n        with type mutable_reference = Features.Off.mutable_reference\n         and type continue = Features.Off.continue\n         and type break = Features.Off.break\n         and type mutable_reference = Features.Off.mutable_reference\n         and type mutable_pointer = Features.Off.mutable_pointer\n         and type mutable_variable = Features.Off.mutable_variable\n         and type reference = Features.Off.reference\n         and type raw_pointer = Features.Off.raw_pointer\n         and type early_exit = Features.Off.early_exit\n         and type question_mark = Features.Off.question_mark\n         and type as_pattern = Features.On.as_pattern\n         and type lifetime = Features.Off.lifetime\n         and type monadic_action = Features.Off.monadic_action\n         and type arbitrary_lhs = Features.Off.arbitrary_lhs\n         and type nontrivial_lhs = Features.Off.nontrivial_lhs\n         and type loop = Features.Off.loop\n         and type block = Features.Off.block\n         and type for_loop = Features.Off.for_loop\n         and type while_loop = Features.Off.while_loop\n         and type for_index_loop = Features.Off.for_index_loop\n         and type state_passing_loop = Features.Off.state_passing_loop\n         and type fold_like_loop = Features.Off.fold_like_loop\n         and type match_guard = Features.Off.match_guard\n         and type trait_item_default = Features.On.trait_item_default) =\nstruct\n  module FB = InputLanguage\n\n  include\n    Subtype.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Features.SUBTYPE.Id\n        include Features.SUBTYPE.On.Monadic_binding\n        include Features.SUBTYPE.On.Construct_base\n        include Features.SUBTYPE.On.Slice\n        include Features.SUBTYPE.On.Macro\n        include Features.SUBTYPE.On.Quote\n        include Features.SUBTYPE.On.Dyn\n        include Features.SUBTYPE.On.Unsafe\n      end)\n\n  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))\nend\n\nmodule AST = Ast.Make (InputLanguage)\n\nmodule BackendOptions = struct\n  type t = Hax_engine.Types.f_star_options_for__null\nend\n\nopen Ast\nmodule U = Ast_utils.Make (InputLanguage)\nmodule Visitors = Ast_visitors.Make (InputLanguage)\nopen AST\n\nmodule Context = struct\n  type t = {\n    current_namespace : string list;\n    items : item list;\n    interface_mode : bool;\n    line_width : int;\n  }\nend\n\nopen Phase_utils\nmodule DepGraphR = Dependencies.Make (Features.Rust)\n\nmodule TransformToInputLanguage =\n  [%functor_application\n    Phases.Reject.RawOrMutPointer(Features.Rust)\n  |> Phases.Rewrite_local_self\n  |> Phases.Transform_hax_lib_inline\n  |> Phases.Specialize\n  |> Phases.Drop_sized_trait\n  |> Phases.Simplify_question_marks\n  |> Phases.And_mut_defsite\n  |> Phases.Reconstruct_asserts\n  |> Phases.Reconstruct_for_loops\n  |> Phases.Reconstruct_while_loops\n  |> Phases.Direct_and_mut\n  |> Phases.Reject.Arbitrary_lhs\n  |> Phases.Drop_blocks\n  |> Phases.Drop_match_guards\n  |> Phases.Drop_references\n  |> Phases.Trivialize_assign_lhs\n  |> Side_effect_utils.Hoist\n  |> Phases.Hoist_disjunctive_patterns\n  |> Phases.Simplify_match_return\n  |> Phases.Local_mutation\n  |> Phases.Rewrite_control_flow\n  |> Phases.Drop_return_break_continue\n  |> Phases.Functionalize_loops\n  |> Phases.Reject.Question_mark\n  |> Phases.Traits_specs\n  |> Phases.Simplify_hoisting\n  |> Phases.Newtype_as_refinement\n  |> Phases.Reorder_fields\n  |> Phases.Sort_items\n  |> SubtypeToInputLanguage\n  |> Identity\n  ]\n  [@ocamlformat \"disable\"]\n\nlet apply_phases (items : Ast.Rust.item list) : AST.item list =\n  TransformToInputLanguage.ditems items\n"
  },
  {
    "path": "engine/backends/proverif/dune",
    "content": "(library\n (name proverif_backend)\n (package hax-engine)\n (wrapped false)\n (libraries hax_engine base hacspeclib_macro_parser)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_inline\n   ppx_functor_application\n   ppx_matches)))\n\n(env\n (_\n  (flags\n   (:standard -w -A))))\n"
  },
  {
    "path": "engine/backends/proverif/proverif_backend.ml",
    "content": "open Hax_engine\nopen Utils\nopen Base\n\ninclude\n  Backend.Make\n    (struct\n      open Features\n      include Off\n      include On.Macro\n      include On.Question_mark\n      include On.Early_exit\n      include On.Slice\n      include On.Quote\n      include On.Construct_base\n    end)\n    (struct\n      let backend = Diagnostics.Backend.ProVerif\n    end)\n\nmodule SubtypeToInputLanguage\n    (FA :\n      Features.T\n      (*  type loop = Features.Off.loop *)\n      (* and type for_loop = Features.Off.for_loop *)\n      (* and type for_index_loop = Features.Off.for_index_loop *)\n      (* and type state_passing_loop = Features.Off.state_passing_loop *)\n      (* and type continue = Features.Off.continue *)\n      (* and type break = Features.Off.break *)\n      (* and type mutable_variable = Features.Off.mutable_variable *)\n      (* and type mutable_reference = Features.Off.mutable_reference *)\n      (* and type mutable_pointer = Features.Off.mutable_pointer *)\n      (* and type reference = Features.Off.reference *)\n      (* and type slice = Features.Off.slice *)\n      (* and type raw_pointer = Features.Off.raw_pointer *)\n        with type early_exit = Features.On.early_exit\n         and type slice = Features.On.slice\n         and type question_mark = Features.On.question_mark\n         and type macro = Features.On.macro\n         and type quote = Features.On.quote\n         and type construct_base = Features.On.construct_base\n(* and type as_pattern = Features.Off.as_pattern *)\n(* and type nontrivial_lhs = Features.Off.nontrivial_lhs *)\n(* and type arbitrary_lhs = Features.Off.arbitrary_lhs *)\n(* and type lifetime = Features.Off.lifetime *)\n(* and type construct_base = Features.Off.construct_base *)\n(* and type monadic_action = Features.Off.monadic_action *)\n(* and type monadic_binding = Features.Off.monadic_binding *)\n(* and type block = Features.Off.block *)) =\nstruct\n  module FB = InputLanguage\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let continue = reject\n        let loop = reject\n        let for_loop = reject\n        let while_loop = reject\n        let for_index_loop = reject\n        let state_passing_loop = reject\n        let continue = reject\n        let break = reject\n        let mutable_variable = reject\n        let mutable_reference = reject\n        let mutable_pointer = reject\n        let reference = reject\n        let raw_pointer = reject\n        let as_pattern = reject\n        let nontrivial_lhs = reject\n        let arbitrary_lhs = reject\n        let lifetime = reject\n        let monadic_action = reject\n        let monadic_binding = reject\n        let fold_like_loop = reject\n        let block = reject\n        let dyn = reject\n        let match_guard = reject\n        let trait_item_default = reject\n        let unsafe = reject\n        let metadata = Phase_reject.make_metadata (NotInBackendLang ProVerif)\n      end)\n\n  let metadata = Phase_utils.Metadata.make (Reject (NotInBackendLang backend))\nend\n\nmodule BackendOptions = struct\n  type t = Hax_engine.Types.pro_verif_options\nend\n\nopen Ast\n\nmodule ProVerifNamePolicy = struct\n  include Concrete_ident.DefaultNamePolicy\n\n  [@@@ocamlformat \"disable\"]\n\n  let reserved_words = Hash_set.of_list (module String) [\n  \"among\"; \"axiom\"; \"channel\"; \"choice\"; \"clauses\"; \"const\"; \"def\"; \"diff\"; \"do\"; \"elimtrue\"; \"else\"; \"equation\"; \"equivalence\"; \"event\"; \"expand\"; \"fail\"; \"for\"; \"forall\"; \"foreach\"; \"free\"; \"fun\"; \"get\"; \"if\"; \"implementation\"; \"in\"; \"inj-event\"; \"insert\"; \"lemma\"; \"let\"; \"letfun\"; \"letproba\"; \"new\"; \"noninterf\"; \"noselect\"; \"not\"; \"nounif\"; \"or\"; \"otherwise\"; \"out\"; \"param\"; \"phase\"; \"pred\"; \"proba\"; \"process\"; \"proof\"; \"public vars\"; \"putbegin\"; \"query\"; \"reduc\"; \"restriction\"; \"secret\"; \"select\"; \"set\"; \"suchthat\"; \"sync\"; \"table\"; \"then\"; \"type\"; \"weaksecret\"; \"yield\"\n  ]\nend\n\nmodule U = Ast_utils.Make (InputLanguage)\nmodule RenderId = Concrete_ident.MakeRenderAPI (ProVerifNamePolicy)\nopen AST\n\nmodule type OPTS = sig\n  val options : Hax_engine.Types.pro_verif_options\nend\n\nmodule type MAKE = sig\n  module Preamble : sig\n    val print : item list -> string\n  end\n\n  module DataTypes : sig\n    val print : item list -> string\n  end\n\n  module Letfuns : sig\n    val print : item list -> string\n  end\nend\n\nmodule Make (Options : OPTS) : MAKE = struct\n  module Print = struct\n    module GenericPrint =\n      Deprecated_generic_printer.Make (InputLanguage) (RenderId)\n\n    open Deprecated_generic_printer_base.Make (InputLanguage)\n    open PPrint\n\n    let iblock f = group >> jump 2 0 >> terminate (break 0) >> f >> group\n\n    (* TODO: Give definitions for core / known library functions, cf issues #447, #448 *)\n    let library_functions :\n        (Concrete_ident_generated.t * (AST.expr list -> document)) list =\n      []\n\n    let library_constructors :\n        (Concrete_ident_generated.t\n        * ((global_ident * AST.expr) list -> document))\n        list =\n      []\n\n    let library_constructor_patterns :\n        (Concrete_ident_generated.t * (field_pat list -> document)) list =\n      []\n\n    let library_types : (Concrete_ident_generated.t * document) list = []\n\n    let assoc_known_name name (known_name, _) =\n      Global_ident.eq_name known_name name\n\n    let translate_known_name name ~dict =\n      List.find ~f:(assoc_known_name name) dict\n\n    class print aux =\n      object (print)\n        inherit GenericPrint.print as super\n\n        (* Backend-specific utilities *)\n\n        method pv_event_def name =\n          string \"event\" ^^ space ^^ name ^^ dot ^^ hardline\n        (** Print a ProVerif event definition. (without arguments)*)\n\n        method pv_event_emission name =\n          string \"event\" ^^ space ^^ name ^^ semi ^^ hardline\n        (** Print a ProVerif event emission process term. (no arguments)*)\n\n        (* ProVerif syntax *)\n        method pv_comment content =\n          string \"(*\" ^^ space ^^ content ^^ space ^^ string \"*)\" ^^ hardline\n        (** Print a ProVerif comment and end the line. *)\n\n        method pv_const name typ =\n          string \"const\" ^^ space ^^ name ^^ colon ^^ space ^^ typ ^^ dot\n        (** Print a ProVerif constant declaration of the given typ (provided as\n            a document).*)\n\n        method pv_constructor ?(is_data = false) ?(is_typeconverter = false)\n            name arg_types typ =\n          let options = if is_data then [ string \"data\" ] else [] in\n          let options =\n            if is_typeconverter then string \"typeConverter\" :: options\n            else options\n          in\n          let options =\n            space ^^ string \"[\"\n            ^^ separate (comma ^^ space) options\n            ^^ string \"]\"\n          in\n          string \"fun\" ^^ space\n          ^^ align\n               (name\n               ^^ iblock parens (separate (comma ^^ break 1) arg_types)\n               ^^ hardline ^^ colon ^^ space ^^ typ ^^ options ^^ dot)\n        (** Print a ProVerif constructor. *)\n\n        method pv_type name = string \"type\" ^^ space ^^ name ^^ dot ^^ hardline\n        (** Print a ProVerif type definition. *)\n\n        method pv_letfun name args body =\n          string \"letfun\" ^^ space\n          ^^ align\n               (name\n               ^^ iblock parens (separate (comma ^^ break 1) args)\n               ^^ space ^^ equals ^^ hardline ^^ body ^^ dot)\n        (** Print a ProVerif letfun definition. *)\n\n        method pv_letfun_call name args =\n          name ^^ iblock parens (separate (comma ^^ break 1) args)\n        (** Print a ProVerif letfun call. *)\n\n        (* Helpers *)\n        method default_value type_name = type_name ^^ string \"_default_value\"\n        method default_letfun_name type_name = type_name ^^ string \"_default\"\n        method error_letfun_name type_name = type_name ^^ string \"_err\"\n\n        method field_accessor_prefix field_name prefix =\n          string \"accessor\" ^^ underscore ^^ prefix ^^ underscore\n          ^^ print#concrete_ident field_name\n\n        method match_arm arms_typ scrutinee { arm_pat; body } =\n          let body = print#expr_at Arm_body body in\n          match arm_pat with\n          | { p = PWild; _ } -> body\n          | { p = PConstruct { constructor; _ } }\n            when Global_ident.eq_name Core__result__Result__Err constructor ->\n              print#pv_letfun_call\n                (print#error_letfun_name (print#ty AlreadyPar arms_typ))\n                []\n          | _ ->\n              let pat =\n                match arm_pat with\n                | { p = PConstant { lit } } ->\n                    iblock parens (string \"=\" ^^ print#literal Pat lit)\n                | _ -> print#pat_at Arm_pat arm_pat |> group\n              in\n              let scrutinee = print#expr_at Expr_Match_scrutinee scrutinee in\n              string \"let\" ^^ space ^^ pat ^^ string \" = \" ^^ scrutinee\n              ^^ string \" in \" ^^ body\n\n        val mutable wildcard_index = 0\n\n        method wildcard =\n          wildcard_index <- wildcard_index + 1;\n          string \"wildcard\" ^^ OCaml.int wildcard_index\n\n        method typed_wildcard = print#wildcard ^^ string \": bitstring\"\n\n        method tuple_elem_pat' :\n            Deprecated_generic_printer_base.par_state -> pat' fn =\n          fun ctx ->\n            let wrap_parens =\n              group\n              >>\n              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock parens\n            in\n            function\n            | PBinding { mut; mode; var; typ; subpat } ->\n                let p = print#local_ident var in\n                p ^^ colon ^^ space ^^ print#ty ctx typ\n            | p -> print#pat' ctx p\n\n        method tuple_elem_pat :\n            Deprecated_generic_printer_base.par_state -> pat fn =\n          fun ctx { p; span; _ } ->\n            print#with_span ~span (fun _ -> print#tuple_elem_pat' ctx p)\n\n        method tuple_elem_pat_at = print#par_state >> print#tuple_elem_pat\n\n        (* Overridden methods *)\n        method! pat' : Deprecated_generic_printer_base.par_state -> pat' fn =\n          fun ctx ->\n            let wrap_parens =\n              group\n              >>\n              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock parens\n            in\n            fun pat ->\n              match pat with\n              | PConstant { lit } -> string \"=\" ^^ print#literal Pat lit\n              | PConstruct { constructor; fields }\n                when Global_ident.eq_name Core__option__Option__None constructor\n                ->\n                  string \"None()\"\n              | PConstruct { constructor; fields }\n              (* The `Some` constructor in ProVerif expects a\n                 bitstring argument, so we use the appropriate\n                 `_to_bitstring` type converter on the inner\n                 expression. *)\n                when Global_ident.eq_name Core__option__Option__Some constructor\n                ->\n                  let inner_field = List.hd_exn fields in\n                  let inner_field_type_doc =\n                    print#ty AlreadyPar inner_field.pat.typ\n                  in\n                  let inner_field_doc = print#pat ctx inner_field.pat in\n                  let inner_block =\n                    match inner_field.pat.typ with\n                    | TApp { ident = `TupleType _ }\n                    (* Tuple types should be translated without conversion from bitstring *)\n                      ->\n                        iblock parens inner_field_doc\n                    | _ ->\n                        iblock parens\n                          (inner_field_type_doc ^^ string \"_to_bitstring\"\n                          ^^ iblock parens inner_field_doc)\n                  in\n                  string \"Some\" ^^ inner_block\n              | PConstruct { constructor; fields }\n              (* We replace applications of the `Ok` constructor\n                 with their contents. *)\n                when Global_ident.eq_name Core__result__Result__Ok constructor\n                ->\n                  let inner_field = List.hd_exn fields in\n                  let inner_field_type_doc =\n                    print#ty AlreadyPar inner_field.pat.typ\n                  in\n                  let inner_field_doc = print#pat ctx inner_field.pat in\n                  inner_field_doc\n              | PConstruct { constructor; fields } -> (\n                  match\n                    translate_known_name constructor\n                      ~dict:library_constructor_patterns\n                  with\n                  | Some (_, translation) -> translation fields\n                  | None -> super#pat' ctx pat)\n              | PWild ->\n                  print#typed_wildcard\n                  (* NOTE: Wildcard translation without collisions? *)\n              | _ -> super#pat' ctx pat\n\n        method! ty_bool = string \"bool\"\n        method! ty_int _ = string \"nat\"\n\n        method! pat_at : Deprecated_generic_printer_base.ast_position -> pat fn\n            =\n          fun pos pat ->\n            match pat with\n            | { p = PWild } -> (\n                match pos with\n                | Param_pat -> print#wildcard\n                | _ -> print#pat (print#par_state pos) pat)\n            | _ -> print#pat (print#par_state pos) pat\n\n        method! pat_construct_tuple : pat list fn =\n          List.map ~f:(print#tuple_elem_pat_at Pat_ConstructTuple)\n          >> print#doc_construct_tuple\n\n        method! expr_app f args _generic_args =\n          let args =\n            separate_map\n              (comma ^^ break 1)\n              (print#expr_at Expr_App_arg >> group)\n              args\n          in\n          let f =\n            match f with\n            | { e = GlobalVar name; _ } -> (\n                match name with\n                | `Projector (`Concrete i) | `Concrete i ->\n                    print#concrete_ident i |> group\n                | _ -> super#expr_at Expr_App_f f |> group)\n          in\n          f ^^ iblock parens args\n\n        method! expr' : Deprecated_generic_printer_base.par_state -> expr' fn =\n          fun ctx e ->\n            let wrap_parens =\n              group\n              >>\n              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock parens\n            in\n            match e with\n            | QuestionMark { e; return_typ; _ } -> print#expr ctx e\n            (* Translate known functions *)\n            | App { f = { e = GlobalVar name; _ }; args } -> (\n                let maps_to_identity fn_name =\n                  Global_ident.eq_name Core__clone__Clone__clone name\n                  || Global_ident.eq_name Rust_primitives__unsize name\n                  || Global_ident.eq_name Core__ops__deref__Deref__deref name\n                in\n                match name with\n                | `Primitive p -> (\n                    match p with\n                    | LogicalOp And ->\n                        print#expr NeedsPar (List.hd_exn args)\n                        ^^ space ^^ string \"&&\" ^^ space\n                        ^^ print#expr NeedsPar (List.nth_exn args 1)\n                    | LogicalOp Or ->\n                        print#expr NeedsPar (List.hd_exn args)\n                        ^^ space ^^ string \"||\" ^^ space\n                        ^^ print#expr NeedsPar (List.nth_exn args 1)\n                    | Cast -> print#expr NeedsPar (List.hd_exn args)\n                    | _ -> empty)\n                | _ -> (\n                    if maps_to_identity name then\n                      print#expr ctx (List.hd_exn args)\n                    else\n                      match\n                        translate_known_name name ~dict:library_functions\n                      with\n                      | Some (name, translation) -> translation args\n                      | None -> (\n                          match name with\n                          | `Projector (`Concrete name) -> (\n                              (* A projector should always have an argument. *)\n                              let arg = Option.value_exn (List.hd args) in\n                              match arg.typ with\n                              | TApp { ident = `Concrete concrete_ident; _ } ->\n                                  let base_name =\n                                    print#concrete_ident concrete_ident\n                                  in\n                                  print#field_accessor_prefix name base_name\n                                  ^^ iblock parens\n                                       (separate_map\n                                          (comma ^^ break 1)\n                                          (fun arg -> print#expr AlreadyPar arg)\n                                          args)\n                              | _ -> super#expr' ctx e)\n                          | _ -> super#expr' ctx e)))\n            | Construct { constructor; fields; _ }\n              when Global_ident.eq_name Core__option__Option__None constructor\n              ->\n                string \"None()\"\n            | Construct { constructor; fields; _ }\n              when Global_ident.eq_name Core__option__Option__Some constructor\n              ->\n                let inner_expr = snd (Option.value_exn (List.hd fields)) in\n                let inner_expr_type_doc = print#ty AlreadyPar inner_expr.typ in\n                let inner_expr_doc = super#expr ctx inner_expr in\n                string \"Some\"\n                ^^ iblock parens\n                     (inner_expr_type_doc ^^ string \"_to_bitstring\"\n                     ^^ iblock parens inner_expr_doc)\n            (* Translate known constructors *)\n            | Construct { constructor; fields } -> (\n                match\n                  translate_known_name constructor ~dict:library_constructors\n                with\n                | Some (name, translation) -> translation fields\n                | None -> super#expr' ctx e)\n            | Match { scrutinee; arms } ->\n                let first_arm = Option.value_exn (List.hd arms) in\n                let arms_typ = first_arm.arm.body.typ in\n                separate_map\n                  (hardline ^^ string \"else \")\n                  (fun { arm; span } -> print#match_arm arms_typ scrutinee arm)\n                  arms\n            | If { cond; then_; else_ } -> (\n                let if_then =\n                  (string \"if\" ^//^ nest 2 (print#expr_at Expr_If_cond cond))\n                  ^/^ string \"then\"\n                  ^//^ (print#expr_at Expr_If_then then_ |> parens |> nest 1)\n                in\n                match else_ with\n                | None -> if_then\n                | Some else_ ->\n                    if_then ^^ break 1 ^^ string \"else\" ^^ space\n                    ^^ (print#expr_at Expr_If_else else_ |> iblock parens)\n                    |> wrap_parens)\n            | Let { monadic; lhs; rhs; body } ->\n                (Option.map\n                   ~f:(fun monad -> print#expr_monadic_let ~monad)\n                   monadic\n                |> Option.value ~default:print#expr_let)\n                  ~lhs ~rhs body\n                |> wrap_parens\n            | _ -> super#expr' ctx e\n\n        method! concrete_ident = print#concrete_ident' ~under_current_ns:false\n\n        method! item_unwrapped item =\n          let assume_item =\n            List.rev Options.options.assume_items\n            |> List.find ~f:(fun (clause : Types.inclusion_clause) ->\n                   let namespace = clause.namespace in\n                   Concrete_ident.matches_namespace namespace item.ident)\n            |> Option.map ~f:(fun (clause : Types.inclusion_clause) ->\n                   match clause.kind with Types.Excluded -> false | _ -> true)\n            |> Option.value ~default:false\n          in\n          let fun_and_reduc base_name constructor =\n            let constructor_name = print#concrete_ident constructor.name in\n            let field_prefix = print#concrete_ident base_name in\n            let fun_args = constructor.arguments in\n            let fun_args_full =\n              separate_map\n                (comma ^^ break 1)\n                (fun (x, y, _z) ->\n                  print#concrete_ident x ^^ string \": \"\n                  ^^ print#ty_at Param_typ y)\n                fun_args\n            in\n            let fun_args_names =\n              separate_map\n                (comma ^^ break 1)\n                (fst3 >> fun x -> print#concrete_ident x)\n                fun_args\n            in\n            let fun_args_types =\n              List.map ~f:(snd3 >> print#ty_at Param_typ) fun_args\n            in\n            let fun_line =\n              print#pv_constructor ~is_data:true constructor_name fun_args_types\n                (print#concrete_ident base_name)\n            in\n            let reduc_line =\n              string \"reduc forall \" ^^ iblock Fn.id fun_args_full ^^ semi\n            in\n            let build_accessor (ident, ty, attr) =\n              print#field_accessor_prefix ident field_prefix\n              ^^ iblock parens (constructor_name ^^ iblock parens fun_args_names)\n              ^^ blank 1 ^^ equals ^^ blank 1 ^^ print#concrete_ident ident\n            in\n            let reduc_lines =\n              separate_map (dot ^^ hardline)\n                (fun arg ->\n                  reduc_line ^^ nest 4 (hardline ^^ build_accessor arg))\n                fun_args\n            in\n            fun_line ^^ hardline ^^ reduc_lines\n            ^^ if reduc_lines == empty then empty else dot\n          in\n          match item.v with\n          (* `fn`s with empty parameter lists are really Rust consts. *)\n          | Fn { name; body; params = [] } ->\n              let const_typ =\n                match body.typ with\n                (* ProVerif does not allow `nat` constants. *)\n                | TInt _ -> string \"bitstring\"\n                | _ -> print#ty_at Item_Fn_body body.typ\n              in\n              print#pv_const (print#concrete_ident name) const_typ\n          | Fn { name; generics; body; params } ->\n              let as_constructor : attrs -> bool =\n                Attr_payloads.payloads\n                >> List.exists ~f:(fst >> [%matches? Types.PVConstructor])\n              in\n              let as_handwritten : attrs -> bool =\n                Attr_payloads.payloads\n                >> List.exists ~f:(fst >> [%matches? Types.PVHandwritten])\n              in\n              if as_constructor item.attrs then\n                let arg_types =\n                  List.map ~f:(fun p -> print#ty_at Param_typ p.typ) params\n                in\n                let return_typ = print#ty_at Item_Fn_body body.typ in\n                print#pv_comment (string \"marked as constructor\")\n                ^^ print#pv_constructor ~is_data:true\n                     (print#concrete_ident name)\n                     arg_types return_typ\n              else\n                let comment =\n                  if assume_item then\n                    print#pv_comment\n                      (string \"REPLACE by body of type: \"\n                      ^^ print#ty_at Item_Fn_body body.typ)\n                  else if as_handwritten item.attrs then\n                    print#pv_comment (string \"REPLACE by handwritten model\")\n                  else empty\n                in\n                let reached_event_name =\n                  string \"Reached\" ^^ underscore ^^ print#concrete_ident name\n                in\n                let exit_event_name =\n                  string \"Exit\" ^^ underscore ^^ print#concrete_ident name\n                in\n                let body =\n                  if assume_item || as_handwritten item.attrs then\n                    let body_type = print#ty_at Item_Fn_body body.typ in\n                    print#pv_letfun_call\n                      (print#default_letfun_name body_type)\n                      []\n                  else print#expr_at Item_Fn_body body\n                in\n                comment\n                ^^ print#pv_letfun\n                     (print#concrete_ident name)\n                     (List.map ~f:print#param params)\n                     body\n          | Type { name; generics; variants; is_struct } ->\n              let type_name_doc = print#concrete_ident name in\n              let type_line = print#pv_type type_name_doc in\n              let to_bitstring_converter_line =\n                print#pv_constructor ~is_typeconverter:true\n                  (type_name_doc ^^ string \"_to_bitstring\")\n                  [ type_name_doc ] (string \"bitstring\")\n              in\n              let from_bitstring_converter_line =\n                print#pv_constructor ~is_typeconverter:true\n                  (type_name_doc ^^ string \"_from_bitstring\")\n                  [ string \"bitstring\" ]\n                  type_name_doc\n              in\n              let default_line =\n                let const_name = print#default_value type_name_doc in\n                print#pv_const const_name type_name_doc\n                ^^ hardline\n                ^^ print#pv_letfun\n                     (print#default_letfun_name type_name_doc)\n                     [] const_name\n              in\n              let err_line =\n                print#pv_letfun\n                  (print#error_letfun_name type_name_doc)\n                  []\n                  (string \"let x = construct_fail() in \"\n                  ^^ print#default_value type_name_doc)\n              in\n              let default_lines =\n                type_line ^^ hardline ^^ to_bitstring_converter_line ^^ hardline\n                ^^ from_bitstring_converter_line ^^ hardline ^^ default_line\n                ^^ hardline ^^ err_line ^^ hardline\n              in\n              let destructor_lines =\n                if is_struct then\n                  let struct_constructor = List.hd variants in\n                  match struct_constructor with\n                  | None -> empty\n                  | Some constructor -> fun_and_reduc name constructor\n                else\n                  separate_map hardline\n                    (fun variant -> fun_and_reduc name variant)\n                    variants\n              in\n              if\n                Attrs.find_unique_attr item.attrs\n                  ~f:\n                    ([%eq: Types.ha_payload] Erased >> Fn.flip Option.some_if ())\n                |> Option.is_some\n              then default_lines\n              else default_lines ^^ destructor_lines\n          | Quote { quote; _ } -> print#quote quote\n          | _ -> empty\n\n        method! expr_let : lhs:pat -> rhs:expr -> expr fn =\n          fun ~lhs ~rhs body ->\n            string \"let\" ^^ space\n            ^^ iblock Fn.id (print#pat_at Expr_Let_lhs lhs)\n            ^^ space ^^ equals ^^ space\n            ^^ iblock parens (print#expr_at Expr_Let_rhs rhs |> group)\n            ^^ space ^^ string \"in\" ^^ hardline\n            ^^ (print#expr_at Expr_Let_body body |> group)\n\n        method! concrete_ident' ~(under_current_ns : bool) : concrete_ident fn =\n          fun id ->\n            if under_current_ns then print#name_of_concrete_ident id\n            else\n              let path = print#namespace_of_concrete_ident id in\n              separate_map (underscore ^^ underscore) utf8string path\n              ^^ underscore ^^ underscore\n              ^^ print#name_of_concrete_ident id\n\n        method! doc_construct_inductive :\n            is_record:bool ->\n            is_struct:bool ->\n            constructor:concrete_ident ->\n            base:document option ->\n            (global_ident * document) list fn =\n          fun ~is_record ~is_struct:_ ~constructor ~base:_ args ->\n            if is_record then\n              print#concrete_ident constructor\n              ^^ iblock parens\n                   (separate_map\n                      (break 0 ^^ comma)\n                      (fun (field, body) -> iblock Fn.id body |> group)\n                      args)\n            else\n              print#concrete_ident constructor\n              ^^ iblock parens (separate_map (comma ^^ break 1) snd args)\n\n        method! generic_values : generic_value list fn =\n          function\n          | [] -> empty\n          | values ->\n              string \"_of\" ^^ underscore\n              ^^ separate_map underscore print#generic_value values\n\n        method! ty_app f args =\n          print#concrete_ident f ^^ print#generic_values args\n\n        method! ty_tuple _ _ = string \"bitstring\"\n\n        method! local_ident e =\n          match String.chop_prefix ~prefix:\"impl \" e.name with\n          | Some name ->\n              let name =\n                \"impl_\"\n                ^ String.tr ~target:'+' ~replacement:'_'\n                    (String.tr ~target:' ' ~replacement:'_' name)\n              in\n              string name\n          | _ -> super#local_ident e\n\n        method! expr ctx e =\n          match e.e with\n          | App { f = { e = GlobalVar name; _ }; args }\n            when Global_ident.eq_name Core__convert__Into__into name ->\n              print#ty ctx e.typ ^^ string \"_from_bitstring\"\n              ^^ iblock parens (print#expr ctx (List.hd_exn args))\n          | App { f = { e = GlobalVar name; _ }; args }\n            when Global_ident.eq_name Rust_primitives__hax__never_to_any name ->\n              print#ty ctx e.typ ^^ string \"_err()\"\n          | _ -> (\n              match e.typ with\n              | TApp { ident }\n                when Global_ident.eq_name Core__result__Result ident -> (\n                  match e.e with\n                  | Construct { constructor; fields }\n                    when Global_ident.eq_name Core__result__Result__Ok\n                           constructor ->\n                      let inner_expr =\n                        snd (Option.value_exn (List.hd fields))\n                      in\n                      let inner_expr_doc = super#expr ctx inner_expr in\n                      inner_expr_doc\n                  | Construct { constructor; _ }\n                    when Global_ident.eq_name Core__result__Result__Err\n                           constructor ->\n                      print#ty ctx e.typ ^^ string \"_err()\"\n                  | _ -> super#expr ctx e (*This cannot happen*))\n              | _ -> super#expr ctx e)\n\n        method! ty : Deprecated_generic_printer_base.par_state -> ty fn =\n          fun ctx ty ->\n            match ty with\n            | TBool -> print#ty_bool\n            | TParam i -> print#local_ident i\n            | TInt kind -> print#ty_int kind\n            (* Translate known types, no args at the moment *)\n            | TApp { ident; args }\n              when Global_ident.eq_name Alloc__vec__Vec ident ->\n                string \"bitstring\"\n            | TApp { ident; args }\n              when Global_ident.eq_name Core__option__Option ident ->\n                string \"Option\"\n            | TApp { ident; args }\n              when Global_ident.eq_name Core__result__Result ident -> (\n                (* print first of args*)\n                let result_ok_type = List.hd_exn args in\n                match result_ok_type with\n                | GType typ -> print#ty ctx typ\n                | GConst e -> print#expr ctx e\n                | _ -> empty (* Do not tranlsate lifetimes *))\n            | TApp { ident; args } -> super#ty ctx ty\n            (*(\n                match translate_known_name ident ~dict:library_types with\n                | Some (_, translation) -> translation\n                | None -> super#ty ctx ty)*)\n            | _ -> string \"bitstring\"\n      end\n\n    type proverif_aux_info = CrateFns of AST.item list | NoAuxInfo\n\n    include Api (struct\n      type aux_info = proverif_aux_info\n\n      let new_print aux = (new print aux :> print_object)\n    end)\n  end\n\n  let filter_crate_functions (items : AST.item list) =\n    List.filter\n      ~f:(fun item ->\n        [%matches? Fn _] item.v\n        || [%matches? Quote { origin = { item_kind = `Fn; _ }; _ }] item.v)\n      items\n\n  let is_process_read : attrs -> bool =\n    Attr_payloads.payloads\n    >> List.exists ~f:(fst >> [%matches? Types.ProcessRead])\n\n  let is_process_write : attrs -> bool =\n    Attr_payloads.payloads\n    >> List.exists ~f:(fst >> [%matches? Types.ProcessWrite])\n\n  let is_process_init : attrs -> bool =\n    Attr_payloads.payloads\n    >> List.exists ~f:(fst >> [%matches? Types.ProcessInit])\n\n  let is_process item =\n    is_process_read item.attrs\n    || is_process_write item.attrs\n    || is_process_init item.attrs\n\n  module type Subprinter = sig\n    val print : AST.item list -> string\n  end\n\n  module MkSubprinter (Section : sig\n    val banner : string\n    val preamble : AST.item list -> string\n    val contents : AST.item list -> string\n  end) =\n  struct\n    let hline = \"(*****************************************)\\n\"\n    let banner = hline ^ \"(* \" ^ Section.banner ^ \" *)\\n\" ^ hline ^ \"\\n\"\n\n    let print items =\n      banner ^ Section.preamble items ^ Section.contents items ^ \"\\n\\n\"\n  end\n\n  module Preamble = MkSubprinter (struct\n    let banner = \"Preamble\"\n\n    let preamble items =\n      \"channel c.\\n\\n\\\n       fun construct_fail() : bitstring\\n\\\n       reduc construct_fail() = fail.\\n\\n\\\n       type Option.\\n\\\n       fun Some(bitstring): Option [data].\\n\\\n       fun None(): Option [data].\\n\\\n       letfun Option_err() = let x = construct_fail() in None().\\n\\n\\\n       const empty: bitstring.\\n\\\n       letfun bitstring_default() = empty.\\n\\\n       letfun bitstring_err() = let x = construct_fail() in \\\n       bitstring_default().\\n\\n\\\n       letfun nat_default() = 0.\\n\\\n       fun nat_to_bitstring(nat): bitstring.\\n\\\n       letfun nat_err() = let x = construct_fail() in nat_default().\\n\\n\\\n       letfun bool_default() = false.\\n\"\n\n    let contents items = \"\"\n  end)\n\n  module DataTypes = MkSubprinter (struct\n    let banner = \"Types and Constructors\"\n    let preamble items = \"\"\n\n    let filter_data_types items =\n      List.filter\n        ~f:(fun item ->\n          [%matches? Type _] item.v\n          || [%matches? Quote { origin = { item_kind = `Type; _ }; _ }] item.v)\n        items\n\n    let contents items =\n      let contents, _ = Print.items NoAuxInfo (filter_data_types items) in\n      contents\n  end)\n\n  module Letfuns = MkSubprinter (struct\n    let banner = \"Functions\"\n    let preamble items = \"\"\n\n    let contents items =\n      let process_letfuns, pure_letfuns =\n        List.partition_tf ~f:is_process (filter_crate_functions items)\n      in\n      let pure_letfuns_print, _ =\n        Print.items (CrateFns (filter_crate_functions items)) pure_letfuns\n      in\n      let process_letfuns_print, _ =\n        Print.items (CrateFns (filter_crate_functions items)) process_letfuns\n      in\n      pure_letfuns_print ^ process_letfuns_print\n  end)\nend\n\nlet translate m (bo : BackendOptions.t) ~(bundles : AST.item list list)\n    (items : AST.item list) : Types.file list =\n  let (module M : MAKE) =\n    (module Make (struct\n      let options = bo\n    end))\n  in\n  let lib_contents =\n    M.Preamble.print items ^ M.DataTypes.print items ^ M.Letfuns.print items\n  in\n  let lib_file =\n    Types.{ path = \"lib.pvl\"; contents = lib_contents; sourcemap = None }\n  in\n  [ lib_file ]\n\nopen Phase_utils\nmodule DepGraph = Dependencies.Make (InputLanguage)\nmodule DepGraphR = Dependencies.Make (Features.Rust)\n\nmodule TransformToInputLanguage =\n  [%functor_application\n  Phases.Reject.Unsafe(Features.Rust)\n  |> Phases.Reject.RawOrMutPointer\n  |> Phases.Transform_hax_lib_inline\n  |> Phases.Simplify_question_marks\n  |> Phases.And_mut_defsite\n  |> Phases.Reconstruct_for_loops\n  |> Phases.Direct_and_mut\n  |> Phases.Reject.Arbitrary_lhs\n  |> Phases.Drop_blocks\n  |> Phases.Drop_references\n  |> Phases.Trivialize_assign_lhs\n  |> Side_effect_utils.Hoist\n  |> Phases.Simplify_match_return\n  |> Phases.Local_mutation\n  |> Phases.Reject.Continue\n  |> Phases.Reject.Dyn\n  |> Phases.Reorder_fields\n  |> Phases.Bundle_cycles\n  |> Phases.Sort_items_namespace_wise\n  |> SubtypeToInputLanguage\n  |> Identity\n  ]\n  [@ocamlformat \"disable\"]\n\nlet apply_phases (bo : BackendOptions.t) (items : Ast.Rust.item list) :\n    AST.item list =\n  TransformToInputLanguage.ditems items\n"
  },
  {
    "path": "engine/backends/proverif/proverif_backend.mli",
    "content": "open Hax_engine.Backend\ninclude T with type BackendOptions.t = Hax_engine.Types.pro_verif_options\n"
  },
  {
    "path": "engine/bin/dune",
    "content": "(library\n (name lib)\n (modules lib)\n (wrapped false)\n (libraries\n  hax_engine\n  fstar_backend\n  lean_backend\n  coq_backend\n  ssprove_backend\n  easycrypt_backend\n  proverif_backend\n  logs\n  core)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_matches\n   ppx_string)))\n\n(executable\n (public_name hax-engine)\n (name native_driver)\n (modules native_driver)\n (libraries lib))\n\n; The following line is commented: by default, we don't want to\n; generate javascript.\n\n; (include dune-js)\n\n(env\n (_\n  (flags\n   (:standard -g -warn-error -A -warn-error +8 -w -33))))\n"
  },
  {
    "path": "engine/bin/dune-js",
    "content": "(executable\n (optional)\n (name js_driver)\n (modes js)\n (modules js_driver)\n (js_of_ocaml\n  (javascript_files js_stubs/mutex.js js_stubs/stdint.js js_stubs/unix.js))\n (libraries js_of_ocaml lib))\n"
  },
  {
    "path": "engine/bin/js_driver.ml",
    "content": "open Base\nopen Js_of_ocaml\n\n(* Strings are slow with js_of_ocaml. Thus, parsing a string into a\n   `Yojson.Safe.t` is extremly slow using yojson itself. Instead, I\n   wrote a very simple and stupid `yojson_of_string_via_js` that (1)\n   parses the json out of a JS string into a JS object (2) make a\n   yojson AST. This is something like x100 faster. Without this hack,\n   the performance is too bad to be bearable. *)\nlet yojson_of_string_via_js (s : string) : Yojson.Safe.t =\n  let f =\n    Js.Unsafe.js_expr\n      {js|\n(function (mkNull, mkBool, mkBigint, mkInt, mkFloat, mkString, mkDict, mkArray){\n  function isInt(n) {\n    return n % 1 === 0;\n  }\n  function f(x){\n    let t = typeof x;\n    if (t === 'undefined' || x === null) {\n      return mkNull;\n    } else if (t === 'boolean') {\n      return mkBool(x);\n    } else if (t === 'object') {\n      if (x instanceof Array) {\n        return mkArray(x.map(f));\n      } else {\n        let data = Object.entries(x).map(function(o) {\n          let key = o[0];\n          let val = f(o[1]);\n          return [key, val];\n        });\n        return mkDict(data);\n      }\n    } else if (t === 'number') {\n      return mkBigint(x.toString());\n      if (isInt(x)) {\n        return mkInt(x);\n      } else {\n        return mkFloat(x);\n      }\n    } else if (t === 'bigint') {\n      return mkBigint(x.toString());\n    } else if (t === 'string') {\n      return mkString(x);\n    } else {\n      throw (\"Cannot deal with \" + JSON.stringify(x));\n    }\n  };\n  return (function(str){\n    let json = JSON.parse(str);\n    let result = f(json);\n    return result;\n  });\n})\n|js}\n  in\n  let open Js in\n  let open Unsafe in\n  let wrap (type a) (f : a t -> Yojson.Safe.t) =\n    inject (callback (fun x -> f (coerce x)))\n  in\n  let to_list x = to_array x |> Array.to_list in\n  let fn =\n    fun_call f\n      [|\n        inject `Null;\n        wrap (fun x -> `Bool (to_bool x));\n        wrap (fun x -> `Intlit (to_bytestring x));\n        wrap (fun x -> `Int (float_of_number x |> Float.to_int));\n        wrap (fun x -> `Float (float_of_number x));\n        wrap (fun x -> `String (to_bytestring x));\n        wrap (fun x ->\n            `Assoc\n              (to_list x\n              |> List.map ~f:(fun x ->\n                     match to_list x with\n                     | [ key; json ] -> (to_string key, Stdlib.Obj.magic json)\n                     | _ -> failwith \"Assoc\")));\n        wrap (fun x -> `List (to_list x));\n      |]\n  in\n  fun_call fn [| string s |> coerce |] |> Obj.magic\n\nlet _ =\n  Hax_engine.Hax_io.init\n    (module struct\n      let read_json () =\n        let line = Stdio.In_channel.input_line Stdio.In_channel.stdin in\n        Option.map ~f:yojson_of_string_via_js line\n\n      let write_json msg =\n        let open Stdio.Out_channel in\n        Yojson.Safe.to_channel stdout msg;\n        output_char stdout '\\n';\n        flush stdout\n    end);\n  Lib.main ()\n"
  },
  {
    "path": "engine/bin/js_stubs/mutex.js",
    "content": "// Whether to log.\nvar v_log = false;\nfunction ll(s) { if (v_log) console.log(s); }\n\n//Provides: caml_condition_broadcast\nfunction caml_condition_broadcast() {\n  return 0;\n}\n\n//Provides: caml_condition_new\nfunction caml_condition_new() {\n  return 0;\n}\n\n//Provides: caml_condition_signal\nfunction caml_condition_signal() {\n  return 0;\n}\n\n//Provides: caml_condition_wait\nfunction caml_condition_wait() {\n  return 0;\n}\n\n//Provides: caml_thread_initialize\nfunction caml_thread_initialize() {\n  return 0;\n}\n\n//Provides: caml_thread_new\nfunction caml_thread_new() {\n  return 0;\n}\n\n//Provides: caml_thread_self\nfunction caml_thread_self() {\n  return [0,0];\n}\n\n//Provides: caml_thread_uncaught_exception\nfunction caml_thread_uncaught_exception() {\n  return 0;\n}\n\n//Provides: caml_thread_yield\nfunction caml_thread_yield() {\n  return 0;\n}\n\n//Provides: caml_mutex_lock\nfunction caml_mutex_lock() {\n  return 0;\n}\n\n//Provides: caml_mutex_new\nfunction caml_mutex_new() {\n  return 0;\n}\n\n//Provides: caml_mutex_unlock\nfunction caml_mutex_unlock() {\n  return 0;\n}\n\n//Provides: caml_thread_cleanup\nfunction caml_thread_cleanup() {\n  return 0;\n}\n\n//Provides: caml_thread_exit\nfunction caml_thread_exit() {\n  return 0;\n}\n\n//Provides: caml_thread_id\nfunction caml_thread_id() {\n  return 0;\n}\n"
  },
  {
    "path": "engine/bin/js_stubs/stdint.js",
    "content": "\n// Provides: int8_of_nativeint\nfunction int8_of_nativeint(){};\n// Provides: int8_of_float\nfunction int8_of_float(){};\n// Provides: int8_of_int16\nfunction int8_of_int16(){};\n// Provides: int8_of_int24\nfunction int8_of_int24(){};\n// Provides: int8_of_int32\nfunction int8_of_int32(){};\n// Provides: int8_of_int40\nfunction int8_of_int40(){};\n// Provides: int8_of_int48\nfunction int8_of_int48(){};\n// Provides: int8_of_int56\nfunction int8_of_int56(){};\n// Provides: int8_of_int64\nfunction int8_of_int64(){};\n// Provides: int8_of_int128\nfunction int8_of_int128(){};\n// Provides: int8_of_uint8\nfunction int8_of_uint8(){};\n// Provides: int8_of_uint16\nfunction int8_of_uint16(){};\n// Provides: int8_of_uint24\nfunction int8_of_uint24(){};\n// Provides: int8_of_uint32\nfunction int8_of_uint32(){};\n// Provides: int8_of_uint40\nfunction int8_of_uint40(){};\n// Provides: int8_of_uint48\nfunction int8_of_uint48(){};\n// Provides: int8_of_uint56\nfunction int8_of_uint56(){};\n// Provides: int8_of_uint64\nfunction int8_of_uint64(){};\n// Provides: int8_of_uint128\nfunction int8_of_uint128(){};\n// Provides: nativeint_of_int8\nfunction nativeint_of_int8(){};\n// Provides: float_of_int8\nfunction float_of_int8(){};\n// Provides: int16_of_int8\nfunction int16_of_int8(){};\n// Provides: int24_of_int8\nfunction int24_of_int8(){};\n// Provides: int32_of_int8\nfunction int32_of_int8(){};\n// Provides: int40_of_int8\nfunction int40_of_int8(){};\n// Provides: int48_of_int8\nfunction int48_of_int8(){};\n// Provides: int56_of_int8\nfunction int56_of_int8(){};\n// Provides: int64_of_int8\nfunction int64_of_int8(){};\n// Provides: int128_of_int8\nfunction int128_of_int8(){};\n// Provides: uint8_of_int8\nfunction uint8_of_int8(){};\n// Provides: uint16_of_int8\nfunction uint16_of_int8(){};\n// Provides: uint24_of_int8\nfunction uint24_of_int8(){};\n// Provides: uint32_of_int8\nfunction uint32_of_int8(){};\n// Provides: uint40_of_int8\nfunction uint40_of_int8(){};\n// Provides: uint48_of_int8\nfunction uint48_of_int8(){};\n// Provides: uint56_of_int8\nfunction uint56_of_int8(){};\n// Provides: uint64_of_int8\nfunction uint64_of_int8(){};\n// Provides: uint128_of_int8\nfunction uint128_of_int8(){};\n// Provides: int8_bits_of_float\nfunction int8_bits_of_float(){};\n// Provides: int8_float_of_bits\nfunction int8_float_of_bits(){};\n// Provides: int16_of_nativeint\nfunction int16_of_nativeint(){};\n// Provides: int16_of_float\nfunction int16_of_float(){};\n// Provides: int16_of_int24\nfunction int16_of_int24(){};\n// Provides: int16_of_int32\nfunction int16_of_int32(){};\n// Provides: int16_of_int40\nfunction int16_of_int40(){};\n// Provides: int16_of_int48\nfunction int16_of_int48(){};\n// Provides: int16_of_int56\nfunction int16_of_int56(){};\n// Provides: int16_of_int64\nfunction int16_of_int64(){};\n// Provides: int16_of_int128\nfunction int16_of_int128(){};\n// Provides: int16_of_uint8\nfunction int16_of_uint8(){};\n// Provides: int16_of_uint16\nfunction int16_of_uint16(){};\n// Provides: int16_of_uint24\nfunction int16_of_uint24(){};\n// Provides: int16_of_uint32\nfunction int16_of_uint32(){};\n// Provides: int16_of_uint40\nfunction int16_of_uint40(){};\n// Provides: int16_of_uint48\nfunction int16_of_uint48(){};\n// Provides: int16_of_uint56\nfunction int16_of_uint56(){};\n// Provides: int16_of_uint64\nfunction int16_of_uint64(){};\n// Provides: int16_of_uint128\nfunction int16_of_uint128(){};\n// Provides: nativeint_of_int16\nfunction nativeint_of_int16(){};\n// Provides: float_of_int16\nfunction float_of_int16(){};\n// Provides: int24_of_int16\nfunction int24_of_int16(){};\n// Provides: int32_of_int16\nfunction int32_of_int16(){};\n// Provides: int40_of_int16\nfunction int40_of_int16(){};\n// Provides: int48_of_int16\nfunction int48_of_int16(){};\n// Provides: int56_of_int16\nfunction int56_of_int16(){};\n// Provides: int64_of_int16\nfunction int64_of_int16(){};\n// Provides: int128_of_int16\nfunction int128_of_int16(){};\n// Provides: uint8_of_int16\nfunction uint8_of_int16(){};\n// Provides: uint16_of_int16\nfunction uint16_of_int16(){};\n// Provides: uint24_of_int16\nfunction uint24_of_int16(){};\n// Provides: uint32_of_int16\nfunction uint32_of_int16(){};\n// Provides: uint40_of_int16\nfunction uint40_of_int16(){};\n// Provides: uint48_of_int16\nfunction uint48_of_int16(){};\n// Provides: uint56_of_int16\nfunction uint56_of_int16(){};\n// Provides: uint64_of_int16\nfunction uint64_of_int16(){};\n// Provides: uint128_of_int16\nfunction uint128_of_int16(){};\n// Provides: int16_bits_of_float\nfunction int16_bits_of_float(){};\n// Provides: int16_float_of_bits\nfunction int16_float_of_bits(){};\n// Provides: int24_of_nativeint\nfunction int24_of_nativeint(){};\n// Provides: int24_of_float\nfunction int24_of_float(){};\n// Provides: int24_of_int32\nfunction int24_of_int32(){};\n// Provides: int24_of_int40\nfunction int24_of_int40(){};\n// Provides: int24_of_int48\nfunction int24_of_int48(){};\n// Provides: int24_of_int56\nfunction int24_of_int56(){};\n// Provides: int24_of_int64\nfunction int24_of_int64(){};\n// Provides: int24_of_int128\nfunction int24_of_int128(){};\n// Provides: int24_of_uint8\nfunction int24_of_uint8(){};\n// Provides: int24_of_uint16\nfunction int24_of_uint16(){};\n// Provides: int24_of_uint24\nfunction int24_of_uint24(){};\n// Provides: int24_of_uint32\nfunction int24_of_uint32(){};\n// Provides: int24_of_uint40\nfunction int24_of_uint40(){};\n// Provides: int24_of_uint48\nfunction int24_of_uint48(){};\n// Provides: int24_of_uint56\nfunction int24_of_uint56(){};\n// Provides: int24_of_uint64\nfunction int24_of_uint64(){};\n// Provides: int24_of_uint128\nfunction int24_of_uint128(){};\n// Provides: nativeint_of_int24\nfunction nativeint_of_int24(){};\n// Provides: float_of_int24\nfunction float_of_int24(){};\n// Provides: int32_of_int24\nfunction int32_of_int24(){};\n// Provides: int40_of_int24\nfunction int40_of_int24(){};\n// Provides: int48_of_int24\nfunction int48_of_int24(){};\n// Provides: int56_of_int24\nfunction int56_of_int24(){};\n// Provides: int64_of_int24\nfunction int64_of_int24(){};\n// Provides: int128_of_int24\nfunction int128_of_int24(){};\n// Provides: uint8_of_int24\nfunction uint8_of_int24(){};\n// Provides: uint16_of_int24\nfunction uint16_of_int24(){};\n// Provides: uint24_of_int24\nfunction uint24_of_int24(){};\n// Provides: uint32_of_int24\nfunction uint32_of_int24(){};\n// Provides: uint40_of_int24\nfunction uint40_of_int24(){};\n// Provides: uint48_of_int24\nfunction uint48_of_int24(){};\n// Provides: uint56_of_int24\nfunction uint56_of_int24(){};\n// Provides: uint64_of_int24\nfunction uint64_of_int24(){};\n// Provides: uint128_of_int24\nfunction uint128_of_int24(){};\n// Provides: int32_of_int40\nfunction int32_of_int40(){};\n// Provides: int32_of_int48\nfunction int32_of_int48(){};\n// Provides: int32_of_int56\nfunction int32_of_int56(){};\n// Provides: int32_of_int64\nfunction int32_of_int64(){};\n// Provides: int32_of_int128\nfunction int32_of_int128(){};\n// Provides: int32_of_uint8\nfunction int32_of_uint8(){};\n// Provides: int32_of_uint16\nfunction int32_of_uint16(){};\n// Provides: int32_of_uint24\nfunction int32_of_uint24(){};\n// Provides: int32_of_uint32\nfunction int32_of_uint32(){};\n// Provides: int32_of_uint40\nfunction int32_of_uint40(){};\n// Provides: int32_of_uint48\nfunction int32_of_uint48(){};\n// Provides: int32_of_uint56\nfunction int32_of_uint56(){};\n// Provides: int32_of_uint64\nfunction int32_of_uint64(){};\n// Provides: int32_of_uint128\nfunction int32_of_uint128(){};\n// Provides: int40_of_int32\nfunction int40_of_int32(){};\n// Provides: int48_of_int32\nfunction int48_of_int32(){};\n// Provides: int56_of_int32\nfunction int56_of_int32(){};\n// Provides: int64_of_int32\nfunction int64_of_int32(){};\n// Provides: int128_of_int32\nfunction int128_of_int32(){};\n// Provides: uint8_of_int32\nfunction uint8_of_int32(){};\n// Provides: uint16_of_int32\nfunction uint16_of_int32(){};\n// Provides: uint24_of_int32\nfunction uint24_of_int32(){};\n// Provides: uint32_of_int32\nfunction uint32_of_int32(){};\n// Provides: uint40_of_int32\nfunction uint40_of_int32(){};\n// Provides: uint48_of_int32\nfunction uint48_of_int32(){};\n// Provides: uint56_of_int32\nfunction uint56_of_int32(){};\n// Provides: uint64_of_int32\nfunction uint64_of_int32(){};\n// Provides: uint128_of_int32\nfunction uint128_of_int32(){};\n// Provides: int64_of_int40\nfunction int64_of_int40(){};\n// Provides: int64_of_int48\nfunction int64_of_int48(){};\n// Provides: int64_of_int56\nfunction int64_of_int56(){};\n// Provides: int64_of_int128\nfunction int64_of_int128(){};\n// Provides: int64_of_uint8\nfunction int64_of_uint8(){};\n// Provides: int64_of_uint16\nfunction int64_of_uint16(){};\n// Provides: int64_of_uint24\nfunction int64_of_uint24(){};\n// Provides: int64_of_uint32\nfunction int64_of_uint32(){};\n// Provides: int64_of_uint40\nfunction int64_of_uint40(){};\n// Provides: int64_of_uint48\nfunction int64_of_uint48(){};\n// Provides: int64_of_uint56\nfunction int64_of_uint56(){};\n// Provides: int64_of_uint64\nfunction int64_of_uint64(){};\n// Provides: int64_of_uint128\nfunction int64_of_uint128(){};\n// Provides: int40_of_int64\nfunction int40_of_int64(){};\n// Provides: int48_of_int64\nfunction int48_of_int64(){};\n// Provides: int56_of_int64\nfunction int56_of_int64(){};\n// Provides: int128_of_int64\nfunction int128_of_int64(){};\n// Provides: uint8_of_int64\nfunction uint8_of_int64(){};\n// Provides: uint16_of_int64\nfunction uint16_of_int64(){};\n// Provides: uint24_of_int64\nfunction uint24_of_int64(){};\n// Provides: uint32_of_int64\nfunction uint32_of_int64(){};\n// Provides: uint40_of_int64\nfunction uint40_of_int64(){};\n// Provides: uint48_of_int64\nfunction uint48_of_int64(){};\n// Provides: uint56_of_int64\nfunction uint56_of_int64(){};\n// Provides: uint64_of_int64\nfunction uint64_of_int64(){};\n// Provides: uint128_of_int64\nfunction uint128_of_int64(){};\n// Provides: int40_mul\nfunction int40_mul(){};\n// Provides: int40_div\nfunction int40_div(){};\n// Provides: uint40_xor\nfunction uint40_xor(){};\n// Provides: int40_shift_right\nfunction int40_shift_right(){};\n// Provides: uint40_shift_right\nfunction uint40_shift_right(){};\n// Provides: int40_of_int\nfunction int40_of_int(){};\n// Provides: int40_of_nativeint\nfunction int40_of_nativeint(){};\n// Provides: int40_of_float\nfunction int40_of_float(){};\n// Provides: int40_of_int48\nfunction int40_of_int48(){};\n// Provides: int40_of_int56\nfunction int40_of_int56(){};\n// Provides: int40_of_int128\nfunction int40_of_int128(){};\n// Provides: int40_of_uint8\nfunction int40_of_uint8(){};\n// Provides: int40_of_uint16\nfunction int40_of_uint16(){};\n// Provides: int40_of_uint24\nfunction int40_of_uint24(){};\n// Provides: int40_of_uint32\nfunction int40_of_uint32(){};\n// Provides: int40_of_uint40\nfunction int40_of_uint40(){};\n// Provides: int40_of_uint48\nfunction int40_of_uint48(){};\n// Provides: int40_of_uint56\nfunction int40_of_uint56(){};\n// Provides: int40_of_uint64\nfunction int40_of_uint64(){};\n// Provides: int40_of_uint128\nfunction int40_of_uint128(){};\n// Provides: int_of_int40\nfunction int_of_int40(){};\n// Provides: nativeint_of_int40\nfunction nativeint_of_int40(){};\n// Provides: float_of_int40\nfunction float_of_int40(){};\n// Provides: int48_of_int40\nfunction int48_of_int40(){};\n// Provides: int56_of_int40\nfunction int56_of_int40(){};\n// Provides: int128_of_int40\nfunction int128_of_int40(){};\n// Provides: uint8_of_int40\nfunction uint8_of_int40(){};\n// Provides: uint16_of_int40\nfunction uint16_of_int40(){};\n// Provides: uint24_of_int40\nfunction uint24_of_int40(){};\n// Provides: uint32_of_int40\nfunction uint32_of_int40(){};\n// Provides: uint40_of_int40\nfunction uint40_of_int40(){};\n// Provides: uint48_of_int40\nfunction uint48_of_int40(){};\n// Provides: uint56_of_int40\nfunction uint56_of_int40(){};\n// Provides: uint64_of_int40\nfunction uint64_of_int40(){};\n// Provides: uint128_of_int40\nfunction uint128_of_int40(){};\n// Provides: int40_max_int\nfunction int40_max_int(){};\n// Provides: int40_min_int\nfunction int40_min_int(){};\n// Provides: int48_mul\nfunction int48_mul(){};\n// Provides: int48_div\nfunction int48_div(){};\n// Provides: uint48_xor\nfunction uint48_xor(){};\n// Provides: int48_shift_right\nfunction int48_shift_right(){};\n// Provides: uint48_shift_right\nfunction uint48_shift_right(){};\n// Provides: int48_of_int\nfunction int48_of_int(){};\n// Provides: int48_of_nativeint\nfunction int48_of_nativeint(){};\n// Provides: int48_of_float\nfunction int48_of_float(){};\n// Provides: int48_of_int56\nfunction int48_of_int56(){};\n// Provides: int48_of_int128\nfunction int48_of_int128(){};\n// Provides: int48_of_uint8\nfunction int48_of_uint8(){};\n// Provides: int48_of_uint16\nfunction int48_of_uint16(){};\n// Provides: int48_of_uint24\nfunction int48_of_uint24(){};\n// Provides: int48_of_uint32\nfunction int48_of_uint32(){};\n// Provides: int48_of_uint40\nfunction int48_of_uint40(){};\n// Provides: int48_of_uint48\nfunction int48_of_uint48(){};\n// Provides: int48_of_uint56\nfunction int48_of_uint56(){};\n// Provides: int48_of_uint64\nfunction int48_of_uint64(){};\n// Provides: int48_of_uint128\nfunction int48_of_uint128(){};\n// Provides: int_of_int48\nfunction int_of_int48(){};\n// Provides: nativeint_of_int48\nfunction nativeint_of_int48(){};\n// Provides: float_of_int48\nfunction float_of_int48(){};\n// Provides: int56_of_int48\nfunction int56_of_int48(){};\n// Provides: int128_of_int48\nfunction int128_of_int48(){};\n// Provides: uint8_of_int48\nfunction uint8_of_int48(){};\n// Provides: uint16_of_int48\nfunction uint16_of_int48(){};\n// Provides: uint24_of_int48\nfunction uint24_of_int48(){};\n// Provides: uint32_of_int48\nfunction uint32_of_int48(){};\n// Provides: uint40_of_int48\nfunction uint40_of_int48(){};\n// Provides: uint48_of_int48\nfunction uint48_of_int48(){};\n// Provides: uint56_of_int48\nfunction uint56_of_int48(){};\n// Provides: uint64_of_int48\nfunction uint64_of_int48(){};\n// Provides: uint128_of_int48\nfunction uint128_of_int48(){};\n// Provides: int48_max_int\nfunction int48_max_int(){};\n// Provides: int48_min_int\nfunction int48_min_int(){};\n// Provides: int56_mul\nfunction int56_mul(){};\n// Provides: int56_div\nfunction int56_div(){};\n// Provides: uint56_xor\nfunction uint56_xor(){};\n// Provides: int56_shift_right\nfunction int56_shift_right(){};\n// Provides: uint56_shift_right\nfunction uint56_shift_right(){};\n// Provides: int56_of_int\nfunction int56_of_int(){};\n// Provides: int56_of_nativeint\nfunction int56_of_nativeint(){};\n// Provides: int56_of_float\nfunction int56_of_float(){};\n// Provides: int56_of_int128\nfunction int56_of_int128(){};\n// Provides: int56_of_uint8\nfunction int56_of_uint8(){};\n// Provides: int56_of_uint16\nfunction int56_of_uint16(){};\n// Provides: int56_of_uint24\nfunction int56_of_uint24(){};\n// Provides: int56_of_uint32\nfunction int56_of_uint32(){};\n// Provides: int56_of_uint40\nfunction int56_of_uint40(){};\n// Provides: int56_of_uint48\nfunction int56_of_uint48(){};\n// Provides: int56_of_uint56\nfunction int56_of_uint56(){};\n// Provides: int56_of_uint64\nfunction int56_of_uint64(){};\n// Provides: int56_of_uint128\nfunction int56_of_uint128(){};\n// Provides: int_of_int56\nfunction int_of_int56(){};\n// Provides: nativeint_of_int56\nfunction nativeint_of_int56(){};\n// Provides: float_of_int56\nfunction float_of_int56(){};\n// Provides: int128_of_int56\nfunction int128_of_int56(){};\n// Provides: uint8_of_int56\nfunction uint8_of_int56(){};\n// Provides: uint16_of_int56\nfunction uint16_of_int56(){};\n// Provides: uint24_of_int56\nfunction uint24_of_int56(){};\n// Provides: uint32_of_int56\nfunction uint32_of_int56(){};\n// Provides: uint40_of_int56\nfunction uint40_of_int56(){};\n// Provides: uint48_of_int56\nfunction uint48_of_int56(){};\n// Provides: uint56_of_int56\nfunction uint56_of_int56(){};\n// Provides: uint64_of_int56\nfunction uint64_of_int56(){};\n// Provides: uint128_of_int56\nfunction uint128_of_int56(){};\n// Provides: int56_max_int\nfunction int56_max_int(){};\n// Provides: int56_min_int\nfunction int56_min_int(){};\n// Provides: int128_add\nfunction int128_add(){};\n// Provides: int128_sub\nfunction int128_sub(){};\n// Provides: int128_mul\nfunction int128_mul(){};\n// Provides: int128_div\nfunction int128_div(){};\n// Provides: int128_mod\nfunction int128_mod(){};\n// Provides: int128_and\nfunction int128_and(){};\n// Provides: int128_or\nfunction int128_or(){};\n// Provides: int128_xor\nfunction int128_xor(){};\n// Provides: int128_shift_left\nfunction int128_shift_left(){};\n// Provides: int128_shift_right\nfunction int128_shift_right(){};\n// Provides: int128_abs\nfunction int128_abs(){};\n// Provides: int128_neg\nfunction int128_neg(){};\n// Provides: int128_of_int\nfunction int128_of_int(){};\n// Provides: int128_of_nativeint\nfunction int128_of_nativeint(){};\n// Provides: int128_of_float\nfunction int128_of_float(){};\n// Provides: int128_of_uint8\nfunction int128_of_uint8(){};\n// Provides: int128_of_uint16\nfunction int128_of_uint16(){};\n// Provides: int128_of_uint24\nfunction int128_of_uint24(){};\n// Provides: int128_of_uint32\nfunction int128_of_uint32(){};\n// Provides: int128_of_uint40\nfunction int128_of_uint40(){};\n// Provides: int128_of_uint48\nfunction int128_of_uint48(){};\n// Provides: int128_of_uint56\nfunction int128_of_uint56(){};\n// Provides: int128_of_uint64\nfunction int128_of_uint64(){};\n// Provides: int128_of_uint128\nfunction int128_of_uint128(){};\n// Provides: int_of_int128\nfunction int_of_int128(){};\n// Provides: nativeint_of_int128\nfunction nativeint_of_int128(){};\n// Provides: float_of_int128\nfunction float_of_int128(){};\n// Provides: uint8_of_int128\nfunction uint8_of_int128(){};\n// Provides: uint16_of_int128\nfunction uint16_of_int128(){};\n// Provides: uint24_of_int128\nfunction uint24_of_int128(){};\n// Provides: uint32_of_int128\nfunction uint32_of_int128(){};\n// Provides: uint40_of_int128\nfunction uint40_of_int128(){};\n// Provides: uint48_of_int128\nfunction uint48_of_int128(){};\n// Provides: uint56_of_int128\nfunction uint56_of_int128(){};\n// Provides: uint64_of_int128\nfunction uint64_of_int128(){};\n// Provides: uint128_of_int128\nfunction uint128_of_int128(){};\n// Provides: int128_max_int\nfunction int128_max_int(){};\n// Provides: int128_min_int\nfunction int128_min_int(){};\n// Provides: int128_init_custom_ops\nfunction int128_init_custom_ops(){};\n// Provides: uint8_of_nativeint\nfunction uint8_of_nativeint(){};\n// Provides: uint8_of_float\nfunction uint8_of_float(){};\n// Provides: uint8_of_uint16\nfunction uint8_of_uint16(){};\n// Provides: uint8_of_uint24\nfunction uint8_of_uint24(){};\n// Provides: uint8_of_uint32\nfunction uint8_of_uint32(){};\n// Provides: uint8_of_uint40\nfunction uint8_of_uint40(){};\n// Provides: uint8_of_uint48\nfunction uint8_of_uint48(){};\n// Provides: uint8_of_uint56\nfunction uint8_of_uint56(){};\n// Provides: uint8_of_uint64\nfunction uint8_of_uint64(){};\n// Provides: uint8_of_uint128\nfunction uint8_of_uint128(){};\n// Provides: nativeint_of_uint8\nfunction nativeint_of_uint8(){};\n// Provides: float_of_uint8\nfunction float_of_uint8(){};\n// Provides: uint16_of_uint8\nfunction uint16_of_uint8(){};\n// Provides: uint24_of_uint8\nfunction uint24_of_uint8(){};\n// Provides: uint32_of_uint8\nfunction uint32_of_uint8(){};\n// Provides: uint40_of_uint8\nfunction uint40_of_uint8(){};\n// Provides: uint48_of_uint8\nfunction uint48_of_uint8(){};\n// Provides: uint56_of_uint8\nfunction uint56_of_uint8(){};\n// Provides: uint64_of_uint8\nfunction uint64_of_uint8(){};\n// Provides: uint128_of_uint8\nfunction uint128_of_uint8(){};\n// Provides: uint16_of_nativeint\nfunction uint16_of_nativeint(){};\n// Provides: uint16_of_float\nfunction uint16_of_float(){};\n// Provides: uint16_of_uint24\nfunction uint16_of_uint24(){};\n// Provides: uint16_of_uint32\nfunction uint16_of_uint32(){};\n// Provides: uint16_of_uint40\nfunction uint16_of_uint40(){};\n// Provides: uint16_of_uint48\nfunction uint16_of_uint48(){};\n// Provides: uint16_of_uint56\nfunction uint16_of_uint56(){};\n// Provides: uint16_of_uint64\nfunction uint16_of_uint64(){};\n// Provides: uint16_of_uint128\nfunction uint16_of_uint128(){};\n// Provides: nativeint_of_uint16\nfunction nativeint_of_uint16(){};\n// Provides: float_of_uint16\nfunction float_of_uint16(){};\n// Provides: uint24_of_uint16\nfunction uint24_of_uint16(){};\n// Provides: uint32_of_uint16\nfunction uint32_of_uint16(){};\n// Provides: uint40_of_uint16\nfunction uint40_of_uint16(){};\n// Provides: uint48_of_uint16\nfunction uint48_of_uint16(){};\n// Provides: uint56_of_uint16\nfunction uint56_of_uint16(){};\n// Provides: uint64_of_uint16\nfunction uint64_of_uint16(){};\n// Provides: uint128_of_uint16\nfunction uint128_of_uint16(){};\n// Provides: uint24_of_nativeint\nfunction uint24_of_nativeint(){};\n// Provides: uint24_of_float\nfunction uint24_of_float(){};\n// Provides: uint24_of_uint32\nfunction uint24_of_uint32(){};\n// Provides: uint24_of_uint40\nfunction uint24_of_uint40(){};\n// Provides: uint24_of_uint48\nfunction uint24_of_uint48(){};\n// Provides: uint24_of_uint56\nfunction uint24_of_uint56(){};\n// Provides: uint24_of_uint64\nfunction uint24_of_uint64(){};\n// Provides: uint24_of_uint128\nfunction uint24_of_uint128(){};\n// Provides: nativeint_of_uint24\nfunction nativeint_of_uint24(){};\n// Provides: float_of_uint24\nfunction float_of_uint24(){};\n// Provides: uint32_of_uint24\nfunction uint32_of_uint24(){};\n// Provides: uint40_of_uint24\nfunction uint40_of_uint24(){};\n// Provides: uint48_of_uint24\nfunction uint48_of_uint24(){};\n// Provides: uint56_of_uint24\nfunction uint56_of_uint24(){};\n// Provides: uint64_of_uint24\nfunction uint64_of_uint24(){};\n// Provides: uint128_of_uint24\nfunction uint128_of_uint24(){};\n// Provides: uint32_add\nfunction uint32_add(){};\n// Provides: uint32_sub\nfunction uint32_sub(){};\n// Provides: uint32_mul\nfunction uint32_mul(){};\n// Provides: uint32_div\nfunction uint32_div(){};\n// Provides: uint32_mod\nfunction uint32_mod(){};\n// Provides: uint32_and\nfunction uint32_and(){};\n// Provides: uint32_or\nfunction uint32_or(){};\n// Provides: uint32_xor\nfunction uint32_xor(){};\n// Provides: uint32_shift_left\nfunction uint32_shift_left(){};\n// Provides: uint32_shift_right\nfunction uint32_shift_right(){};\n// Provides: uint32_neg\nfunction uint32_neg(){};\n// Provides: uint32_of_int\nfunction uint32_of_int(){};\n// Provides: uint32_of_nativeint\nfunction uint32_of_nativeint(){};\n// Provides: uint32_of_float\nfunction uint32_of_float(){};\n// Provides: uint32_of_uint40\nfunction uint32_of_uint40(){};\n// Provides: uint32_of_uint48\nfunction uint32_of_uint48(){};\n// Provides: uint32_of_uint56\nfunction uint32_of_uint56(){};\n// Provides: uint32_of_uint64\nfunction uint32_of_uint64(){};\n// Provides: uint32_of_uint128\nfunction uint32_of_uint128(){};\n// Provides: int_of_uint32\nfunction int_of_uint32(){};\n// Provides: nativeint_of_uint32\nfunction nativeint_of_uint32(){};\n// Provides: float_of_uint32\nfunction float_of_uint32(){};\n// Provides: uint40_of_uint32\nfunction uint40_of_uint32(){};\n// Provides: uint48_of_uint32\nfunction uint48_of_uint32(){};\n// Provides: uint56_of_uint32\nfunction uint56_of_uint32(){};\n// Provides: uint64_of_uint32\nfunction uint64_of_uint32(){};\n// Provides: uint128_of_uint32\nfunction uint128_of_uint32(){};\n// Provides: uint32_max_int\nfunction uint32_max_int(){};\n// Provides: uint32_init_custom_ops\nfunction uint32_init_custom_ops(){};\n// Provides: uint64_add\nfunction uint64_add(){};\n// Provides: uint64_sub\nfunction uint64_sub(){};\n// Provides: uint64_mul\nfunction uint64_mul(){};\n// Provides: uint64_div\nfunction uint64_div(){};\n// Provides: uint64_mod\nfunction uint64_mod(){};\n// Provides: uint64_and\nfunction uint64_and(){};\n// Provides: uint64_or\nfunction uint64_or(){};\n// Provides: uint64_xor\nfunction uint64_xor(){};\n// Provides: uint64_shift_left\nfunction uint64_shift_left(){};\n// Provides: uint64_shift_right\nfunction uint64_shift_right(){};\n// Provides: uint64_neg\nfunction uint64_neg(){};\n// Provides: uint64_of_int\nfunction uint64_of_int(){};\n// Provides: uint64_of_nativeint\nfunction uint64_of_nativeint(){};\n// Provides: uint64_of_float\nfunction uint64_of_float(){};\n// Provides: uint64_of_uint40\nfunction uint64_of_uint40(){};\n// Provides: uint64_of_uint48\nfunction uint64_of_uint48(){};\n// Provides: uint64_of_uint56\nfunction uint64_of_uint56(){};\n// Provides: uint64_of_uint128\nfunction uint64_of_uint128(){};\n// Provides: int_of_uint64\nfunction int_of_uint64(){};\n// Provides: nativeint_of_uint64\nfunction nativeint_of_uint64(){};\n// Provides: float_of_uint64\nfunction float_of_uint64(){};\n// Provides: uint40_of_uint64\nfunction uint40_of_uint64(){};\n// Provides: uint48_of_uint64\nfunction uint48_of_uint64(){};\n// Provides: uint56_of_uint64\nfunction uint56_of_uint64(){};\n// Provides: uint128_of_uint64\nfunction uint128_of_uint64(){};\n// Provides: uint64_max_int\nfunction uint64_max_int(){};\n// Provides: uint64_init_custom_ops\nfunction uint64_init_custom_ops(){};\n// Provides: uint40_mul\nfunction uint40_mul(){};\n// Provides: uint40_div\nfunction uint40_div(){};\n// Provides: uint40_neg\nfunction uint40_neg(){};\n// Provides: uint40_of_int\nfunction uint40_of_int(){};\n// Provides: uint40_of_nativeint\nfunction uint40_of_nativeint(){};\n// Provides: uint40_of_float\nfunction uint40_of_float(){};\n// Provides: uint40_of_uint48\nfunction uint40_of_uint48(){};\n// Provides: uint40_of_uint56\nfunction uint40_of_uint56(){};\n// Provides: uint40_of_uint128\nfunction uint40_of_uint128(){};\n// Provides: int_of_uint40\nfunction int_of_uint40(){};\n// Provides: nativeint_of_uint40\nfunction nativeint_of_uint40(){};\n// Provides: float_of_uint40\nfunction float_of_uint40(){};\n// Provides: uint48_of_uint40\nfunction uint48_of_uint40(){};\n// Provides: uint56_of_uint40\nfunction uint56_of_uint40(){};\n// Provides: uint128_of_uint40\nfunction uint128_of_uint40(){};\n// Provides: uint48_mul\nfunction uint48_mul(){};\n// Provides: uint48_div\nfunction uint48_div(){};\n// Provides: uint48_neg\nfunction uint48_neg(){};\n// Provides: uint48_of_int\nfunction uint48_of_int(){};\n// Provides: uint48_of_nativeint\nfunction uint48_of_nativeint(){};\n// Provides: uint48_of_float\nfunction uint48_of_float(){};\n// Provides: uint48_of_uint56\nfunction uint48_of_uint56(){};\n// Provides: uint48_of_uint128\nfunction uint48_of_uint128(){};\n// Provides: int_of_uint48\nfunction int_of_uint48(){};\n// Provides: nativeint_of_uint48\nfunction nativeint_of_uint48(){};\n// Provides: float_of_uint48\nfunction float_of_uint48(){};\n// Provides: uint56_of_uint48\nfunction uint56_of_uint48(){};\n// Provides: uint128_of_uint48\nfunction uint128_of_uint48(){};\n// Provides: uint56_mul\nfunction uint56_mul(){};\n// Provides: uint56_div\nfunction uint56_div(){};\n// Provides: uint56_neg\nfunction uint56_neg(){};\n// Provides: uint56_of_int\nfunction uint56_of_int(){};\n// Provides: uint56_of_nativeint\nfunction uint56_of_nativeint(){};\n// Provides: uint56_of_float\nfunction uint56_of_float(){};\n// Provides: uint56_of_uint128\nfunction uint56_of_uint128(){};\n// Provides: int_of_uint56\nfunction int_of_uint56(){};\n// Provides: nativeint_of_uint56\nfunction nativeint_of_uint56(){};\n// Provides: float_of_uint56\nfunction float_of_uint56(){};\n// Provides: uint128_of_uint56\nfunction uint128_of_uint56(){};\n// Provides: uint128_add\nfunction uint128_add(){};\n// Provides: uint128_sub\nfunction uint128_sub(){};\n// Provides: uint128_mul\nfunction uint128_mul(){};\n// Provides: uint128_div\nfunction uint128_div(){};\n// Provides: uint128_mod\nfunction uint128_mod(){};\n// Provides: uint128_and\nfunction uint128_and(){};\n// Provides: uint128_or\nfunction uint128_or(){};\n// Provides: uint128_xor\nfunction uint128_xor(){};\n// Provides: uint128_shift_left\nfunction uint128_shift_left(){};\n// Provides: uint128_shift_right\nfunction uint128_shift_right(){};\n// Provides: uint128_of_int\nfunction uint128_of_int(){};\n// Provides: uint128_of_nativeint\nfunction uint128_of_nativeint(){};\n// Provides: uint128_of_float\nfunction uint128_of_float(){};\n// Provides: int_of_uint128\nfunction int_of_uint128(){};\n// Provides: nativeint_of_uint128\nfunction nativeint_of_uint128(){};\n// Provides: float_of_uint128\nfunction float_of_uint128(){};\n// Provides: uint128_max_int\nfunction uint128_max_int(){};\n// Provides: uint128_init_custom_ops\nfunction uint128_init_custom_ops(){};\n"
  },
  {
    "path": "engine/bin/js_stubs/unix.js",
    "content": "// //Provides: unix_ll\n// function unix_ll(s, args) { \n//   // if (unix_ll.log) joo_global_object.console.warn(s, args); \n//   // if (unix_ll.trap) throw new Error(\"unix trap: '\"+ s + \"' not implemented\");\n// }\n// // unix_ll.log = true;       // whether to log calls\n// // unix_ll.trap = false;     // whether to halt on calls\n\n// //Provides: caml_raise_unix_error\n// //Requires: caml_named_value, caml_raise_with_arg, caml_new_string\n// function caml_raise_unix_error(msg) {\n//   var tag = caml_named_value(\"Unix.Unix_error\");\n//   // var util = require('util');\n//   // console.log(util.inspect(chan, {showHidden: false, depth: null}));\n//   caml_raise_with_arg (tag, caml_new_string (msg));\n// }\n\n// //Provides: unix_access\n// //Requires: unix_ll\n// function unix_access() {\n//   unix_ll(\"unix_access\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_alarm\n// //Requires: unix_ll\n// function unix_alarm() {\n//   unix_ll(\"unix_alarm\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_bind\n// //Requires: unix_ll\n// function unix_bind() {\n//   unix_ll(\"unix_bind\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_close\n// //Requires: unix_ll\n// function unix_close() {\n//   unix_ll(\"unix_close\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_connect\n// //Requires: unix_ll\n// function unix_connect() {\n//   unix_ll(\"unix_connect\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_dup\n// //Requires: unix_ll\n// function unix_dup() {\n//   unix_ll(\"unix_dup\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_dup2\n// //Requires: unix_ll\n// function unix_dup2() {\n//   unix_ll(\"unix_dup2\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_environment\n// //Requires: unix_ll\n// function unix_environment() {\n//   unix_ll(\"unix_environment\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_error_message\n// //Requires: unix_ll\n// function unix_error_message() {\n//   unix_ll(\"unix_error_message\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_execve\n// //Requires: unix_ll\n// function unix_execve() {\n//   unix_ll(\"unix_execve\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_execvp\n// //Requires: unix_ll\n// function unix_execvp() {\n//   unix_ll(\"unix_execvp\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_execvpe\n// //Requires: unix_ll\n// function unix_execvpe() {\n//   unix_ll(\"unix_execvpe\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_getcwd\n// //Requires: unix_ll\n// function unix_getcwd() {\n//   unix_ll(\"unix_getcwd\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_fork\n// //Requires: unix_ll\n// function unix_fork() {\n//   unix_ll(\"unix_fork\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_getpid\n// //Requires: unix_ll\n// function unix_getpid() {\n//   unix_ll(\"unix_getpid\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_getpwnam\n// //Requires: unix_ll\n// function unix_getpwnam() {\n//   unix_ll(\"unix_getpwnam\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_getsockname\n// //Requires: unix_ll\n// function unix_getsockname() {\n//   unix_ll(\"unix_getsockname\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_kill\n// //Requires: unix_ll\n// function unix_kill() {\n//   unix_ll(\"unix_kill\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_listen\n// //Requires: unix_ll\n// function unix_listen() {\n//   unix_ll(\"unix_listen\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_pipe\n// //Requires: unix_ll\n// function unix_pipe() {\n//   unix_ll(\"unix_pipe\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_read\n// //Requires: unix_ll\n// function unix_read() {\n//   unix_ll(\"unix_read\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_opendir\n// //Requires: unix_ll\n// function unix_opendir(dir) {\n//   unix_ll(\"unix_opendir\", arguments);\n\n//   // caml_raise_unix_error(\"opendir\", arguments);\n//   return [];\n// }\n\n// //Provides: unix_readdir\n// //Requires: unix_ll, caml_raise_constant, caml_global_data\n// function unix_readdir(dir) {\n//   unix_ll(\"unix_readdir\", arguments);\n\n//   // caml_raise_unix_error(\"readdir\", arguments);\n//   caml_raise_constant(caml_global_data.End_of_file);\n//   return [];\n// }\n\n// //Provides: unix_closedir\n// //Requires: unix_ll\n// function unix_closedir() {\n//   unix_ll(\"unix_closedir\", arguments);\n//   return [];\n// }\n\n// //Provides: unix_select\n// //Requires: unix_ll\n// function unix_select() {\n//   unix_ll(\"unix_select\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_set_close_on_exec\n// //Requires: unix_ll\n// function unix_set_close_on_exec() {\n//   unix_ll(\"unix_set_close_on_exec\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_set_nonblock\n// //Requires: unix_ll\n// function unix_set_nonblock() {\n//   unix_ll(\"unix_set_nonblock\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_sleep\n// //Requires: unix_ll\n// function unix_sleep() {\n//   unix_ll(\"unix_sleep\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_socket\n// //Requires: unix_ll\n// function unix_socket() {\n//   unix_ll(\"unix_socket\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_string_of_inet_addr\n// //Requires: unix_ll\n// function unix_string_of_inet_addr() {\n//   unix_ll(\"unix_string_of_inet_addr\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_times\n// //Requires: unix_ll\n// function unix_times() {\n//   unix_ll(\"unix_times\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_wait\n// //Requires: unix_ll\n// function unix_wait() {\n//   unix_ll(\"unix_wait\", arguments);\n//   return 0;\n// }\n\n// //Provides: unix_waitpid\n// //Requires: unix_ll\n// function unix_waitpid() {\n//   unix_ll(\"unix_waitpid\", arguments);\n//   return 0;\n// }\n\n// // Provides: unix_accept\n// // Requires: unix_ll\n// function unix_accept()                 { unix_ll(\"unix_accept\", arguments); }\n// // Provides: unix_chdir\n// // Requires: unix_ll\n// function unix_chdir()                  { unix_ll(\"unix_chdir\", arguments); }\n// // Provides: unix_chmod\n// // Requires: unix_ll\n// function unix_chmod()                  { unix_ll(\"unix_chmod\", arguments); }\n// // Provides: unix_chown\n// // Requires: unix_ll\n// function unix_chown()                  { unix_ll(\"unix_chown\", arguments); }\n// // Provides: unix_chroot\n// // Requires: unix_ll\n// function unix_chroot()                 { unix_ll(\"unix_chroot\", arguments); }\n// // Provides: unix_clear_close_on_exec\n// // Requires: unix_ll\n// function unix_clear_close_on_exec()    { unix_ll(\"unix_clear_close_on_exec\", arguments); }\n// // Provides: unix_clear_nonblock\n// // Requires: unix_ll\n// function unix_clear_nonblock()         { unix_ll(\"unix_clear_nonblock\", arguments); }\n// // Provides: unix_environment_unsafe\n// // Requires: unix_ll\n// function unix_environment_unsafe()     { unix_ll(\"unix_environment_unsafe\", arguments); }\n// // Provides: unix_execv\n// // Requires: unix_ll\n// function unix_execv()                  { unix_ll(\"unix_execv\", arguments); }\n// // Provides: unix_fchmod\n// // Requires: unix_ll\n// function unix_fchmod()                 { unix_ll(\"unix_fchmod\", arguments); }\n// // Provides: unix_fchown\n// // Requires: unix_ll\n// function unix_fchown()                 { unix_ll(\"unix_fchown\", arguments); }\n// // Provides: unix_fstat\n// // Requires: unix_ll\n// function unix_fstat()                 { unix_ll(\"unix_fstat\", arguments); }\n// // Provides: unix_fstat_64\n// // Requires: unix_ll\n// function unix_fstat_64()              { unix_ll(\"unix_fstat_64\", arguments); }\n// // Provides: unix_ftruncate\n// // Requires: unix_ll\n// function unix_ftruncate()             { unix_ll(\"unix_ftruncate\", arguments); }\n// // Provides: unix_ftruncate_64\n// // Requires: unix_ll\n// function unix_ftruncate_64()          { unix_ll(\"unix_ftruncate_64\", arguments); }\n// // Provides: unix_getaddrinfo\n// // Requires: unix_ll\n// function unix_getaddrinfo()           { unix_ll(\"unix_getaddrinfo\", arguments); }\n// // Provides: unix_getegid\n// // Requires: unix_ll\n// function unix_getegid()               { unix_ll(\"unix_getegid\", arguments); }\n// // Provides: unix_geteuid\n// // Requires: unix_ll\n// function unix_geteuid()               { unix_ll(\"unix_geteuid\", arguments); }\n// // Provides: unix_getgid\n// // Requires: unix_ll\n// function unix_getgid()                { unix_ll(\"unix_getgid\", arguments); }\n// // Provides: unix_getgrgid\n// // Requires: unix_ll\n// function unix_getgrgid()              { unix_ll(\"unix_getgrgid\", arguments); }\n// // Provides: unix_getgrnam\n// // Requires: unix_ll\n// function unix_getgrnam()              { unix_ll(\"unix_getgrnam\", arguments); }\n// // Provides: unix_getgroups\n// // Requires: unix_ll\n// function unix_getgroups()             { unix_ll(\"unix_getgroups\", arguments); }\n// // Provides: unix_gethostbyaddr\n// // Requires: unix_ll\n// function unix_gethostbyaddr()         { unix_ll(\"unix_gethostbyaddr\", arguments); }\n// // Provides: unix_gethostbyname\n// // Requires: unix_ll\n// function unix_gethostbyname()         { unix_ll(\"unix_gethostbyname\", arguments); }\n// // Provides: unix_gethostname\n// // Requires: unix_ll\n// function unix_gethostname()           { unix_ll(\"unix_gethostname\", arguments); }\n// // Provides: unix_getitimer\n// // Requires: unix_ll\n// function unix_getitimer()             { unix_ll(\"unix_getitimer\", arguments); }\n// // Provides: unix_getlogin\n// // Requires: unix_ll\n// function unix_getlogin()              { unix_ll(\"unix_getlogin\", arguments); }\n// // Provides: unix_getnameinfo\n// // Requires: unix_ll\n// function unix_getnameinfo()           { unix_ll(\"unix_getnameinfo\", arguments); }\n// // Provides: unix_getpeername\n// // Requires: unix_ll\n// function unix_getpeername()           { unix_ll(\"unix_getpeername\", arguments); }\n// // Provides: unix_getppid\n// // Requires: unix_ll\n// function unix_getppid()               { unix_ll(\"unix_getppid\", arguments); }\n// // Provides: unix_getprotobyname\n// // Requires: unix_ll\n// function unix_getprotobyname()        { unix_ll(\"unix_getprotobyname\", arguments); }\n// // Provides: unix_getprotobynumber\n// // Requires: unix_ll\n// function unix_getprotobynumber()      { unix_ll(\"unix_getprotobynumber\", arguments); }\n// // Provides: unix_getservbyname\n// // Requires: unix_ll\n// function unix_getservbyname()         { unix_ll(\"unix_getservbyname\", arguments); }\n// // Provides: unix_getservbyport\n// // Requires: unix_ll\n// function unix_getservbyport()         { unix_ll(\"unix_getservbyport\", arguments); }\n// // Provides: unix_getsockopt\n// // Requires: unix_ll\n// function unix_getsockopt()            { unix_ll(\"unix_getsockopt\", arguments); }\n// // Provides: unix_initgroups\n// // Requires: unix_ll\n// function unix_initgroups()            { unix_ll(\"unix_initgroups\", arguments); }\n// // Provides: unix_link\n// // Requires: unix_ll\n// function unix_link()                  { unix_ll(\"unix_link\", arguments); }\n// // Provides: unix_lockf\n// // Requires: unix_ll\n// function unix_lockf()                 { unix_ll(\"unix_lockf\", arguments); }\n// // Provides: unix_lseek\n// // Requires: unix_ll\n// function unix_lseek()                 { unix_ll(\"unix_lseek\", arguments); }\n// // Provides: unix_lseek_64\n// // Requires: unix_ll\n// function unix_lseek_64()              { unix_ll(\"unix_lseek_64\", arguments); }\n// // Provides: unix_mkfifo\n// // Requires: unix_ll\n// function unix_mkfifo()                { unix_ll(\"unix_mkfifo\", arguments); }\n// // Provides: unix_nice\n// // Requires: unix_ll\n// function unix_nice()                  { unix_ll(\"unix_nice\", arguments); }\n// // Provides: unix_open\n// // Requires: unix_ll\n// function unix_open()                  { unix_ll(\"unix_open\", arguments); }\n// // Provides: unix_putenv\n// // Requires: unix_ll\n// function unix_putenv()                { unix_ll(\"unix_putenv\", arguments); }\n// // Provides: unix_recv\n// // Requires: unix_ll\n// function unix_recv()                  { unix_ll(\"unix_recv\", arguments); }\n// // Provides: unix_recvfrom\n// // Requires: unix_ll\n// function unix_recvfrom()              { unix_ll(\"unix_recvfrom\", arguments); }\n// // Provides: unix_rename\n// // Requires: unix_ll\n// function unix_rename()                { unix_ll(\"unix_rename\", arguments); }\n// // Provides: unix_rewinddir\n// // Requires: unix_ll\n// function unix_rewinddir()             { unix_ll(\"unix_rewinddir\", arguments); }\n// // Provides: unix_send\n// // Requires: unix_ll\n// function unix_send()                  { unix_ll(\"unix_send\", arguments); }\n// // Provides: unix_sendto\n// // Requires: unix_ll\n// function unix_sendto()                { unix_ll(\"unix_sendto\", arguments); }\n// // Provides: unix_setgid\n// // Requires: unix_ll\n// function unix_setgid()                { unix_ll(\"unix_setgid\", arguments); }\n// // Provides: unix_setgroups\n// // Requires: unix_ll\n// function unix_setgroups()             { unix_ll(\"unix_setgroups\", arguments); }\n// // Provides: unix_setitimer\n// // Requires: unix_ll\n// function unix_setitimer()             { unix_ll(\"unix_setitimer\", arguments); }\n// // Provides: unix_setsid\n// // Requires: unix_ll\n// function unix_setsid()                { unix_ll(\"unix_setsid\", arguments); }\n// // Provides: unix_setsockopt\n// // Requires: unix_ll\n// function unix_setsockopt()            { unix_ll(\"unix_setsockopt\", arguments); }\n// // Provides: unix_setuid\n// // Requires: unix_ll\n// function unix_setuid()                { unix_ll(\"unix_setuid\", arguments); }\n// // Provides: unix_shutdown\n// // Requires: unix_ll\n// function unix_shutdown()              { unix_ll(\"unix_shutdown\", arguments); }\n// // Provides: unix_sigpending\n// // Requires: unix_ll\n// function unix_sigpending()            { unix_ll(\"unix_sigpending\", arguments); }\n// // Provides: unix_sigprocmask\n// // Requires: unix_ll\n// function unix_sigprocmask()           { unix_ll(\"unix_sigprocmask\", arguments); }\n// // Provides: unix_sigsuspend\n// // Requires: unix_ll\n// function unix_sigsuspend()            { unix_ll(\"unix_sigsuspend\", arguments); }\n// // Provides: unix_single_write\n// // Requires: unix_ll\n// function unix_single_write()          { unix_ll(\"unix_single_write\", arguments); }\n// // Provides: unix_socketpair\n// // Requires: unix_ll\n// function unix_socketpair()            { unix_ll(\"unix_socketpair\", arguments); }\n// // Provides: unix_tcdrain\n// // Requires: unix_ll\n// function unix_tcdrain()               { unix_ll(\"unix_tcdrain\", arguments); }\n// // Provides: unix_tcflow\n// // Requires: unix_ll\n// function unix_tcflow()                { unix_ll(\"unix_tcflow\", arguments); }\n// // Provides: unix_tcflush\n// // Requires: unix_ll\n// function unix_tcflush()               { unix_ll(\"unix_tcflush\", arguments); }\n// // Provides: unix_tcgetattr\n// // Requires: unix_ll\n// function unix_tcgetattr()             { unix_ll(\"unix_tcgetattr\", arguments); }\n// // Provides: unix_tcsendbreak\n// // Requires: unix_ll\n// function unix_tcsendbreak()           { unix_ll(\"unix_tcsendbreak\", arguments); }\n// // Provides: unix_tcsetattr\n// // Requires: unix_ll\n// function unix_tcsetattr()             { unix_ll(\"unix_tcsetattr\", arguments); }\n// // Provides: unix_truncate\n// // Requires: unix_ll\n// function unix_truncate()              { unix_ll(\"unix_truncate\", arguments); }\n// // Provides: unix_truncate_64\n// // Requires: unix_ll\n// function unix_truncate_64()           { unix_ll(\"unix_truncate_64\", arguments); }\n// // Provides: unix_umask\n// // Requires: unix_ll\n// function unix_umask()                 { unix_ll(\"unix_umask\", arguments); }\n// // Provides: unix_utimes\n// // Requires: unix_ll\n// function unix_utimes()                { unix_ll(\"unix_utimes\", arguments); }\n// // Provides: unix_write\n// // Requires: unix_ll\n// function unix_write()                 { unix_ll(\"unix_write\", arguments); }\n// // Provides: unix_exit\n// // Requires: unix_ll\n// function unix_exit()                  { unix_ll(\"unix_exit\", arguments); }\n// // Provides: unix_spawn\n// // Requires: unix_ll\n// function unix_spawn()                 { unix_ll(\"unix_spawn\", arguments); }\n// // Provides: unix_fsync\n// // Requires: unix_ll\n// function unix_fsync()                 { unix_ll(\"unix_fsync\", arguments); }\n// // Provides: unix_inchannel_of_filedescr\n// // Requires: unix_ll\n// function unix_inchannel_of_filedescr()  { unix_ll(\"unix_inchannel_of_filedescr\", arguments); }\n// // Provides: unix_outchannel_of_filedescr\n// // Requires: unix_ll\n// function unix_outchannel_of_filedescr() { unix_ll(\"unix_outchannel_of_filedescr\", arguments); }\n// // Provides: caml_mutex_try_lock\n// // Requires: unix_ll\nfunction caml_mutex_try_lock()       {  }\n// Provides: caml_thread_join\nfunction caml_thread_join()          {  }\n// Provides: caml_thread_sigmask\nfunction caml_thread_sigmask()       {  }\n// Provides: caml_unix_map_file_bytecode\nfunction caml_unix_map_file_bytecode() {  }\n// Provides: caml_wait_signal\nfunction caml_wait_signal()          {  }\n"
  },
  {
    "path": "engine/bin/lib.ml",
    "content": "open Hax_engine\nopen Base\nopen Stdio\nopen Utils\n\nlet setup_logs (options : Types.engine_options) =\n  let level : Logs.level option =\n    match options.backend.verbose with\n    | 0 -> None\n    | 1 -> Some Info\n    | _ -> Some Debug\n  in\n  Logs.set_level level;\n  Logs.set_reporter @@ Logs.format_reporter ()\n\nmodule Deps = Dependencies.Make (Features.Rust)\n\nmodule Error : Phase_utils.ERROR = Phase_utils.MakeError (struct\n  let ctx = Diagnostics.Context.ThirImport\nend)\n\nmodule Attrs = Attr_payloads.MakeBase (Error)\n\nlet import_thir_items (include_clauses : Types.inclusion_clause list)\n    (items : Types.item_for__thir_body list) : Ast.Rust.item list =\n  let imported_items =\n    List.map\n      ~f:(fun item ->\n        let ident = Concrete_ident.(of_def_id ~value:true item.owner_id) in\n        let most_precise_clause =\n          (* Computes the include clause that apply to `item`, if any *)\n          List.filter\n            ~f:(fun clause ->\n              Concrete_ident.matches_namespace clause.Types.namespace ident)\n            include_clauses\n          |> List.last\n        in\n        let type_only =\n          (* Shall we drop the body? *)\n          Option.map\n            ~f:(fun clause -> [%matches? Types.SignatureOnly] clause.kind)\n            most_precise_clause\n          |> Option.value ~default:false\n        in\n        Import_thir.import_item ~type_only item)\n      items\n    |> List.map ~f:snd\n  in\n  Logs.info (fun m -> m \"Items translated\");\n  let items = List.concat_map ~f:fst imported_items in\n  let associated_items =\n    let assoc_items = Deps.uid_associated_items items in\n    fun (item : Deps.AST.item) -> assoc_items item.attrs\n  in\n  (* Build a map from idents to error reports *)\n  let ident_to_reports =\n    List.concat_map\n      ~f:(fun (items, reports) ->\n        List.map ~f:(fun (item : Ast.Rust.item) -> (item.ident, reports)) items)\n      imported_items\n    |> Map.of_alist_exn (module Concrete_ident)\n  in\n  let items = Deps.filter_by_inclusion_clauses include_clauses items in\n  let items =\n    items\n    @ (List.concat_map ~f:associated_items items\n      |> List.filter ~f:(List.mem ~equal:[%eq: Deps.AST.item] items >> not))\n  in\n  let items =\n    List.filter\n      ~f:(fun i ->\n        match Attrs.status i.attrs with Included _ -> true | _ -> false)\n      items\n  in\n  Hax_io.write\n    (ItemProcessed (List.filter_map ~f:(fun i -> Span.owner_hint i.span) items));\n  (* Extract error reports for the items we actually extract *)\n  let reports =\n    List.concat_map\n      ~f:(fun (item : Ast.Rust.item) ->\n        Map.find_exn ident_to_reports item.ident)\n      items\n    |> List.dedup_and_sort ~compare:Diagnostics.compare\n  in\n  (* Report every error *)\n  List.iter ~f:Diagnostics.Core.report reports;\n  items\n\nlet run (options : Types.engine_options) : Types.output =\n  setup_logs options;\n  if options.backend.debug_engine |> Option.is_some then\n    Phase_utils.DebugBindPhase.enable ();\n  let run (type options_type)\n      (module M : Backend.T with type BackendOptions.t = options_type)\n      (backend_options : options_type) : Types.file list =\n    let open M in\n    Concrete_ident.ImplInfoStore.init\n      (Concrete_ident_generated.impl_infos @ options.impl_infos);\n    let include_clauses =\n      options.backend.translation_options.include_namespaces\n    in\n    let input =\n      match options.input with\n      | Types.FullDef _ ->\n          failwith \"Internal error: the ocaml engine does not support FullDef\"\n      | Types.Legacy i -> i\n    in\n    let items =\n      Profiling.profile ThirImport (List.length input) (fun _ ->\n          import_thir_items include_clauses input)\n    in\n    let items =\n      if options.backend.extract_type_aliases then items\n      else\n        List.filter\n          ~f:(function { v = TyAlias _; _ } -> false | _ -> true)\n          items\n    in\n    Logs.info (fun m ->\n        m \"Applying phase for backend %s\"\n          ([%show: Diagnostics.Backend.t] M.backend));\n    let items = apply_phases backend_options items in\n    let with_items = Attrs.with_items items in\n    let bundles, _ =\n      let module DepGraph = Dependencies.Make (InputLanguage) in\n      DepGraph.recursive_bundles items\n    in\n    let items =\n      List.filter items ~f:(fun (i : AST.item) ->\n          Attrs.late_skip i.attrs |> not)\n    in\n    Logs.info (fun m ->\n        m \"Translating items with backend %s\"\n          ([%show: Diagnostics.Backend.t] M.backend));\n    let items =\n      Profiling.profile (Backend M.backend) (List.length items) (fun _ ->\n          translate with_items backend_options items ~bundles)\n    in\n    items\n  in\n  let diagnostics, files =\n    Diagnostics.try_ (fun () ->\n        match options.backend.backend with\n        | ProVerif opts -> run (module Proverif_backend) opts\n        | Fstar opts -> run (module Fstar_backend) opts\n        | Coq -> run (module Coq_backend) ()\n        | Ssprove -> run (module Ssprove_backend) ()\n        | Easycrypt -> run (module Easycrypt_backend) ()\n        | GenerateRustEngineNames ->\n            failwith\n              \"The OCaml hax engine should never be called with \\\n               `GenerateRustEngineNames`, it is an rust engine only internal \\\n               command.\"\n        | backend ->\n            failwith\n              (\"The OCaml hax engine should never be called with backend `\"\n              ^ [%show: Types.backend_for__null] backend\n              ^ \"`. This backend uses the newer rust engine. Please report \\\n                 this issue on our GitHub repository: \\\n                 https://github.com/cryspen/hax.\"))\n  in\n  {\n    diagnostics = List.map ~f:Diagnostics.to_thir_diagnostic diagnostics;\n    files = Option.value ~default:[] files;\n    debug_json = [];\n  }\n\n(** Shallow parses a `id_table::Node<T>` (or a raw `T`) JSON *)\nlet parse_id_table_node (json : Yojson.Safe.t) :\n    (int64 * Yojson.Safe.t) list * Yojson.Safe.t =\n  let expect_uint64 = function\n    | `Intlit str -> Some (Int64.of_string str)\n    | `Int id -> Some (Int.to_int64 id)\n    | _ -> None\n  in\n  let table, value =\n    match json with\n    | `List [ table; value ] -> (table, value)\n    | _ -> failwith \"parse_id_table_node: expected a tuple at top-level\"\n  in\n  let table =\n    match table with\n    | `List json_list -> json_list\n    | _ -> failwith \"parse_id_table_node: `map` is supposed to be a list\"\n  in\n  let table =\n    List.map\n      ~f:(function\n        | `List [ id; `Assoc [ (_, contents) ] ] ->\n            let id =\n              expect_uint64 id\n              |> Option.value_exn\n                   ~message:\"parse_id_table_node: id: expected int64\"\n            in\n            (id, contents)\n        | _ -> failwith \"parse_id_table_node: expected a list of size two\")\n      table\n  in\n  (table, value)\n\nlet load_table ?(check_version = true) : Yojson.Safe.t =\n  let table, json =\n    Hax_io.read_json () |> Option.value_exn |> parse_id_table_node\n  in\n  (if check_version then\n     let version =\n       try Yojson.Safe.Util.(member \"hax_version\" json |> to_string)\n       with _ -> \"unknown\"\n     in\n     if String.equal version Types.hax_version |> not then (\n       prerr_endline\n         [%string\n           {|\nThe versions of `hax-engine` and of `cargo-hax` are different:\n  - `hax-engine` version: %{Types.hax_version}\n  - `cargo-hax`  version: %{version}\n\nPlease reinstall hax.\n|}];\n       Stdlib.exit 1));\n  table\n  |> List.iter ~f:(fun (id, json) ->\n         Hashtbl.add_exn Types.cache_map ~key:id ~data:(`JSON json));\n  json\n\nlet parse_options () =\n  let json = load_table ~check_version:true in\n  let options = [%of_yojson: Types.engine_options] json in\n  Profiling.enabled := options.backend.profile;\n  options\n\nlet send_debug_strings =\n  Phase_utils.DebugBindPhase.export\n  >> List.iter ~f:(fun json -> DebugString json |> Hax_io.write)\n\n(** Entrypoint of the engine. Assumes `Hax_io.init` was called. *)\nlet engine () =\n  let options = Profiling.profile (Other \"parse_options\") 1 parse_options in\n  Printexc.record_backtrace true;\n  let result =\n    try Ok (run options) with\n    | Hax_engine.Diagnostics.SpanFreeError.Exn exn ->\n        Error\n          ( Failure\n              (\"Uncatched hax exception (please report, this should not \\\n                appear): \"\n              ^ [%show: Hax_engine.Diagnostics.SpanFreeError.t] exn),\n            Printexc.get_raw_backtrace () )\n    | e -> Error (e, Printexc.get_raw_backtrace ())\n  in\n  match result with\n  | Ok results ->\n      List.iter\n        ~f:(fun diag -> Diagnostic diag |> Hax_io.write)\n        results.diagnostics;\n      List.iter ~f:(fun file -> File file |> Hax_io.write) results.files;\n\n      send_debug_strings ();\n      Hax_io.close ();\n\n      Logs.info (fun m -> m \"Exiting Hax engine (success)\")\n  | Error (exn, bt) ->\n      Logs.info (fun m -> m \"Exiting Hax engine (with an unexpected failure)\");\n      Printexc.raise_with_backtrace exn bt\n\nmodule ExportFullAst = Export_ast.Make (Features.Full)\nmodule ExportRustAst = Export_ast.Make (Features.Rust)\nmodule ExportLeanAst = Export_ast.Make (Lean_backend.InputLanguage)\n\nlet driver_for_rust_engine_inner (query : Rust_engine_types.query) :\n    Rust_engine_types.response =\n  Profiling.enabled := query.profiling;\n  if query.debug_bind_phase then Phase_utils.DebugBindPhase.enable ();\n  match query.kind with\n  | Types.ImportThir { input; translation_options } ->\n      let imported_items =\n        import_thir_items translation_options.include_namespaces input\n      in\n      let rust_ast_items =\n        List.concat_map ~f:ExportRustAst.ditem imported_items\n      in\n      Rust_engine_types.ImportThir { output = rust_ast_items }\n  | Types.ApplyPhases { input; phases } ->\n      let items = List.concat_map ~f:Import_ast.ditem input in\n      let module Phase =\n        (val List.map\n               ~f:(fun name ->\n                 Untyped_phases.phase_of_name name |> Option.value_exn)\n               phases\n             |> Untyped_phases.bind_list)\n      in\n      let items = Phase.ditems items in\n      let output = List.concat_map ~f:ExportFullAst.ditem items in\n      Rust_engine_types.ApplyPhases { output }\n  | Types.Print { printer = Fstar backend_options; input } ->\n      let open Fstar_backend in\n      let items = List.concat_map ~f:Import_ast.ditem input in\n\n      let items : AST.item list = Stdlib.Obj.magic items in\n      let items = post_process_items items in\n      let with_items = Attrs.with_items items in\n      let bundles, _ =\n        let module DepGraph = Dependencies.Make (InputLanguage) in\n        DepGraph.recursive_bundles items\n      in\n      let items =\n        List.filter items ~f:(fun (i : AST.item) ->\n            Attrs.late_skip i.attrs |> not)\n      in\n      Logs.info (fun m ->\n          m \"Translating items with backend %s\"\n            ([%show: Diagnostics.Backend.t] Fstar_backend.backend));\n      let files =\n        Profiling.profile (Backend Fstar_backend.backend) (List.length items)\n          (fun _ -> translate with_items backend_options items ~bundles)\n      in\n      List.iter ~f:(fun file -> File file |> Hax_io.write) files;\n      Rust_engine_types.PrintOk\n  | Types.Print _ ->\n      failwith\n        \"Using the Ocaml engine for Printing only is reserved to the F* backend\"\n\n(** Entry point for interacting with the Rust hax engine *)\nlet driver_for_rust_engine () : unit =\n  let query : Rust_engine_types.query =\n    let json = load_table ~check_version:false in\n    [%of_yojson: Rust_engine_types.query] json\n  in\n  Concrete_ident.ImplInfoStore.init\n    (Concrete_ident_generated.impl_infos @ query.impl_infos);\n  let response = driver_for_rust_engine_inner query in\n  send_debug_strings ();\n  Hax_io.write_json ([%yojson_of: Rust_engine_types.response] response);\n  Hax_io.write_json ([%yojson_of: Types.from_engine] Exit)\n"
  },
  {
    "path": "engine/bin/lib.mli",
    "content": "val engine : unit -> unit\nval driver_for_rust_engine : unit -> unit\n"
  },
  {
    "path": "engine/bin/native_driver.ml",
    "content": "open Hax_engine\nopen Base\n\nlet _ =\n  Hax_io.init\n    (module struct\n      let stdin_json_stream =\n        ref (Yojson.Safe.seq_from_channel In_channel.stdin)\n\n      let read_json () =\n        match Stdlib.Seq.uncons !stdin_json_stream with\n        | Some (json, stream) ->\n            stdin_json_stream := stream;\n            Some json\n        | None -> None\n\n      let write_json msg =\n        let open Stdio.Out_channel in\n        Yojson.Safe.to_channel stdout msg;\n        output_char stdout '\\n';\n        flush stdout\n    end);\n  match Sys.get_argv () with\n  | [| _; \"driver_rust_engine\" |] -> Lib.driver_for_rust_engine ()\n  | _ -> Lib.engine ()\n"
  },
  {
    "path": "engine/default.nix",
    "content": "{ ocamlPackages, fetchzip, hax-rust-frontend, hax-engine-names-extract, rustc\n, nodejs, jq, closurecompiler, gnused, lib, removeReferencesTo, fetchFromGitHub\n}:\nlet\n  non_empty_list = ocamlPackages.buildDunePackage rec {\n    pname = \"non_empty_list\";\n    version = \"0.1\";\n    src = fetchzip {\n      url =\n        \"https://github.com/johnyob/ocaml-non-empty-list/archive/refs/tags/${version}.zip\";\n      sha256 = \"sha256-BJlEi0yG2DRT5vuU9ulucMD5MPFt9duWgcNO1YsigiA=\";\n    };\n    buildInputs = with ocamlPackages; [ base ppxlib ppx_deriving ];\n    duneVersion = \"3\";\n    minimalOCamlVersion = \"4.08\";\n    doCheck = false;\n  };\n  ppx_matches = ocamlPackages.buildDunePackage rec {\n    pname = \"ppx_matches\";\n    version = \"0.1\";\n\n    src = fetchzip {\n      url =\n        \"https://github.com/wrbs/ppx_matches/archive/refs/tags/${version}.zip\";\n      sha256 = \"sha256-nAmWF8MgW0odKkRiFcHGsvJyIxNHaZpnOlNPsef89Fo=\";\n    };\n\n    buildInputs = [ ocamlPackages.ppxlib ];\n    duneVersion = \"3\";\n    minimalOCamlVersion = \"4.04\";\n    doCheck = false;\n  };\n  hax-engine = ocamlPackages.buildDunePackage {\n    pname = \"hax-engine\";\n    version = \"0.0.1\";\n    duneVersion = \"3\";\n    src = lib.sourceFilesBySuffices ./. [\n      \".ml\"\n      \".mli\"\n      \".js\"\n      \"dune\"\n      \"dune-js\"\n      \"dune-project\"\n      \"sh\"\n      \"rs\"\n      \"mld\"\n    ];\n    buildInputs = with ocamlPackages;\n      [\n        base\n        ppx_yojson_conv\n        yojson\n        ppx_sexp_conv\n        ppx_hash\n        pprint\n        non_empty_list\n        ppx_deriving_yojson\n        ppx_matches\n        ppx_let\n        ppx_enumerate\n        cmdliner\n        angstrom\n        ppx_string\n        logs\n        core\n        stdio\n        re\n        js_of_ocaml\n        ocamlgraph\n      ] ++\n      # F* dependencies\n      [ batteries menhirLib ppx_deriving ppxlib sedlex stdint ];\n    nativeBuildInputs = [\n      rustc\n      hax-rust-frontend\n      hax-engine-names-extract\n      nodejs\n      ocamlPackages.js_of_ocaml-compiler\n      jq\n      removeReferencesTo\n    ];\n    strictDeps = true;\n    installPhase = ''\n      dune install --prefix=$bin --libdir=$lib/lib/ocaml/${ocamlPackages.ocaml.version}/site-lib/\n      find \"$bin\" -type f -exec remove-references-to -t ${ocamlPackages.ocaml} '{}' +\n    '';\n\n    outputs = [ \"out\" \"bin\" \"lib\" ];\n    passthru = {\n      docs = hax-engine.overrideAttrs (old: {\n        name = \"hax-engine-docs\";\n        nativeBuildInputs = old.nativeBuildInputs ++ [ ocamlPackages.odoc ];\n        buildPhase = \"dune build @doc\";\n        installPhase = \"cp -rf _build/default/_doc/_html $out\";\n        outputs = [ \"out\" ];\n      });\n      js = hax-engine.overrideAttrs (old: {\n        name = \"hax-engine.js\";\n        nativeBuildInputs = old.nativeBuildInputs ++ [ closurecompiler gnused ];\n        outputs = [ \"out\" ];\n        buildPhase = ''\n          # Enable JS build\n          sed -i \"s/; (include dune-js)/(include dune-js)/g\" bin/dune\n          # Compile JS target\n          dune build bin/js_driver.bc.js\n          # Optimize the size of the JS file\n          closure-compiler --js _build/default/bin/js_driver.bc.js --js_output_file hax-engine.js\n          # Add a shebang & make executable\n          sed -i '1 i #!/usr/bin/env node' hax-engine.js\n          chmod +x hax-engine.js\n        '';\n        checkPhase = \"true\";\n        installPhase = \"cp hax-engine.js $out\";\n      });\n    };\n  };\nin hax-engine.overrideAttrs (_: { name = \"hax-engine\"; })\n"
  },
  {
    "path": "engine/doc/dune",
    "content": "(documentation\n (package hax-engine)\n (mld_files index))\n"
  },
  {
    "path": "engine/doc/index.mld",
    "content": "{0 Hax Engine}\n\nThe engine of hax is written in OCaml, and has the following structure:\n{ul {- the {!module-Hax_engine} library (located in `/engine/lib`)}\n    {- the {!module-Native_driver} binary (located in `/engine/bin`)}\n    {- the backends (located in `/engine/backends`):\n       {ul {- {!module-Fstar_backend}}\n           {- {!module-Coq_ast}}\n           {- {!module-Easycrypt_ast}}\n       }\n    }\n    {- utilities and PPXs:\n       {ul {- {!module-Hacspeclib_macro_parser}}\n           {- {!module-Ppx_functor_application}}\n           {- {!module-Ppx_generate_features}}\n           {- {!module-Ppx_inline}}\n        }\n    }\n}\n\n"
  },
  {
    "path": "engine/dune-project",
    "content": "(lang dune 3.0)\n\n(name hax-engine)\n\n(version 0.3.6)\n\n(generate_opam_files true)\n\n(source\n (github hacspec/hax))\n\n(authors \"Hax Authors\")\n\n(maintainers \"Hax Authors\")\n\n(license \"Apache-2.0\")\n\n(documentation https://hacspec.org/hax/)\n\n(package\n (name hax-engine)\n (synopsis \"The engine of hax, a Rust verification tool\")\n (description \"Hax is divided in two: a frontend (written in Rust) and an engine (written in OCaml). This is the engine.\")\n (depends\n        ocaml\n        dune\n        (base (>= \"0.16.2\"))\n        core\n        yojson\n        non_empty_list\n        pprint\n        ppx_deriving_yojson\n        ppx_yojson_conv\n        ppx_sexp_conv\n        ppx_compare\n        ppx_hash\n        ppx_deriving\n        cmdliner\n        angstrom\n        re\n        ppx_matches\n        ppx_string\n        logs\n        (ocamlgraph (>= \"2.2.0\"))\n\n        js_of_ocaml-compiler\n        js_of_ocaml\n        js_of_ocaml-ppx\n        \n        ; F*-specific dependencies\n        batteries\n        stdint\n        ppxlib\n        menhirLib\n        sedlex\n        )\n (tags\n  (topics rust verification)))\n\n"
  },
  {
    "path": "engine/hax-engine.opam",
    "content": "# This file is generated by dune, edit dune-project instead\nopam-version: \"2.0\"\nversion: \"0.3.6\"\nsynopsis: \"The engine of hax, a Rust verification tool\"\ndescription:\n  \"Hax is divided in two: a frontend (written in Rust) and an engine (written in OCaml). This is the engine.\"\nmaintainer: [\"Hax Authors\"]\nauthors: [\"Hax Authors\"]\nlicense: \"Apache-2.0\"\ntags: [\"topics\" \"rust\" \"verification\"]\nhomepage: \"https://github.com/hacspec/hax\"\ndoc: \"https://hacspec.org/hax/\"\nbug-reports: \"https://github.com/hacspec/hax/issues\"\ndepends: [\n  \"ocaml\"\n  \"dune\" {>= \"3.0\"}\n  \"base\" {>= \"0.16.2\"}\n  \"core\"\n  \"yojson\"\n  \"non_empty_list\"\n  \"pprint\"\n  \"ppx_deriving_yojson\"\n  \"ppx_yojson_conv\"\n  \"ppx_sexp_conv\"\n  \"ppx_compare\"\n  \"ppx_hash\"\n  \"ppx_deriving\"\n  \"cmdliner\"\n  \"angstrom\"\n  \"re\"\n  \"ppx_matches\"\n  \"ppx_string\"\n  \"logs\"\n  \"ocamlgraph\" {>= \"2.2.0\"}\n  \"js_of_ocaml-compiler\"\n  \"js_of_ocaml\"\n  \"js_of_ocaml-ppx\"\n  \"batteries\"\n  \"stdint\"\n  \"ppxlib\"\n  \"menhirLib\"\n  \"sedlex\"\n  \"odoc\" {with-doc}\n]\nbuild: [\n  [\"dune\" \"subst\"] {dev}\n  [\n    \"dune\"\n    \"build\"\n    \"-p\"\n    name\n    \"-j\"\n    jobs\n    \"@install\"\n    \"@runtest\" {with-test}\n    \"@doc\" {with-doc}\n  ]\n]\ndev-repo: \"git+https://github.com/hacspec/hax.git\"\ndepexts: [\n  [\"nodejs\"] {}\n]\n"
  },
  {
    "path": "engine/hax-engine.opam.template",
    "content": "depexts: [\n  [\"nodejs\"] {}\n]\n"
  },
  {
    "path": "engine/lib/analyses/function_dependency.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (F : Features.T) = struct\n  module FA = F\n  module A = Ast.Make (F)\n  module U = Ast_utils.Make (F)\n  open Ast\n\n  (* TODO: Swap to Concrete_ident see: https://github.com/hacspec/hax/issues/375 *)\n  type analysis_data = concrete_ident list Map.M(String).t\n  type id_order = int\n\n  let analyse (items : A.item list) : analysis_data =\n    let temp_list = List.concat_map ~f:U.functions_of_item items in\n    List.fold_left\n      ~init:(Map.empty (module String))\n      ~f:(fun y (name, body) ->\n        Map.set y\n          ~key:([%show: Concrete_ident.View.t] (Concrete_ident.to_view name))\n          ~data:\n            (Set.to_list\n               (U.Reducers.collect_concrete_idents#visit_expr () body)))\n      temp_list\nend\n"
  },
  {
    "path": "engine/lib/analyses/mutable_variables.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (F : Features.T) = struct\n  module FA = F\n  module A = Ast.Make (F)\n  module U = Ast_utils.Make (F)\n  module Visitors = Ast_visitors.Make (F)\n  open Ast\n\n  type id_order = int\n\n  (* TODO: Swap to Concrete_ident see: https://github.com/hacspec/hax/issues/375 *)\n  type pre_data = concrete_ident list Map.M(String).t\n\n  (* TODO: Swap to Concrete_ident see: https://github.com/hacspec/hax/issues/375 *)\n  type analysis_data =\n    (Local_ident.t list * (U.TypedLocalIdent.t * id_order) list)\n    (* external mut_vars and new variables (e.g. needs def / local) *)\n    Map.M(String).t\n\n  let id_to_string = Concrete_ident.to_view >> [%show: Concrete_ident.View.t]\n\n  module LocalIdentOrData (Ty : sig\n    type ty [@@deriving compare, sexp]\n  end) =\n  struct\n    module W = struct\n      module T = struct\n        type t = Data of Ty.ty | Identifier of Local_ident.t\n        [@@deriving compare, sexp]\n      end\n\n      include T\n      module C = Base.Comparator.Make (T)\n      include C\n    end\n\n    include W\n    include Set.M (W)\n\n    class type ['s] monoid = object\n      method zero : 's\n      method plus : 's -> 's -> 's\n    end\n\n    class ['s, 't] prod_monoid (fst : 's monoid)\n      (snd : 't monoid) (* : ['s * 't] monoid *) =\n      object\n        method fst = fst\n        method snd = snd\n        method zero : 's * 't = (fst#zero, snd#zero)\n\n        method plus : 's * 't -> 's * 't -> 's * 't =\n          fst#plus *** snd#plus >> uncurry ( *** )\n      end\n\n    class set_monoid : [(T.t, W.comparator_witness) Set.t] monoid =\n      object\n        method zero = Set.empty (module W)\n        method plus = Set.union\n      end\n\n    class ['a] map_monoid :\n      [(Local_ident.t, 'a list, Local_ident.comparator_witness) Map.t] monoid =\n      object\n        method zero = Map.empty (module Local_ident)\n\n        method plus =\n          let combine ~key:_ = ( @ ) in\n          Map.merge_skewed ~combine\n      end\n\n    let analyse_expr (data : analysis_data)\n        (env : W.t list Map.M(Local_ident).t) (expr : A.expr) :\n        W.t list * W.t list Map.M(Local_ident).t =\n      let mut_var_set, new_env =\n        (object\n           inherit [_] Visitors.reduce as super\n\n           inherit\n             [_, _] prod_monoid\n               (object\n                  inherit set_monoid\n               end)\n               (object\n                  inherit [_] map_monoid\n               end) as m\n\n           (* method! visit_PBinding env mut _ var _typ subpat = *)\n           (*   m#plus *)\n           (*     (m#plus *)\n           (*        (match mut with *)\n           (*         | Mutable _ -> *)\n           (*           (Set.empty (module W), Map.singleton (module LocalIdent) var ([Identifier var])) *)\n           (*         | _ -> m#zero) *)\n           (*        (Option.value_map subpat ~default:m#zero *)\n           (*           ~f:(fst >> super#visit_pat env))) *)\n           (*     (Option.value_map (Map.find env var) ~default:m#zero ~f:(fun x -> (Set.of_list (module W) x, Map.empty (module LocalIdent)))) *)\n\n           method! visit_expr' env e =\n             match e with\n             | Let { lhs = pat; rhs = expr; body; _ } ->\n                 let new_set, new_env = super#visit_expr env expr in\n                 m#plus\n                   (super#visit_expr\n                      (m#snd#plus (m#snd#plus env new_env)\n                         (Map.of_alist_exn\n                            (module Local_ident)\n                            (List.map\n                               ~f:(fun v -> (v, Set.to_list new_set))\n                               (Set.to_list (U.Reducers.variables_of_pat pat)))))\n                      body)\n                   (new_set, m#snd#zero)\n             | _ -> super#visit_expr' env e\n\n           method! visit_local_ident (env : W.t list Map.M(Local_ident).t) ident\n               =\n             Option.value_map (Map.find env ident) ~default:m#zero ~f:(fun x ->\n                 (Set.of_list (module W) x, m#snd#zero))\n\n           (* NO-OP? *)\n           method! visit_global_ident (env : W.t list Map.M(Local_ident).t)\n               (x : Global_ident.t) =\n             match x with\n             | `Concrete cid ->\n                 Option.value_map ~default:m#zero\n                   ~f:(fun (x, _) ->\n                     ( Set.of_list\n                         (module W)\n                         (List.map ~f:(fun x -> W.Identifier x) x),\n                       m#snd#zero ))\n                   (Map.find data (id_to_string cid))\n             | _ -> super#visit_global_ident env x\n\n           method! visit_concrete_ident (_env : W.t list Map.M(Local_ident).t)\n               (cid : Concrete_ident.t) =\n             Option.value_map ~default:m#zero\n               ~f:(fun (x, _) ->\n                 ( Set.of_list\n                     (module W)\n                     (List.map ~f:(fun x -> W.Identifier x) x),\n                   m#snd#zero ))\n               (Map.find data (id_to_string cid))\n        end)\n          #visit_expr\n          env expr\n      in\n      (Set.to_list mut_var_set, new_env)\n  end\n\n  let rec analyse (func_dep : pre_data) (items : A.item list) : analysis_data =\n    let (mut_var_list, _) :\n        (concrete_ident * (U.TypedLocalIdent.t * id_order) list) list * _ =\n      List.fold_left ~init:([], 0)\n        ~f:(fun (y, count) (name, body) ->\n          let items, count = analyse_function_body body count in\n          (y @ [ (name, items) ], count))\n        (List.concat_map ~f:U.functions_of_item items)\n    in\n    let mut_map (* Concrete_ident *) :\n        (Local_ident.t list * (U.TypedLocalIdent.t * id_order) list)\n        Map.M(String).t =\n      List.fold_left\n        ~init:(Map.empty (module String (* Concrete_ident *)))\n        ~f:(fun y (x_name, x_items) ->\n          Map.set y ~key:(id_to_string x_name)\n            ~data:\n              ( List.map ~f:(fst >> fst) x_items\n                @ Option.value_map ~default:[]\n                    ~f:\n                      (List.filter_map ~f:(id_to_string >> Map.find y)\n                      >> List.concat_map ~f:fst)\n                    (Map.find func_dep (id_to_string x_name)),\n                x_items ))\n        mut_var_list\n    in\n    mut_map\n\n  and analyse_function_body (x : A.expr) (i : id_order) :\n      (U.TypedLocalIdent.t * id_order) list * id_order =\n    let mut_var_list =\n      Set.to_list\n        ((object (self)\n            inherit [_] Visitors.reduce as super\n            inherit [_] U.Sets.TypedLocalIdent.monoid as m\n\n            method! visit_pat' () pat' =\n              match pat' with\n              | PBinding { mut; var; typ; subpat; _ } ->\n                  m#plus\n                    (match mut with\n                    | Mutable _ ->\n                        Set.singleton (module U.TypedLocalIdent) (var, typ)\n                    | Immutable -> Set.empty (module U.TypedLocalIdent))\n                    (Option.value_map subpat ~default:m#zero\n                       ~f:(fst >> self#visit_pat ()))\n              | _ -> super#visit_pat' () pat'\n         end)\n           #visit_expr\n           () x)\n    in\n    number_list mut_var_list i\n\n  (* State monad *)\n  and number_list (l : 'a list) (i : int) : ('a * int) list * int =\n    List.fold_left ~init:([], i) ~f:(fun (y, i) x -> (y @ [ (x, i) ], i + 1)) l\nend\n"
  },
  {
    "path": "engine/lib/analyses.ml",
    "content": "module Function_dependency = Function_dependency.Make\nmodule Mutable_variables = Mutable_variables.Make\n"
  },
  {
    "path": "engine/lib/ast.ml",
    "content": "open! Prelude\n\ntype todo = string [@@deriving show, yojson, hash, compare, sexp, hash, eq]\ntype span = Span.t [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype concrete_ident = Concrete_ident.t\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype logical_op = And | Or\n\nand primitive_ident = Deref | Cast | LogicalOp of logical_op\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\nmodule Global_ident = struct\n  module T = struct\n    type t =\n      [ `Concrete of concrete_ident\n      | `Primitive of primitive_ident\n      | `TupleType of int\n      | `TupleCons of int\n      | `TupleField of int * int\n      | `Projector of [ `Concrete of concrete_ident | `TupleField of int * int ]\n      ]\n    [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n  end\n\n  module M = struct\n    include Base.Comparator.Make (T)\n    include T\n  end\n\n  include M\n  module Map = Map.M (M)\n\n  let of_name ~value n = `Concrete (Concrete_ident.of_name ~value n)\n\n  let eq_name name (x : t) : bool =\n    match x with `Concrete x -> Concrete_ident.eq_name name x | _ -> false\n\n  let to_string : t -> string = [%show: t]\nend\n\ntype global_ident = Global_ident.t\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype attr_kind =\n  | Tool of { path : string; tokens : string }\n  | DocComment of { kind : doc_comment_kind; body : string }\n\nand attr = { kind : attr_kind; span : span }\nand doc_comment_kind = DCKLine | DCKBlock\nand attrs = attr list [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype local_ident = Local_ident.t\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype size = S8 | S16 | S32 | S64 | S128 | SSize\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\nlet int_of_size = function\n  | S8 -> Some 8\n  | S16 -> Some 16\n  | S32 -> Some 32\n  | S64 -> Some 64\n  | S128 -> Some 128\n  | _ -> None\n\nlet string_of_size = int_of_size >> Option.map ~f:Int.to_string\n\ntype signedness = Signed | Unsigned\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype int_kind = { size : size; signedness : signedness }\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\nlet show_int_kind { size; signedness } =\n  (match signedness with Signed -> \"i\" | Unsigned -> \"u\")\n  ^ (int_of_size size\n    |> Option.map ~f:Int.to_string\n    |> Option.value ~default:\"size\")\n\ntype float_kind = F16 | F32 | F64 | F128\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\nlet show_float_kind = function\n  | F16 -> \"f16\"\n  | F32 -> \"f32\"\n  | F64 -> \"f64\"\n  | F128 -> \"f128\"\n\ntype literal =\n  | String of string\n  | Char of char\n  | Int of { value : string; negative : bool; kind : int_kind }\n  | Float of { value : string; negative : bool; kind : float_kind }\n  | Bool of bool\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype 'mut_witness mutability = Mutable of 'mut_witness | Immutable\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype item_kind =\n  [ `Fn\n  | `TyAlias\n  | `Type\n  | `IMacroInvokation\n  | `Trait\n  | `Impl\n  | `Alias\n  | `Use\n  | `Quote\n  | `HaxError\n  | `NotImplementedYet ]\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n(** Describes the (shallow) kind of an item. *)\n\ntype item_quote_origin_position = [ `Before | `After | `Replace ]\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\ntype item_quote_origin = {\n  item_kind : item_kind;\n  item_ident : concrete_ident;\n  position : item_quote_origin_position;\n}\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n(** From where does a quote item comes from? *)\n\nmodule Make =\nfunctor\n  (F : Features.T)\n  ->\n  struct\n    type safety_kind = Safe | Unsafe of F.unsafe\n    [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n    type borrow_kind = Shared | Unique | Mut of F.mutable_reference\n    [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n    type binding_mode = ByValue | ByRef of (borrow_kind * F.reference)\n    [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n    type ty =\n      | TBool\n      | TChar\n      | TInt of int_kind\n      | TFloat of float_kind\n      | TStr\n      | TApp of { ident : global_ident; args : generic_value list }\n      | TArray of { typ : ty; length : expr }\n      | TSlice of { witness : F.slice; ty : ty }\n      | TRawPointer of { witness : F.raw_pointer } (* todo *)\n      | TRef of {\n          witness : F.reference;\n          region : todo;\n          typ : ty;\n          mut : F.mutable_reference mutability;\n        }\n      | TParam of local_ident\n      | TArrow of ty list * ty\n      | TAssociatedType of { impl : impl_expr; item : concrete_ident }\n      | TOpaque of concrete_ident\n      | TDyn of { witness : F.dyn; goals : dyn_trait_goal list }\n\n    and generic_value =\n      | GLifetime of { lt : todo; witness : F.lifetime }\n      | GType of ty\n      | GConst of expr\n\n    and impl_expr = { kind : impl_expr_kind; goal : trait_goal }\n\n    and impl_expr_kind =\n      | Self\n      | Concrete of trait_goal\n      | LocalBound of { id : string }\n      | Parent of { impl : impl_expr; ident : impl_ident }\n      | Projection of {\n          impl : impl_expr;\n          item : concrete_ident;\n          ident : impl_ident;\n        }\n      | ImplApp of { impl : impl_expr; args : impl_expr list }\n      | Dyn\n      | Builtin of trait_goal\n\n    and trait_goal = { trait : concrete_ident; args : generic_value list }\n    (** A fully applied trait: [Foo<SomeTy, T0, ..., Tn>] (or `SomeTy: Foo<T0,\n        ..., Tn>`). An `impl_expr` \"inhabits\" a `trait_goal`. *)\n\n    and dyn_trait_goal = {\n      trait : concrete_ident;\n      non_self_args : generic_value list;\n    }\n    (** A dyn trait: [Foo<_, T0, ..., Tn>]. The generic arguments are known but\n        the actual type implementing the trait is known only dynamically. *)\n\n    and impl_ident = { goal : trait_goal; name : string }\n    (** An impl identifier [{goal; name}] can be:\n        - An in-scope variable [name] that inhabits [goal].\n        - A field of some other impl expression [i]: [i.name] inhabits [goal].\n          This corresponds to parent bounds or associated type bounds.\n        - An argument that introduces a variable [name] that inhabits [goal]. *)\n\n    and projection_predicate = {\n      impl : impl_expr;\n      assoc_item : concrete_ident;\n      typ : ty;\n    }\n    (** Expresses a constraints over an associated type. For instance:\n        [ fn f<T : Foo<S = String>>(...) ^^^^^^^^^^ ] (provided the trait `Foo`\n        has an associated type `S`). *)\n\n    (* TODO: ADD SPAN! *)\n    and pat' =\n      | PWild\n      | PAscription of { typ : ty; typ_span : span; pat : pat }\n      | PConstruct of {\n          constructor : global_ident;\n          is_record : bool; (* are fields named? *)\n          is_struct : bool; (* a struct has one constructor *)\n          fields : field_pat list;\n        }\n      (* An or-pattern, e.g. `p | q`.\n         Invariant: `List.length subpats >= 2`. *)\n      | POr of { subpats : pat list }\n      | PArray of { args : pat list }\n      | PDeref of { subpat : pat; witness : F.reference }\n      | PConstant of { lit : literal }\n      | PBinding of {\n          mut : F.mutable_variable mutability;\n          mode : binding_mode;\n          var : local_ident;\n          typ : ty;\n          subpat : (pat * F.as_pattern) option;\n        }\n\n    and pat = { p : pat'; span : span; typ : ty }\n    and field_pat = { field : global_ident; pat : pat }\n\n    (* This marker describes what control flow is present in a loop.\n       It is added by phase `DropReturnBreakContinue` and the\n       information is used in `FunctionalizeLoops`. We need it because\n       we replace the control flow nodes of the AST by some encoding\n       in the `ControlFlow` enum. *)\n    and cf_kind = BreakOnly | BreakOrReturn\n\n    and expr' =\n      (* pure fragment *)\n      | If of { cond : expr; then_ : expr; else_ : expr option }\n      | App of {\n          f : expr;\n          args : expr list (* ; f_span: span *);\n          generic_args : generic_value list;\n          bounds_impls : impl_expr list;\n          trait : (impl_expr * generic_value list) option;\n        }\n      | Literal of literal\n      | Array of expr list\n      | Construct of {\n          constructor : global_ident;\n          is_record : bool; (* are fields named? *)\n          is_struct : bool; (* a struct has one constructor *)\n          fields : (global_ident * expr) list;\n          base : (expr * F.construct_base) option;\n        }\n      | Match of { scrutinee : expr; arms : arm list }\n      | Let of {\n          monadic : (supported_monads * F.monadic_binding) option;\n          lhs : pat;\n          rhs : expr;\n          body : expr;\n        }\n      | Block of { e : expr; safety_mode : safety_kind; witness : F.block }\n        (* Corresponds to `{e}`: this is important for places *)\n      | LocalVar of local_ident\n      | GlobalVar of global_ident\n      | Ascription of { e : expr; typ : ty }\n      (* Macro *)\n      | MacroInvokation of {\n          macro : global_ident;\n          args : string;\n          witness : F.macro;\n        }\n      (* Mut *)\n      | Assign of { lhs : lhs; e : expr; witness : F.mutable_variable }\n      (* Loop *)\n      | Loop of {\n          body : expr;\n          kind : loop_kind;\n          state : loop_state option;\n          control_flow : (cf_kind * F.fold_like_loop) option;\n          label : string option;\n          witness : F.loop;\n        }\n      (* ControlFlow *)\n      | Break of {\n          e : expr;\n          acc : (expr * F.state_passing_loop) option;\n          label : string option;\n          witness : F.break * F.loop;\n        }\n      | Return of { e : expr; witness : F.early_exit }\n      | QuestionMark of { e : expr; return_typ : ty; witness : F.question_mark }\n          (** The expression `e?`. In opposition to Rust, no implicit coercion\n              is applied on the (potential) error payload of `e`. Coercion\n              should be made explicit within `e`. *)\n      | Continue of {\n          acc : (expr * F.state_passing_loop) option;\n          label : string option;\n          witness : F.continue * F.loop;\n        }\n      (* Mem *)\n      | Borrow of { kind : borrow_kind; e : expr; witness : F.reference }\n      (* Raw borrow *)\n      | AddressOf of {\n          mut : F.mutable_pointer mutability;\n          e : expr;\n          witness : F.raw_pointer;\n        }\n      | Closure of { params : pat list; body : expr; captures : expr list }\n      | EffectAction of { action : F.monadic_action; argument : expr }\n      | Quote of quote\n          (** A quotation is an inlined piece of backend code interleaved with\n              Rust code *)\n\n    and expr = { e : expr'; span : span; typ : ty }\n    and quote = { contents : quote_content list; witness : F.quote }\n\n    and quote_content =\n      | Verbatim of string\n      | Expr of expr\n      | Pattern of pat\n      | Typ of ty\n\n    and supported_monads =\n      | MException of ty\n          (** a exception monad, which we use to handle early returns *)\n      | MResult of ty  (** the [Result] monad *)\n      | MOption  (** the [Option] monad *)\n\n    and loop_kind =\n      | UnconditionalLoop\n      | WhileLoop of { condition : expr; witness : F.while_loop }\n      | ForLoop of { pat : pat; it : expr; witness : F.for_loop }\n      | ForIndexLoop of {\n          start : expr;\n          end_ : expr;\n          var : local_ident;\n          var_typ : ty;\n          witness : F.for_index_loop;\n        }\n\n    and loop_state = { init : expr; bpat : pat; witness : F.state_passing_loop }\n\n    (* | WhileLoop of { *)\n    (*     condition: expr; *)\n    (*     witness : F.while_loop; *)\n    (*   } *)\n\n    (* TODO: LHS should be places or \"compositions\" of places, see [assignee expression] in https://doc.rust-lang.org/reference/expressions.html#place-expressions-and-value-expressions (issue #222) *)\n    and lhs =\n      | LhsLocalVar of { var : Local_ident.t; typ : ty }\n      | LhsVecRef of { e : lhs; typ : ty; witness : F.nontrivial_lhs }\n      | LhsArbitraryExpr of { e : expr; witness : F.arbitrary_lhs }\n      | LhsFieldAccessor of {\n          e : lhs;\n          typ : ty;\n          (* TODO: change type (see #316) *)\n          field : global_ident;\n          witness : F.nontrivial_lhs;\n        }\n      | LhsArrayAccessor of {\n          e : lhs;\n          typ : ty;\n          index : expr;\n          witness : F.nontrivial_lhs;\n        }\n\n    (* A guard is a condition on a pattern like: *)\n    (* match x {.. if guard => .., ..}*)\n    and guard = { guard : guard'; span : span }\n\n    (* Only if-let guards are supported for now but other variants like regular if *)\n    (* could be added later (regular if guards are for now desugared as IfLet) *)\n    and guard' = IfLet of { lhs : pat; rhs : expr; witness : F.match_guard }\n\n    (* OCaml + visitors is not happy with `pat`... hence `arm_pat`... *)\n    and arm' = { arm_pat : pat; body : expr; guard : guard option }\n\n    and arm = { arm : arm'; span : span }\n    [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n    type generic_param = {\n      ident : local_ident;\n      span : span;\n      attrs : attrs;\n      kind : generic_param_kind;\n    }\n\n    and generic_param_kind =\n      | GPLifetime of { witness : F.lifetime }\n      | GPType\n      | GPConst of { typ : ty }\n\n    and generic_constraint =\n      | GCLifetime of todo * F.lifetime\n      | GCType of impl_ident\n      | GCProjection of projection_predicate\n          (** Trait or lifetime constraints. For instance, `A` and `B` in `fn\n              f<T: A + B>()`. *)\n    [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n    type param = { pat : pat; typ : ty; typ_span : span option; attrs : attrs }\n\n    and generics = {\n      params : generic_param list;\n      constraints : generic_constraint list;\n    }\n\n    and variant = {\n      name : concrete_ident;\n      arguments : (concrete_ident * ty * attrs) list;\n      is_record : bool;\n      attrs : attrs;\n    }\n\n    and item' =\n      (* Todo: topological sort, rec bundles *)\n      | Fn of {\n          name : concrete_ident;\n          generics : generics;\n          body : expr;\n          params : param list;\n          safety : safety_kind;\n        }\n      | TyAlias of { name : concrete_ident; generics : generics; ty : ty }\n      | Type of {\n          name : concrete_ident;\n          generics : generics;\n          variants : variant list;\n          is_struct : bool;\n        }\n      | IMacroInvokation of {\n          macro : concrete_ident;\n          argument : string;\n          span : span;\n          witness : F.macro;\n        }\n      | Trait of {\n          name : concrete_ident;\n          generics : generics;\n          items : trait_item list;\n          safety : safety_kind;\n        }\n      | Impl of {\n          generics : generics;\n          self_ty : ty;\n          of_trait : concrete_ident * generic_value list;\n          items : impl_item list;\n          parent_bounds : (impl_expr * impl_ident) list;\n          safety : safety_kind;\n        }\n      | Alias of { name : concrete_ident; item : concrete_ident }\n          (** `Alias {name; item}` is basically a `use\n              <item> as _;` where `name` is the renamed ident. *)\n      | Use of {\n          path : string list;\n          is_external : bool;\n          rename : string option;\n        }\n      | Quote of { quote : quote; origin : item_quote_origin }\n      | HaxError of string\n      | NotImplementedYet\n\n    and item = { v : item'; span : span; ident : concrete_ident; attrs : attrs }\n\n    and impl_item' =\n      | IIType of { typ : ty; parent_bounds : (impl_expr * impl_ident) list }\n      | IIFn of { body : expr; params : param list }\n\n    and impl_item = {\n      ii_span : span;\n      ii_generics : generics;\n      ii_v : impl_item';\n      ii_ident : concrete_ident;\n      ii_attrs : attrs;\n    }\n\n    and trait_item' =\n      | TIType of impl_ident list\n      | TIFn of ty\n      | TIDefault of {\n          params : param list;\n          body : expr;\n          witness : F.trait_item_default;\n        }\n\n    and trait_item = {\n      (* TODO: why do I need to prefix by `ti_` here? I guess visitors fail or something *)\n      ti_span : span;\n      ti_generics : generics;\n      ti_v : trait_item';\n      ti_ident : concrete_ident;\n      ti_attrs : attrs;\n    }\n    [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n    type modul = item list\n\n    let make_hax_error_item (span : span) (ident : Concrete_ident.t)\n        (s : string) : item =\n      { v = HaxError s; span; ident; attrs = [] }\n\n    (* module F = F *)\n  end\n\nmodule type T = sig\n  type expr [@@deriving show, compare, yojson]\n  type item' [@@deriving show, compare, yojson]\n\n  type item = {\n    v : item';\n    span : span;\n    ident : Concrete_ident.t;\n    attrs : attrs;\n  }\n  [@@deriving show, compare, yojson]\n\n  val make_hax_error_item : span -> Concrete_ident.t -> string -> item\nend\n\nmodule Rust = Make (Features.Rust)\nmodule Full = Make (Features.Full)\n"
  },
  {
    "path": "engine/lib/ast_builder.ml",
    "content": "open! Prelude\nopen! Ast\n\nmodule Make (F : Features.T) = struct\n  module AST = Ast.Make (F)\n  open AST\n\n  open struct\n    module Gen = Ast_builder_generated.Make (F)\n  end\n\n  module type SPAN = Gen.SPAN\n\n  include Gen.Explicit\n\n  module NoSpan = struct\n    let ty_tuple (types : ty list) : ty =\n      let ident = `TupleType (List.length types) in\n      let args = List.map ~f:(fun typ -> GType typ) types in\n      TApp { ident; args }\n\n    let ty_tuple_or_id : ty list -> ty = function\n      | [ ty ] -> ty\n      | types -> ty_tuple types\n\n    (** This gives the type of a value in the `ControlFlow` enum *)\n    let ty_cf ~(continue_type : ty) ~(break_type : ty) : ty =\n      TApp\n        {\n          ident =\n            Global_ident.of_name ~value:false\n              Core__ops__control_flow__ControlFlow;\n          args = [ GType break_type; GType continue_type ];\n        }\n\n    (** This gives the type of a value encoded in the `ControlFlow` enum. In\n        case a `return_type` is provided the encoding is nested: `return v` is\n        `Break (Break v)` `break v` is `Break (Continue (v, acc))` *)\n    let ty_cf_return ~(acc_type : ty) ~(break_type : ty)\n        ~(return_type : ty option) : ty =\n      let break_type = ty_tuple [ break_type; acc_type ] in\n      match return_type with\n      | Some ret_ty ->\n          let break_type = ty_cf ~break_type:ret_ty ~continue_type:break_type in\n          ty_cf ~break_type ~continue_type:acc_type\n      | None -> ty_cf ~break_type ~continue_type:acc_type\n  end\n\n  include NoSpan\n\n  module Explicit = struct\n    let ty_unit : ty = TApp { ident = `TupleType 0; args = [] }\n    let expr_unit = expr_GlobalVar (`TupleCons 0) ~typ:ty_unit\n\n    let expr_tuple ~(span : span) (tuple : expr list) =\n      let len = List.length tuple in\n      let fields = List.mapi ~f:(fun i x -> (`TupleField (i, len), x)) tuple in\n      let typ = NoSpan.ty_tuple @@ List.map ~f:(fun { typ; _ } -> typ) tuple in\n      expr_Construct ~span ~typ ~constructor:(`TupleCons len) ~is_record:false\n        ~is_struct:true ~fields ~base:None\n\n    let pat_PBinding ~typ = pat_PBinding ~inner_typ:typ ~typ\n\n    let arm ~span arm_pat ?(guard = None) body =\n      { arm = { arm_pat; body; guard }; span }\n\n    let pat_Constructor_CF ~(span : span) ~(typ : ty)\n        (cf : [ `Break | `Continue ]) (pat : pat) =\n      match cf with\n      | `Break ->\n          {\n            p =\n              PConstruct\n                {\n                  constructor =\n                    Global_ident.of_name ~value:true\n                      Core__ops__control_flow__ControlFlow__Break;\n                  fields =\n                    [\n                      {\n                        field =\n                          Global_ident.of_name ~value:true\n                            Core__ops__control_flow__ControlFlow__Break__0;\n                        pat;\n                      };\n                    ];\n                  is_record = false;\n                  is_struct = false;\n                };\n            typ;\n            span;\n          }\n      | `Continue ->\n          {\n            p =\n              PConstruct\n                {\n                  constructor =\n                    Global_ident.of_name ~value:true\n                      Core__ops__control_flow__ControlFlow__Continue;\n                  fields =\n                    [\n                      {\n                        field =\n                          Global_ident.of_name ~value:true\n                            Core__ops__control_flow__ControlFlow__Continue__0;\n                        pat;\n                      };\n                    ];\n                  is_record = false;\n                  is_struct = false;\n                };\n            typ;\n            span;\n          }\n\n    let call_Constructor' (constructor : global_ident) is_struct\n        (args : expr list) span ret_typ =\n      let mk_field =\n        let len = List.length args in\n        fun n -> `TupleField (len, n)\n      in\n      let fields = List.mapi ~f:(fun i arg -> (mk_field i, arg)) args in\n      {\n        e =\n          Construct\n            { constructor; is_record = false; is_struct; fields; base = None };\n        typ = ret_typ;\n        span;\n      }\n\n    let call_Constructor (constructor_name : Concrete_ident.name)\n        (is_struct : bool) (args : expr list) span ret_typ =\n      call_Constructor'\n        (`Concrete (Concrete_ident.of_name ~value:true constructor_name))\n        is_struct args span ret_typ\n\n    let expr'_Constructor_CF ~(span : span) ~(break_type : ty)\n        ?(continue_type : ty = ty_unit) (cf : [ `Break | `Continue ]) (e : expr)\n        =\n      let typ = NoSpan.ty_cf ~continue_type ~break_type in\n      match cf with\n      | `Break ->\n          call_Constructor Core__ops__control_flow__ControlFlow__Break false\n            [ e ] span typ\n      | `Continue ->\n          call_Constructor Core__ops__control_flow__ControlFlow__Continue false\n            [ e ] span typ\n\n    (** We use the following encoding of return, break and continue in the\n        `ControlFlow` enum: Return e -> Break (Break e) Break e -> Break\n        ((Continue(e, acc))) Continue -> Continue(acc)\n\n        In case there is no return we simplify to: Break e -> (Break (e, acc))\n        Continue -> (continue (acc)) *)\n    let expr_Constructor_CF ~(span : span) ~(break_type : ty option)\n        ~(return_type : ty option) ~(acc : expr) ?(e : expr = expr_unit ~span)\n        (cf : [ `Return | `Break | `Continue ]) =\n      let break_type = Option.value ~default:ty_unit break_type in\n      match cf with\n      | `Return ->\n          let continue_type = NoSpan.ty_tuple [ break_type; acc.typ ] in\n          let inner =\n            expr'_Constructor_CF ~break_type:e.typ ~continue_type ~span `Break e\n          in\n          expr'_Constructor_CF ~span ~break_type:inner.typ\n            ~continue_type:acc.typ `Break inner\n      | `Break ->\n          let tuple = expr_tuple ~span [ e; acc ] in\n          let inner =\n            match return_type with\n            | Some ret_typ ->\n                expr'_Constructor_CF ~span ~break_type:ret_typ\n                  ~continue_type:tuple.typ `Continue tuple\n            | None -> tuple\n          in\n          expr'_Constructor_CF ~span ~break_type:inner.typ\n            ~continue_type:acc.typ `Break inner\n      | `Continue ->\n          let break_type =\n            let tuple_type = NoSpan.ty_tuple [ break_type; acc.typ ] in\n            match return_type with\n            | Some ret_typ ->\n                NoSpan.ty_cf ~break_type:ret_typ ~continue_type:tuple_type\n            | None -> tuple_type\n          in\n          expr'_Constructor_CF ~span ~break_type ~continue_type:acc.typ\n            `Continue acc\n  end\n\n  include Explicit\n\n  module Make0 (Span : Gen.SPAN) = struct\n    open! Span\n    include Gen.Make (Span)\n    include NoSpan\n\n    let pat_PBinding = Explicit.pat_PBinding ~span\n    let expr_unit = expr_unit ~span\n    let expr_tuple = expr_tuple ~span\n    let pat_Constructor_CF = pat_Constructor_CF ~span\n    let expr'_Constructor_CF = expr'_Constructor_CF ~span\n    let expr_Constructor_CF = expr_Constructor_CF ~span\n    let arm ?(guard = None) = arm ~span ?guard\n  end\n\n  module type S = module type of Make0 (struct\n    (* This [failwith] is OK: this module is never actually used for computation. It is useful only for typing. *)\n    let span = failwith \"type only module: this will never be computed\"\n  end)\n\n  module Make (Span : sig\n    val span : span\n  end) : S =\n    Make0 (Span)\n\n  let make : span -> (module S) =\n   fun span : (module S) ->\n    (module Make0 (struct\n      let span = span\n    end))\nend\n"
  },
  {
    "path": "engine/lib/ast_destruct.ml",
    "content": "open! Prelude\nopen! Ast\n\nmodule Make (F : Features.T) = struct\n  include Ast_destruct_generated.Make (F)\n\n  let list_0 = function [] -> Some () | _ -> None\n  let list_1 = function [ a ] -> Some a | _ -> None\n  let list_2 = function [ a; b ] -> Some (a, b) | _ -> None\n  let list_3 = function [ a; b; c ] -> Some (a, b, c) | _ -> None\n  let list_4 = function [ a; b; c; d ] -> Some (a, b, c, d) | _ -> None\n  let list_5 = function [ a; b; c; d; e ] -> Some (a, b, c, d, e) | _ -> None\nend\n"
  },
  {
    "path": "engine/lib/ast_utils.ml",
    "content": "open! Prelude\nopen Ast\n\ntype visit_level = ExprLevel | TypeLevel\n\nmodule TypedLocalIdent (Ty : sig\n  type ty [@@deriving show, yojson]\nend) =\nstruct\n  module T = struct\n    type t = Local_ident.t * Ty.ty [@@deriving show, yojson]\n\n    let sexp_of_t : t -> _ = fst >> Local_ident.sexp_of_t\n    let compare (a : t) (b : t) = [%compare: Local_ident.t] (fst a) (fst b)\n    let equal (a : t) (b : t) = [%eq: Local_ident.t] (fst a) (fst b)\n  end\n\n  include Base.Comparator.Make (T)\n  include T\nend\n\nmodule UniqueList (T : sig\n  type t [@@deriving eq, show, yojson]\n  type comparator_witness\nend) : sig\n  type t [@@deriving eq, show, yojson]\n\n  val without : T.t -> t -> t\n  val cons : T.t -> t -> t\n  val to_list : t -> T.t list\n  val from_set : (T.t, T.comparator_witness) Set.t -> t\n  val empty : t\n  val is_empty : t -> bool\n  val singleton : T.t -> t\nend = struct\n  type t = T.t list [@@deriving eq, show, yojson]\n\n  let without x = List.filter ~f:([%eq: T.t] x >> not)\n  let cons hd tl = hd :: tl\n  let to_list = Fn.id\n  let from_set s = Set.to_list s\n  let empty = []\n  let is_empty = List.is_empty\n  let singleton x = [ x ]\nend\n\nmodule Make (F : Features.T) = struct\n  module AST = Ast.Make (F)\n  open AST\n  module TypedLocalIdent = TypedLocalIdent (AST)\n  module Visitors = Ast_visitors.Make (F)\n  module M = Ast_builder.Make (F)\n  module D = Ast_destruct.Make (F)\n\n  module Expect = struct\n    let mut_borrow (e : expr) : expr option =\n      match e.e with Borrow { kind = Mut _; e; _ } -> Some e | _ -> None\n\n    let borrow (e : expr) : expr option =\n      match e.e with Borrow { e; _ } -> Some e | _ -> None\n\n    let block (e : expr) : expr option =\n      match e.e with Block { e; _ } -> Some e | _ -> None\n\n    let deref (e : expr) : expr option =\n      match e.e with\n      | App { f = { e = GlobalVar (`Primitive Deref); _ }; args = [ e ]; _ } ->\n          Some e\n      | _ -> None\n\n    let closure (e : expr) : (pat list * expr) option =\n      match e.e with\n      | Closure { params; body; _ } -> Some (params, body)\n      | _ -> None\n\n    let app (e : expr) :\n        (expr\n        * expr list\n        * generic_value list\n        * impl_expr option\n        * impl_expr list)\n        option =\n      match e.e with\n      | App { f; args; generic_args; trait; bounds_impls } ->\n          (* TODO: propagate full trait *)\n          Some (f, args, generic_args, Option.map ~f:fst trait, bounds_impls)\n      | _ -> None\n\n    let pbinding_simple (p : pat) : (local_ident * ty) option =\n      match p.p with\n      | PBinding { mut = Immutable; mode = _; var; typ; subpat = None } ->\n          Some (var, typ)\n      | _ -> None\n\n    let concrete_app1 (f : Concrete_ident.name) (e : expr) : expr option =\n      match e.e with\n      | App\n          {\n            f = { e = GlobalVar (`Concrete f'); _ };\n            args = [ e ];\n            generic_args = _;\n            trait = _;\n            _ (* TODO: see issue #328 *);\n          }\n        when Concrete_ident.eq_name f f' ->\n          Some e\n      | _ -> None\n\n    let deref_mut_app = concrete_app1 Core__ops__deref__DerefMut__deref_mut\n\n    let local_var (e : expr) : local_ident option =\n      match e.e with LocalVar v -> Some v | _ -> None\n\n    let arrow (typ : ty) : (ty list * ty) option =\n      match typ with\n      | TArrow (inputs, output) -> Some (inputs, output)\n      | _ -> None\n\n    let mut_ref (typ : ty) : ty option =\n      match typ with TRef { mut = Mutable _; typ; _ } -> Some typ | _ -> None\n\n    let concrete_app' : expr' -> concrete_ident option = function\n      | App { f = { e = GlobalVar (`Concrete c); _ }; _ } -> Some c\n      | _ -> None\n  end\n\n  module Sets = struct\n    module Global_ident = struct\n      include Set.M (Global_ident)\n\n      class ['s] monoid =\n        object\n          method private zero = Set.empty (module Global_ident)\n          method private plus = Set.union\n        end\n    end\n\n    module Concrete_ident = struct\n      include Set.M (Concrete_ident)\n\n      class ['s] monoid =\n        object\n          method private zero = Set.empty (module Concrete_ident)\n          method private plus = Set.union\n        end\n    end\n\n    module Local_ident = struct\n      include Set.M (Local_ident)\n\n      class ['s] monoid =\n        object\n          method private zero = Set.empty (module Local_ident)\n          method private plus = Set.union\n        end\n    end\n\n    module TypedLocalIdent = struct\n      include Set.M (TypedLocalIdent)\n\n      let show (x : t) : string =\n        [%show: TypedLocalIdent.t list] @@ Set.to_list x\n\n      let pp (fmt : Stdlib.Format.formatter) (s : t) : unit =\n        Stdlib.Format.pp_print_string fmt @@ show s\n\n      class ['s] monoid =\n        object\n          method private zero = Set.empty (module TypedLocalIdent)\n          method private plus = Set.union\n        end\n    end\n  end\n\n  let functions_of_item (x : item) : (concrete_ident * expr) list =\n    match x.v with\n    | Fn { name; generics = _; body; params = _; safety = _ } ->\n        [ (name, body) ]\n    | Impl { items; _ } ->\n        List.filter_map\n          ~f:(fun w ->\n            match w.ii_v with\n            | IIFn { body; params = _ } -> Some (w.ii_ident, body)\n            | _ -> None)\n          items\n    | _ -> []\n\n  module Mappers = struct\n    let regenerate_span_ids =\n      object\n        inherit [_] Visitors.map\n        method! visit_span () = Span.refresh_id\n      end\n\n    let normalize_borrow_mut =\n      object\n        inherit [_] Visitors.map as super\n\n        method! visit_expr () e =\n          let rec expr e =\n            match e.e with\n            | App\n                {\n                  f = { e = GlobalVar (`Primitive Deref); _ };\n                  args = [ { e = Borrow { e = sub; _ }; _ } ];\n                  generic_args = _;\n                  trait = _;\n                  _ (* TODO: see issue #328 *);\n                } ->\n                expr sub\n            | _ -> super#visit_expr () e\n          in\n          expr e\n      end\n\n    let drop_bodies =\n      object\n        inherit [_] Visitors.map as super\n\n        method! visit_item' () item' =\n          match item' with\n          | Fn { name; generics; body; params; safety } ->\n              Fn\n                {\n                  name;\n                  generics;\n                  body = { body with e = GlobalVar (`TupleCons 0) };\n                  params;\n                  safety;\n                }\n          | _ -> super#visit_item' () item'\n      end\n\n    let replace_local_variables (map : (local_ident, expr, _) Map.t) =\n      object\n        inherit [_] Visitors.map as super\n\n        method! visit_expr () e =\n          match e.e with\n          | LocalVar var -> Map.find map var |> Option.value ~default:e\n          | _ -> super#visit_expr () e\n      end\n\n    (** [replace_local_variable var replacement] returns a visitor that maps any\n        type of the AST replacing every occurence of the expression\n        [LocalVar var] by [replacement]. *)\n    let replace_local_variable (var : local_ident) (replacement : expr) =\n      replace_local_variables\n        (Map.of_alist_exn (module Local_ident) [ (var, replacement) ])\n\n    let rename_local_idents (f : local_ident -> local_ident) =\n      object\n        inherit [_] Visitors.map as _super\n        method! visit_local_ident () ident = f ident\n      end\n\n    let rename_global_idents (f : visit_level -> global_ident -> global_ident) =\n      object\n        inherit [_] Visitors.map as super\n        method! visit_global_ident (lvl : visit_level) ident = f lvl ident\n        method! visit_ty _ t = super#visit_ty TypeLevel t\n      end\n\n    let rename_concrete_idents\n        (f : visit_level -> Concrete_ident.t -> Concrete_ident.t) =\n      object\n        inherit [_] Visitors.map as super\n        method! visit_concrete_ident (lvl : visit_level) ident = f lvl ident\n\n        method! visit_global_ident lvl (x : Global_ident.t) =\n          match x with\n          | `Concrete x -> `Concrete (f lvl x)\n          | `Projector (`Concrete x) -> `Projector (`Concrete (f lvl x))\n          | _ -> super#visit_global_ident lvl x\n\n        method! visit_ty _ t = super#visit_ty TypeLevel t\n      end\n\n    let rename_global_idents_item\n        (f : visit_level -> global_ident -> global_ident) : item -> item =\n      (rename_global_idents f)#visit_item ExprLevel\n\n    (** Add type ascription nodes in nested function calls. This helps type\n        inference in the presence of associated types in backends that don't\n        support them well (F* for instance). *)\n    let add_typ_ascription =\n      let is_app = Expect.concrete_app' >> Option.is_some in\n      let o =\n        object\n          inherit [_] Visitors.map as super\n\n          method! visit_expr' (ascribe_app : bool) e =\n            (* Enable type ascription of underlying function\n               application. In the F* backend, we're annotating every\n               [Let] bindings, thus if we're facing a [Let], we turn\n               off application ascription. Similarly, if we're facing\n               an Ascription, we turn off application ascription. *)\n            let ascribe_app =\n              (ascribe_app || is_app e)\n              && not ([%matches? Let _ | Ascription _] e)\n            in\n            super#visit_expr' ascribe_app e\n\n          method! visit_expr (ascribe_app : bool) e =\n            let ascribe_app =\n              ascribe_app\n              && not\n                   (match e.typ with\n                   | TApp { ident; _ } ->\n                       Global_ident.eq_name Hax_lib__prop__Prop ident\n                   | _ -> false)\n            in\n            let e = super#visit_expr ascribe_app e in\n            let ascribe (e : expr) =\n              if [%matches? Ascription _] e.e then e\n              else { e with e = Ascription { e; typ = e.typ } }\n            in\n            match e.e with\n            | App\n                {\n                  f = { e = GlobalVar (`Primitive Cast); _ } as f;\n                  args = [ arg ];\n                  generic_args;\n                  trait;\n                  bounds_impls;\n                } ->\n                ascribe\n                  {\n                    e with\n                    e =\n                      App\n                        {\n                          f;\n                          args = [ ascribe arg ];\n                          generic_args;\n                          trait;\n                          bounds_impls;\n                        };\n                  }\n            (* Match scrutinees need to be ascribed as well\n               (see https://github.com/hacspec/hax/issues/1207).*)\n            | Match { scrutinee; arms } ->\n                { e with e = Match { scrutinee = ascribe scrutinee; arms } }\n            | _ ->\n                (* Ascribe the return type of a function application & constructors *)\n                if (ascribe_app && is_app e.e) || [%matches? Construct _] e.e\n                then ascribe e\n                else e\n        end\n      in\n      o#visit_item false\n  end\n\n  module Reducers = struct\n    let collect_local_idents =\n      object\n        inherit [_] Visitors.reduce as _super\n        inherit [_] Sets.Local_ident.monoid as _m\n        method! visit_local_ident () x = Set.singleton (module Local_ident) x\n      end\n\n    include struct\n      open struct\n        type env = Local_ident.t list\n\n        let id_shadows ~(env : env) (id : Local_ident.t) =\n          List.find env ~f:(fun x -> String.equal x.name id.name)\n          |> Option.value ~default:id\n          |> [%equal: Local_ident.t] id\n          |> not\n\n        let ( ++ ) = Set.union\n\n        let shadows' (type a) ~env vars (x : a) next =\n          (* account for shadowing within `vars` *)\n          List.filter ~f:(id_shadows ~env:vars) (List.rev vars)\n          |> Set.of_list (module Local_ident)\n          |> Set.union (next (vars @ env) x)\n\n        let shadows (type a) ~(env : env) (pats : pat list) (x : a)\n            (next : env -> a -> Sets.Local_ident.t) =\n          let vars =\n            List.map pats ~f:(collect_local_idents#visit_pat ())\n            |> Set.(union_list (module Local_ident) >> to_list)\n          in\n          shadows' ~env vars x next\n      end\n\n      (** Rust macros are hygienic: even if a macro introduces a name that\n          already exists in scope, the compiler will not shadow it. Instead, it\n          will track and differentiate the two, even if those have the same\n          name. `collect_ambiguous_local_idents` is a visitor that collects such\n          \"fake\" shadowings. *)\n      let collect_ambiguous_local_idents =\n        object (self)\n          inherit [_] Visitors.reduce as super\n          inherit [_] Sets.Local_ident.monoid as _m\n\n          method! visit_arm' env { arm_pat; body; guard } =\n            match guard with\n            | None -> shadows ~env [ arm_pat ] body super#visit_expr\n            | Some { guard = IfLet { lhs; rhs; _ }; _ } ->\n                shadows ~env [ arm_pat ] rhs super#visit_expr\n                ++ shadows ~env [ arm_pat; lhs ] body super#visit_expr\n\n          method! visit_expr' env e =\n            match e with\n            | Let { monadic = _; lhs; rhs; body } ->\n                super#visit_expr env rhs\n                ++ shadows ~env [ lhs ] body super#visit_expr\n            | Loop { kind; state; body; _ } ->\n                let empty = Set.empty (module Local_ident) |> Fn.(id &&& id) in\n                let ikind, ukind =\n                  match kind with\n                  | UnconditionalLoop -> empty\n                  | WhileLoop { condition; _ } ->\n                      ( collect_local_idents#visit_expr () condition,\n                        super#visit_expr env condition )\n                  | ForLoop { pat; it; _ } ->\n                      ( collect_local_idents#visit_pat () pat,\n                        super#visit_expr env it )\n                  | ForIndexLoop { start; end_; var; _ } ->\n                      ( Set.singleton (module Local_ident) var,\n                        super#visit_expr (var :: env) start\n                        ++ super#visit_expr (var :: env) end_ )\n                in\n                let istate, ustate =\n                  match state with\n                  | Some { init; bpat; _ } ->\n                      ( collect_local_idents#visit_pat () bpat,\n                        super#visit_expr (Set.to_list ikind @ env) init )\n                  | _ -> empty\n                in\n                let intro = ikind ++ istate |> Set.to_list in\n                ukind ++ ustate ++ shadows' ~env intro body super#visit_expr\n            | Closure { params; body; _ } ->\n                shadows ~env params body super#visit_expr\n            | _ -> super#visit_expr' env e\n\n          method! visit_impl_item' env ii =\n            match ii with\n            | IIFn { body; params } -> self#visit_function_like env body params\n            | _ -> super#visit_impl_item' env ii\n\n          method! visit_item' env i =\n            match i with\n            | Fn { body; params; _ } -> self#visit_function_like env body params\n            | _ -> super#visit_item' env i\n\n          method visit_function_like env body params =\n            let f p = p.pat in\n            shadows ~env (List.map ~f params) body super#visit_expr\n\n          method! visit_local_ident env id =\n            Set.(if id_shadows ~env id then Fn.flip singleton id else empty)\n              (module Local_ident)\n        end\n\n      (** Rust macros are hygienic: even if a macro introduces a name that\n          already exists in scope, the compiler will not shadow it. Instead, it\n          will track and differentiate the two, even if those have the same\n          name. `disambiguate_local_idents item` renames every instance of such\n          a \"fake\" shadowing in `item`. See PR #368 for an example. *)\n      let disambiguate_local_idents (item : item) =\n        let ambiguous = collect_ambiguous_local_idents#visit_item [] item in\n        let local_vars = collect_local_idents#visit_item () item |> ref in\n        let refresh env (id : Local_ident.t) : string =\n          let extract_suffix (id' : Local_ident.t) =\n            String.chop_prefix ~prefix:(id.name ^ \"_\") id'.name\n            |> Option.bind ~f:string_to_int\n          in\n          let suffix =\n            Set.filter_map (module Int) env ~f:extract_suffix\n            |> Set.max_elt |> Option.value ~default:0 |> ( + ) 1\n          in\n          id.name ^ \"_\" ^ Int.to_string suffix\n        in\n        let new_names =\n          ambiguous |> Set.to_list\n          |> List.map ~f:(fun (var : Local_ident.t) ->\n                 let var' = { var with name = refresh !local_vars var } in\n                 local_vars := Set.add !local_vars var';\n                 (var, var'))\n          |> Map.of_alist_exn (module Local_ident)\n        in\n        let rename var = Map.find new_names var |> Option.value ~default:var in\n        (Mappers.rename_local_idents rename)#visit_item () item\n    end\n\n    let collect_global_idents =\n      object\n        inherit [_] Visitors.reduce as _super\n        inherit [_] Sets.Global_ident.monoid as _m\n\n        method! visit_global_ident (_env : unit) (x : Global_ident.t) =\n          Set.singleton (module Global_ident) x\n      end\n\n    let collect_concrete_idents =\n      object\n        inherit [_] Visitors.reduce as super\n        inherit [_] Sets.Concrete_ident.monoid as _m\n\n        method! visit_global_ident (_env : unit) (x : Global_ident.t) =\n          match x with\n          | `Concrete x -> Set.singleton (module Concrete_ident) x\n          | _ -> super#visit_global_ident () x\n\n        method! visit_concrete_ident (_env : unit) (x : Concrete_ident.t) =\n          Set.singleton (module Concrete_ident) x\n      end\n\n    let variables_of_pat (p : pat) : Sets.Local_ident.t =\n      (object\n         inherit [_] Visitors.reduce as super\n         inherit [_] Sets.Local_ident.monoid as m\n\n         method! visit_pat' env pat' =\n           match pat' with\n           | PBinding { var; subpat; _ } ->\n               m#plus\n                 (Set.singleton (module Local_ident) var)\n                 (Option.value_map subpat ~default:m#zero\n                    ~f:(fst >> super#visit_pat env))\n           | _ -> super#visit_pat' env pat'\n      end)\n        #visit_pat\n        () p\n\n    let variables_of_param (p : param) : Local_ident.t list =\n      variables_of_pat p.pat |> Set.to_list\n\n    let variables_of_pats : pat list -> Sets.Local_ident.t =\n      List.map ~f:variables_of_pat >> Set.union_list (module Local_ident)\n\n    let without_vars (mut_vars : Sets.TypedLocalIdent.t)\n        (vars : Sets.Local_ident.t) =\n      Set.filter mut_vars ~f:(fst >> Set.mem vars >> not)\n\n    let without_pats_vars (mut_vars : Sets.TypedLocalIdent.t) :\n        pat list -> Sets.TypedLocalIdent.t =\n      variables_of_pats >> without_vars mut_vars\n\n    let without_pat_vars (mut_vars : Sets.TypedLocalIdent.t) (pat : pat) :\n        Sets.TypedLocalIdent.t =\n      without_pats_vars mut_vars [ pat ]\n\n    let free_assigned_variables\n        (fv_of_arbitrary_lhs :\n          F.arbitrary_lhs -> expr -> Sets.TypedLocalIdent.t) =\n      object (self)\n        inherit [_] Visitors.reduce as super\n        inherit [_] Sets.TypedLocalIdent.monoid as m\n\n        (* TODO: loop state *)\n\n        method! visit_expr' () e =\n          match e with\n          | Assign { lhs; e; _ } ->\n              let rec visit_lhs lhs =\n                match lhs with\n                | LhsLocalVar { var; _ } ->\n                    Set.singleton (module TypedLocalIdent) (var, e.typ)\n                | LhsFieldAccessor { e; _ } | LhsVecRef { e; _ } -> visit_lhs e\n                | LhsArrayAccessor { e; index; _ } ->\n                    Set.union (self#visit_expr () index) (visit_lhs e)\n                | LhsArbitraryExpr { witness; e } ->\n                    fv_of_arbitrary_lhs witness e\n              in\n              visit_lhs lhs\n          | Match { scrutinee; arms } ->\n              List.fold_left ~init:(self#visit_expr () scrutinee) ~f:Set.union\n              @@ List.map ~f:(fun arm -> self#visit_arm () arm) arms\n          | Let { lhs = pat; rhs = expr; body; _ } ->\n              Set.union (self#visit_expr () expr)\n              @@ without_pat_vars (self#visit_expr () body) pat\n          | Closure { params; body; _ } ->\n              without_pats_vars (self#visit_expr () body) params\n          | Loop { body; kind; state; _ } ->\n              let vars =\n                (match kind with\n                | UnconditionalLoop -> []\n                | WhileLoop _ -> []\n                | ForLoop { pat = _not_mutable; _ } -> []\n                | ForIndexLoop { var = _not_mutable; _ } -> [])\n                @ (state\n                  |> Option.map ~f:(fun { bpat; _ } -> variables_of_pat bpat)\n                  |> Option.to_list)\n                |> Set.union_list (module Local_ident)\n              in\n              m#plus\n                (self#visit_loop_kind () kind)\n                (m#plus\n                   (Option.map ~f:(self#visit_loop_state ()) state\n                   |> Option.value ~default:m#zero)\n                   (without_vars (self#visit_expr () body) vars))\n          | _ -> super#visit_expr' () e\n\n        method! visit_arm' () { arm_pat; body; guard } =\n          match guard with\n          | Some { guard = IfLet { lhs; rhs; _ }; _ } ->\n              let rhs_vars =\n                without_pat_vars (self#visit_expr () rhs) arm_pat\n              in\n              let body_vars =\n                without_pats_vars (self#visit_expr () body) [ arm_pat; lhs ]\n              in\n              Set.union rhs_vars body_vars\n          | None -> without_pat_vars (self#visit_expr () body) arm_pat\n      end\n\n    class ['s] expr_list_monoid =\n      object\n        method private zero = []\n        method private plus = List.append\n      end\n\n    let collect_break_payloads =\n      object (self)\n        inherit [_] Visitors.reduce as super\n        inherit [_] expr_list_monoid as _m\n\n        method! visit_expr' () e =\n          match e with\n          | Break { e; _ } -> self#plus (self#visit_expr () e) [ e ]\n          | Loop _ ->\n              (* Do *NOT* visit sub nodes *)\n              self#zero\n          | _ -> super#visit_expr' () e\n      end\n\n    let collect_attrs =\n      object (_self)\n        inherit [_] Visitors.reduce\n        inherit [_] expr_list_monoid\n        method! visit_attrs () attrs = attrs\n      end\n  end\n\n  (** Produces a local identifier which is locally fresh with respect to\n      variables [{vars}]. *)\n  let fresh_local_ident_in (vars : local_ident list) (prefix : string) :\n      Local_ident.t =\n    let free_suffix =\n      vars\n      |> List.filter_map ~f:(fun ({ name; _ } : local_ident) ->\n             String.chop_prefix ~prefix name)\n      |> List.map ~f:(function \"\" -> \"0\" | s -> s)\n      |> List.filter_map ~f:Stdlib.int_of_string_opt\n      |> List.fold ~init:(-1) ~f:Int.max\n      |> ( + ) 1\n      |> function\n      | 0 -> \"\"\n      | n -> Int.to_string n\n    in\n    {\n      name = prefix ^ free_suffix;\n      id =\n        (* TODO: freshness is local and name-only here... *)\n        Local_ident.mk_id Expr (-1);\n    }\n\n  (** Produces a local identifier which is locally fresh with respect to\n      expressions [{exprs}]. *)\n  let fresh_local_ident_in_expr (exprs : expr list) (prefix : string) :\n      Local_ident.t =\n    fresh_local_ident_in\n      (List.map ~f:(Reducers.collect_local_idents#visit_expr ()) exprs\n      |> Set.union_list (module Local_ident)\n      |> Set.to_list)\n      prefix\n\n  let never_typ : ty =\n    let ident =\n      `Concrete\n        (Concrete_ident.of_name ~value:false Rust_primitives__hax__Never)\n    in\n    TApp { ident; args = [] }\n\n  let is_never_typ : ty -> bool = function\n    | TApp { ident; _ } ->\n        Global_ident.eq_name Rust_primitives__hax__Never ident\n    | _ -> false\n\n  let unit_typ : ty = TApp { ident = `TupleType 0; args = [] }\n\n  let unit_expr span : expr =\n    { typ = unit_typ; span; e = GlobalVar (`TupleCons 0) }\n\n  (* TODO: Those tuple1 things are wrong! Tuples of size one exists in Rust! e.g. `(123,)` *)\n  let rec remove_tuple1_pat (p : pat) : pat =\n    match p.p with\n    | PConstruct { constructor = `TupleType 1; fields = [ { pat; _ } ]; _ } ->\n        remove_tuple1_pat pat\n    | _ -> p\n\n  let rec remove_tuple1 (t : ty) : ty =\n    match t with\n    | TApp { ident = `TupleType 1; args = [ GType t ] } -> remove_tuple1 t\n    | _ -> t\n\n  let remove_unsize (e : expr) : expr =\n    match e.e with\n    | App { f = { e = GlobalVar f; _ }; args = [ e ]; _ }\n      when Global_ident.eq_name Rust_primitives__unsize f ->\n        e\n    | _ -> e\n\n  (** See [beta_reduce_closure]'s documentation. *)\n  let beta_reduce_closure_opt (e : expr) : expr option =\n    let* f, args, _, _, _ = Expect.app e in\n    let* pats, body = Expect.closure f in\n    let* vars = List.map ~f:Expect.pbinding_simple pats |> sequence in\n    let vars = List.map ~f:fst vars in\n    let replacements =\n      List.zip_exn vars args |> Map.of_alist_exn (module Local_ident)\n    in\n    Some ((Mappers.replace_local_variables replacements)#visit_expr () body)\n\n  (** Reduces a [(|x1, ..., xN| body)(e1, ..., eN)] to\n      [body[x1/e1, ..., xN/eN]]. This assumes the arities are right:\n      [(|x, y| ...)(e1)]. *)\n  let beta_reduce_closure (e : expr) : expr =\n    beta_reduce_closure_opt e |> Option.value ~default:e\n\n  let is_unit_typ : ty -> bool =\n    remove_tuple1 >> [%matches? TApp { ident = `TupleType 0; _ }]\n\n  let rec pat_is_expr (p : pat) (e : expr) =\n    match (p.p, e.e) with\n    | _, Construct { constructor = `TupleCons 1; fields = [ (_, e) ]; _ } ->\n        pat_is_expr p e\n    | PBinding { subpat = None; var = pv; _ }, LocalVar ev ->\n        [%eq: local_ident] pv ev\n    | ( PConstruct { constructor = pn; fields = pargs; _ },\n        Construct { constructor = en; fields = eargs; base = None; _ } )\n      when [%eq: global_ident] pn en -> (\n        match List.zip pargs eargs with\n        | Ok zip ->\n            List.for_all\n              ~f:(fun (x, y) ->\n                [%eq: global_ident] x.field (fst y) && pat_is_expr x.pat (snd y))\n              zip\n        | Unequal_lengths -> false)\n    | _ -> false\n\n  let make_let (lhs : pat) (rhs : expr) (body : expr) =\n    if pat_is_expr lhs body then rhs\n    else { body with e = Let { monadic = None; lhs; rhs; body } }\n\n  let make_lets (lbs : (pat * expr) list) (body : expr) =\n    List.fold_right ~init:body\n      ~f:(fun (pat, expr) body -> make_let pat expr body)\n      lbs\n\n  let make_var_pat (var : local_ident) (typ : ty) (span : span) : pat =\n    {\n      p = PBinding { mut = Immutable; mode = ByValue; var; typ; subpat = None };\n      span;\n      typ;\n    }\n\n  let ty_equality (a : ty) (b : ty) : bool =\n    let replace_spans =\n      object\n        inherit [_] Visitors.map\n        method! visit_span _ = function _ -> Span.default\n      end\n    in\n    let a = replace_spans#visit_ty () a in\n    let b = replace_spans#visit_ty () b in\n    [%eq: ty] a b\n\n  let let_of_binding ((var, rhs) : local_ident * expr) (body : expr) : expr =\n    make_let (make_var_pat var rhs.typ rhs.span) rhs body\n\n  let lets_of_bindings (bindings : (local_ident * expr) list) (body : expr) :\n      expr =\n    List.fold_right ~init:body ~f:let_of_binding bindings\n\n  let make_tuple_typ' (tuple : ty list) : ty =\n    TApp\n      {\n        ident = `TupleType (List.length tuple);\n        args = List.map ~f:(fun typ -> GType typ) tuple;\n      }\n\n  let make_tuple_typ (tuple : ty list) : ty =\n    match tuple with [ ty ] -> ty | _ -> make_tuple_typ' tuple\n\n  let make_unit_param (span : span) : param =\n    let typ = unit_typ in\n    let pat = M.pat_PWild ~typ ~span in\n    { pat; typ; typ_span = None; attrs = [] }\n\n  let make_seq (e1 : expr) (e2 : expr) : expr =\n    make_let (M.pat_PWild ~typ:e1.typ ~span:e1.span) e1 e2\n\n  let make_tuple_field_pat (len : int) (nth : int) (pat : pat) : field_pat =\n    { field = `TupleField (nth + 1, len); pat }\n\n  let make_tuple_pat'' span (tuple : field_pat list) : pat =\n    match tuple with\n    | [ { pat; _ } ] -> pat\n    | _ ->\n        let len = List.length tuple in\n        {\n          p =\n            PConstruct\n              {\n                constructor = `TupleCons len;\n                is_record = false;\n                is_struct = true;\n                fields = tuple;\n              };\n          typ = make_tuple_typ @@ List.map ~f:(fun { pat; _ } -> pat.typ) tuple;\n          span;\n        }\n\n  let make_tuple_pat' (pats : pat list) : pat =\n    let len = List.length pats in\n    let span = Span.union_list @@ List.map ~f:(fun p -> p.span) pats in\n    List.mapi ~f:(fun i pat -> { field = `TupleField (i, len); pat }) pats\n    |> make_tuple_pat'' span\n\n  let make_tuple_pat : pat list -> pat = function\n    | [ pat ] -> pat\n    | pats -> make_tuple_pat' pats\n\n  let make_tuple_expr' ~(span : span) (tuple : expr list) : expr =\n    let len = List.length tuple in\n    {\n      e =\n        Construct\n          {\n            constructor = `TupleCons len;\n            is_record = false;\n            is_struct = true;\n            fields =\n              List.mapi ~f:(fun i x -> (`TupleField (i, len), x)) @@ tuple;\n            base = None;\n          };\n      typ = make_tuple_typ @@ List.map ~f:(fun { typ; _ } -> typ) tuple;\n      span;\n    }\n\n  let make_tuple_expr ~(span : span) : expr list -> expr = function\n    | [ e ] -> e\n    | es -> make_tuple_expr' ~span es\n\n  (* maybe we should just drop Construct in favor of a\n     [Record] thing, and put everything which is not a Record\n       into an App. This would simplify stuff quite much. Maybe not\n       for LHS things. *)\n  let call_Constructor' (constructor : global_ident) is_struct\n      (args : expr list) span ret_typ =\n    let mk_field =\n      let len = List.length args in\n      fun n -> `TupleField (len, n)\n    in\n    let fields = List.mapi ~f:(fun i arg -> (mk_field i, arg)) args in\n    {\n      e =\n        Construct\n          { constructor; is_record = false; is_struct; fields; base = None };\n      typ = ret_typ;\n      span;\n    }\n\n  let call_Constructor (constructor_name : Concrete_ident.name)\n      (is_struct : bool) (args : expr list) span ret_typ =\n    call_Constructor'\n      (`Concrete (Concrete_ident.of_name ~value:true constructor_name))\n      is_struct args span ret_typ\n\n  let call' ?impl f ?(generic_args = []) ?(impl_generic_args = [])\n      (args : expr list) span ret_typ =\n    let typ = TArrow (List.map ~f:(fun arg -> arg.typ) args, ret_typ) in\n    let e = GlobalVar f in\n    {\n      e =\n        App\n          {\n            f = { e; typ; span };\n            args;\n            generic_args;\n            bounds_impls = [];\n            trait = Option.map ~f:(fun impl -> (impl, impl_generic_args)) impl;\n          };\n      typ = ret_typ;\n      span;\n    }\n\n  let call ?(generic_args = []) ?(impl_generic_args = []) ?impl\n      (f_name : Concrete_ident.name) (args : expr list) span ret_typ =\n    call' ?impl ~generic_args ~impl_generic_args\n      (`Concrete (Concrete_ident.of_name ~value:true f_name))\n      args span ret_typ\n\n  let make_closure (params : pat list) (body : expr) (span : span) : expr =\n    let params =\n      match params with\n      | [] -> [ M.pat_PWild ~typ:unit_typ ~span ]\n      | _ -> params\n    in\n    let e = Closure { params; body; captures = [] } in\n    { e; typ = TArrow (List.map ~f:(fun p -> p.typ) params, body.typ); span }\n\n  let string_lit span (s : string) : expr =\n    { span; typ = TStr; e = Literal (String s) }\n\n  module HaxFailure = struct\n    module Build = struct\n      let pat span (typ : ty) (msg : string) : pat =\n        let (module M) = M.make span in\n        let constructor =\n          Global_ident.of_name ~value:true Rust_primitives__hax__Failure__Ctor\n        in\n        let pat = M.pat_PConstant ~typ ~lit:(String msg) in\n        let fields = [ { field = constructor; pat } ] in\n        M.pat_PConstruct ~typ ~is_record:false ~is_struct:true ~constructor\n          ~fields\n\n      let expr span (typ : ty) (error : string) (ast : string) =\n        let args = List.map ~f:(string_lit span) [ error; ast ] in\n        call Rust_primitives__hax__failure args span typ\n\n      let ty (payload : string) =\n        let ident =\n          `Concrete\n            (Concrete_ident.of_name ~value:false Rust_primitives__hax__Failure)\n        in\n        let (module M) = M.make (Span.dummy ()) in\n        let payload = M.expr_Literal ~typ:TBool (String payload) in\n        TApp { ident; args = [ GConst payload ] }\n    end\n\n    open struct\n      let destruct_str_lit e =\n        let* l = D.expr_Literal e in\n        match l with String s -> Some s | _ -> None\n    end\n\n    module Destruct = struct\n      let pat (p : pat) : string option =\n        let* p = D.pat_PConstruct p in\n        let*? () =\n          Global_ident.eq_name Rust_primitives__hax__Failure__Ctor p.constructor\n        in\n        let* { pat; _ } = D.list_1 p.fields in\n        let* s = D.pat_PConstant pat in\n        match s.lit with String s -> Some s | _ -> None\n\n      let expr (e : expr) : (string * string) option =\n        let* app = D.expr_App e in\n        let* id = D.expr_GlobalVar app.f in\n        let*? _ = Global_ident.eq_name Rust_primitives__hax__failure id in\n        let* x, y = D.list_2 app.args in\n\n        let* x = destruct_str_lit x in\n        let* y = destruct_str_lit y in\n        Some (x, y)\n\n      let ty (t : ty) : string option =\n        match t with\n        | TApp { ident; args = [ GConst payload ] }\n          when Global_ident.eq_name Rust_primitives__hax__Failure ident ->\n            destruct_str_lit payload\n        | _ -> None\n    end\n  end\n\n  let hax_failure_expr' span (typ : ty) (context, kind) (ast : string) =\n    let ast =\n      (* Remove consecutive withe spaces *)\n      String.split ~on:' ' ast\n      |> List.filter ~f:(String.is_empty >> not)\n      |> String.concat ~sep:\" \"\n    in\n    let ast =\n      if String.length ast > 200 then String.sub ~pos:0 ~len:200 ast ^ \"...\"\n      else ast\n    in\n    let error = Diagnostics.pretty_print_context_kind context kind in\n    HaxFailure.Build.expr span typ error ast\n\n  let hax_failure_expr span (typ : ty) (context, kind) (expr0 : Ast.Full.expr) =\n    hax_failure_expr' span typ (context, kind) (Print_rust.pexpr_str expr0)\n\n  module LiftToFullAst = struct\n    let expr : AST.expr -> Ast.Full.expr = Stdlib.Obj.magic\n    let ty : AST.ty -> Ast.Full.ty = Stdlib.Obj.magic\n    let item : AST.item -> Ast.Full.item = Stdlib.Obj.magic\n  end\n\n  module Debug : sig\n    val expr : ?label:string -> AST.expr -> unit\n    (** Prints an expression pretty-printed as Rust, with its full AST encoded\n        as JSON, available as a file, so that one can `jless` or `jq` into it.\n    *)\n\n    val item' : ?label:string -> AST.item -> string\n    val item : ?label:string -> AST.item -> unit\n  end = struct\n    let expr ?(label = \"\") (e : AST.expr) : unit =\n      let path = tempfile_path ~suffix:\".json\" in\n      Core.Out_channel.write_all path\n        ~data:([%yojson_of: AST.expr] e |> Yojson.Safe.pretty_to_string);\n      let e = LiftToFullAst.expr e in\n      \"```rust \" ^ label ^ \"\\n\" ^ Print_rust.pexpr_str e\n      ^ \"\\n```\\x1b[34m JSON-encoded AST available at \\x1b[1m\" ^ path\n      ^ \"\\x1b[0m (hint: use `jless \" ^ path ^ \"`)\"\n      |> Stdio.prerr_endline\n\n    let item' ?(label = \"\") (e : AST.item) : string =\n      let path = tempfile_path ~suffix:\".json\" in\n      Core.Out_channel.write_all path\n        ~data:([%yojson_of: AST.item] e |> Yojson.Safe.pretty_to_string);\n      let e = LiftToFullAst.item e in\n      \"```rust \" ^ label ^ \"\\n\" ^ Print_rust.pitem_str e\n      ^ \"\\n```\\x1b[34m JSON-encoded AST available at \\x1b[1m\" ^ path\n      ^ \"\\x1b[0m (hint: use `jless \" ^ path ^ \"`)\"\n\n    let item ?(label = \"\") (e : AST.item) =\n      item' ~label e |> Stdio.prerr_endline\n  end\n\n  let unbox_expr' (next : expr -> expr) (e : expr) : expr =\n    match e.e with\n    | App { f = { e = GlobalVar f; _ }; args = [ e ]; _ }\n      when Global_ident.eq_name Alloc__boxed__Impl__new f\n           || Global_ident.eq_name Rust_primitives__hax__box_new f ->\n        next e\n    | _ -> e\n\n  let underef_expr' (next : expr -> expr) (e : expr) : expr =\n    match e.e with\n    | App\n        {\n          f = { e = GlobalVar (`Primitive Ast.Deref); _ };\n          args = [ e ];\n          generic_args = _;\n          bounds_impls = _;\n          trait = _;\n        } ->\n        next e\n    | _ -> e\n\n  let rec unref_ty (t : ty) : ty =\n    match t with TRef { typ; _ } -> unref_ty typ | t -> t\n\n  let rec unbox_expr e = unbox_expr' unbox_expr e\n  let underef_expr e = underef_expr' unbox_expr e\n\n  let rec unbox_underef_expr e =\n    (unbox_expr' unbox_underef_expr >> underef_expr' unbox_underef_expr) e\n\n  (* extracts a `param` out of a `generic_param` if it's a const\n     generic, otherwise returns `None`` *)\n  let param_of_generic_const_param (g : generic_param) : param option =\n    let* typ = match g.kind with GPConst { typ } -> Some typ | _ -> None in\n    let ({ span; ident = var; _ } : generic_param) = g in\n    let pat =\n      let mode, mut, subpat = (ByValue, Immutable, None) in\n      { p = PBinding { mut; mode; var; typ; subpat }; span; typ }\n    in\n    Some { pat; typ; typ_span = Some span; attrs = [] }\n\n  let kind_of_item (item : item) : item_kind =\n    match item.v with\n    | Fn _ -> `Fn\n    | TyAlias _ -> `TyAlias\n    | Type _ -> `Type\n    | IMacroInvokation _ -> `IMacroInvokation\n    | Trait _ -> `Trait\n    | Impl _ -> `Impl\n    | Alias _ -> `Alias\n    | Use _ -> `Use\n    | Quote _ -> `Quote\n    | HaxError _ -> `HaxError\n    | NotImplementedYet -> `NotImplementedYet\n\n  let rec expr_of_lhs (span : span) (lhs : lhs) : expr =\n    match lhs with\n    | LhsLocalVar { var; typ } -> { e = LocalVar var; typ; span }\n    | LhsVecRef { e; _ } -> expr_of_lhs span e\n    | LhsFieldAccessor { e; typ; field; _ } ->\n        let e = expr_of_lhs span e in\n        let f = { e = GlobalVar field; typ = TArrow ([ e.typ ], typ); span } in\n        {\n          e =\n            App\n              {\n                f;\n                args = [ e ];\n                generic_args = [];\n                bounds_impls = [];\n                trait = None (* TODO: see issue #328 *);\n              };\n          typ;\n          span;\n        }\n    | LhsArrayAccessor { e; typ; index; _ } ->\n        let args = [ expr_of_lhs span e; index ] in\n        call Core__ops__index__Index__index args span typ\n    | LhsArbitraryExpr { e; _ } -> e\n\n  (* module Box = struct *)\n  (*   module Ty = struct *)\n  (*     let destruct (t : ty) : ty option = *)\n  (*       match t with *)\n  (*       | TApp { ident = `Concrete box; args = [ GType sub; _alloc ] } *)\n  (*         when Concrete_ident.eq_name Alloc__boxed__Box box -> *)\n  (*           Some sub *)\n  (*       | _ -> None *)\n\n  (*     let alloc_ty = *)\n  (*       TApp *)\n  (*         { *)\n  (*           ident = `Concrete (Concrete_ident.of_name Type Alloc__alloc__Global); *)\n  (*           args = []; *)\n  (*         } *)\n\n  (*     let make (t : ty) : ty = *)\n  (*       let ident = `Concrete (Concrete_ident.of_name Type Alloc__boxed__Box) in *)\n  (*       TApp { ident; args = [ GType t; GType alloc_ty ] } *)\n  (*   end *)\n\n  (*   module Expr = struct *)\n  (*     let destruct (e : expr) : expr option = *)\n  (*       match e.e with *)\n  (*       | App { f = { e = GlobalVar (`Primitive Box); _ }; args = [ arg ] } -> *)\n  (*           Some arg *)\n  (*       | _ -> None *)\n\n  (*     let make (e : expr) : expr = *)\n  (*       let boxed_ty = Ty.make e.typ in *)\n  (*       let f_ty = TArrow ([ e.typ ], boxed_ty) in *)\n  (*       let f = { e with typ = f_ty; e = GlobalVar (`Primitive Box) } in *)\n  (*       { e with typ = boxed_ty; e = App { f; args = [ e ] } } *)\n  (*   end *)\n  (* end *)\n\n  let rec collect_let_bindings' (e : expr) : (pat * expr * ty) list * expr =\n    match e.e with\n    | Let { monadic = _; lhs; rhs; body } ->\n        let bindings, body = collect_let_bindings' body in\n        ((lhs, rhs, e.typ) :: bindings, body)\n    | _ -> ([], e)\n\n  let collect_let_bindings (e : expr) : (pat * expr) list * expr =\n    let bindings, body = collect_let_bindings' e in\n    let types = List.map ~f:thd3 bindings in\n    assert (\n      match (List.drop_last types, types) with\n      | Some init, _ :: tl ->\n          List.zip_exn init tl |> List.for_all ~f:(uncurry [%eq: ty])\n      | _ -> true);\n    (* TODO: injecting the type of the lets in the body is bad.\n       We should stay closer to Rust's inference.\n       Here, we lose a bit of information.\n    *)\n    let body =\n      { body with typ = List.hd types |> Option.value ~default:body.typ }\n    in\n    (List.map ~f:(fun (p, e, _) -> (p, e)) bindings, body)\n\n  let rec map_body_of_nested_lets (f : expr -> expr) (e : expr) : expr =\n    match e.e with\n    | Let { monadic; lhs; rhs; body } ->\n        {\n          e with\n          e = Let { monadic; lhs; rhs; body = map_body_of_nested_lets f body };\n        }\n    | _ -> f e\n\n  let tuple_projector span (tuple_typ : ty) (len : int) (nth : int)\n      (type_at_nth : ty) : expr =\n    {\n      span;\n      (* TODO: require a span here *)\n      typ = TArrow ([ tuple_typ ], type_at_nth);\n      e = GlobalVar (`Projector (`TupleField (nth, len)));\n    }\n\n  let project_tuple (tuple : expr) (len : int) (nth : int) (type_at_nth : ty) :\n      expr =\n    {\n      span = tuple.span;\n      typ = type_at_nth;\n      e =\n        App\n          {\n            f = tuple_projector tuple.span tuple.typ len nth type_at_nth;\n            args = [ tuple ];\n            generic_args = [] (* TODO: see issue #328 *);\n            bounds_impls = [];\n            trait = None (* TODO: see issue #328 *);\n          };\n    }\n\n  (** Concatenates the generics [g1] and [g2], making sure lifetimes appear\n      first *)\n  let concat_generics (g1 : generics) (g2 : generics) : generics =\n    let params = g1.params @ g2.params in\n    let constraints = g1.constraints @ g2.constraints in\n    let lifetimes, others =\n      List.partition_tf ~f:(fun p -> [%matches? GPLifetime _] p.kind) params\n    in\n    let params = lifetimes @ others in\n    { params; constraints }\n\n  module Place = struct\n    type t = { place : place'; span : span; typ : ty }\n\n    and place' =\n      | LocalVar of Local_ident.t\n      | Deref of expr\n      | VecRef of t\n      | IndexProjection of { place : t; index : expr }\n      | FieldProjection of { place : t; projector : global_ident }\n    [@@deriving show]\n\n    let deref_mut_allowed (t : ty) : bool =\n      match t with\n      | TApp { ident; _ } -> Global_ident.eq_name Alloc__vec__Vec ident\n      | _ -> false\n\n    let rec of_expr (e : expr) : t option =\n      let wrap place = Some { place; span = e.span; typ = e.typ } in\n      match e.e with\n      | App { f = { e = GlobalVar (`Primitive Deref); _ }; args = [ e ]; _ }\n        -> (\n          match of_expr e with\n          | Some { place = IndexProjection _; _ } as value -> value\n          | _ -> wrap @@ Deref e)\n      | LocalVar i -> wrap @@ LocalVar i\n      | App\n          {\n            f = { e = GlobalVar (`Projector _ as projector); _ };\n            args = [ place ];\n            generic_args = _;\n            bounds_impls = _;\n            trait = _;\n          (* TODO: see issue #328 *)\n          } ->\n          let* place = of_expr place in\n          wrap @@ FieldProjection { place; projector }\n      | App\n          {\n            f = { e = GlobalVar f; _ };\n            args = [ place; index ];\n            generic_args = _;\n            bounds_impls = _;\n            trait = _;\n          (* TODO: see issue #328 *)\n          }\n        when Global_ident.eq_name Core__ops__index__Index__index f ->\n          let* place = of_expr place in\n          let place = IndexProjection { place; index } in\n          Some { place; span = e.span; typ = e.typ }\n      | App\n          {\n            f = { e = GlobalVar f; _ };\n            args = [ place; index ];\n            generic_args = _;\n            bounds_impls = _;\n            trait = _;\n          (* TODO: see issue #328 *)\n          }\n        when Global_ident.eq_name Core__ops__index__IndexMut__index_mut f ->\n          (* Note that here, we allow any type to be `index_mut`ed:\n             Hax translates that to `Rust_primitives.Hax.update_at`.\n             This will typecheck IFF there is an implementation.\n          *)\n          let* typ = Expect.mut_ref e.typ in\n          let* place = Expect.mut_borrow place in\n          let* place = of_expr place in\n          let place = IndexProjection { place; index } in\n          Some { place; span = e.span; typ }\n      | _ -> None\n\n    let rec to_expr (p : t) : expr =\n      match p.place with\n      | LocalVar v ->\n          let e : expr' = LocalVar v in\n          { e; typ = p.typ; span = p.span }\n      | VecRef inner ->\n          let e = to_expr inner in\n          call Alloc__vec__Impl_1__as_slice [ e ] p.span p.typ\n      | Deref e -> call' (`Primitive Deref) [ e ] p.span p.typ\n      | FieldProjection { place; projector } ->\n          let e = to_expr place in\n          call' projector [ e ] p.span p.typ\n      | IndexProjection { place; index } ->\n          let e = to_expr place in\n          call Core__ops__index__Index__index [ e; index ] p.span p.typ\n\n    let expect_deref_mut (p : t) : t option =\n      match p.place with\n      | Deref e ->\n          let visible_ty = e.typ in\n          let* e = Expect.deref_mut_app e in\n          let* e = Expect.mut_borrow e in\n          let res = of_expr e in\n          let f : t -> t =\n           fun p ->\n            match (unref_ty visible_ty, unref_ty p.typ) with\n            | (TSlice _ as typ), TApp { ident; _ }\n              when Global_ident.eq_name Alloc__vec__Vec ident ->\n                { p with place = VecRef p; typ }\n            | _ -> p\n          in\n          Option.map res ~f\n      | _ -> None\n\n    let expect_allowed_deref_mut (p : t) : t option =\n      let* p = expect_deref_mut p in\n      if deref_mut_allowed p.typ then Some p else None\n\n    let skip_allowed_deref_mut (p : t) : t =\n      Option.value ~default:p (expect_deref_mut p)\n  end\n\n  let group_items_by_namespace (items : item list) :\n      item list Concrete_ident.View.ModPath.Map.t =\n    let h = Hashtbl.create (module Concrete_ident.View.ModPath) in\n    List.iter items ~f:(fun item ->\n        let ns = (Concrete_ident.to_view item.ident).mod_path in\n        let items = Hashtbl.find_or_add h ns ~default:(fun _ -> ref []) in\n        items := !items @ [ item ]);\n    Map.of_iteri_exn\n      (module Concrete_ident.View.ModPath)\n      ~iteri:(Hashtbl.map h ~f:( ! ) |> Hashtbl.iteri)\nend\n"
  },
  {
    "path": "engine/lib/attr_payloads.ml",
    "content": "open! Prelude\nopen Ast\n\nlet payload (attr : attr) : (Types.ha_payload * span) option =\n  match attr.kind with\n  | Tool { path; tokens } when [%eq: string] path \"_hax::json\" -> (\n      match Yojson.Safe.from_string tokens with\n      | `String s -> (\n          match\n            Yojson.Safe.from_string s |> Types.safe_ha_payload_of_yojson\n          with\n          | Error _ ->\n              Stdlib.prerr_endline\n                [%string\n                  {|\nThe hax engine could not parse a hax attribute.\nThis means that the crate being extracted and the version of hax engine are incompatible.\nPlease make sure the `hax-lib` dependency of the extracted crate matches hax-engine's version (%{Types.hax_version}).\n|}];\n              Stdlib.exit 1\n          | Ok value -> Some (value, attr.span))\n      | x ->\n          Stdlib.failwith\n          @@ \"Attr_payloads: payloads: expected a string while parsing JSON, \\\n              got \"\n          ^ Yojson.Safe.pretty_to_string x\n          ^ \"instead\")\n  | _ -> None\n\n(** Parse [_hax::json] attributes *)\nlet payloads : attrs -> (Types.ha_payload * span) list =\n  List.filter_map ~f:payload\n\n(** Create a attribute out of a [payload] *)\nlet to_attr (payload : Types.ha_payload) (span : span) : attr =\n  let json =\n    `String (Yojson.Safe.to_string ([%yojson_of: Types.ha_payload] payload))\n  in\n  let kind : attr_kind =\n    Tool { path = \"_hax::json\"; tokens = Yojson.Safe.to_string json }\n  in\n  { kind; span }\n\nmodule UId = struct\n  module T = struct\n    type t = UId of string [@@deriving show, yojson, compare, sexp, eq]\n  end\n\n  module M = struct\n    include Base.Comparator.Make (T)\n    include T\n  end\n\n  include M\n  module Map = Map.M (M)\n\n  let of_raw (uid : Types.ha_uid) : t = UId uid.uid\nend\n\nmodule AssocRole = struct\n  module T = struct\n    type t =\n      | Requires\n      | Ensures\n      | Decreases\n      | SMTPat\n      | Refine\n      | ProcessRead\n      | ProcessWrite\n      | ProcessInit\n      | ProtocolMessages\n      | ItemQuote\n    [@@deriving show, yojson, compare, sexp, eq]\n  end\n\n  module M = struct\n    include Base.Comparator.Make (T)\n    include T\n  end\n\n  include M\n  module Map = Map.M (M)\n\n  let of_raw : Types.ha_assoc_role -> t = function\n    | Requires -> Requires\n    | Ensures -> Ensures\n    | Decreases -> Decreases\n    | SMTPat -> SMTPat\n    | Refine -> Refine\n    | ItemQuote -> ItemQuote\n    | ProcessRead -> ProcessRead\n    | ProcessWrite -> ProcessWrite\n    | ProcessInit -> ProcessInit\n    | ProtocolMessages -> ProtocolMessages\nend\n\nmodule MakeBase (Error : Phase_utils.ERROR) = struct\n  (* Given a predicate, finds an attribute that is not supposed to occur\n     more than once. Returns `None` if no such attribute was found. *)\n  let find_unique_attr (attrs : attrs) ~(f : Types.ha_payload -> 'a option) :\n      'a option =\n    match\n      payloads attrs\n      |> List.filter_map ~f:(fun (x, span) ->\n             Option.map ~f:(fun x -> (x, span)) (f x))\n    with\n    | [ (attr, _) ] -> Some attr\n    | [] -> None\n    | (attr, _first) :: (_, _second) :: _ -> Some attr\n  (* TODO: when parent attributes are handled correctly (see issue #288) revive the error below *)\n  (* Error.assertion_failure (Span.union first second) *)\n  (*   \"This attribute is supposed to be unique\" *)\n\n  (* we should have multi span errors, basically make somethings really close to Rustc diagnostics! *)\n\n  let status : attrs -> Types.ha_item_status =\n    let f = function Types.ItemStatus is -> Some is | _ -> None in\n    let default : Types.ha_item_status = Types.Included { late_skip = false } in\n    find_unique_attr ~f >> Option.value ~default\n\n  (** Extracts an `Order` attribute if it exists. *)\n  let order : attrs -> int option =\n    let f = function Types.Order n -> Some n | _ -> None in\n    find_unique_attr ~f\n\n  let late_skip : attrs -> bool =\n    status >> [%matches? Types.Included { late_skip = true }]\n\n  let is_erased : attrs -> bool =\n    find_unique_attr\n      ~f:([%eq: Types.ha_payload] Erased >> Fn.flip Option.some_if ())\n    >> Option.is_some\n\n  let uid : attrs -> UId.t option =\n    let f = function Types.Uid uid -> Some (UId.of_raw uid) | _ -> None in\n    find_unique_attr ~f\n\n  let lemma : attrs -> bool =\n    payloads >> List.exists ~f:(fst >> [%matches? Types.Lemma])\n\n  (* User code can be *decorated* (e.g. attributes `ensures` or\n     `refine`). A decoration is attached to a user code via an\n     `AssociatedItem` attribute, that specifies an unique identifier\n     (uid) and a role (Ensure, Decreases, Refine...) *)\n  let raw_associated_item : attrs -> (AssocRole.t * UId.t) list =\n    payloads >> List.map ~f:fst\n    >> List.filter_map ~f:(function\n         | Types.AssociatedItem { role; item } ->\n             Some (AssocRole.of_raw role, UId.of_raw item)\n         | _ -> None)\nend\n\nmodule Make (F : Features.T) (Error : Phase_utils.ERROR) = struct\n  module AST = Ast.Make (F)\n  module U = Ast_utils.Make (F)\n  open AST\n  include MakeBase (Error)\n\n  let attrs_field (i : item) = i.attrs\n\n  (* TODO: Maybe rename me `graph` or something? *)\n  module type WITH_ITEMS = sig\n    val item_uid_map : item UId.Map.t\n    val try_item_of_uid : UId.t -> item option\n    val item_of_uid : UId.t -> item\n    val associated_items_per_roles : attrs -> item list AssocRole.Map.t\n    val associated_item : AssocRole.t -> attrs -> item option\n\n    val associated_fn :\n      AssocRole.t -> attrs -> (generics * param list * expr) option\n\n    val associated_expr :\n      ?keep_last_args:int -> AssocRole.t -> attrs -> expr option\n\n    val associated_items : AssocRole.t -> attrs -> item list\n\n    val associated_fns :\n      AssocRole.t -> attrs -> (generics * param list * expr) list\n\n    val associated_exprs :\n      ?keep_last_args:int -> AssocRole.t -> attrs -> expr list\n\n    val expect_fn : item -> generics * param list * expr\n\n    val expect_expr :\n      ?keep_last_args:int -> generics * param list * expr -> expr\n\n    val associated_refinement_in_type :\n      span -> string list -> attrs -> expr option\n    (** For type, there is a special treatment. The name of fields are global\n        identifiers, and thus are subject to rewriting by [Concrete_ident] at\n        the moment of printing. In contrast, in the refinement `fn` item\n        generated by the proc-macros, the arguments are local identifiers, and\n        thus are rewrited in a different manner.\n\n        Thus, [associated_refinement_in_type] takes a list of [free_variables]:\n        those are already formatted strings as printed by the backend. Then, we\n        rewrite identities in the refinement formula to match exactly this print\n        policy, using *final* local identifiers (see `Local_ident.make_final`).\n    *)\n\n    include module type of MakeBase (Error)\n  end\n\n  module WithItems (I : sig\n    val items : item list\n  end) : WITH_ITEMS = struct\n    include MakeBase (Error)\n\n    let map_of_alist (type a b cmp) (m : (a, cmp) Comparator.Module.t)\n        (l : (a * b) list) ~(dup : a -> b list -> (a, b, cmp) Map.t) :\n        (a, b, cmp) Map.t =\n      let (module M) = m in\n      let equal x y = Int.equal (M.comparator.compare x y) 0 in\n      match Map.of_alist m l with\n      | `Ok map -> map\n      | `Duplicate_key key ->\n          List.filter ~f:(fst >> equal key) l |> List.map ~f:snd |> dup key\n\n    (* Useful for looking up decorations *)\n    let item_uid_map : item UId.Map.t =\n      let f item = uid item.attrs |> Option.map ~f:(fun id -> (id, item)) in\n      let l = List.filter_map ~f I.items in\n      let dup uid items =\n        let span = List.map ~f:(fun i -> i.span) items |> Span.union_list in\n        Error.assertion_failure span\n        @@ \"Two or more items share the same UID \"\n        ^ [%show: UId.t] uid\n      in\n      map_of_alist (module UId) l ~dup\n\n    let try_item_of_uid (uid : UId.t) : item option = Map.find item_uid_map uid\n\n    let item_of_uid (uid : UId.t) : item =\n      try_item_of_uid uid\n      |> Option.value_or_thunk ~default:(fun () ->\n             Error.assertion_failure (Span.dummy ())\n             @@ \"Could not find item with UID \"\n             ^ [%show: UId.t] uid)\n\n    let associated_items_per_roles : attrs -> item list AssocRole.Map.t =\n      raw_associated_item\n      >> List.map ~f:(map_snd item_of_uid)\n      >> Map.of_alist_multi (module AssocRole)\n\n    let expect_singleton failure = function\n      | [] -> None\n      | [ v ] -> Some v\n      | _ -> failure ()\n    (* Error.assertion_failure span message *)\n\n    let span_of_attrs =\n      List.map ~f:(fun (i : attr) -> i.span) >> Span.union_list\n\n    let find_or_empty role list = Map.find list role |> Option.value ~default:[]\n\n    let associated_items (role : AssocRole.t) (attrs : attrs) : item list =\n      associated_items_per_roles attrs |> find_or_empty role\n\n    let associated_item (role : AssocRole.t) (attrs : attrs) : item option =\n      associated_items role attrs\n      |> expect_singleton (fun _ ->\n             let span = span_of_attrs attrs in\n             Error.assertion_failure span\n             @@ \"Found more than one \"\n             ^ [%show: AssocRole.t] role\n             ^ \" for this item. Only one is allowed.\")\n\n    let expect_fn = function\n      | { v = Fn { generics; params; body; _ }; _ } -> (generics, params, body)\n      | { span; _ } ->\n          Error.assertion_failure span\n            \"this associated item was expected to be a `fn` item\"\n\n    let expect_expr ?(keep_last_args = 0) (_generics, params, body) =\n      let n =\n        if keep_last_args < 0 then 0 else List.length params - keep_last_args\n      in\n      let params = List.drop params n |> List.map ~f:(fun p -> p.pat) in\n      match params with\n      | [] -> body\n      | _ -> { body with e = Closure { params; body; captures = [] } }\n\n    let associated_fn (role : AssocRole.t) :\n        attrs -> (generics * param list * expr) option =\n      associated_item role >> Option.map ~f:expect_fn\n\n    let associated_fns (role : AssocRole.t) :\n        attrs -> (generics * param list * expr) list =\n      associated_items role >> List.map ~f:expect_fn\n\n    (** Looks up an associated expression, optionally keeping `keep_last_args`\n        last arguments. If keep_last_args is negative, then all arguments are\n        kept. *)\n    let associated_expr ?(keep_last_args = 0) (role : AssocRole.t) :\n        attrs -> expr option =\n      associated_fn role >> Option.map ~f:(expect_expr ~keep_last_args)\n\n    let associated_exprs ?(keep_last_args = 0) (role : AssocRole.t) :\n        attrs -> expr list =\n      associated_fns role >> List.map ~f:(expect_expr ~keep_last_args)\n\n    let associated_refinement_in_type span (free_variables : string list) :\n        attrs -> expr option =\n      associated_fn Refine\n      >> Option.map ~f:(fun (_, params, body) ->\n             let substs =\n               let x =\n                 List.concat_map ~f:U.Reducers.variables_of_param params\n               in\n               let y = List.map ~f:Local_ident.make_final free_variables in\n               List.zip_opt x y\n               |> Option.value_or_thunk ~default:(fun _ ->\n                      let details =\n                        \"associated_refinement_in_type: zip two lists of \\\n                         different lenghts\\n\" ^ \"\\n - params: \"\n                        ^ [%show: param list] params\n                        ^ \"\\n - free_variables: \"\n                        ^ [%show: string list] free_variables\n                      in\n                      Error.assertion_failure span details)\n             in\n             let v =\n               U.Mappers.rename_local_idents (fun i ->\n                   match List.find ~f:(fst >> [%eq: local_ident] i) substs with\n                   | None -> i\n                   | Some (_, i) -> i)\n             in\n             v#visit_expr () body)\n  end\n\n  let with_items (items : item list) : (module WITH_ITEMS) =\n    (module WithItems (struct\n      let items = items\n    end))\nend\n"
  },
  {
    "path": "engine/lib/backend.ml",
    "content": "open! Prelude\nopen Ast\n\nmodule type BACKEND_OPTIONS = sig\n  type t\nend\n\nmodule UnitBackendOptions = struct\n  type t = unit\nend\n\nmodule type T = sig\n  module InputLanguage : Features.T\n  module AST : module type of Ast.Make (InputLanguage)\n\n  module U : sig\n    module Mappers : sig\n      val rename_global_idents_item :\n        (Ast_utils.visit_level -> global_ident -> global_ident) ->\n        AST.item ->\n        AST.item\n    end\n  end\n\n  module Error : Phase_utils.ERROR\n  module BackendOptions : BACKEND_OPTIONS\n  module Attrs : module type of Attr_payloads.Make (InputLanguage) (Error)\n\n  val apply_phases : BackendOptions.t -> Ast.Rust.item list -> AST.item list\n\n  val translate :\n    (module Attrs.WITH_ITEMS) ->\n    BackendOptions.t ->\n    bundles:AST.item list list ->\n    AST.item list ->\n    Types.file list\n\n  val backend : Diagnostics.Backend.t\nend\n\nmodule type BackendMetadata = sig\n  val backend : Diagnostics.Backend.t\nend\n\nmodule Make (InputLanguage : Features.T) (M : BackendMetadata) = struct\n  module InputLanguage = InputLanguage\n  module AST = Ast.Make (InputLanguage)\n  module U = Ast_utils.Make (InputLanguage)\n  include M\n\n  module Error = struct\n    type t = { kind : Diagnostics.kind; span : Ast.span } [@@deriving show, eq]\n\n    let raise err =\n      let context = Diagnostics.Context.Backend M.backend in\n      let kind = err.kind in\n      let span = Span.to_thir err.span in\n      Diagnostics.SpanFreeError.raise ~span (Span.owner_hint err.span) context\n        kind\n\n    let unimplemented ?issue_id ?details span =\n      raise\n        {\n          kind =\n            Unimplemented\n              { issue_id = Option.map ~f:MyInt64.of_int issue_id; details };\n          span;\n        }\n\n    let assertion_failure span details =\n      raise { kind = AssertionFailure { details }; span }\n  end\n\n  module Attrs = Attr_payloads.Make (InputLanguage) (Error)\n  [@@ocaml.deprecated\n    \"Use more precise errors: Error.unimplemented, Error.assertion_failure or \\\n     a raw Error.t (with Error.raise)\"]\n\n  let failwith ?(span = Span.default) msg =\n    Error.unimplemented\n      ~details:\n        (\"[TODO: this error uses failwith, and thus leads to bad error \\\n          messages, please update it using [Diagnostics.*] helpers] \" ^ msg)\n      span\nend\n"
  },
  {
    "path": "engine/lib/concrete_ident/concrete_ident.ml",
    "content": "open! Prelude\nmodule View = Concrete_ident_view\n\nmodule Fresh_module : sig\n  (** This module provides a way of generating fresh modules paths. This can be\n      used to reorganize locally definitions; the main motivation for this is\n      recursive bundles, where we move definitions from multiple modules to one\n      fresh module. This is fine because we re-expose all the original\n      definitions. *)\n\n  type t [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n  val fresh : label:string -> Explicit_def_id.t list -> t\n  (** [fresh ~label hints] creates a fresh module out of the non-empty list of\n      explicit definition identifiers hints [hints] and out of a label [label].\n\n      The new module will have a unique path, close to [hints], and containing\n      the label [label]. *)\n\n  val register : fresh_module:t -> Explicit_def_id.t -> unit\n  (** [register ~fresh_module id] declares that [id] belongs to [fresh_module].\n  *)\n\n  val get_path_hints : t -> Explicit_def_id.t list\n  (** List path hints for a fresh module. *)\n\n  val to_mod_path : t -> View.ModPath.t\n  (** Compute a module path for a fresh module. *)\n\n  val to_rust_ast : t -> Rust_engine_types.fresh_module\n  val from_rust_ast : Rust_engine_types.fresh_module -> t\nend = struct\n  open View\n\n  type t = { id : int; hints : Explicit_def_id.t list; label : string }\n  [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n  let id_state = ref 0\n  let map_state : _ Hashtbl.t = Hashtbl.create (module Int)\n\n  let fresh ~label hints =\n    id_state := !id_state + 1;\n    assert (List.is_empty hints |> not);\n    { id = !id_state; hints; label }\n\n  let register ~(fresh_module : t) (did : Explicit_def_id.t) =\n    let default = (Set.empty (module ModPath), None) in\n    let f (set, opt) = (Set.add set (View.of_def_id did).mod_path, opt) in\n    Hashtbl.update map_state fresh_module.id ~f:(Option.value ~default >> f)\n\n  (** [compute_path_chunks fresh_module] returns\n      [(mod_path, mod_name, suffixes)]. [suffixes] are optional suffixes to add\n      to [mod_name] so that the resulting path is unique. *)\n  let compute_path_chunks (m : t) =\n    let mod_paths = List.map ~f:(fun d -> (of_def_id d).mod_path) m.hints in\n    let base = List.longest_prefix ~eq:DisambiguatedString.equal mod_paths in\n    assert (List.is_empty base |> not);\n    let module_names =\n      List.filter ~f:(List.length >> ( < ) (List.length base)) mod_paths\n      |> List.filter_map ~f:List.last\n      |> List.dedup_and_sort ~compare:[%compare: DisambiguatedString.t]\n    in\n    let hash =\n      List.dedup_and_sort ~compare:[%compare: Explicit_def_id.t] m.hints\n      |> [%hash: Explicit_def_id.t list] |> Int.to_string\n      |> DisambiguatedString.pure\n    in\n    let label = DisambiguatedString.pure m.label in\n    (base, label, module_names @ [ hash ])\n\n  let all_paths () =\n    let rust_ones =\n      Explicit_def_id.State.list_all ()\n      |> List.map ~f:(fun x -> (of_def_id x).mod_path)\n    in\n    let fresh_ones : ModPath.t list =\n      Hashtbl.data map_state |> List.filter_map ~f:snd\n    in\n    rust_ones @ fresh_ones\n\n  let compute_path (m : t) =\n    let mod_path, mod_name, suffixes = compute_path_chunks m in\n    let existing_names =\n      all_paths ()\n      |> List.filter_map ~f:last_init\n      |> List.filter ~f:(fst >> [%eq: ModPath.t] mod_path)\n      |> List.map ~f:snd\n      |> List.map ~f:(fun m -> m.DisambiguatedString.data)\n      |> Set.of_list (module String)\n    in\n    let mod_name =\n      List.mapi ~f:(fun n _ -> mod_name :: List.take suffixes n) suffixes\n      |> List.map ~f:(List.map ~f:(fun m -> m.DisambiguatedString.data))\n      |> List.map ~f:(String.concat ~sep:\"_\")\n      |> List.find ~f:(Set.mem existing_names >> not)\n      |> Option.value_exn\n           ~message:\n             \"Broken invariant: in fresh modules the suffix is supposed to be \\\n              crafted so that it is unique.\"\n      |> DisambiguatedString.pure\n    in\n    mod_path @ [ mod_name ]\n\n  let to_mod_path m =\n    Hashtbl.update_and_return map_state m.id\n      ~f:\n        ( Option.value ~default:(Set.empty (module ModPath), None)\n        >> fun (paths, alloc) ->\n          ( paths,\n            alloc\n            |> Option.value_or_thunk ~default:(fun () -> compute_path m)\n            |> Option.some ) )\n    |> snd |> Option.value_exn\n\n  let get_path_hints { hints; _ } = hints\n\n  let to_rust_ast ({ id; hints; label } : t) : Rust_engine_types.fresh_module =\n    {\n      id = Int.to_string id;\n      hints = List.map ~f:Explicit_def_id.to_rust_ast hints;\n      label;\n    }\n\n  let from_rust_ast ({ id; hints; label } : Rust_engine_types.fresh_module) : t\n      =\n    {\n      id = Int.of_string id;\n      hints = List.map ~f:Explicit_def_id.from_rust_ast hints;\n      label;\n    }\nend\n\ntype reserved_suffix = [ `Cast | `Pre | `Post ]\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n(** A concrete identifier can have a reserved suffix: this is useful to derive\n    new identifiers from existing identifiers. *)\n\nmodule T = struct\n  type t = {\n    def_id : Explicit_def_id.t;\n    moved : Fresh_module.t option;\n    suffix : reserved_suffix option;\n  }\n  [@@deriving show, yojson, hash, compare, sexp, hash, eq]\nend\n\ninclude T\ninclude Comparator.Make (T)\n\nlet to_debug_string = T.show\n\nlet fresh_module ~label =\n  List.concat_map ~f:(fun { def_id; moved; _ } ->\n      def_id\n      :: (Option.to_list moved |> List.concat_map ~f:Fresh_module.get_path_hints))\n  >> Fresh_module.fresh ~label\n\nmodule Cache = struct\n  let state = Hash_set.create (module T)\n  let cached = Fn.id &&& Hash_set.add state >> fst\nend\n\nlet make (def_id : Explicit_def_id.t) (moved : Fresh_module.t option)\n    (suffix : reserved_suffix option) =\n  { def_id; moved; suffix }\n\nlet of_def_id ?(suffix : reserved_suffix option = None) ~(value : bool)\n    (def_id : Types.def_id) =\n  let constructor =\n    (* A DefId is a constructor when it's a value and points to a variant, a union or a struct. *)\n    value\n    && [%matches? (Variant | Union | Struct : Types.def_kind)]\n         def_id.contents.value.kind\n  in\n  make (Explicit_def_id.of_def_id_exn ~constructor def_id) None suffix\n  |> Cache.cached\n\nlet move_to_fresh_module (fresh_module : Fresh_module.t) (i : t) =\n  Fresh_module.register ~fresh_module i.def_id;\n  Cache.cached { i with moved = Some fresh_module }\n\nlet with_suffix (suffix : reserved_suffix) (i : t) : t =\n  { i with suffix = Some suffix }\n\nmodule type VIEW_RENDERER = sig\n  val render_module : View.DisambiguatedString.t -> string\n\n  val render_name :\n    namespace:View.ModPath.t -> View.RelPath.Chunk.t list -> string\n\n  val finalize : Concrete_ident_render_sig.rendered -> string\nend\n\nlet to_view (ident : t) : Concrete_ident_view.t =\n  let Concrete_ident_view.{ mod_path; rel_path } =\n    View.of_def_id ident.def_id\n  in\n  let mod_path =\n    Option.map ~f:Fresh_module.to_mod_path ident.moved\n    |> Option.value ~default:mod_path\n  in\n  { mod_path; rel_path }\n\n(** Stateful store that maps [def_id]s to implementation information (which\n    trait is implemented? for which type? under which constraints?) *)\nmodule ImplInfoStore = struct\n  include Explicit_def_id.ImplInfoStore\n\n  let lookup_raw (impl : t) : Types.impl_infos option = lookup_raw impl.def_id\nend\n\nmodule MakeToString (R : VIEW_RENDERER) = struct\n  open Concrete_ident_render_sig\n\n  (** For each module namespace, we store two different pieces of data:\n      - a set of rendered names in this namespace\n      - a memoization map from full identifiers to rendered names\n\n      If an identifier was already rendered, we just use this already rendered\n      name.\n\n      Otherwise, when we print a name under a fresh module, we take a look at\n      the set: if there is already an identifier in the fresh module with the\n      exact same rendered name, then we have a collision, and we need to\n      generate a fresh name.\n\n      To generate a fresh name, we use the set of rendered names. *)\n  let per_module :\n      (string list, string Hash_set.t * (t, string) Hashtbl.t) Hashtbl.t =\n    Hashtbl.create\n      (module struct\n        type t = string list [@@deriving hash, compare, sexp, eq]\n      end)\n\n  let render (i : t) : rendered =\n    let Concrete_ident_view.{ mod_path; rel_path } = to_view i in\n    let mod_path =\n      Concrete_ident_view.ModPath.rename_crate \"core\" \"core_models\" mod_path\n    in\n\n    let path = List.map ~f:R.render_module mod_path in\n    (* Retrieve the various maps. *)\n    let name_set, memo =\n      Hashtbl.find_or_add per_module\n        ~default:(fun _ ->\n          (Hash_set.create (module String), Hashtbl.create (module T)))\n        path\n    in\n    (* If we rendered [i] already in the past, just use that. *)\n    let name =\n      match Hashtbl.find memo i with\n      | Some name -> name\n      | None ->\n          let name = R.render_name ~namespace:mod_path rel_path in\n          let name =\n            match i.suffix with\n            | Some suffix -> (\n                name ^ \"_\"\n                ^\n                match suffix with\n                | `Pre -> \"pre\"\n                | `Post -> \"post\"\n                | `Cast -> \"cast_to_repr\")\n            | _ -> name\n          in\n          let moved_into_fresh_ns = Option.is_some i.moved in\n          let name =\n            if moved_into_fresh_ns then\n              let escape_sep =\n                let re = Re.Pcre.regexp \"__(e*)from__\" in\n                let f group = \"__e\" ^ Re.Group.get group 1 ^ \"from__\" in\n                Re.replace ~all:true re ~f\n              in\n              escape_sep name\n            else name\n          in\n          let is_assoc_or_field (rel_path : View.RelPath.t) : bool =\n            match List.last rel_path with\n            | Some (`AssociatedItem (_, (`Trait _ | `Impl (_, `Trait, _))))\n            | Some (`Field _) ->\n                true\n            | _ -> false\n          in\n          let name =\n            if\n              Hash_set.mem name_set name && moved_into_fresh_ns\n              && (not << is_assoc_or_field) rel_path\n              (* If this rel_path already exists in a fresh namespace,\n                 then we have a duplicate and we should disambiguate.\n                 Unless for associated items which correspond to trait\n                 methods which may be repeated (with their implementations),\n                 and for fields (which are repeated by accessors). *)\n            then\n              let path : View.ModPath.t = (View.of_def_id i.def_id).mod_path in\n              let path = List.map ~f:R.render_module path in\n              (* Generates the list of all prefixes of reversed `path` *)\n              List.folding_map ~init:[] (List.rev path) ~f:(fun acc chunk ->\n                  let acc = chunk :: acc in\n                  (acc, acc))\n              (* We want to try small prefixes first *)\n              |> List.map ~f:List.rev\n              (* We generate a fake path with module ancestors *)\n              |> List.map ~f:(fun path ->\n                     name ^ \"__from__\"\n                     ^ String.concat ~sep:\"__\"\n                         path (* This might shadow, we should escape *))\n                 (* Find the shortest name that doesn't exist already *)\n              |> List.find ~f:(Hash_set.mem name_set >> not)\n              |> Option.value ~default:(name ^ ([%hash: t] i |> Int.to_string))\n            else name\n          in\n          (* Update the maps and hashtables *)\n          let _ = Hash_set.add name_set name in\n          let _ = Hashtbl.add memo ~key:i ~data:name in\n          name\n    in\n    { path; name }\n\n  let show (i : t) : string =\n    let { path; name } = render i in\n    R.finalize { path; name }\nend\n\nmodule RenderSig = Concrete_ident_render_sig.Make (T)\ninclude RenderSig\n\nmodule type NAME_POLICY = Concrete_ident_render_sig.NAME_POLICY\n\nmodule MakeRenderAPI (NP : NAME_POLICY) : RENDER_API = struct\n  open Concrete_ident_render_sig\n\n  let is_reserved_word : string -> bool = Hash_set.mem NP.reserved_words\n\n  module R : VIEW_RENDERER = struct\n    let disambiguator_escape s =\n      match split_str ~on:\"_\" s |> List.rev with\n      | hd :: _ :: _ when Int.of_string_opt hd |> Option.is_some -> s ^ \"_\"\n      | _ -> s\n\n    let render_disambiguated View.DisambiguatedString.{ disambiguator; data } =\n      if Int64.equal Int64.zero disambiguator then disambiguator_escape data\n      else data ^ \"_\" ^ Int64.to_string disambiguator\n\n    let render_module = render_disambiguated\n\n    module NameAst = struct\n      module Separator = struct\n        let separator = \"__\"\n        let concat x y = x ^ separator ^ y\n\n        let escape =\n          let re = Re.Pcre.regexp \"_(e*)_\" in\n          let f group = \"_e\" ^ Re.Group.get group 1 ^ \"_\" in\n          Re.replace ~all:true re ~f\n      end\n\n      module Prefixes : sig\n        type t = private string [@@deriving eq, show]\n\n        val allowed : t list\n        (** List of allowed reserved prefixes. *)\n\n        val mk : string -> t\n        (** Creates a prefix, if it is valid. *)\n\n        val escape : string -> string\n        (** Escapes reserved prefixes in a string *)\n      end = struct\n        type t = string [@@deriving eq, show]\n\n        let allowed =\n          [\n            \"impl\";\n            \"anon_const\";\n            \"inline_const\";\n            \"foreign\";\n            \"use\";\n            \"opaque\";\n            \"closure\";\n            \"t\";\n            \"v\";\n            \"f\";\n            \"i\";\n            \"discriminant\";\n          ]\n          @ (List.filter_map ~f:Fn.id\n               [\n                 NP.struct_constructor_prefix;\n                 NP.enum_constructor_prefix;\n                 NP.union_constructor_prefix;\n               ]\n            |> List.dedup_and_sort ~compare:String.compare)\n\n        let mem = List.mem ~equal:[%eq: string] allowed\n\n        let mk s =\n          if mem s then s\n          else\n            failwith (\"broken invariant: [\" ^ s ^ \"] is not an allowed prefix\")\n\n        let escape_char = \"e\"\n\n        let () =\n          assert (\n            (* Make sure there is no prefix `Cs` such that `C ^ \"s\"` is a prefix as well. *)\n            List.for_all allowed ~f:(fun s -> not (mem (first_letter s ^ s))))\n\n        let () = assert (mem \"e\" |> not)\n\n        let rec escape (s : string) : string =\n          match String.lsplit2 ~on:'_' s with\n          | Some (\"\", rest) -> \"e_\" ^ escape rest\n          | Some (prefix, rest)\n            when List.mem ~equal:[%equal: string] allowed prefix ->\n              first_letter prefix ^ prefix ^ \"_\" ^ escape rest\n          | _ -> s\n      end\n\n      type policy = {\n        prefix : Prefixes.t;\n        disable_when : [ `SameCase ] list;\n        mode : [ `Global | `Local | `Both ];\n      }\n      [@@deriving eq, show]\n\n      type t =\n        | Concat of (t * t)  (** Concatenate two names *)\n        | Policy of (policy * t)\n        | TrustedString of string  (** A string that is already escaped *)\n        | UnsafeString of string  (** A string that needs escaping *)\n        | Empty\n      [@@deriving eq, show]\n\n      let rec global_policy ast : _ =\n        let filter =\n          Option.filter ~f:(fun p -> [%matches? `Global | `Both] p.mode)\n        in\n        let ( <|> ) v f = match v with Some v -> Some v | None -> f () in\n        match ast with\n        | Policy (policy, contents) ->\n            global_policy contents |> filter <|> fun _ ->\n            policy |> Option.some |> filter\n        | Concat (l, r) ->\n            global_policy r |> filter <|> fun _ -> global_policy l |> filter\n        | _ -> None\n\n      let escape_unsafe_string = Prefixes.escape >> Separator.escape\n\n      let apply_policy (leftmost : bool) (policy : policy) (escaped : string) =\n        let prefix = (policy.prefix :> string) in\n        let disable =\n          List.exists policy.disable_when ~f:(function `SameCase ->\n              let first_upper = first_letter >> is_uppercase in\n              Bool.equal (first_upper prefix) (first_upper escaped))\n        in\n        if (not disable) || (leftmost && is_reserved_word escaped) then\n          prefix ^ \"_\" ^ escaped\n        else escaped\n\n      let rec norm' = function\n        | Concat (Empty, x) | Concat (x, Empty) -> x\n        | Policy (_, Empty) -> Empty\n        | Policy (p, x) -> Policy (p, norm' x)\n        | Concat (x, y) -> Concat (norm' x, norm' y)\n        | x -> x\n\n      let rec norm x =\n        let x' = norm' x in\n        if [%eq: t] x x' then x else norm x'\n\n      let concat_list =\n        List.fold ~f:(fun l r -> Concat (l, r)) ~init:Empty >> norm\n\n      let rec render' leftmost ast =\n        match ast with\n        | Concat (a, b) ->\n            Separator.concat (render' leftmost a) (render' false b)\n        | Policy (policy, a) when [%matches? `Global] policy.mode ->\n            render' leftmost a\n        | Policy (policy, a) ->\n            render' leftmost a |> apply_policy leftmost policy\n        | TrustedString s -> s\n        | UnsafeString s -> escape_unsafe_string s\n        | Empty -> \"\"\n\n      let render ast =\n        let policy = global_policy ast in\n        let policy =\n          Option.map ~f:(apply_policy true) policy\n          |> Option.value ~default:Fn.id\n        in\n        let rendered = norm ast |> render' true |> policy in\n        if is_reserved_word rendered then rendered ^ \"_escape_reserved_word\"\n        else rendered\n    end\n\n    (** [pretty_impl_name ~namespace impl_infos] computes a pretty impl name\n        given impl information and a namespace. A pretty name can be computed\n        when:\n        - (1) the impl, (2) the type and (3) the trait implemented all live in\n          the same namespace\n        - the impl block has no generics\n        - the type implemented is simple enough to be represented as a string\n          (see module {!Thir_simple_types}) *)\n    let pretty_impl_name ~namespace (impl_infos : Types.impl_infos) =\n      let* ty = Thir_simple_types.to_string ~namespace impl_infos.typ in\n      let*? _no_generics = List.is_empty impl_infos.generics.params in\n      match impl_infos.trait_ref with\n      | None -> Some ty\n      | Some { value = { def_id = trait; generic_args = [ _self ]; _ }; _ } ->\n          let* trait = Explicit_def_id.of_def_id trait in\n          let trait = View.of_def_id trait in\n          let*? _same_ns = [%eq: View.ModPath.t] namespace trait.mod_path in\n          let* trait =\n            match trait.rel_path with\n            | [ `Trait (n, _) ] when Int64.equal Int64.zero n.disambiguator ->\n                Some n.data\n            | _ -> None\n          in\n          let trait =\n            let re = Re.Pcre.regexp \"_((?:e_)*)for_\" in\n            let f group = \"_e_\" ^ Re.Group.get group 1 ^ \"for_\" in\n            Re.replace ~all:true re ~f trait\n          in\n          Some (trait ^ \"_for_\" ^ ty)\n      | _ -> None\n\n    (** Produces a name for an impl block, only if it is necessary (e.g. the\n        disambiguator is non-null) *)\n    let impl_name ~namespace ?(always = false) disambiguator\n        (impl_infos : Types.impl_infos option) =\n      let pretty = impl_infos |> Option.bind ~f:(pretty_impl_name ~namespace) in\n      let*? _ = always || Int64.equal Int64.zero disambiguator |> not in\n      match pretty with\n      | Some pretty -> Some pretty\n      | None ->\n          if Int64.equal Int64.zero disambiguator then None\n          else Some (Int64.to_string disambiguator)\n\n    (** Renders one chunk *)\n    let render_chunk ~namespace ~final (chunk : View.RelPath.Chunk.t) :\n        NameAst.t =\n      let prefix ?(global = false) ?(disable_when = []) s contents =\n        NameAst.Policy\n          ( {\n              prefix = NameAst.Prefixes.mk s;\n              mode = (if global then `Both else `Local);\n              disable_when;\n            },\n            contents )\n      in\n      let prefix_d s d = prefix s (NameAst.UnsafeString (Int64.to_string d)) in\n      let dstr s = NameAst.UnsafeString (render_disambiguated s) in\n      let render_impl_name ?(always = false) disambiguator impl_infos =\n        match impl_name ~namespace ~always disambiguator impl_infos with\n        | Some name -> prefix \"impl\" (UnsafeString name)\n        | None -> TrustedString \"impl\"\n      in\n      match chunk with\n      | `AnonConst d ->\n          prefix ~global:true ~disable_when:[ `SameCase ] \"anon_const\"\n            (NameAst.UnsafeString (Int64.to_string d))\n      | `InlineConst d ->\n          prefix ~global:true ~disable_when:[ `SameCase ] \"inline_const\"\n            (NameAst.UnsafeString (Int64.to_string d))\n      | `Use d -> prefix_d \"use\" d\n      | `Foreign d -> prefix_d \"foreign\" d\n      | `GlobalAsm d -> prefix_d \"global_asm\" d\n      | `Closure d -> prefix_d \"closure\" d\n      | `Opaque d -> prefix_d \"opaque\" d\n      (* The name of a trait impl *)\n      | `Impl (d, _, impl_infos) -> render_impl_name d impl_infos\n      (* Print the name of an associated item in a inherent impl *)\n      | `AssociatedItem\n          ((`Type n | `Const n | `Fn n), `Impl (d, `Inherent, impl_infos)) ->\n          let impl = render_impl_name ~always:true d impl_infos in\n          Concat (impl, dstr n)\n      (* Print the name of an item defined inside an associated item of a trait impl *)\n      (* `Impl of\n         'disambiguator\n         * [ `Inherent | `Trait ]\n         * Types.impl_infos option*)\n      | `AssociatedItem\n          ((`Type n | `Const n | `Fn n), `Impl (d, `Trait, impl_infos))\n        when not final ->\n          Concat\n            (prefix \"f\" (dstr n), render_impl_name ~always:true d impl_infos)\n      (* Print the name of an associated item in a trait impl *)\n      | `AssociatedItem\n          ((`Type n | `Const n | `Fn n), `Impl (d, `Trait, impl_infos)) ->\n          if NP.prefix_associated_item_with_trait_name then\n            Concat\n              (render_impl_name ~always:true d impl_infos, prefix \"f\" (dstr n))\n          else prefix \"f\" (dstr n)\n      | `AssociatedItem ((`Type n | `Const n | `Fn n), `Trait (trait_name, _))\n        ->\n          if NP.prefix_associated_item_with_trait_name then\n            Concat (dstr trait_name, prefix \"f\" (dstr n))\n          else prefix \"f\" (dstr n)\n      (* The constructor of a struct *)\n      | `Constructor (cons, parent) -> (\n          let cons = render_disambiguated cons in\n          let include_type, prefix_s, type_name =\n            match parent with\n            | `Struct n ->\n                ( NP.prefix_struct_constructors_with_type,\n                  NP.struct_constructor_prefix,\n                  n )\n            | `Enum n ->\n                ( NP.prefix_enum_constructors_with_type,\n                  NP.enum_constructor_prefix,\n                  n )\n            | `Union n ->\n                ( NP.prefix_union_constructors_with_type,\n                  NP.union_constructor_prefix,\n                  n )\n          in\n          let cons =\n            if include_type then render_disambiguated type_name ^ \"_\" ^ cons\n            else cons\n          in\n          match prefix_s with\n          | Some prefix_s ->\n              prefix ~global:true ~disable_when:[ `SameCase ] prefix_s\n                (UnsafeString cons)\n          | _ -> UnsafeString cons)\n      (* Anonymous fields *)\n      | `Field ({ data; disambiguator }, _)\n        when Option.is_some (Int.of_string_opt data)\n             && Int64.equal disambiguator Int64.zero ->\n          TrustedString (NP.anonymous_field_transform data)\n      (* Named fields *)\n      | `Field (n, `Constructor (cons, (`Struct typ | `Union typ | `Enum typ)))\n        ->\n          let n = render_disambiguated n in\n          let n =\n            match NP.named_field_prefix with\n            | Some `ConstructorName -> render_disambiguated cons ^ \"_\" ^ n\n            | Some `TypeName -> render_disambiguated typ ^ \"_\" ^ n\n            | _ -> n\n          in\n          prefix \"f\" (UnsafeString n)\n      (* Anything function-like *)\n      | `Macro n | `Static n | `Fn n | `Const n ->\n          prefix \"v\" ~disable_when:[ `SameCase ] (dstr n)\n      (* Anything type-like *)\n      | `ExternCrate n\n      | `Trait (n, _)\n      | `ForeignTy n\n      | `TraitAlias n\n      | `Mod n\n      | `Struct n\n      | `Union n\n      | `TyAlias n\n      | `Enum n ->\n          prefix \"t\" (dstr n)\n\n    let render_name ~namespace (rel_path : View.RelPath.t) =\n      let l = List.length rel_path in\n      let rel_path =\n        List.mapi\n          ~f:(fun i -> render_chunk ~final:(i = l - 1) ~namespace)\n          rel_path\n        |> NameAst.concat_list\n      in\n      NameAst.render rel_path\n\n    let finalize { path; name } =\n      let path = List.map ~f:(map_first_letter String.uppercase) path in\n      String.concat ~sep:\".\"\n        (path @ if String.is_empty name then [] else [ name ])\n  end\n\n  include MakeToString (R)\n\n  let pp fmt = T.show >> Stdlib.Format.pp_print_string fmt\n\n  let show id =\n    let { path; name } = render id in\n    (path @ if String.is_empty name then [] else [ name ])\n    |> String.concat ~sep:\"::\"\n\n  let local_ident (li : Local_ident.t) : string =\n    if Local_ident.is_final li then li.name\n    else\n      R.render_name ~namespace:[]\n        [\n          `Fn\n            View.DisambiguatedString.\n              { disambiguator = Int64.zero; data = li.name };\n        ]\nend\n\ntype name = Concrete_ident_generated.t\n[@@deriving show, yojson, compare, sexp, eq, hash]\n\nlet of_name ~value = Concrete_ident_generated.def_id_of >> of_def_id ~value\n\nlet eq_name name id =\n  let of_name = Concrete_ident_generated.def_id_of name in\n  let a = of_name.contents.value in\n  let b = Explicit_def_id.to_def_id id.def_id in\n  String.equal a.krate b.krate\n  && [%eq: Types.disambiguated_def_path_item list] a.path b.path\n\nmodule DefaultNamePolicy : NAME_POLICY = struct\n  let reserved_words = Hash_set.create (module String)\n  let anonymous_field_transform = Fn.id\n  let prefix__constructors_with_type = false\n  let prefix_struct_constructors_with_type = false\n  let prefix_enum_constructors_with_type = true\n  let prefix_union_constructors_with_type = false\n  let struct_constructor_prefix = Some \"C\"\n  let enum_constructor_prefix = Some \"C\"\n  let union_constructor_prefix = Some \"C\"\n  let named_field_prefix = None\n  let prefix_associated_item_with_trait_name = false\nend\n\nmodule DefaultViewAPI = MakeRenderAPI (DefaultNamePolicy)\n\nlet map_path_strings ~(f : string -> string) (did : t) : t =\n  let constructor = did.def_id |> Explicit_def_id.is_constructor in\n  let did : Types.def_id_contents = did.def_id |> Explicit_def_id.to_def_id in\n  let path =\n    did.path\n    |> List.map ~f:(fun (chunk : Types.disambiguated_def_path_item) ->\n           let data =\n             match chunk.data with\n             | TypeNs s -> Types.TypeNs (f s)\n             | ValueNs s -> ValueNs (f s)\n             | MacroNs s -> MacroNs (f s)\n             | LifetimeNs s -> LifetimeNs (f s)\n             | data -> data\n           in\n           { chunk with data })\n  in\n  let did = { did with path } in\n  let def_id =\n    Explicit_def_id.of_def_id_exn ~constructor\n      { contents = { value = did; id = Base.Int64.zero } }\n  in\n  { def_id; moved = None; suffix = None }\n\nlet is_constructor (did : t) : bool = Explicit_def_id.is_constructor did.def_id\n\nlet is_anon_assoc_ty (did : t) : bool =\n  Explicit_def_id.is_anon_assoc_ty did.def_id\n\nlet matches_namespace (ns : Types.namespace) (did : t) : bool =\n  let did = Explicit_def_id.to_def_id did.def_id in\n  let path : string option list =\n    [ Some did.krate ]\n    @ List.map\n        ~f:(fun (chunk : Types.disambiguated_def_path_item) ->\n          match chunk.data with\n          | TypeNs s | ValueNs s | MacroNs s | LifetimeNs s -> Some s\n          | _ -> None)\n        did.path\n  in\n  let rec aux (pattern : Types.namespace_chunk list) (path : string option list)\n      =\n    match (pattern, path) with\n    | [], [] -> true\n    | Exact x :: pattern, Some y :: path ->\n        [%equal: string] x y && aux pattern path\n    | Glob One :: pattern, _ :: path -> aux pattern path\n    | Glob Many :: pattern, [] -> aux pattern []\n    | Glob Many :: pattern', _ :: path' ->\n        aux pattern' path || aux pattern path'\n    | _ -> false\n  in\n  aux ns.chunks path\n\nlet to_rust_ast ({ def_id; moved; suffix } : t) : Rust_engine_types.concrete_id\n    =\n  let moved = Option.map ~f:Fresh_module.to_rust_ast moved in\n  let suffix =\n    Option.map\n      ~f:(fun s ->\n        match s with\n        | `Cast -> Rust_engine_types.Cast\n        | `Pre -> Rust_engine_types.Pre\n        | `Post -> Rust_engine_types.Post)\n      suffix\n  in\n  { def_id = Explicit_def_id.to_rust_ast def_id; moved; suffix }\n\nlet from_rust_ast ({ def_id; moved; suffix } : Rust_engine_types.concrete_id) :\n    t =\n  let moved = Option.map ~f:Fresh_module.from_rust_ast moved in\n  let suffix =\n    Option.map\n      ~f:(fun s -> match s with Cast -> `Cast | Pre -> `Pre | Post -> `Post)\n      suffix\n  in\n  { def_id = Explicit_def_id.from_rust_ast def_id; moved; suffix }\n"
  },
  {
    "path": "engine/lib/concrete_ident/concrete_ident.mli",
    "content": "(** This module provides the global concrete identifiers. *)\n\nmodule Fresh_module : sig\n  type t [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n  (** A type representing a fresh module. Below, we define two functions:\n      - [fresh] creates a new fresh module\n      - [move_to_fresh_module] creates a new and always fresh identifier by\n        \"moving\" an existing identifier under the given fresh module *)\nend\n\nmodule View : module type of Concrete_ident_view\n\nmodule T : sig\n  type t [@@deriving show, yojson, compare, sexp, eq, hash]\n  (** A concrete identifier. *)\nend\n\ninclude module type of T with type t = T.t\n\ntype reserved_suffix = [ `Cast | `Pre | `Post ]\n[@@deriving show, yojson, hash, compare, sexp, hash, eq]\n(** A concrete identifier can have a reserved suffix: this is useful to derive\n    new identifiers from existing identifiers. *)\n\nval of_def_id :\n  ?suffix:reserved_suffix option -> value:bool -> Types.def_id -> t\n(** [of_def_id ?suffix ~value def_id] a concrete identifier out of a Rust\n    identifier [def_id]. [value] is a flag that decides whether [def_id] refers\n    to a value or not.\n\n    [value] is important only for constructors: i.e. the identifier for the type\n    of a struct should be created with [value] set to false while the identifier\n    for the constructor of a struct should be create with [value] set to true.\n    For more information, please read the documentation of module\n    {!Explicit_def_id}. *)\n\ntype name = Concrete_ident_generated.t\n[@@deriving show, yojson, compare, sexp, eq, hash]\n(** A enumeration of static concrete identifiers useful inside the engine. *)\n\nval of_name : value:bool -> name -> t\n(** Creates an identifier given a name. [value] has the same meaning as in\n    function {!of_def_id}. *)\n\nval eq_name : name -> t -> bool\n(** [eq_name name identifier] is true whenever [identifier] is [name]. *)\n\nval to_debug_string : t -> string\n(** Format an identifier as a (ppx) debug string. The default debug pretty\n    prints the identifier. *)\n\nval fresh_module : label:string -> t list -> Fresh_module.t\n(** [fresh_module ~label hints] creates a fresh module given a non-empty list of\n    existing identifiers and a label. The generated module name will be unique,\n    will be close to the identifiers found in [hints], and will include the\n    label. *)\n\nval move_to_fresh_module : Fresh_module.t -> t -> t\n(** Creates a fresh identifier under a given fresh module and given an existing\n    identifier. *)\n\nval with_suffix : reserved_suffix -> t -> t\n(** Creates an identifier out of an existing one, adding a suffix. *)\n\nval to_view : t -> Concrete_ident_view.t\n(** Compute a view for a given identifier. *)\n\nval map_path_strings : f:(string -> string) -> t -> t\n[@@alert unsafe \"This function should be only used in Import_thir!\"]\n(** This function maps any string found in the inner representation of hax. This\n    is a hack for Import_thir so that we can generically produce identifiers for\n    any integer type, please do not use it elsewhere. *)\n\nval is_constructor : t -> bool\n(** Returns true if the ident represents a constructor. *)\n\nval is_anon_assoc_ty : t -> bool\n(** Returns true if the ident represents an anonymous associated type. *)\n\ntype comparator_witness\n\nval comparator : (t, comparator_witness) Base.Comparator.comparator\n\nmodule RenderSig : module type of Concrete_ident_render_sig.Make (T)\n\nmodule type RENDER_API = RenderSig.RENDER_API\nmodule type NAME_POLICY = Concrete_ident_render_sig.NAME_POLICY\n\nmodule DefaultNamePolicy : NAME_POLICY\nmodule MakeRenderAPI (NP : NAME_POLICY) : RenderSig.RENDER_API\nmodule DefaultViewAPI : RenderSig.RENDER_API\n\nmodule ImplInfoStore : sig\n  val init : (Types.def_id * Types.impl_infos) list -> unit\n\n  val lookup_raw : t -> Types.impl_infos option\n  (** Lookup the (raw[1]) implementation information given a concrete ident.\n      Returns `Some _` if and only if the supplied identifier points to an\n      `Impl`.\n\n      [1]: those are raw THIR types.\n\n      {b WARNING}: due to\n      {{:https://github.com/hacspec/hax/issues/363} issue 363}, when looking up\n      certain identifiers generated by the engine, this function may return\n      [None] even though the supplied identifier points to an [Impl] block. *)\nend\n\nval matches_namespace : Types.namespace -> t -> bool\nval to_rust_ast : t -> Rust_engine_types.concrete_id\nval from_rust_ast : Rust_engine_types.concrete_id -> t\n"
  },
  {
    "path": "engine/lib/concrete_ident/concrete_ident_render_sig.ml",
    "content": "open! Prelude\n\ntype rendered = { path : string list; name : string }\n\nmodule type NAME_POLICY = sig\n  val reserved_words : string Hash_set.t\n  (** List of all words that have a special meaning in the target language, and\n      that should thus be escaped. *)\n\n  val anonymous_field_transform : string -> string\n  (** Transformation applied to anonymous tuple fields (i.e. [x.1]) *)\n\n  val named_field_prefix : [ `ConstructorName | `TypeName ] option\n  (** Should fields be prefixed? *)\n\n  val prefix_struct_constructors_with_type : bool\n  val prefix_enum_constructors_with_type : bool\n  val prefix_union_constructors_with_type : bool\n  val struct_constructor_prefix : string option\n  val enum_constructor_prefix : string option\n  val union_constructor_prefix : string option\n  val prefix_associated_item_with_trait_name : bool\nend\n\nmodule Make (T : sig\n  type t\nend) =\nstruct\n  open T\n\n  module type RENDER_API = sig\n    val show : t -> string\n    val pp : Formatter.t -> t -> unit\n    val render : t -> rendered\n    val local_ident : Local_ident.t -> string\n  end\nend\n"
  },
  {
    "path": "engine/lib/concrete_ident/concrete_ident_types.ml",
    "content": "open Prelude\n\n(** An [ExplicitDefId.t] is a Rust [Types.def_id] tagged with some diambiguation metadata.\n    \n    Rust raw [Types.def_id] can be ambiguous: consider the following Rust code:\n    ```rust\n    struct S;\n    fn f() -> S { S }\n    ```\n    Here, the return type of `f` (that is, `S`) and the constructor `S` in the body of `f` refers to the exact same identifier `mycrate::S`.\n    Yet, they denotes two very different objects: a type versus a constructor.\n\n    [ExplicitDefId.t] clears up this ambiguity, making constructors and types two separate things.\n\n    Also, an [ExplicitDefId.t] always points to an item: an [ExplicitDefId.t] is never pointing to a crate alone.\n*)\nmodule type ExplicitDefId = sig\n  type t [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n  (** Representation of explicit definition identifiers. *)\n\n  val of_def_id : ?constructor:bool -> Types.def_id -> t option\n  (** Smart constructor for [t]. Creates an explicit def id out of a raw Rust\n      definition identifier [Types.def_id].\n\n      When [of_def_id] is called with [id] a [Types.def_id], if the [kind] of\n      [id] is either [Struct] or [Union], then [constructor] is mandatory.\n      Otherwise, the argument [constructor] should be [true] only if [id] is a\n      variant.\n\n      This function returns [Some] only when those condition are met. *)\n\n  val make_exn : ?constructor:bool -> Types.def_id -> t\n  (** Exception-throwing variant of [make]. This should be used when we know\n      statically that the conditions described in the documentation of [make]\n      are met.\n\n      For instance, with static [Types.def_id]s or in [Import_thir]. *)\n\n  val is_constructor : t -> bool\n  (** Checks wether a definition identifier [id] points to a constructor.\n\n      [is_constructor id] returns [true] when:\n      - the kind of [id] is [Struct] or [Union] and the identifier was tagged as\n        a constructor;\n      - the kind of [id] is [Variant]. Otherwise, [is_constructor id] returns\n        [false]. *)\n\n  val parent : t -> t option\n  (** Looks up the parent of a definition identifier. Note that the parent of\n      the identifier of a field is always a constructor.\n\n      Also, a top-level item (e.g. `my_crate::some_item`) has no parent: recall\n      that [t] represent only items, not crates. *)\n\n  val parents : t -> t list\n  (** Ordered list of parents for an identifier [id], starting with [id], up to\n      the top-most parent identifier. *)\n\n  val to_def_id : t -> Types.def_id_contents\n  (** Destructor for [t]. *)\n\n  module State : sig\n    val list_all : unit -> t list\n    (** List all identifiers the engine dealt with so far. Beware, this function\n        is stateful. *)\n  end\nend\n\nmodule ViewTypes = struct\n  type disambiguator = Int64.t\n  [@@deriving show, hash, compare, sexp, hash, eq, map]\n\n  module DisambiguatedString = struct\n    type t = { disambiguator : disambiguator; data : string }\n    [@@deriving show, hash, compare, sexp, hash, eq, map]\n  end\nend\n"
  },
  {
    "path": "engine/lib/concrete_ident/concrete_ident_view.ml",
    "content": "open! Prelude\ninclude Concrete_ident_view_types\n\n(** Rust paths come with invariants (e.g. a function is always a `ValueNs _`),\n    this function raises an error if a path doesn't respect those. *)\nlet broken_invariant (type t) msg (did : Explicit_def_id.t) : t =\n  let msg =\n    \"Explicit_def_id: an invariant has been broken. Expected \" ^ msg\n    ^ \".\\n\\ndid=\"\n    ^ [%show: Explicit_def_id.t] did\n  in\n  Stdio.prerr_endline msg;\n  failwith msg\n\n(** Helper module to asserts various properties about a DefId. *)\nmodule Assert = struct\n  let parent did =\n    Explicit_def_id.parent did\n    |> Option.value_or_thunk ~default:(fun _ ->\n           broken_invariant \"the Explicit_def_id to have a parent\" did)\n\n  let type_ns (did : Explicit_def_id.t) =\n    match List.last (Explicit_def_id.to_def_id did).path with\n    | Some { data = TypeNs data; disambiguator } ->\n        DisambiguatedString.{ data; disambiguator }\n    | _ -> broken_invariant \"last path chunk to exist and be of type TypeNs\" did\n\n  let macro_ns (did : Explicit_def_id.t) =\n    match List.last (Explicit_def_id.to_def_id did).path with\n    | Some { data = MacroNs data; disambiguator } ->\n        DisambiguatedString.{ data; disambiguator }\n    | _ ->\n        broken_invariant \"last path chunk to exist and be of type MacroNs\" did\n\n  let value_ns (did : Explicit_def_id.t) =\n    match List.last (Explicit_def_id.to_def_id did).path with\n    | Some { data = ValueNs data; disambiguator } ->\n        DisambiguatedString.{ data; disambiguator }\n    | _ ->\n        broken_invariant \"last path chunk to exist and be of type ValueNs\" did\nend\n\nlet rec poly :\n    'n 'd.\n    into_n:(Explicit_def_id.t -> DisambiguatedString.t -> 'n) ->\n    into_d:(Explicit_def_id.t -> Int64.t -> 'd) ->\n    Explicit_def_id.t ->\n    ('n, 'd) RelPath.Chunk.poly =\n fun ~into_n ~into_d did ->\n  let poly = poly ~into_n ~into_d in\n  let mk_associated_item kind : ('n, 'd) RelPath.Chunk.poly =\n    `AssociatedItem\n      ( kind,\n        match Assert.parent did |> poly with\n        | (`Impl _ | `Trait _) as p -> p\n        | _ -> broken_invariant \"Impl or Trait\" (Assert.parent did) )\n  in\n  let assert_type_ns did = Assert.type_ns did |> into_n did in\n  let assert_value_ns did = Assert.value_ns did |> into_n did in\n  let assert_macro_ns did = Assert.macro_ns did |> into_n did in\n  let result =\n    match (Explicit_def_id.to_def_id did).kind with\n    | (Ctor (Struct, _) | Struct) when Explicit_def_id.is_constructor did ->\n        let name = assert_type_ns did in\n        `Constructor (name, `Struct name)\n    | Variant | Ctor _ ->\n        let parent = Assert.parent did in\n        let name = assert_type_ns did in\n        `Constructor\n          ( name,\n            match poly parent with\n            | (`Enum _ | `Struct _ | `Union _) as p -> p\n            | _ -> broken_invariant \"Enum, Struct or Union\" parent )\n    | Fn -> `Fn (assert_value_ns did)\n    | Const -> `Const (assert_value_ns did)\n    | AssocFn -> `Fn (assert_value_ns did) |> mk_associated_item\n    | AssocConst -> `Const (assert_value_ns did) |> mk_associated_item\n    | AssocTy -> `Type (assert_type_ns did) |> mk_associated_item\n    | TyAlias -> `TyAlias (assert_type_ns did)\n    | Field ->\n        let constructor =\n          let parent = Assert.parent did in\n          match parent |> poly with\n          | `Constructor _ as p -> p\n          | _ -> broken_invariant \"Constructor\" parent\n        in\n        `Field (assert_value_ns did, constructor)\n    | Trait -> `Trait (assert_type_ns did, None)\n    | TraitAlias -> `Trait (assert_type_ns did, Some `Alias)\n    | Macro _ -> `Macro (assert_macro_ns did)\n    | Union -> `Union (assert_type_ns did)\n    | Enum -> `Enum (assert_type_ns did)\n    | Struct -> `Struct (assert_type_ns did)\n    | AnonConst ->\n        `AnonConst\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = AnonConst; disambiguator } -> into_d did disambiguator\n          | _ -> broken_invariant \"last path chunk to be AnonConst\" did)\n    | Closure ->\n        `AnonConst\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = Closure; disambiguator } -> into_d did disambiguator\n          | _ -> broken_invariant \"last path chunk to be Closure\" did)\n    | Impl { of_trait } ->\n        `Impl\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = Impl; disambiguator } ->\n              ( into_d did disambiguator,\n                (if of_trait then `Trait else `Inherent),\n                Explicit_def_id.ImplInfoStore.lookup_raw did )\n          | _ -> broken_invariant \"last path chunk to be Impl\" did)\n    | OpaqueTy ->\n        `Opaque\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = OpaqueTy; disambiguator } -> into_d did disambiguator\n          | _ -> broken_invariant \"last path chunk to be Opaque\" did)\n    | Use ->\n        `Use\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = Use; disambiguator } -> into_d did disambiguator\n          | _ -> broken_invariant \"last path chunk to be Use\" did)\n    | ForeignMod ->\n        `Foreign\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = ForeignMod; disambiguator } -> into_d did disambiguator\n          | _ -> broken_invariant \"last path chunk to be ForeignMod\" did)\n    | ForeignTy -> `ForeignTy (assert_type_ns did)\n    | ExternCrate -> `ExternCrate (assert_type_ns did)\n    | Static _ -> `Static (assert_value_ns did)\n    | Mod -> `Mod (assert_type_ns did)\n    | GlobalAsm ->\n        `GlobalAsm\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = GlobalAsm; disambiguator } -> into_d did disambiguator\n          | _ -> broken_invariant \"last path chunk to be GlobalAsm\" did)\n    | InlineConst ->\n        `InlineConst\n          (match List.last_exn (Explicit_def_id.to_def_id did).path with\n          | { data = AnonConst; disambiguator } -> into_d did disambiguator\n          | _ -> broken_invariant \"last path chunk to be AnonConst\" did)\n    | TyParam | ConstParam | PromotedConst | LifetimeParam\n    | SyntheticCoroutineBody ->\n        (* It should be impossible for such items to ever be referenced by anyting in hax. *)\n        broken_invariant\n          \"non (TyParam | ConstParam | InlineConst | PromotedConst | \\\n           LifetimeParam | SyntheticCoroutineBody) identifier\"\n          did\n  in\n  result\n\nlet view_name : Explicit_def_id.t -> RelPath.Chunk.t =\n  poly ~into_n:(fun _ n -> n) ~into_d:(fun _ d -> d)\n\nlet view_name_did : Explicit_def_id.t -> _ RelPath.Chunk.poly =\n  let mk x y = (x, y) in\n  poly ~into_n:mk ~into_d:mk\n\nlet of_def_id (did : Explicit_def_id.t) : t =\n  (* We distinguish between:\n     - a chain of identifiers that have a relation with each other (e.g. if `k::E::C` is a constructor and `k::E` a enum)\n     - a chain of identifiers that have no relation (e.g. `k::f` and `k::f::g` are both functions).\n  *)\n  (* This distinguishing is implemented by `poly` (or `view_name_did` and `view_name`) *)\n  (* From `poly`, we can inspect the root of the chain of identifiers, e.g. `k::E` is the root of `k::E::C`. *)\n  let ns_chunks, rel_path =\n    let rec find name_chunks (did : Explicit_def_id.t) =\n      let is_mod did =\n        [%matches? (Types.Mod : Types.def_kind)]\n          (Explicit_def_id.to_def_id did).kind\n      in\n      (let*? _did_is_a_mod = is_mod did in\n       let parents = Explicit_def_id.parents did in\n       let*? _parents_all_mods = List.for_all ~f:is_mod parents in\n       Some (List.rev parents, name_chunks))\n      |> Option.value_or_thunk ~default:(fun _ ->\n             let view = view_name_did did in\n             let did =\n               view |> RelPath.Chunk.map_poly fst fst |> RelPath.Chunk.root\n             in\n             let name_chunks =\n               RelPath.Chunk.map_poly snd snd view :: name_chunks\n             in\n             match Explicit_def_id.parent did with\n             | None -> ([], name_chunks)\n             | Some did -> find name_chunks did)\n    in\n    find [] did\n  in\n  let mod_path : DisambiguatedString.t list =\n    { data = (Explicit_def_id.to_def_id did).krate; disambiguator = Int64.zero }\n    :: List.map\n         ~f:(fun (m : Explicit_def_id.t) ->\n           match (Explicit_def_id.to_def_id m).path |> List.last_exn with\n           | Types.{ disambiguator; data = TypeNs data } ->\n               DisambiguatedString.{ data; disambiguator }\n           | _ ->\n               broken_invariant\n                 \"A `Mod` identifier must a `TypeNs` as its last path\" m)\n         ns_chunks\n  in\n  (* This is a hack: we remove a prefix that we add in\n     https://github.com/cryspen/hax/blob/02d67770f2626e4bb27fc2a1ba9cfe612819d4a8/hax-lib/macros/src/implementation.rs#L897 *)\n  let mod_path =\n    List.filter mod_path ~f:(fun ds ->\n        String.is_prefix ds.data ~prefix:\"hax__autogenerated_refinement_\" |> not)\n  in\n  { rel_path; mod_path }\n"
  },
  {
    "path": "engine/lib/concrete_ident/concrete_ident_view.mli",
    "content": "include module type of Concrete_ident_view_types\n\nval of_def_id : Explicit_def_id.t -> t\n(** Computes a view for an explicit definition identifier. *)\n"
  },
  {
    "path": "engine/lib/concrete_ident/concrete_ident_view_types.ml",
    "content": "open! Prelude\n\n(** This modules defines what is the view over a concrete identifiers.\n\n    Hax manipulates concrete identifiers (that is global identifiers referring\n    to concrete Rust items -- not built-in operators) as raw Rust identifiers\n    augmented with some metadata.\n\n    Rust represents identifiers as a crate and a path. Each chunk of the path is\n    roughly a level of nest in Rust. The path lacks information about definition\n    kinds.\n\n    There is two kinds of nesting for items.\n    - Comfort: e.g. the user decides to embed a struct within a function to work\n      with it locally.\n    - Relational: e.g. an associated method has to be under a trait, or a field\n      has to be under a constructor.\n\n    This module provides a view to those paths: a path in the view is a list of\n    smaller relational paths. For instance, consider the following piece of\n    code:\n\n    {@rust[\n      mod a {\n          impl MyTrait for MyType {\n              fn assoc_fn() {\n                  struct LocalStruct {\n                      field: u8,\n                  };\n              }\n          }\n      }\n    ]}\n\n    Here, the Rust raw definition identifier of [LocalStruct] is roughly\n    [my_crate::a::<Impl 0>::assoc_fn::LocalStruct::field].\n\n    The view for [LocalStruct] looks like:\n    [{ { path: [\"mycrate\"; \"a\"], name_path: [ `AssociatedItem (\"assoc_fn\", `Impl\n     0); `Field (\"field\", `Constructor (\"LocalStruct\", `Struct \"LocalStruct\")) ]\n     } }] *)\n\ntype disambiguator = Int64.t\n[@@deriving show, hash, compare, sexp, hash, eq, map]\n(** A [Int64.t] disambiguator: this is given by Rust. *)\n\n(** A string with a disambiguator. *)\nmodule DisambiguatedString = struct\n  module T = struct\n    type t = { disambiguator : disambiguator; data : string }\n    [@@deriving show, hash, compare, sexp, hash, eq, map]\n  end\n\n  include T\n  include Base.Comparator.Make (T)\n\n  let pure data = { disambiguator = Int64.zero; data }\nend\n\n(** A \"module and crate\"-only path. This is the longest `mod` suffix of a\n    definition identifier path. This is a list of disambiguated strings. *)\nmodule ModPath = struct\n  module T = struct\n    open struct\n      module T = struct\n        type t = DisambiguatedString.t list\n        [@@deriving show, hash, compare, sexp, hash, eq]\n      end\n    end\n\n    include T\n    include Base.Comparator.Make (T)\n  end\n\n  include T\n  module Map = Map.M (T)\n\n  let rename_crate (original_name : string) (new_name : string) (mod_path : t) :\n      t =\n    match mod_path with\n    | krate :: path when String.equal krate.data original_name ->\n        { krate with data = new_name } :: path\n    | _ -> mod_path\nend\n\n(** A relational path is a path composed of relational chunks. *)\nmodule RelPath = struct\n  (** A relational chunk is a short path describing \"mandatory\" nestings between\n      items: e.g. a field below a struct, an enum below an enum variants, etc.\n\n      The types defined by this module are indexed by two other types: ['name]\n      and ['disambiguator]. This helps for instrumenting the view to perform\n      additional operations: see [collect_either], [collect] and [root]. *)\n  module Chunk = struct\n    type 'name type_definition =\n      [ `Enum of 'name | `Struct of 'name | `Union of 'name ]\n    (** A type can be an enum, a struct or a union. A type is standalone: it has\n        no mandatory parent item. *)\n\n    and 'name constructor = [ `Constructor of 'name * 'name type_definition ]\n    (** A constructor always has a parent type definition. *)\n\n    and 'name maybe_associated = [ `Fn of 'name | `Const of 'name ]\n    [@@deriving show, hash, compare, sexp, hash, eq, map]\n    (** Helper type for function and constants: those exist both as associated\n        in an impl block or a trait, and as standalone. *)\n\n    type 'name associated = [ 'name maybe_associated | `Type of 'name ]\n    (** An associated item. This is pulled out of [`AssociatedItem] below:\n        otherwise, some PPX is broken... *)\n\n    and ('name, 'disambiguator) assoc_parent =\n      [ `Impl of\n        'disambiguator * [ `Inherent | `Trait ] * Types.impl_infos option\n      | `Trait of 'name * [ `Alias ] option ]\n    [@@deriving show, hash, compare, sexp, hash, eq, map]\n    (** The parent of an associated item can be an impl or a trait. *)\n\n    type ('name, 'disambiguator) poly =\n      [ 'name type_definition\n      | 'name constructor\n      | 'name maybe_associated\n      | ('name, 'disambiguator) assoc_parent\n      | `Use of 'disambiguator\n      | `AnonConst of 'disambiguator\n      | `InlineConst of 'disambiguator\n        (** This is e.g.: {[\n            const {\n                fn f() {}\n            }\n          ]} \n          Here, `f` is under an `InlineConst`.\n          *)\n      | `TraitAlias of 'name\n      | `Foreign of 'disambiguator\n      | `ForeignTy of 'name\n      | `TyAlias of 'name\n      | `ExternCrate of 'name\n      | `Opaque of 'disambiguator\n        (** This is e.g.: {[\n          fn f() -> impl Clone {}\n          fn g() {\n            f();\n          }\n        ]} \n        Here, the type of `f()` is `<f::OpaqueTy>`.\n        *)\n      | `Static of 'name\n      | `Macro of 'name\n      | `AssociatedItem of\n        'name associated * ('name, 'disambiguator) assoc_parent\n      | `Mod of 'name\n      | `GlobalAsm of 'disambiguator\n      | `Field of 'name * 'name constructor\n      | `Closure of 'disambiguator\n        (** We usually never refer to closure: in THIR, we inline closures.\n            However, items can be placed under closures, thus it is present\n            here. See #1450 for more details. *) ]\n    [@@deriving show, hash, compare, sexp, hash, eq, map]\n    (** [poly] is the (polymorphic) type for a relational chunk: it defines what\n        is a chunk. *)\n\n    type t = (DisambiguatedString.t, disambiguator) poly\n    [@@deriving show, hash, compare, sexp, hash, eq]\n    (** [t] is the natural instantiation of [poly]. *)\n\n    (** Transforms a [t] into a [poly] with annotated strings instead of just\n        disambiguators. This adds names to the disambiguator-only constructs\n        defined in [poly]. *)\n    let add_strings ?(impl = \"impl\") ?(anon_const = \"anon_const\")\n        ?(foreign = \"foregin\") ?(global_asm = \"global_asm\") (n : t) :\n        (DisambiguatedString.t, DisambiguatedString.t) poly =\n      let f disambiguator =\n        DisambiguatedString.{ disambiguator; data = impl }\n      in\n      match map_poly Fn.id f n with\n      | `AnonConst o -> `AnonConst { o with data = anon_const }\n      | `Foreign o -> `Foreign { o with data = foreign }\n      | `GlobalAsm o -> `GlobalAsm { o with data = global_asm }\n      | n -> n\n\n    (** Erases names from a [t]. *)\n    let only_disambiguators : t -> (disambiguator, disambiguator) poly =\n      map_poly DisambiguatedString.(fun ds -> ds.disambiguator) Fn.id\n\n    (** Collects all the data of a [t], from the child to the parent. *)\n    let rec collect_either :\n        'n 'd. ('n, 'd) poly -> [ `N of 'n | `D of 'd ] list = function\n      | `Opaque n\n      | `GlobalAsm n\n      | `AnonConst n\n      | `InlineConst n\n      | `Impl (n, _, _)\n      | `Use n\n      | `Closure n\n      | `Foreign n ->\n          [ `D n ]\n      | `Static n\n      | `Macro n\n      | `Enum n\n      | `Struct n\n      | `Union n\n      | `TyAlias n\n      | `TraitAlias n\n      | `Fn n\n      | `Const n\n      | `Trait (n, _)\n      | `ExternCrate n\n      | `Mod n\n      | `ForeignTy n ->\n          [ `N n ]\n      | `AssociatedItem ((`Fn a | `Const a | `Type a), b) ->\n          `N a :: collect_either (b :> _ poly)\n      | `Constructor (a, b) -> `N a :: collect_either (b :> _ poly)\n      | `Field (a, b) -> `N a :: collect_either (b :> _ poly)\n\n    (** Same as [collect_either], but works on a [poly] whose ['name] and\n        ['disambiguator] happen to be the same type. *)\n    let collect : 'a. ('a, 'a) poly -> 'a list =\n     fun n -> collect_either n |> List.map ~f:(function `D v | `N v -> v)\n\n    (** Find the root of a [poly]. *)\n    let root : 'a. ('a, 'a) poly -> 'a = fun x -> collect x |> List.last_exn\n  end\n\n  type t = Chunk.t list [@@deriving show, hash, compare, sexp, hash, eq]\nend\n\ntype t = { mod_path : ModPath.t; rel_path : RelPath.t }\n[@@deriving show, hash, compare, sexp, hash, eq]\n(** Invariant: [name_path] is non-empty *)\n"
  },
  {
    "path": "engine/lib/concrete_ident/explicit_def_id.ml",
    "content": "open! Prelude\n\nmodule T = struct\n  type t = { is_constructor : bool; def_id : Types.def_id_contents }\n  [@@deriving show, yojson, sexp]\n\n  type repr = bool * string * Types.disambiguated_def_path_item list\n  [@@deriving hash, compare, eq]\n\n  let to_repr { is_constructor; def_id } =\n    (is_constructor, def_id.krate, def_id.path)\n\n  let hash = to_repr >> hash_repr\n  let hash_fold_t s = to_repr >> hash_fold_repr s\n  let equal x y = equal_repr (to_repr x) (to_repr y)\n  let compare x y = compare_repr (to_repr x) (to_repr y)\nend\n\ninclude T\n\n(** Helpers for dealing with Rust raw [Types.def_id]s *)\nmodule H = struct\n  let contents (did : Types.def_id) = did.contents.value\n\n  (** Helper to get the parent of a [Types.def_id_contents] *)\n  let parent (did : Types.def_id_contents) : Types.def_id_contents option =\n    Option.map ~f:contents did.parent\nend\n\n(** A pure, def_id_contents version of [of_def_id]. This is not exposed\n    publicly. *)\nlet pure_of_def_id ?constructor (def_id : Types.def_id_contents) : t option =\n  let* _not_crate_root = def_id.path |> List.last in\n  let path_without_ctor =\n    (* Get rid of extra [Ctor] *)\n    let* init, last = last_init def_id.path in\n    let*? _ = [%matches? (Types.Ctor : Types.def_path_item)] last.data in\n    Some init\n  in\n  let parent = def_id.parent in\n  let parent =\n    if Option.is_some path_without_ctor then\n      let* parent = parent in\n      (H.contents parent).parent\n    else parent\n  in\n  let path = Option.value path_without_ctor ~default:def_id.path in\n  let def_id = { def_id with parent; path } in\n  let constructor =\n    if Option.is_some path_without_ctor then Some true else constructor\n  in\n  let*? _constructor_provided_if_union_or_struct =\n    not\n      (Option.is_none constructor\n      && [%matches? (Union | Struct : Types.def_kind)] def_id.kind)\n  in\n  let is_constructor =\n    [%matches? (Variant : Types.def_kind)] def_id.kind\n    || [%matches? Some true] constructor\n  in\n  Some { is_constructor; def_id }\n\nmodule State = struct\n  let state = Hash_set.create (module T)\n\n  let of_def_id' ?constructor def_id_contents =\n    let* did = pure_of_def_id ?constructor def_id_contents in\n    Hash_set.add state did;\n    Some did\n\n  let of_def_id ?constructor def_id =\n    of_def_id' ?constructor (H.contents def_id)\n\n  let list_all () = Hash_set.to_list state\nend\n\nlet of_def_id = State.of_def_id\n\nlet of_def_id_exn ?constructor def_id =\n  of_def_id ?constructor def_id |> Option.value_exn\n\nlet parent (did : t) : t option =\n  let* parent = H.parent did.def_id in\n  let*? _not_crate_root = List.is_empty parent.path |> not in\n  let constructor = [%matches? (Field : Types.def_kind)] did.def_id.kind in\n  State.of_def_id' ~constructor parent\n\nlet rec parents (did : t) =\n  did :: (parent did |> Option.map ~f:parents |> Option.value ~default:[])\n\nlet to_def_id { def_id; _ } = def_id\nlet is_constructor { is_constructor; _ } = is_constructor\n\nlet is_anon_assoc_ty did =\n  [%matches?\n    Some ({ data = AnonAssocTy _; _ } : Types.disambiguated_def_path_item)]\n    (List.last (to_def_id did).path)\n\n(** Stateful store that maps [def_id]s to implementation information (which\n    trait is implemented? for which type? under which constraints?) *)\nmodule ImplInfoStore = struct\n  let state : (Types.def_id_contents, Types.impl_infos) Hashtbl.t option ref =\n    ref None\n\n  module T = struct\n    type t = Types.def_id_contents [@@deriving show, compare, sexp, eq, hash]\n  end\n\n  let init (impl_infos : (Types.def_id * Types.impl_infos) list) =\n    state :=\n      impl_infos\n      |> List.map ~f:(fun ((id : Types.def_id), impl_infos) ->\n             (id.contents.value, impl_infos))\n      |> Hashtbl.of_alist_multi (module T)\n      |> Hashtbl.map ~f:List.hd_exn |> Option.some\n\n  let get_state () =\n    match !state with\n    | None -> failwith \"ImplInfoStore was not initialized\"\n    | Some state -> state\n\n  (** Given a [id] of type [def_id], [find id] will return [Some impl_info] when\n      [id] is an (non-inherent[1]) impl. [impl_info] contains information about\n      the trait being implemented and for which type.\n\n      [1]:\n      https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations\n  *)\n  let find k = Hashtbl.find (get_state ()) k\n\n  let lookup_raw (impl_def_id : t) : Types.impl_infos option =\n    find (to_def_id impl_def_id)\nend\n\nmodule ToRustAST = struct\n  module A = Types\n  module B = Rust_engine_types\n\n  let rec def_id_contents_to_rust_ast\n      ({ krate; path; parent; kind; _ } : A.def_id_contents) : B.def_id =\n    let f (o : A.def_id) = def_id_contents_to_rust_ast o.contents.value in\n    let parent = Option.map ~f parent in\n    { krate; path; parent; kind }\n\n  let to_rust_ast ({ is_constructor; def_id } : t) : B.explicit_def_id =\n    { is_constructor; def_id = def_id_contents_to_rust_ast def_id }\nend\n\nmodule FromRustAST = struct\n  module A = Rust_engine_types\n  module B = Types\n\n  let rec def_id_contents_to_rust_ast\n      ({ krate; path; parent; kind; _ } : A.def_id) : B.def_id_contents =\n    let f (o : A.def_id) : B.def_id =\n      let contents : B.node_for__def_id_contents =\n        { value = def_id_contents_to_rust_ast o; id = Int64.zero }\n      in\n      { contents }\n    in\n    let parent = Option.map ~f parent in\n    {\n      krate;\n      path;\n      parent;\n      kind;\n      index = (Int64.zero, Int64.zero, None);\n      is_local = false;\n    }\n\n  let to_rust_ast ({ is_constructor; def_id } : A.explicit_def_id) : t =\n    { is_constructor; def_id = def_id_contents_to_rust_ast def_id }\nend\n\nlet def_id_to_rust_ast = ToRustAST.def_id_contents_to_rust_ast\nlet def_id_from_rust_ast = FromRustAST.def_id_contents_to_rust_ast\nlet to_rust_ast = ToRustAST.to_rust_ast\nlet from_rust_ast = FromRustAST.to_rust_ast\n"
  },
  {
    "path": "engine/lib/concrete_ident/explicit_def_id.mli",
    "content": "open! Prelude\n\n(** An [ExplicitDefId.t] is a Rust [Types.def_id] tagged with some diambiguation metadata.\n    Explicit definition identifiers are used internally by the concrete names of hax.\n    \n    Rust raw [Types.def_id] can be ambiguous: consider the following Rust code:\n    ```rust\n    struct S;\n    fn f() -> S { S }\n    ```\n    Here, the return type of `f` (that is, `S`) and the constructor `S` in the body of `f` refer to the exact same identifier `mycrate::S`.\n    Yet, they denote two very different objects: a type versus a constructor.\n\n    [ExplicitDefId.t] clears up this ambiguity, making constructors and types two separate things.\n\n    Also, an [ExplicitDefId.t] always points to an item: an [ExplicitDefId.t] is never pointing to a crate alone.\n*)\n\ntype t [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n(** Representation of explicit definition identifiers. *)\n\nval of_def_id : ?constructor:bool -> Types.def_id -> t option\n(** Smart constructor for [t]. Creates an explicit def id out of a raw Rust\n    definition identifier [Types.def_id].\n\n    When [of_def_id] is called with [id] a [Types.def_id], if the [kind] of [id]\n    is either [Struct] or [Union], then [constructor] is mandatory. Otherwise,\n    the argument [constructor] should be [true] only if [id] is a variant.\n\n    [of_def_id] shall not be called on a Rust identifier pointing to a crate\n    root.\n\n    This function returns [Some] only when those condition are met. *)\n\nval of_def_id_exn : ?constructor:bool -> Types.def_id -> t\n(** Exception-throwing variant of [of_def_id]. This should be used when we know\n    statically that the conditions described in the documentation of [of_def_id]\n    are met.\n\n    For instance, with static [Types.def_id]s or in [Import_thir]. *)\n\nval is_constructor : t -> bool\n(** Checks wether a definition identifier [id] points to a constructor.\n\n    [is_constructor id] returns [true] when:\n    - the kind of [id] is [Struct] or [Union] and the identifier was tagged as a\n      constructor;\n    - the kind of [id] is [Variant]. Otherwise, [is_constructor id] returns\n      [false]. *)\n\nval is_anon_assoc_ty : t -> bool\n(** Returns true if the ident represents an anonymous associated type. *)\n\nval parent : t -> t option\n(** Looks up the parent of a definition identifier. Note that the parent of the\n    identifier of a field is always a constructor.\n\n    Also, a top-level item (e.g. `my_crate::some_item`) has no parent: recall\n    that [t] represent only items, not crates. *)\n\nval parents : t -> t list\n(** Ordered list of parents for an identifier [id], starting with [id], up to\n    the top-most parent identifier. *)\n\nval to_def_id : t -> Types.def_id_contents\n(** Destructor for [t]. *)\n\nmodule State : sig\n  val list_all : unit -> t list\n  (** List all identifiers the engine dealt with so far. Beware, this function\n      is stateful. *)\nend\n\nmodule ImplInfoStore : sig\n  val init : (Types.def_id * Types.impl_infos) list -> unit\n\n  val lookup_raw : t -> Types.impl_infos option\n  (** Lookup the (raw[1]) implementation information given a concrete ident.\n      Returns `Some _` if and only if the supplied identifier points to an\n      `Impl`.\n\n      [1]: those are raw THIR types.\n\n      {b WARNING}: due to\n      {{:https://github.com/hacspec/hax/issues/363} issue 363}, when looking up\n      certain identifiers generated by the engine, this function may return\n      [None] even though the supplied identifier points to an [Impl] block. *)\nend\n\nval def_id_to_rust_ast : Types.def_id_contents -> Types.def_id_inner\nval def_id_from_rust_ast : Types.def_id_inner -> Types.def_id_contents\nval to_rust_ast : t -> Rust_engine_types.explicit_def_id\nval from_rust_ast : Rust_engine_types.explicit_def_id -> t\n"
  },
  {
    "path": "engine/lib/concrete_ident/impl_infos.ml",
    "content": "open! Prelude\n\ntype t = {\n  trait_goal : Ast.Rust.trait_goal option;\n      (** The trait implemented by the [impl] block or [None] if the [impl]\n          block is an\n          {{:https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations}\n           inherent [impl]}. *)\n  typ : Ast.Rust.ty;  (** The type implemented by the [impl] block. *)\n  clauses : Ast.Rust.trait_goal list;\n      (** The clauses that constraint this [impl] block. *)\n}\n(** metadata of an [impl] block *)\n\n(** Lookup the implementation information given a concrete ident. Returns\n    [Some _] if and only if the supplied identifier points to an [Impl].\n\n    {b WARNING}: due to\n    {{:https://github.com/hacspec/hax/issues/363} issue 363}, when looking up\n    certain identifiers generated by the engine, this function may return [None]\n    even though the supplied identifier points to an [Impl] block. *)\nlet lookup span (impl : Concrete_ident.t) : t option =\n  let* Types.{ generics = _; clauses; typ; trait_ref } =\n    Concrete_ident.ImplInfoStore.lookup_raw impl\n  in\n  let trait_goal =\n    Option.map ~f:(Import_thir.import_trait_ref span) trait_ref\n  in\n  let typ = Import_thir.import_ty span typ in\n  let clauses =\n    let f i ((binder : Types.clause), span) =\n      Import_thir.import_clause span i binder\n    in\n    List.filter_mapi ~f clauses\n    |> List.filter_map ~f:(fun (c : Ast.Rust.generic_constraint) ->\n           match c with GCType i -> Some i.goal | _ -> None)\n  in\n  Some { trait_goal; typ; clauses }\n"
  },
  {
    "path": "engine/lib/concrete_ident/thir_simple_types.ml",
    "content": "open! Prelude\nmodule View = Concrete_ident_view\n\n(** Interprets a type as a \"simple type\". A simple type is a type for which, in\n    a given scope, we can give a non-ambiguous string identifier.\n\n    This is useful for naming local impls.\n\n    Examples of \"simple\" types:\n    - primitive types (e.g. u8, u16)\n    - enums/structs/unions defined in [namespace], when:\n\n    + all their generic arguments are instantiated to a simple type\n\n    - a reference to a simple type\n    - a slice to a simple type\n    - a tuple of simple types of arity zero (e.g. no ADTs of non-zero arity) *)\nlet to_string ~(namespace : View.ModPath.t) :\n    Types.node_for__ty_kind -> string option =\n  let escape =\n    let re = Re.Pcre.regexp \"_((?:e_)*)of_\" in\n    let f group = \"_e_\" ^ Re.Group.get group 1 ^ \"of_\" in\n    Re.replace ~all:true re ~f\n  in\n  let adt def_id =\n    let* def_id = Explicit_def_id.of_def_id ~constructor:false def_id in\n    let view = View.of_def_id def_id in\n    let* () =\n      [%equal: View.ModPath.t] view.mod_path namespace |> some_if_true\n    in\n    let* last = expect_singleton view.rel_path in\n    let* name =\n      match last with\n      | (`Struct d | `Union d | `Enum d)\n        when Int64.(equal (of_int 0) d.disambiguator) ->\n          Some d.data\n      | _ -> None\n    in\n    escape name |> Option.some\n  in\n  let arity0 (ty : Types.node_for__ty_kind) =\n    match ty.Types.value with\n    | Bool -> Some \"bool\"\n    | Char -> Some \"char\"\n    | Str -> Some \"str\"\n    | Never -> Some \"never\"\n    | Int Isize -> Some \"isize\"\n    | Int I8 -> Some \"i8\"\n    | Int I16 -> Some \"i16\"\n    | Int I32 -> Some \"i32\"\n    | Int I64 -> Some \"i64\"\n    | Int I128 -> Some \"i128\"\n    | Uint Usize -> Some \"usize\"\n    | Uint U8 -> Some \"u8\"\n    | Uint U16 -> Some \"u16\"\n    | Uint U32 -> Some \"u32\"\n    | Uint U64 -> Some \"u64\"\n    | Uint U128 -> Some \"u128\"\n    | Float F32 -> Some \"f32\"\n    | Float F64 -> Some \"f64\"\n    | Tuple { value = { generic_args = []; _ }; _ } -> Some \"unit\"\n    | Adt { value = { def_id; generic_args = []; _ }; _ } ->\n        Option.map ~f:escape (adt def_id)\n    | _ -> None\n  in\n  let apply left right = left ^ \"_of_\" ^ right in\n  let rec arity1 (ty : Types.node_for__ty_kind) =\n    match ty.value with\n    | Slice { value = { generic_args = [ Type sub ]; _ }; _ } ->\n        arity1 sub |> Option.map ~f:(apply \"slice\")\n    | Ref (_, sub, _) -> arity1 sub |> Option.map ~f:(apply \"ref\")\n    | Adt { value = { def_id; generic_args = [ Type arg ]; _ }; _ } ->\n        let* adt = adt def_id in\n        let* arg = arity1 arg in\n        Some (apply adt arg)\n    | Tuple { value = { generic_args; _ }; _ } ->\n        let* l =\n          List.map\n            ~f:(fun (arg : Types.generic_arg) ->\n              match arg with Type ty -> arity0 ty | _ -> None)\n            generic_args\n          |> Option.all\n        in\n        Some (\"tuple_\" ^ String.concat ~sep:\"_\" l)\n    | _ -> arity0 ty\n  in\n  arity1\n"
  },
  {
    "path": "engine/lib/dependencies.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) = struct\n  module AST = Ast.Make (F)\n  module U = Ast_utils.Make (F)\n  open Ast\n  open AST\n\n  (** Get the identifier of an item *)\n  let ident_of (item : item) : Concrete_ident.t = item.ident\n\n  (** Get all the identifiers declared under an item. This includes the\n      identifier of the item itself, but also of any sub-item: for instance,\n      associated items within an impl. *)\n  let idents_of (item : item) : Concrete_ident.t list =\n    let is_field_anonymous ident =\n      match List.last (Concrete_ident.to_view ident).mod_path with\n      | Some { data = n; _ } -> Option.is_some (Int.of_string_opt n)\n      | _ -> false\n    in\n    ident_of item\n    ::\n    (match item.v with\n    | Type { variants; _ } ->\n        List.concat_map\n          ~f:(fun variant ->\n            let fields =\n              List.map ~f:fst3 variant.arguments\n              |> List.filter ~f:(not << is_field_anonymous)\n            in\n\n            variant.name :: fields)\n          variants\n    | Trait { items; _ } -> List.map ~f:(fun item -> item.ti_ident) items\n    | Impl { items; _ } -> List.map ~f:(fun item -> item.ii_ident) items\n    | _ -> (* No sub items *) [])\n\n  module Namespace = struct\n    include Concrete_ident.View.ModPath\n    module Set = Set.M (Concrete_ident.View.ModPath)\n\n    let of_concrete_ident ci : t = (Concrete_ident.to_view ci).mod_path\n\n    let to_string ?(sep = \"::\") : t -> string =\n      List.map ~f:(fun (o : Concrete_ident_view.DisambiguatedString.t) ->\n          o.data)\n      >> String.concat ~sep\n  end\n\n  module Error : Phase_utils.ERROR = Phase_utils.MakeError (struct\n    let ctx = Diagnostics.Context.Dependencies\n  end)\n\n  module Attrs = Attr_payloads.Make (F) (Error)\n\n  let uid_associated_items (items : item list) : attrs -> item list =\n    let open Attrs.WithItems (struct\n      let items = items\n    end) in\n    raw_associated_item >> List.filter_map ~f:(snd >> try_item_of_uid)\n\n  module ItemGraph = struct\n    module G = Graph.Persistent.Digraph.Concrete (Concrete_ident)\n\n    module GInt = struct\n      include Graph.Persistent.Digraph.Concrete (Int)\n\n      let empty () = empty\n    end\n\n    module Topological = Graph.Topological.Make_stable (GInt)\n    module Map_G_GInt = Graph.Gmap.Edge (G) (GInt)\n    module Oper = Graph.Oper.P (G)\n\n    let vertices_of_item (i : item) : G.V.t list =\n      let ( @ ) = Set.union in\n      let v = U.Reducers.collect_concrete_idents in\n      let concat_map f =\n        List.map ~f >> Set.union_list (module Concrete_ident)\n      in\n      let set =\n        match i.v with\n        | Fn { name = _; generics; body; params; _ } ->\n            v#visit_generics () generics\n            @ v#visit_expr () body\n            @ concat_map (v#visit_param ()) params\n        | TyAlias { name = _; generics; ty } ->\n            v#visit_generics () generics @ v#visit_ty () ty\n        | Type { name = _; generics; variants; is_struct = (_ : bool) } ->\n            v#visit_generics () generics\n            @ concat_map (v#visit_variant ()) variants\n        | IMacroInvokation { macro; argument = (_ : string); span; witness = _ }\n          ->\n            v#visit_concrete_ident () macro @ v#visit_span () span\n        | Trait { name = _; generics; items; safety = _ } ->\n            v#visit_generics () generics\n            @ concat_map (v#visit_trait_item ()) items\n        | Impl { generics; self_ty; of_trait; items; parent_bounds; safety = _ }\n          ->\n            v#visit_generics () generics\n            @ v#visit_ty () self_ty\n            @ v#visit_concrete_ident () (fst of_trait)\n            @ concat_map (v#visit_generic_value ()) (snd of_trait)\n            @ concat_map (v#visit_impl_item ()) items\n            @ concat_map\n                (fun (ie, ii) ->\n                  v#visit_impl_expr () ie @ v#visit_impl_ident () ii)\n                parent_bounds\n        | Alias { name = _; item } -> v#visit_concrete_ident () item\n        | Use _ | Quote _ | HaxError _ | NotImplementedYet ->\n            Set.empty (module Concrete_ident)\n      in\n      set |> Set.to_list\n\n    let vertices_of_items ~uid_associated_items (items : item list) : G.E.t list\n        =\n      List.concat_map\n        ~f:(fun i ->\n          let attrs = U.Reducers.collect_attrs#visit_item () i in\n          let assoc =\n            uid_associated_items attrs |> List.map ~f:(fun i -> i.ident)\n          in\n          vertices_of_item i @ assoc |> List.map ~f:(Fn.const i.ident &&& Fn.id))\n        items\n\n    let of_items ~original_items (items : item list) : G.t =\n      let init =\n        List.fold ~init:G.empty ~f:(fun g -> ident_of >> G.add_vertex g) items\n      in\n      let uid_associated_items = uid_associated_items original_items in\n      vertices_of_items ~uid_associated_items items\n      |> List.fold ~init ~f:(G.add_edge >> uncurry)\n\n    let transitive_dependencies_of (g : G.t) (selection : Concrete_ident.t list)\n        : Concrete_ident.t Hash_set.t =\n      let set = Hash_set.create (module Concrete_ident) in\n      let rec visit vertex =\n        if Hash_set.mem set vertex |> not then (\n          Hash_set.add set vertex;\n          G.iter_succ visit g vertex)\n      in\n      List.filter ~f:(G.mem_vertex g) selection |> List.iter ~f:visit;\n      set\n\n    let transitive_dependencies_of_items ~original_items (items : item list)\n        ?(graph = of_items ~original_items items)\n        (selection : Concrete_ident.t list) : item list =\n      let set = transitive_dependencies_of graph selection in\n      items |> List.filter ~f:(ident_of >> Hash_set.mem set)\n\n    module MutRec = struct\n      module Bundle = struct\n        type t = concrete_ident list\n\n        let namespaces_of : t -> Namespace.Set.t =\n          List.map ~f:Namespace.of_concrete_ident\n          >> Set.of_list (module Namespace)\n\n        let homogeneous_namespace (ns : t) : bool =\n          Set.length (namespaces_of ns) <= 1\n      end\n\n      type t = {\n        mut_rec_bundles : Bundle.t list;\n        non_mut_rec : concrete_ident list;\n      }\n\n      module SCC = Graph.Components.Make (G)\n\n      let of_graph (g : G.t) : t =\n        let is_mut_rec_with_itself x = G.mem_edge g x x in\n        let mut_rec_bundles, non_mut_rec =\n          SCC.scc_list g\n          |> List.partition_map ~f:(function\n               | [] -> failwith \"scc_list returned empty cluster\"\n               | [ x ] when is_mut_rec_with_itself x |> not -> Second x\n               | bundle -> First bundle)\n        in\n        { mut_rec_bundles; non_mut_rec }\n\n      let all_homogeneous_namespace (g : G.t) =\n        List.for_all ~f:Bundle.homogeneous_namespace\n          (of_graph g).mut_rec_bundles\n    end\n\n    module CyclicDep = struct\n      module Bundle = struct\n        type t = Concrete_ident.t list\n\n        module G = Graph.Persistent.Graph.Concrete (Concrete_ident)\n        module CC = Graph.Components.Undirected (G)\n\n        let cycles g = CC.components_list g\n      end\n\n      (* This is a solution that bundles together everything that belongs to the same module SCC.\n         It results in bundles that are much bigger than they could be but is a simple solution\n         to the problem described in https://github.com/hacspec/hax/issues/995#issuecomment-2411114404 *)\n      let of_mod_sccs (items : item list)\n          (mod_graph_cycles : Namespace.Set.t list) : Bundle.t list =\n        let item_names = List.map items ~f:(fun x -> x.ident) in\n        let cycles =\n          List.filter mod_graph_cycles ~f:(fun set -> Set.length set > 1)\n        in\n        let bundles =\n          List.map cycles ~f:(fun set ->\n              List.filter item_names ~f:(fun item ->\n                  Set.mem set (Namespace.of_concrete_ident item)))\n        in\n        bundles\n    end\n\n    open Graph.Graphviz.Dot (struct\n      include G\n\n      let graph_attributes _ = []\n      let default_vertex_attributes _ = []\n      let vertex_name i = \"\\\"\" ^ Concrete_ident.show i ^ \"\\\"\"\n\n      let vertex_attributes i =\n        [ `Label (Concrete_ident.DefaultViewAPI.render i).name ]\n\n      let get_subgraph i =\n        let ns = Namespace.of_concrete_ident i in\n        let sg_name = Namespace.to_string ~sep:\"__\" ns in\n        let label = Namespace.to_string ~sep:\"::\" ns in\n        let open Graph.Graphviz.DotAttributes in\n        Some { sg_name; sg_attributes = [ `Label label ]; sg_parent = None }\n\n      let default_edge_attributes _ = []\n      let edge_attributes _ = []\n    end)\n\n    let print oc items = output_graph oc (of_items ~original_items:items items)\n  end\n\n  module ModGraph = struct\n    module G = Graph.Persistent.Digraph.Concrete (Namespace)\n\n    let of_items (items : item list) : G.t =\n      let ig = ItemGraph.of_items ~original_items:items items in\n      let vertices =\n        List.fold items ~init:G.empty ~f:(fun g item ->\n            G.add_vertex g (Namespace.of_concrete_ident item.ident))\n      in\n      List.map ~f:(ident_of >> (Namespace.of_concrete_ident &&& Fn.id)) items\n      |> Map.of_alist_multi (module Namespace)\n      |> Map.map\n           ~f:\n             (List.concat_map\n                ~f:\n                  (ItemGraph.G.succ ig\n                  >> List.map ~f:Namespace.of_concrete_ident)\n             >> Set.of_list (module Namespace)\n             >> Set.to_list)\n      |> Map.to_alist\n      |> List.concat_map ~f:(fun (x, ys) -> List.map ~f:(fun y -> (x, y)) ys)\n      |> List.fold ~init:vertices ~f:(G.add_edge >> uncurry)\n\n    module SCC = Graph.Components.Make (G)\n\n    let cycles g : Namespace.Set.t list =\n      SCC.scc_list g |> List.map ~f:(Set.of_list (module Namespace))\n\n    (** Returns the namespaces in topological order *)\n    let order g : Namespace.t list =\n      let module ModTopo = Graph.Topological.Make_stable (G) in\n      ModTopo.fold List.cons g []\n\n    open Graph.Graphviz.Dot (struct\n      include G\n\n      let graph_attributes _ = []\n      let default_vertex_attributes _ = []\n      let vertex_name ns = \"\\\"\" ^ Namespace.to_string ns ^ \"\\\"\"\n      let vertex_attributes _ = []\n      let get_subgraph _ = None\n      let default_edge_attributes _ = []\n      let edge_attributes _ = []\n    end)\n\n    let print oc items =\n      let g = of_items items in\n      let complicated_ones =\n        SCC.scc_list g\n        |> List.concat_map ~f:(function [] | [ _ ] -> [] | bundle -> bundle)\n      in\n      let g =\n        List.concat_map\n          ~f:(fun ns ->\n            List.map\n              ~f:(fun y -> (ns, y))\n              (G.succ g ns\n              |> List.filter\n                   ~f:(List.mem ~equal:[%equal: Namespace.t] complicated_ones)))\n          complicated_ones\n        |> List.fold ~init:G.empty ~f:(G.add_edge >> uncurry)\n      in\n      output_graph oc g\n  end\n\n  let ident_list_to_string =\n    List.map ~f:Concrete_ident.DefaultViewAPI.show >> String.concat ~sep:\", \"\n\n  let sort (items : item list) : item list =\n    let g =\n      ItemGraph.of_items ~original_items:items items |> ItemGraph.Oper.mirror\n    in\n    let stable_g =\n      let to_index =\n        items\n        |> List.mapi ~f:(fun i item -> (item.ident, i))\n        |> Map.of_alist_exn (module Concrete_ident)\n        |> Map.find\n      in\n      ItemGraph.Map_G_GInt.filter_map\n        (to_index *** to_index >> uncurry Option.both)\n        g\n    in\n    let stable_g =\n      List.foldi items ~init:stable_g ~f:(fun i g _ ->\n          ItemGraph.GInt.add_vertex g i)\n    in\n    let items' =\n      let items_array = Array.of_list items in\n      let lookup (index : int) = items_array.(index) in\n      ItemGraph.Topological.fold List.cons stable_g [] |> List.map ~f:lookup\n    in\n    (* Stable topological sort doesn't guarantee to group cycles together.\n       We make this correction to ensure mutually recursive items are grouped. *)\n    let items' =\n      let cycles =\n        ItemGraph.MutRec.SCC.scc_list g\n        |> List.filter ~f:(fun cycle -> List.length cycle > 1)\n      in\n      (* TODO: This can be optimized by using a set or a map\n         to avoid traversing all cycles at each iteration. *)\n      List.fold items' ~init:[] ~f:(fun acc item ->\n          match\n            List.find cycles ~f:(fun cycle ->\n                List.mem cycle item.ident ~equal:[%eq: concrete_ident])\n          with\n          | Some _\n            when List.exists acc ~f:(fun els ->\n                     List.mem els item ~equal:[%eq: item]) ->\n              [] :: acc\n          | Some cycle ->\n              List.map cycle ~f:(fun ident ->\n                  List.find_exn items ~f:(fun item ->\n                      [%eq: concrete_ident] item.ident ident))\n              :: acc\n          | None -> [ item ] :: acc)\n      |> List.concat\n    in\n    (* Quote items must be placed right before or after their origin *)\n    let items' =\n      let before_quotes, after_quotes, _ =\n        List.partition3_map items' ~f:(fun item ->\n            match item.v with\n            | Quote { origin; _ } -> (\n                match origin.position with\n                | `Before -> `Fst (origin, item)\n                | `After -> `Snd (origin, item)\n                | `Replace -> `Trd ())\n            | _ -> `Trd ())\n      in\n      let move_quote before origin quote_item =\n        List.concat_map ~f:(fun item ->\n            if [%eq: concrete_ident] origin.item_ident item.ident then\n              if before then [ quote_item; item ] else [ item; quote_item ]\n            else if [%eq: concrete_ident] quote_item.ident item.ident then []\n            else [ item ])\n      in\n      let before_quotes = List.rev before_quotes in\n      let items' =\n        List.fold before_quotes ~init:items'\n          ~f:(fun items' (origin, quote_item) ->\n            move_quote true origin quote_item items')\n      in\n      List.fold after_quotes ~init:items' ~f:(fun items' (origin, quote_item) ->\n          move_quote false origin quote_item items')\n    in\n\n    assert (\n      let of_list =\n        List.map ~f:ident_of >> Set.of_list (module Concrete_ident)\n      in\n      let items = of_list items in\n      let items' = of_list items' in\n      Set.equal items items');\n    items'\n\n  (** Sort within each namespaces: items are first grouped by namespace, then\n      sorted topologically. *)\n  let sort_namespace_wise (items : item list) : item list =\n    let sorted_by_namespace =\n      U.group_items_by_namespace items\n      |> Map.data\n      |> List.map ~f:(fun items -> sort items)\n    in\n    let sorted_namespaces = ModGraph.order (ModGraph.of_items items) in\n    List.concat_map sorted_namespaces ~f:(fun namespace ->\n        List.find sorted_by_namespace ~f:(fun items ->\n            List.exists items ~f:(fun item ->\n                Namespace.equal\n                  (Namespace.of_concrete_ident item.ident)\n                  namespace))\n        |> Option.value ~default:[])\n\n  let filter_by_inclusion_clauses' ~original_items\n      (clauses : Types.inclusion_clause list) (items : item list) :\n      item list * Concrete_ident.t Hash_set.t =\n    let graph = ItemGraph.of_items ~original_items items in\n    let of_list = Set.of_list (module Concrete_ident) in\n    let selection = List.map ~f:ident_of items |> of_list in\n    let deps_of =\n      let to_set = Hash_set.to_list >> of_list in\n      Set.to_list >> ItemGraph.transitive_dependencies_of graph >> to_set\n    in\n    let show_ident_set =\n      Set.to_list\n      >> List.map ~f:Concrete_ident.DefaultViewAPI.show\n      >> List.map ~f:(fun s -> \" - \" ^ s)\n      >> String.concat ~sep:\"\\n\"\n    in\n    let show_inclusion_clause Types.{ kind; namespace } =\n      (match kind with\n      | Excluded -> \"-\"\n      | SignatureOnly -> \"+:\"\n      | Included deps_kind -> (\n          match deps_kind with\n          | Transitive -> \"+\"\n          | Shallow -> \"+~\"\n          | None' -> \"+!\"))\n      ^ \"[\"\n      ^ (List.map\n           ~f:(function Glob One -> \"*\" | Glob Many -> \"**\" | Exact s -> s)\n           namespace.chunks\n        |> String.concat ~sep:\"::\")\n      ^ \"]\"\n    in\n    let hax_lib_include =\n      let id_to_include =\n        Hashtbl.of_alist_exn\n          (module Concrete_ident)\n          (List.map\n             ~f:(fun it ->\n               ( it.ident,\n                 Attrs.find_unique_attr\n                   ~f:(function Types.ItemStatus is -> Some is | _ -> None)\n                   it.attrs ))\n             items)\n      in\n      Hashtbl.find id_to_include >> Option.join\n    in\n\n    let items_drop_body = Hash_set.create (module Concrete_ident) in\n    let apply_clause selection' (clause : Types.inclusion_clause) =\n      let matches = Concrete_ident.matches_namespace clause.Types.namespace in\n      let matched0 = Set.filter ~f:matches selection in\n      let with_deps, drop_bodies =\n        match clause.kind with\n        | Included Transitive -> (true, false)\n        | Included Shallow -> (true, true)\n        | Included None' -> (false, false)\n        | SignatureOnly -> (false, true)\n        | Excluded -> (false, false)\n      in\n      let matched = matched0 |> if with_deps then deps_of else Fn.id in\n      if drop_bodies then (\n        Set.iter ~f:(Hash_set.add items_drop_body) matched;\n        Set.iter ~f:(Hash_set.remove items_drop_body) matched0);\n      Logs.info (fun m ->\n          m \"The clause [%s] will %s the following Rust items:\\n%s\"\n            (show_inclusion_clause clause)\n            (match clause.kind with Excluded -> \"remove\" | _ -> \"add\")\n          @@ show_ident_set matched);\n      let set_op =\n        match clause.kind with\n        | Included _ | SignatureOnly -> Set.union\n        | Excluded -> Set.diff\n      in\n      let result = set_op selection' matched in\n      let forced_include =\n        selection'\n        |> Set.filter\n             ~f:\n               (hax_lib_include\n               >> [%eq: Types.ha_item_status option]\n                    (Some (Included { late_skip = false })))\n      in\n      Set.union forced_include result\n    in\n    let selection = List.fold ~init:selection ~f:apply_clause clauses in\n    Logs.info (fun m ->\n        m \"The following Rust items are going to be extracted:\\n%s\"\n        @@ show_ident_set selection);\n    (List.filter ~f:(ident_of >> Set.mem selection) items, items_drop_body)\n\n  let filter_by_inclusion_clauses (clauses : Types.inclusion_clause list)\n      (items : item list) : item list =\n    let f = filter_by_inclusion_clauses' ~original_items:items clauses in\n    let selection =\n      let items', items_drop_body = f items in\n      let items', _ =\n        (* when one includes only shallow dependencies, we just remove bodies *)\n        List.map\n          ~f:(fun item ->\n            if Hash_set.mem items_drop_body (ident_of item) then\n              U.Mappers.drop_bodies#visit_item () item\n            else item)\n          items'\n        |> f\n      in\n      List.map ~f:ident_of items' |> Set.of_list (module Concrete_ident)\n    in\n    List.filter ~f:(ident_of >> Set.mem selection) items\n\n  let fresh_module_for (bundle : item list) =\n    let fresh_module =\n      Concrete_ident.fresh_module ~label:\"bundle\" (List.map ~f:ident_of bundle)\n    in\n    let renamings =\n      bundle\n      (* Exclude `Use` items: we exclude those from bundling since they are only\n         user hints. `Use` items don't have proper identifiers, and those\n         identifiers are never referenced by other Rust items. *)\n      |> List.filter ~f:(function { v = Use _; _ } -> false | _ -> true)\n      (* Exclude `NotImplementedYet` items *)\n      |> List.filter ~f:(function\n           | { v = NotImplementedYet; _ } -> false\n           | _ -> true)\n      |> List.concat_map ~f:(fun item ->\n             List.map\n               ~f:(fun id ->\n                 ( item,\n                   (id, Concrete_ident.move_to_fresh_module fresh_module id) ))\n               (idents_of item))\n    in\n    let aliases =\n      let inspect_view_last id =\n        List.last (Concrete_ident.to_view id).rel_path\n      in\n      List.filter_map renamings ~f:(fun (origin_item, (from_id, to_id)) ->\n          let attrs =\n            List.filter\n              ~f:(fun att -> Attrs.late_skip [ att ])\n              origin_item.attrs\n          in\n          let v = Alias { name = from_id; item = to_id } in\n          match origin_item.v with\n          (* We don't want to aliases for constructors of structs with named fields because\n             they can't be imported in F*. Ideally this should be handled by the backend. *)\n          | Type { variants; is_struct = true; _ }\n            when List.for_all variants ~f:(fun variant -> variant.is_record)\n                 && Concrete_ident.is_constructor from_id ->\n              None\n          (* We don't need aliases for fields of types. *)\n          | Type _ when [%matches? Some (`Field _)] (inspect_view_last from_id)\n            ->\n              None\n          (* We don't need aliases for methods of trait impls. *)\n          | Impl _\n            when [%matches? Some (`AssociatedItem _)]\n                   (inspect_view_last from_id) ->\n              None\n          | Quote _ -> None\n          | _ -> Some { attrs; span = origin_item.span; ident = from_id; v })\n    in\n    let rename =\n      let renamings = List.map ~f:snd renamings in\n      let renamings =\n        match Map.of_alist (module Concrete_ident) renamings with\n        | `Duplicate_key dup ->\n            failwith\n              [%string\n                \"Fatal error: in dependency analysis, we construct a renaming \\\n                 key-value list with a guarantee of unicity in keys. However, \\\n                 we found the following key twice:\\n\\\n                 %{[%show: concrete_ident] dup}\"]\n        | `Ok value -> value\n      in\n      let renamer _lvl i = Map.find renamings i |> Option.value ~default:i in\n      (U.Mappers.rename_concrete_idents renamer)#visit_item ExprLevel\n    in\n    List.map ~f:rename bundle @ aliases\n\n  let bundle_cyclic_modules (items : item list) : item list =\n    (* [module_level_scc] is a list of set of strongly connected modules. *)\n    let module_level_scc = ModGraph.(of_items >> cycles) items in\n    let items_per_ns =\n      List.map ~f:(fun i -> (Namespace.of_concrete_ident i.ident, i)) items\n      |> Map.of_alist_multi (module Namespace)\n    in\n    let items_of_ns = Map.find items_per_ns >> Option.value ~default:[] in\n    module_level_scc\n    |> List.concat_map ~f:(fun nss ->\n           let multiple_heterogeneous_modules = Set.length nss > 1 in\n           let items = Set.to_list nss |> List.concat_map ~f:items_of_ns in\n           if multiple_heterogeneous_modules then fresh_module_for items\n           else items)\n\n  let recursive_bundles (items : item list) : item list list * item list =\n    let g = ItemGraph.of_items ~original_items:items items in\n    let bundles = ItemGraph.MutRec.of_graph g in\n    let from_ident ident : item option =\n      List.find ~f:(fun i -> [%equal: Concrete_ident.t] i.ident ident) items\n    in\n    let f = List.filter_map ~f:from_ident in\n    (List.map ~f bundles.mut_rec_bundles, f bundles.non_mut_rec)\nend\n"
  },
  {
    "path": "engine/lib/dependencies.mli",
    "content": "module Make (F : Features.T) : sig\n  module AST : module type of Ast.Make (F)\n\n  val uid_associated_items : AST.item list -> Ast.attrs -> AST.item list\n  val bundle_cyclic_modules : AST.item list -> AST.item list\n\n  val sort_namespace_wise : AST.item list -> AST.item list\n  (** Sort within each namespaces: items are first grouped by namespace, then\n      sorted topologically. *)\n\n  val sort : AST.item list -> AST.item list\n  (** Sort items regardless of their namespaces. *)\n\n  val recursive_bundles : AST.item list -> AST.item list list * AST.item list\n\n  val filter_by_inclusion_clauses :\n    Types.inclusion_clause list -> AST.item list -> AST.item list\nend\n"
  },
  {
    "path": "engine/lib/deprecated_generic_printer/deprecated_generic_printer.ml",
    "content": "open! Prelude\nopen! Ast\n\nmodule Make (F : Features.T) (View : Concrete_ident.RENDER_API) = struct\n  open Deprecated_generic_printer_base\n  open Deprecated_generic_printer_base.Make (F)\n\n  module Class = struct\n    module U = Ast_utils.Make (F)\n    open! AST\n    open PPrint\n\n    let iblock f = group >> jump 2 0 >> terminate (break 0) >> f >> group\n\n    class print =\n      object (print)\n        inherit print_base as super\n        method printer_name = \"Generic\"\n\n        method par_state : ast_position -> par_state =\n          function\n          | Lhs_LhsArrayAccessor | Ty_Tuple | Ty_TSlice | Ty_TArray_length\n          | Expr_If_cond | Expr_If_then | Expr_If_else | Expr_Array\n          | Expr_Assign | Expr_Closure_param | Expr_Closure_body\n          | Expr_Ascription_e | Expr_Let_lhs | Expr_Let_rhs | Expr_Let_body\n          | Expr_App_arg | Expr_ConstructTuple | Pat_ConstructTuple | Pat_PArray\n          | Pat_Ascription_pat | Param_pat | Item_Fn_body | GenericParam_GPConst\n            ->\n              AlreadyPar\n          | _ -> NeedsPar\n\n        method namespace_of_concrete_ident : concrete_ident -> string list =\n          fun i ->\n            let rendered = View.render i in\n            rendered.path\n\n        method concrete_ident' ~(under_current_ns : bool) : concrete_ident fn =\n          fun id ->\n            let id = View.render id in\n            let chunks =\n              if under_current_ns then [ id.name ] else id.path @ [ id.name ]\n            in\n            separate_map (colon ^^ colon) utf8string chunks\n\n        method name_of_concrete_ident : concrete_ident fn =\n          fun id -> (View.render id).name |> utf8string\n\n        method mutability : 'a. 'a mutability fn = fun _ -> empty\n\n        method primitive_ident : primitive_ident fn =\n          function\n          | Deref -> string \"deref\"\n          | Cast -> string \"cast\"\n          | LogicalOp And -> string \"and\"\n          | LogicalOp Or -> string \"or\"\n\n        method local_ident : local_ident fn = View.local_ident >> utf8string\n\n        method literal : literal_ctx -> literal fn =\n          (* TODO : escape *)\n          fun _ctx -> function\n            | String s -> utf8string s |> dquotes\n            | Char c -> char c |> bquotes\n            | Int { value; negative; _ } ->\n                string value |> precede (if negative then minus else empty)\n            | Float { value; kind; negative } ->\n                string value\n                |> precede (if negative then minus else empty)\n                |> terminate (string (show_float_kind kind))\n            | Bool b -> OCaml.bool b\n\n        method generic_value : generic_value fn =\n          function\n          | GLifetime _ -> string \"Lifetime\"\n          | GType ty -> print#ty_at GenericValue_GType ty\n          | GConst expr -> print#expr_at GenericValue_GConst expr\n\n        method lhs : lhs fn =\n          function\n          | LhsLocalVar { var; _ } -> print#local_ident var\n          | LhsVecRef { e; _ } -> print#lhs e\n          | LhsArbitraryExpr { e; _ } -> print#expr_at Lhs_LhsArbitraryExpr e\n          | LhsFieldAccessor { e; field; _ } ->\n              print#lhs e |> parens\n              |> terminate (dot ^^ print#global_ident_projector field)\n          | LhsArrayAccessor { e; index; _ } ->\n              print#lhs e |> parens\n              |> terminate (print#expr_at Lhs_LhsArrayAccessor index |> brackets)\n\n        method ty_bool : document = string \"bool\"\n        method ty_char : document = string \"char\"\n        method ty_str : document = string \"str\"\n\n        method ty_int : int_kind fn =\n          fun { size; signedness } ->\n            let signedness = match signedness with Signed -> \"i\" | _ -> \"u\" in\n            let size =\n              match int_of_size size with\n              | Some n -> OCaml.int n\n              | None -> string \"size\"\n            in\n            string signedness ^^ size\n\n        method ty_float : float_kind fn = show_float_kind >> string\n\n        method generic_values : generic_value list fn =\n          function\n          | [] -> empty\n          | values -> separate_map comma print#generic_value values |> angles\n\n        method ty_app : concrete_ident -> generic_value list fn =\n          fun f args -> print#concrete_ident f ^^ print#generic_values args\n\n        method ty_tuple : int -> ty list fn =\n          fun _n ->\n            separate_map (comma ^^ break 1) (print#ty_at Ty_Tuple)\n            >> iblock parens\n\n        method! ty : par_state -> ty fn =\n          fun ctx ty ->\n            match ty with\n            | TBool -> string \"bool\"\n            | TChar -> string \"char\"\n            | TInt kind -> print#ty_int kind\n            | TFloat kind -> print#ty_float kind\n            | TStr -> string \"String\"\n            | TArrow (inputs, output) ->\n                separate_map (string \"->\") (print#ty_at Ty_TArrow)\n                  (inputs @ [ output ])\n                |> parens\n                |> precede (string \"arrow!\")\n            | TRef { typ; mut; _ } ->\n                ampersand ^^ print#mutability mut ^^ print#ty_at Ty_TRef typ\n            | TParam i -> print#local_ident i\n            | TSlice { ty; _ } -> print#ty_at Ty_TSlice ty |> brackets\n            | TRawPointer _ -> string \"raw_pointer!()\"\n            | TArray { typ; length } ->\n                print#ty_at Ty_TArray_length typ\n                ^/^ semi\n                ^/^ print#expr_at Ty_TArray_length length\n                |> brackets\n            | TAssociatedType _ -> string \"assoc_type!()\"\n            | TOpaque _ -> string \"opaque_type!()\"\n            | TApp _ -> super#ty ctx ty\n            | TDyn _ -> empty (* TODO *)\n\n        method! expr' : par_state -> expr' fn =\n          fun ctx e ->\n            let wrap_parens =\n              group\n              >>\n              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces\n            in\n            match e with\n            | If { cond; then_; else_ } ->\n                let if_then =\n                  (string \"if\" ^//^ nest 2 (print#expr_at Expr_If_cond cond))\n                  ^/^ string \"then\"\n                  ^//^ (print#expr_at Expr_If_then then_ |> braces |> nest 1)\n                in\n                (match else_ with\n                | None -> if_then\n                | Some else_ ->\n                    if_then ^^ break 1 ^^ string \"else\" ^^ space\n                    ^^ (print#expr_at Expr_If_else else_ |> iblock braces))\n                |> wrap_parens\n            | Match { scrutinee; arms } ->\n                let header =\n                  string \"match\" ^^ space\n                  ^^ (print#expr_at Expr_Match_scrutinee scrutinee\n                     |> terminate space |> iblock Fn.id)\n                  |> group\n                in\n                let arms =\n                  separate_map hardline\n                    (print#arm >> group >> nest 2\n                    >> precede (bar ^^ space)\n                    >> group)\n                    arms\n                in\n                header ^^ iblock braces arms\n            | Let { monadic; lhs; rhs; body } ->\n                (Option.map\n                   ~f:(fun monad -> print#expr_monadic_let ~monad)\n                   monadic\n                |> Option.value ~default:print#expr_let)\n                  ~lhs ~rhs body\n                |> wrap_parens\n            | Literal l -> print#literal Expr l\n            | Block { e; safety_mode; _ } -> (\n                let e = lbrace ^/^ nest 2 (print#expr ctx e) ^/^ rbrace in\n                match safety_mode with\n                | Safe -> e\n                | Unsafe _ -> !^\"unsafe \" ^^ e)\n            | Array l ->\n                separate_map comma (print#expr_at Expr_Array) l\n                |> group |> brackets\n            | LocalVar i -> print#local_ident i\n            | GlobalVar (`Concrete i) -> print#concrete_ident i\n            | GlobalVar (`Primitive p) -> print#primitive_ident p\n            | GlobalVar (`TupleCons 0) -> print#expr_construct_tuple []\n            | GlobalVar\n                (`TupleType _ | `TupleField _ | `Projector _ | `TupleCons _) ->\n                print#assertion_failure \"GlobalVar\"\n            | Assign { lhs; e; _ } ->\n                group (print#lhs lhs)\n                ^^ space ^^ equals\n                ^/^ group (print#expr_at Expr_Assign e)\n                ^^ semi\n            | Loop _ -> string \"todo loop;\"\n            | Break _ -> string \"todo break;\"\n            | Return _ -> string \"todo return;\"\n            | Continue _ -> string \"todo continue;\"\n            | QuestionMark { e; _ } ->\n                print#expr_at Expr_QuestionMark e |> terminate qmark\n            | Borrow { kind; e; _ } ->\n                string (match kind with Mut _ -> \"&mut \" | _ -> \"&\")\n                ^^ print#expr_at Expr_Borrow e\n            | AddressOf _ -> string \"todo address of;\"\n            | Closure { params; body; _ } ->\n                separate_map comma (print#pat_at Expr_Closure_param) params\n                |> group |> enclose bar bar\n                |> terminate (print#expr_at Expr_Closure_body body |> group)\n                |> wrap_parens\n            | Ascription { e; typ } ->\n                print#expr_at Expr_Ascription_e e\n                ^^ string \"as\"\n                ^/^ print#ty_at Expr_Ascription_typ typ\n                |> wrap_parens\n            | MacroInvokation _ -> print#assertion_failure \"MacroInvokation\"\n            | EffectAction _ -> print#assertion_failure \"EffectAction\"\n            | Quote quote -> print#quote quote\n            | App _ | Construct _ -> super#expr' ctx e\n\n        method quote { contents; _ } =\n          List.map\n            ~f:(function\n              | Verbatim code -> string code\n              | Expr e -> print#expr_at Expr_Quote e\n              | Pattern p -> print#pat_at Expr_Quote p\n              | Typ p -> print#ty_at Expr_Quote p)\n            contents\n          |> concat\n\n        method expr_monadic_let :\n            monad:supported_monads * F.monadic_binding ->\n            lhs:pat ->\n            rhs:expr ->\n            expr fn =\n          fun ~monad:_ ~lhs ~rhs body -> print#expr_let ~lhs ~rhs body\n\n        method expr_let : lhs:pat -> rhs:expr -> expr fn =\n          fun ~lhs ~rhs body ->\n            string \"let\"\n            ^/^ iblock Fn.id (print#pat_at Expr_Let_lhs lhs)\n            ^/^ equals\n            ^/^ iblock Fn.id (print#expr_at Expr_Let_rhs rhs)\n            ^^ semi\n            ^/^ (print#expr_at Expr_Let_body body |> group)\n\n        method tuple_projection : size:int -> nth:int -> expr fn =\n          fun ~size:_ ~nth e ->\n            print#expr_at Expr_TupleProjection e\n            |> terminate (dot ^^ OCaml.int nth)\n\n        method field_projection : concrete_ident -> expr fn =\n          fun i e ->\n            print#expr_at Expr_FieldProjection e\n            |> terminate (dot ^^ print#name_of_concrete_ident i)\n\n        method expr_app : expr -> expr list -> generic_value list fn =\n          fun f args _generic_args ->\n            let args =\n              separate_map\n                (comma ^^ break 1)\n                (print#expr_at Expr_App_arg >> group)\n                args\n            in\n            let f = print#expr_at Expr_App_f f |> group in\n            f ^^ iblock parens args\n\n        method doc_construct_tuple : document list fn =\n          separate (comma ^^ break 1) >> iblock parens\n\n        method expr_construct_tuple : expr list fn =\n          List.map ~f:(print#expr_at Expr_ConstructTuple)\n          >> print#doc_construct_tuple\n\n        method pat_construct_tuple : pat list fn =\n          List.map ~f:(print#pat_at Pat_ConstructTuple)\n          >> print#doc_construct_tuple\n\n        method global_ident_projector : global_ident fn =\n          function\n          | `Projector (`Concrete i) | `Concrete i -> print#concrete_ident i\n          | _ ->\n              print#assertion_failure \"global_ident_projector: not a projector\"\n\n        method doc_construct_inductive :\n            is_record:bool ->\n            is_struct:bool ->\n            constructor:concrete_ident ->\n            base:document option ->\n            (global_ident * document) list fn =\n          fun ~is_record ~is_struct:_ ~constructor ~base:_ args ->\n            if is_record then\n              print#concrete_ident constructor\n              ^^ space\n              ^^ iblock parens\n                   (separate_map (break 0)\n                      (fun (field, body) ->\n                        (print#global_ident_projector field\n                        |> terminate comma |> group)\n                        ^^ colon ^^ space ^^ iblock Fn.id body)\n                      args)\n            else\n              print#concrete_ident constructor\n              ^^ space\n              ^^ iblock parens (separate_map (break 0) snd args)\n\n        method expr_construct_inductive :\n            is_record:bool ->\n            is_struct:bool ->\n            constructor:concrete_ident ->\n            base:(expr * F.construct_base) option ->\n            (global_ident * expr) list fn =\n          fun ~is_record ~is_struct ~constructor ~base ->\n            let base =\n              Option.map\n                ~f:(fst >> print#expr_at Expr_ConcreteInductive_base)\n                base\n            in\n            List.map ~f:(print#expr_at Expr_ConcreteInductive_field |> map_snd)\n            >> print#doc_construct_inductive ~is_record ~is_struct ~constructor\n                 ~base\n\n        method attr : attr fn = fun _ -> empty\n\n        method! pat' : par_state -> pat' fn =\n          fun ctx ->\n            let wrap_parens =\n              group\n              >>\n              match ctx with AlreadyPar -> Fn.id | NeedsPar -> iblock braces\n            in\n            function\n            | PWild -> underscore\n            | PAscription { typ; typ_span; pat } ->\n                print#pat_ascription ~typ ~typ_span pat |> wrap_parens\n            | PBinding { mut; mode; var; typ = _; subpat } -> (\n                let p =\n                  (match mode with ByRef _ -> string \"&\" | _ -> empty)\n                  ^^ (match mut with Mutable _ -> string \"mut \" | _ -> empty)\n                  ^^ print#local_ident var\n                in\n                match subpat with\n                | Some (subpat, _) ->\n                    p ^^ space ^^ at ^^ space\n                    ^^ print#pat_at Pat_PBinding_subpat subpat\n                    |> wrap_parens\n                | None -> p)\n            | PArray { args } ->\n                separate_map (break 0)\n                  (print#pat_at Pat_PArray >> terminate comma >> group)\n                  args\n                |> iblock brackets\n            | PDeref { subpat; _ } ->\n                ampersand ^^ print#pat_at Pat_PDeref subpat\n            | (PConstruct _ | PConstant _) as pat -> super#pat' ctx pat\n            | POr { subpats } ->\n                separate_map (bar ^^ break 1) (print#pat_at Pat_Or) subpats\n\n        method pat_ascription : typ:ty -> typ_span:span -> pat fn =\n          fun ~typ ~typ_span pat ->\n            print#pat_at Pat_Ascription_pat pat\n            ^^ colon\n            ^^ print#with_span ~span:typ_span (fun () ->\n                   print#ty_at Pat_Ascription_typ typ)\n\n        method expr_unwrapped : par_state -> expr fn =\n          fun ctx { e; _ } -> print#expr' ctx e\n\n        method param : param fn =\n          fun { pat; typ; typ_span; attrs } ->\n            let typ =\n              match typ_span with\n              | Some span ->\n                  print#with_span ~span (fun _ -> print#ty_at Param_typ typ)\n              | None -> print#ty_at Param_typ typ\n            in\n            print#attrs attrs ^^ print#pat_at Param_pat pat ^^ space ^^ colon\n            ^^ space ^^ typ\n\n        method item' : item' fn =\n          function\n          | Fn { name; generics; body; params; safety } ->\n              let params =\n                iblock parens\n                  (separate_map (comma ^^ break 1) print#param params)\n              in\n              let generics = print#generic_params generics.params in\n              let safety =\n                optional Base.Fn.id\n                  (match safety with\n                  | Safe -> None\n                  | Unsafe _ -> Some !^\"unsafe \")\n              in\n              safety ^^ !^\"fn\" ^^ space ^^ print#concrete_ident name ^^ generics\n              ^^ params\n              ^^ iblock braces (print#expr_at Item_Fn_body body)\n          | Quote { quote; _ } -> print#quote quote\n          | _ -> string \"item not implemented\"\n\n        method generic_param' : generic_param fn =\n          fun { ident; attrs; kind; _ } ->\n            let suffix =\n              match kind with\n              | GPLifetime _ -> space ^^ colon ^^ space ^^ string \"'unk\"\n              | GPType -> empty\n              | GPConst { typ } ->\n                  space ^^ colon ^^ space\n                  ^^ print#ty_at GenericParam_GPConst typ\n            in\n            let prefix =\n              match kind with\n              | GPConst _ -> string \"const\" ^^ space\n              | _ -> empty\n            in\n            let ident =\n              let name =\n                if String.(ident.name = \"_\") then \"Anonymous\" else ident.name\n              in\n              { ident with name }\n            in\n            prefix ^^ print#attrs attrs ^^ print#local_ident ident ^^ suffix\n\n        method generic_params : generic_param list fn =\n          separate_map comma print#generic_param >> group >> angles\n\n        (*Option.map ~f:(...) guard |> Option.value ~default:empty*)\n        method arm' : arm' fn =\n          fun { arm_pat; body; guard } ->\n            let pat = print#pat_at Arm_pat arm_pat |> group in\n            let body = print#expr_at Arm_body body in\n            let guard =\n              Option.map\n                ~f:(fun { guard = IfLet { lhs; rhs; _ }; _ } ->\n                  string \" if let \" ^^ print#pat_at Arm_pat lhs ^^ string \" = \"\n                  ^^ print#expr_at Arm_body rhs)\n                guard\n              |> Option.value ~default:empty\n            in\n            pat ^^ guard ^^ string \" => \" ^^ body ^^ comma\n      end\n  end\n\n  include Class\n\n  include Api (struct\n    type aux_info = unit\n\n    let new_print () = (new Class.print :> print_object)\n  end)\nend\n"
  },
  {
    "path": "engine/lib/deprecated_generic_printer/deprecated_generic_printer.mli",
    "content": "module Make (F : Features.T) (View : Concrete_ident.RENDER_API) : sig\n  open Deprecated_generic_printer_base.Make(F)\n  include API\n\n  class print : print_class\nend\n"
  },
  {
    "path": "engine/lib/deprecated_generic_printer/deprecated_generic_printer_base.ml",
    "content": "open! Prelude\nopen! Ast\nopen PPrint\n\n(** Generic printer for the {!module:Ast} ASTs. It uses the [PPrint] library,\n    and additionally computes {!Annotation.t}. *)\n\n(** Identifies a position in the AST. This is useful for figuring out wether we\n    should wrap a chunk of AST in parenthesis, or not, or for implementing\n    special treatment of some sub-trees if they occur in a certain context. *)\ntype ast_position =\n  | GenericValue_GType\n  | GenericValue_GConst\n  | Lhs_LhsArbitraryExpr\n  | Lhs_LhsArrayAccessor\n  | Ty_TArrow\n  | Ty_TRef\n  | Ty_Tuple\n  | Ty_TSlice\n  | Ty_TArray_typ\n  | Ty_TArray_length\n  | Expr_If_cond\n  | Expr_If_then\n  | Expr_If_else\n  | Expr_Array\n  | Expr_Assign\n  | Expr_Closure_param\n  | Expr_Closure_body\n  | Expr_Ascription_e\n  | Expr_Ascription_typ\n  | Expr_Let_lhs\n  | Expr_Let_rhs\n  | Expr_Let_body\n  | Expr_Quote\n  | Expr_Match_scrutinee\n  | Expr_QuestionMark\n  | Expr_Borrow\n  | Expr_TupleProjection\n  | Expr_ConstructTuple\n  | Expr_FieldProjection\n  | Expr_App_f\n  | Expr_App_arg\n  | Expr_ConcreteInductive_base\n  | Expr_ConcreteInductive_field\n  | Pat_PBinding_subpat\n  | Pat_PDeref\n  | Pat_PArray\n  | Pat_ConstructTuple\n  | Pat_ConcreteInductive\n  | Pat_Ascription_pat\n  | Pat_Ascription_typ\n  | Pat_Or\n  | Param_pat\n  | Param_typ\n  | GenericParam_GPType\n  | GenericParam_GPConst\n  | Arm_pat\n  | Arm_body\n  | Item_Fn_body\n[@@warning \"-37\"]\n\nmodule Annotation = struct\n  type loc = { line : int; col : int } [@@deriving show, yojson, eq]\n  type t = loc * span [@@deriving show, yojson, eq]\nend\n\ntype annot_str = string * Annotation.t list [@@deriving show, yojson, eq]\n\n(** When printing a chunk of AST, should we wrap parenthesis ({!NeedsPar}) or\n    not ({!AlreadyPar})? *)\ntype par_state = NeedsPar | AlreadyPar\n\n(** The context of a literal in the AST, does it appear in a pattern ({!Pat}) or\n    in an expression ({!Expr})?*)\ntype literal_ctx = Pat | Expr\n\nmodule Make (F : Features.T) = struct\n  module AST = Ast.Make (F)\n  module U = Ast_utils.Make (F)\n  open Ast.Make (F)\n\n  type 't fn = 't -> document\n\n  (** Raw generic printers base class. Those are useful for building a printer,\n      not for consuming printers. Consumers should use the {!module:Api}\n      functor. *)\n  class virtual print_base =\n    object (print)\n      val mutable current_span = Span.default\n      val mutable span_data : Annotation.t list = []\n      val mutable current_namespace : string list option = None\n      method get_span_data () = span_data\n\n      method with_span ~span f =\n        let prev_span = current_span in\n        current_span <- span;\n        let doc = f () |> print#spanned_doc |> custom in\n        current_span <- prev_span;\n        doc\n\n      method spanned_doc (doc : document) : custom =\n        let span = current_span in\n        object\n          method requirement : requirement = requirement doc\n\n          method pretty : output -> state -> int -> bool -> unit =\n            fun o s i b ->\n              span_data <-\n                ({ line = s.line; col = s.column }, span) :: span_data;\n              pretty o s i b doc\n\n          method compact : output -> unit = fun o -> compact o doc\n        end\n\n      method concrete_ident : concrete_ident fn =\n        fun id ->\n          let current_ns = print#get_current_namespace () in\n          let id_ns = print#namespace_of_concrete_ident id in\n          print#concrete_ident'\n            ~under_current_ns:\n              ([%equal: string list option] current_ns (Some id_ns))\n            id\n      (** Print a concrete identifier.\n\n          Differentiates between encounters of the identifier in its own\n          namespace or a foreign namespace.*)\n\n      method assertion_failure : 'any. string -> 'any =\n        fun details ->\n          let span = Span.to_thir current_span in\n          let kind = Types.AssertionFailure { details } in\n          let ctx = Diagnostics.Context.GenericPrinter print#printer_name in\n          Diagnostics.SpanFreeError.raise ~span\n            (Span.owner_hint current_span)\n            ctx kind\n\n      method set_current_namespace ns = current_namespace <- ns\n      method get_current_namespace () = current_namespace\n\n      (* `*_at` variants *)\n      method expr_at : ast_position -> expr fn = print#par_state >> print#expr\n      method ty_at : ast_position -> ty fn = print#par_state >> print#ty\n      method pat_at : ast_position -> pat fn = print#par_state >> print#pat\n\n      method pat : par_state -> pat fn =\n        fun ctx { p; span; _ } ->\n          print#with_span ~span (fun _ -> print#pat' ctx p)\n\n      method item_unwrapped : item fn = fun { v; _ } -> print#item' v\n\n      method generic_param : generic_param fn =\n        fun ({ span; _ } as p) ->\n          print#with_span ~span (fun _ -> print#generic_param' p)\n\n      method arm : arm fn =\n        fun { arm; span } -> print#with_span ~span (fun _ -> print#arm' arm)\n\n      method ty : par_state -> ty fn =\n        fun _ctx ty ->\n          match ty with\n          | TApp { ident = `Concrete ident; args } ->\n              print#ty_app ident args |> group\n          | TApp\n              {\n                ident =\n                  `Primitive _ | `TupleCons _ | `TupleField _ | `Projector _;\n                _;\n              } ->\n              print#assertion_failure \"TApp not concrete\"\n          | TApp { ident = `TupleType n; args } ->\n              let args =\n                List.filter_map\n                  ~f:(function GType t -> Some t | _ -> None)\n                  args\n              in\n              if [%equal: int] (List.length args) n |> not then\n                print#assertion_failure \"malformed ty app tuple\";\n              print#ty_tuple n args\n          | TApp _ -> .\n          | _ ->\n              print#assertion_failure \"default ty is only implemented for TApp\"\n\n      method expr' : par_state -> expr' fn =\n        fun _ctx e ->\n          match e with\n          | App { f = { e = GlobalVar i; _ } as f; args; generic_args; _ } -> (\n              let expect_one_arg where =\n                match args with\n                | [ arg ] -> arg\n                | _ -> print#assertion_failure @@ \"Expected one arg at \" ^ where\n              in\n              match i with\n              | `Concrete _ | `Primitive _ -> print#expr_app f args generic_args\n              | `TupleType _ | `TupleCons _ | `TupleField _ ->\n                  print#assertion_failure \"App: unexpected tuple\"\n              | `Projector (`TupleField (nth, size)) ->\n                  let arg = expect_one_arg \"projector tuple field\" in\n                  print#tuple_projection ~size ~nth arg\n              | `Projector (`Concrete i) ->\n                  let arg = expect_one_arg \"projector concrete\" in\n                  print#field_projection i arg)\n          | App { f; args; generic_args; _ } ->\n              print#expr_app f args generic_args\n          | Construct { constructor; fields; base; is_record; is_struct } -> (\n              match constructor with\n              | `Concrete constructor ->\n                  print#expr_construct_inductive ~is_record ~is_struct\n                    ~constructor ~base fields\n              | `TupleCons _ ->\n                  List.map ~f:snd fields |> print#expr_construct_tuple\n              | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->\n                  print#assertion_failure \"Construct unexpected constructors\")\n          | App _ | Construct _ -> .\n          | _ ->\n              print#assertion_failure\n                \"default expr' is only implemented for App and Construct\"\n\n      method pat' : par_state -> pat' fn =\n        fun _ -> function\n          | PConstant { lit } -> print#literal Pat lit\n          | PConstruct { constructor; is_record; is_struct; fields } -> (\n              match constructor with\n              | `Concrete constructor ->\n                  print#doc_construct_inductive ~is_record ~is_struct\n                    ~constructor ~base:None\n                    (List.map\n                       ~f:(fun fp ->\n                         (fp.field, print#pat_at Pat_ConcreteInductive fp.pat))\n                       fields)\n              | `TupleCons _ ->\n                  List.map ~f:(fun fp -> fp.pat) fields\n                  |> print#pat_construct_tuple\n              | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->\n                  print#assertion_failure \"todo err\")\n          | _ ->\n              print#assertion_failure\n                \"default pat' is only implemented for PConstant and PConstruct\"\n\n      method expr : par_state -> expr fn =\n        fun ctx e ->\n          let span = e.span in\n          print#with_span ~span (fun _ ->\n              try print#expr_unwrapped ctx e\n              with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n                U.hax_failure_expr span e.typ (context, kind)\n                  (U.LiftToFullAst.expr e)\n                (* TODO: if the printer is extremely broken, this results in a stack overflow *)\n                |> print#expr ctx)\n\n      method item : item fn =\n        fun i ->\n          print#set_current_namespace\n            (print#namespace_of_concrete_ident i.ident |> Option.some);\n          try print#item_unwrapped i\n          with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n            let error = Diagnostics.pretty_print_context_kind context kind in\n            let cast_item : item -> Ast.Full.item = Stdlib.Obj.magic in\n            let ast = cast_item i |> Print_rust.pitem_str in\n            let msg = error ^ \"\\nLast available AST for this item:\\n\\n\" ^ ast in\n            (* TODO: if the printer is extremely broken, this results in a stack overflow *)\n            make_hax_error_item i.span i.ident msg |> print#item\n\n      method items : item list fn = separate_map (twice hardline) print#item\n      (** Print given list of items, separating them by two newlines each.*)\n\n      method attrs : attrs fn = separate_map hardline print#attr\n      (** Print given list of attributes, separating them by one newline each.*)\n    end\n\n  type print_object =\n    < printer_name : string\n    ; get_span_data : unit -> Annotation.t list\n    ; ty : par_state -> ty fn\n    ; pat : par_state -> pat fn\n    ; arm : arm fn\n    ; expr : par_state -> expr fn\n    ; item : item fn\n    ; items : item list fn >\n  (** In the end, an printer *object* should be of the type {!print_object}. *)\n\n  class type print_class = object\n    inherit print_base\n    method printer_name : string\n    method get_span_data : unit -> Annotation.t list\n\n    method namespace_of_concrete_ident : concrete_ident -> string list\n    (** The namespace a concrete identifier was defined in. *)\n\n    method par_state : ast_position -> par_state\n    method concrete_ident' : under_current_ns:bool -> concrete_ident fn\n    method concrete_ident : concrete_ident fn\n    method name_of_concrete_ident : concrete_ident fn\n    method mutability : 'a. 'a mutability fn\n    method primitive_ident : primitive_ident fn\n    method local_ident : local_ident fn\n    method literal : literal_ctx -> literal fn\n    method generic_value : generic_value fn\n    method lhs : lhs fn\n    method ty_bool : document\n    method ty_char : document\n    method ty_str : document\n    method ty_int : int_kind fn\n    method ty_float : float_kind fn\n    method generic_values : generic_value list fn\n    method ty_app : concrete_ident -> generic_value list fn\n    method ty_tuple : int -> ty list fn\n    method ty : par_state -> ty fn\n    method expr' : par_state -> expr' fn\n\n    method expr_monadic_let :\n      monad:supported_monads * F.monadic_binding ->\n      lhs:pat ->\n      rhs:expr ->\n      expr fn\n\n    method expr_let : lhs:pat -> rhs:expr -> expr fn\n    method tuple_projection : size:int -> nth:int -> expr fn\n    method field_projection : concrete_ident -> expr fn\n    method expr_app : expr -> expr list -> generic_value list fn\n    method doc_construct_tuple : document list fn\n    method expr_construct_tuple : expr list fn\n    method pat_construct_tuple : pat list fn\n    method global_ident_projector : global_ident fn\n\n    method doc_construct_inductive :\n      is_record:bool ->\n      is_struct:bool ->\n      constructor:concrete_ident ->\n      base:document option ->\n      (global_ident * document) list fn\n\n    method expr_construct_inductive :\n      is_record:bool ->\n      is_struct:bool ->\n      constructor:concrete_ident ->\n      base:(expr * F.construct_base) option ->\n      (global_ident * expr) list fn\n\n    method attr : attr fn\n    method attrs : attrs fn\n    method pat' : par_state -> pat' fn\n    method pat_ascription : typ:ty -> typ_span:span -> pat fn\n    method pat : par_state -> pat fn\n    method expr_unwrapped : par_state -> expr fn\n    method param : param fn\n    method item' : item' fn\n    method item_unwrapped : item fn\n    method generic_param' : generic_param fn\n    method generic_param : generic_param fn\n    method generic_params : generic_param list fn\n    method arm' : arm' fn\n    method arm : arm fn\n    method expr : par_state -> expr fn\n    method item : item fn\n    method quote : quote fn\n    method items : item list fn\n  end\n\n  module type API = sig\n    type aux_info\n\n    val items : aux_info -> item list -> annot_str\n    val item : aux_info -> item -> annot_str\n    val expr : aux_info -> expr -> annot_str\n    val pat : aux_info -> pat -> annot_str\n    val ty : aux_info -> ty -> annot_str\n  end\n\n  module Api (NewPrint : sig\n    type aux_info\n\n    val new_print : aux_info -> print_object\n  end) =\n  struct\n    open NewPrint\n\n    let mk (f : print_object -> 'a -> PPrint.document) (aux : aux_info) (x : 'a)\n        : annot_str =\n      let printer = new_print aux in\n      let doc = f printer x in\n      let buf = Buffer.create 0 in\n      PPrint.ToBuffer.pretty 1.0 80 buf doc;\n      (Buffer.contents buf, printer#get_span_data ())\n\n    type aux_info = NewPrint.aux_info\n\n    let items : aux_info -> item list -> annot_str = mk (fun p -> p#items)\n    let item : aux_info -> item -> annot_str = mk (fun p -> p#item)\n    let expr : aux_info -> expr -> annot_str = mk (fun p -> p#expr AlreadyPar)\n    let pat : aux_info -> pat -> annot_str = mk (fun p -> p#pat AlreadyPar)\n    let ty : aux_info -> ty -> annot_str = mk (fun p -> p#ty AlreadyPar)\n  end\nend\n"
  },
  {
    "path": "engine/lib/diagnostics.ml",
    "content": "open! Prelude\nmodule T = Types\n\nmodule Backend = struct\n  type t = Coq | SSProve | FStar | EasyCrypt | ProVerif\n  [@@deriving show { with_path = false }, eq, yojson, compare, hash, sexp]\nend\n\nmodule Phase = struct\n  module Rejection = struct\n    type t =\n      | NotInBackendLang of Backend.t\n      | CoercionForUntypedPhase of string\n      | ArbitraryLhs\n      | Continue\n      | Break\n      | QuestionMark\n      | RawOrMutPointer\n      | EarlyExit\n      | AsPattern\n      | Dyn\n      | TraitItemDefault\n      | Unsafe\n    [@@deriving show { with_path = false }, eq, yojson, compare, hash, sexp]\n\n    let display = function\n      | NotInBackendLang backend -> \"not_in_\" ^ [%show: Backend.t] backend\n      | x -> [%show: t] x\n  end\n\n  (** All names for phases defined in `lib/phases_*` are generated automatically\n  *)\n  type%add_phase_names t = Identity | HoistSideEffects | Reject of Rejection.t\n  [@@deriving show { with_path = false }, eq, yojson, compare, hash, sexp]\n\n  let display = function\n    | Reject rejection -> \"reject_\" ^ Rejection.display rejection\n    | x -> [%show: t] x\nend\n\nmodule Context = struct\n  type t =\n    | Phase of Phase.t\n    | Backend of Backend.t\n    | ThirImport\n    | Dependencies\n    | DebugPrintRust\n    | GenericPrinter of string\n    | Other of string\n  [@@deriving show, eq, yojson, compare]\n\n  let display = function\n    | Phase p -> Phase.display p\n    | Backend backend -> [%show: Backend.t] backend ^ \" backend\"\n    | ThirImport -> \"AST import\"\n    | DebugPrintRust -> \"Rust debug printer\"\n    | Dependencies -> \"Dependenciy analysis\"\n    | GenericPrinter kind -> kind ^ \" generic printer\"\n    | Other s -> \"Other (\" ^ s ^ \")\"\nend\n\ntype kind = T.kind [@@deriving show, eq]\n\nlet compare_kind (a : kind) (b : kind) =\n  [%compare: string] ([%show: kind] a) ([%show: kind] b)\n\ntype thir_span = T.span [@@deriving show, eq]\n\nlet compare_thir_span (a : thir_span) (b : thir_span) =\n  [%compare: string] ([%show: thir_span] a) ([%show: thir_span] b)\n\ntype thir_def_id = T.def_id [@@deriving show, eq]\n\nlet compare_thir_def_id (a : thir_def_id) (b : thir_def_id) =\n  [%compare: string] ([%show: thir_def_id] a) ([%show: thir_def_id] b)\n\ntype t = {\n  context : Context.t;\n  kind : kind;\n  span : thir_span list;\n  owner_id : thir_def_id option;\n}\n[@@deriving show, eq, compare]\n\nlet to_thir_diagnostic (d : t) : Types.diagnostics =\n  {\n    kind = d.kind;\n    context = Context.display d.context;\n    span = d.span;\n    owner_id = d.owner_id;\n  }\n\n(** Ask `cargo-hax` to pretty print a diagnostic *)\nlet ask_diagnostic_pretty_print diag : string =\n  Hax_io.request (PrettyPrintDiagnostic diag)\n    ~expected:\"PrettyPrintedDiagnostic\" (function\n    | Types.PrettyPrintedDiagnostic s -> Some s\n    | _ -> None)\n\nlet pretty_print : t -> string =\n  to_thir_diagnostic >> ask_diagnostic_pretty_print\n\nlet pretty_print_context_kind : Context.t -> kind -> string =\n fun context kind ->\n  let span = Span.to_thir (Span.dummy ()) in\n  pretty_print { context; kind; span; owner_id = None }\n\nmodule Core : sig\n  val raise_fatal_error : 'never. t -> 'never\n  val report : t -> unit\n  val try_ : 'x. (unit -> 'x) -> t list * 'x option\n  val capture : 'a. (unit -> 'a) -> 'a * t list\nend = struct\n  (* a mutable state for collecting errors *)\n  let state = ref []\n  let report e = state := !state @ [ e ]\n\n  exception Error\n\n  let raise_fatal_error e =\n    report e;\n    raise Error\n\n  let try_ f =\n    let result = try Some (f ()) with Error -> None in\n    (!state, result)\n\n  let capture (type a) (f : unit -> a) : a * t list =\n    let previous_state = !state in\n    state := [];\n    let result =\n      let x = f () in\n      (x, !state)\n    in\n    state := previous_state;\n    result\nend\n\ninclude Core\n\nlet failure ~context ~span kind =\n  Core.raise_fatal_error\n    { context; kind; span = Span.to_thir span; owner_id = Span.owner_hint span }\n\nmodule SpanFreeError : sig\n  type t = private Data of Context.t * kind [@@deriving show]\n\n  exception Exn of t\n\n  val payload : t -> Context.t * kind\n\n  val raise :\n    ?span:T.span list -> Types.def_id option -> Context.t -> kind -> 'a\nend = struct\n  type t = Data of Context.t * kind [@@deriving show]\n\n  exception Exn of t\n\n  let payload (Data (ctx, kind)) = (ctx, kind)\n\n  let raise_without_reporting (ctx : Context.t) (kind : kind) =\n    raise (Exn (Data (ctx, kind)))\n\n  let raise ?(span = []) (owner_id : Types.def_id option) (ctx : Context.t)\n      (kind : kind) =\n    report { span; kind; context = ctx; owner_id };\n    raise_without_reporting ctx kind\nend\n"
  },
  {
    "path": "engine/lib/dune",
    "content": "(library\n (public_name hax-engine)\n (name hax_engine)\n ;  (modules\n ;   types\n ;   concrete_ident\n ;   concrete_ident_view\n ;   concrete_ident_defid\n ;   prelude\n ;   concrete_ident_view_types\n ;   concrete_ident_generated\n ;   concrete_ident_render_sig\n ;   local_ident\n ;   thir_simple_types\n ;   concrete_ident_fresh_ns\n ;   utils)\n (libraries\n  yojson\n  non_empty_list\n  pprint\n  ppx_deriving_yojson.runtime\n  cmdliner\n  fstar_surface_ast\n  base\n  core\n  logs\n  re\n  sourcemaps\n  ocamlgraph)\n (preprocessor_deps\n  ; `ppx_inline` is used on the `Subtype` module, thus we need it at PPX time\n  (file subtype.ml)\n  (source_tree phases))\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_string\n   ppx_inline\n   ppx_phases_index\n   ppx_generate_features\n   ppx_functor_application\n   ppx_enumerate\n   ppx_deriving.map\n   ppx_matches)))\n\n(include_subdirs unqualified)\n\n(rule\n (alias universe-hash)\n (target universe-hash)\n (deps\n  (:universe_hash ../utils/universe-hash.sh)\n  (universe))\n (action\n  (with-stdout-to\n   universe-hash\n   (run bash %{universe_hash}))))\n\n(rule\n (target ast_visitors.ml)\n (deps\n  (:ast ast.ml))\n (action\n  (with-stdout-to\n   ast_visitors.ml\n   (with-stdin-from\n    %{ast}\n    (run generate_from_ast visitors)))))\n\n(rule\n (target generated_generic_printer_base.ml)\n (deps\n  (:ast ast.ml))\n (action\n  (with-stdout-to\n   generated_generic_printer_base.ml\n   (with-stdin-from\n    %{ast}\n    (run generate_from_ast printer)))))\n\n(rule\n (target ast_destruct_generated.ml)\n (deps\n  (:ast ast.ml))\n (action\n  (with-stdout-to\n   ast_destruct_generated.ml\n   (with-stdin-from\n    %{ast}\n    (run generate_from_ast ast_destruct)))))\n\n(rule\n (target ast_builder_generated.ml)\n (deps\n  (:ast ast.ml))\n (action\n  (with-stdout-to\n   ast_builder_generated.ml\n   (with-stdin-from\n    %{ast}\n    (run generate_from_ast ast_builder)))))\n\n(rule\n (target concrete_ident_generated.ml)\n (deps\n  (alias universe-hash)\n  (env_var HAX_ENGINE_NAMES_EXTRACT_BINARY))\n (action\n  (with-stdout-to\n   concrete_ident_generated.ml\n   (run %{env:HAX_ENGINE_NAMES_EXTRACT_BINARY=hax-engine-names-extract}))))\n\n(rule\n (target types.ml)\n (deps\n  (alias universe-hash)\n  (env_var HAX_JSON_SCHEMA_EXPORTER_BINARY)\n  (:ocaml_of_json_schema\n   ../utils/ocaml_of_json_schema/ocaml_of_json_schema.js))\n (action\n  (with-stdout-to\n   types.ml\n   (pipe-stdout\n    (run %{env:HAX_JSON_SCHEMA_EXPORTER_BINARY=hax-export-json-schemas})\n    (run node %{ocaml_of_json_schema} - -)))))\n\n(env\n (_\n  (flags\n   (:standard -g -warn-error \"+A\" -w \"-17-30-56-32\"))))\n"
  },
  {
    "path": "engine/lib/export_ast.ml",
    "content": "open! Prelude\n\nlet deprecated_node s = failwith (\"Deprecated AST node:\" ^ s)\n\ntype missing_type = unit\n\nmodule B = Rust_engine_types\n\nlet to_error_node (span : Ast.span) (payload : string) : Types.error_node =\n  try [%of_yojson: Types.error_node] (Yojson.Safe.from_string payload)\n  with _ ->\n    let diagnostic : Types.diagnostic =\n      let node : Types.fragment = Unknown \"OCamlEngineError\" in\n      let info : B.diagnostic_info =\n        {\n          context = Import;\n          kind = OcamlEngineErrorPayload payload;\n          span = Span.to_rust_ast_span span;\n        }\n      in\n      { node; info }\n    in\n\n    { fragment = Unknown \"OCamlEngineError\"; diagnostics = [ diagnostic ] }\n\nmodule Make (FA : Features.T) = struct\n  open Ast\n  module A = Ast.Make (FA)\n  module U = Ast_utils.Make (FA)\n\n  let dsafety_kind (safety : A.safety_kind) : B.safety_kind =\n    match safety with Safe -> B.Safe | Unsafe _ -> B.Unsafe\n\n  let rec dty_no_error (span : Ast.span) (ty : A.ty) : B.ty =\n    Newtypety\n      (match ty with\n      | TBool -> Primitive Bool\n      | TChar -> Primitive Char\n      | TInt k -> Primitive (Int (dint_kind k))\n      | TFloat k -> Primitive (Float (dfloat_kind k))\n      | TStr -> Primitive Str\n      | TApp { ident; args } ->\n          B.App\n            {\n              head = dglobal_ident ident;\n              args = List.map ~f:(dgeneric_value span) args;\n            }\n      | TArray { typ; length } ->\n          Array { ty = dty span typ; length = dexpr length }\n      | TSlice { witness = _; ty } -> Slice (dty span ty)\n      | TRef { witness = _; typ; mut; region = _ } ->\n          Ref\n            {\n              inner = dty span typ;\n              mutable' = (match mut with Mutable _ -> true | _ -> false);\n              region = B.EmptyStructregion2;\n            }\n      | TParam local_ident -> Param (dlocal_ident local_ident)\n      | TArrow (inputs, output) ->\n          Arrow\n            { inputs = List.map ~f:(dty span) inputs; output = dty span output }\n      | TAssociatedType { impl; item } ->\n          AssociatedType\n            { impl_ = dimpl_expr span impl; item = dconcrete_ident item }\n      | TOpaque ident -> Opaque (dconcrete_ident ident)\n      | TRawPointer { witness = _ } -> RawPointer\n      | TDyn { witness = _; goals } ->\n          Dyn (List.map ~f:(ddyn_trait_goal span) goals))\n\n  and dty (span : Ast.span) (ty : A.ty) : B.ty =\n    match U.HaxFailure.Destruct.ty ty with\n    | Some s -> Newtypety (Error (to_error_node span s))\n    | None -> dty_no_error span ty\n\n  and dint_kind (ik : int_kind) : B.int_kind =\n    let size : B.int_size =\n      match ik.size with\n      | S8 -> S8\n      | S16 -> S16\n      | S32 -> S32\n      | S64 -> S64\n      | S128 -> S128\n      | SSize -> SSize\n    in\n    {\n      size;\n      signedness =\n        (match ik.signedness with Signed -> Signed | Unsigned -> Unsigned);\n    }\n\n  and dfloat_kind (fk : float_kind) : B.float_kind =\n    match fk with F16 -> F16 | F32 -> F32 | F64 -> F64 | F128 -> F128\n\n  and dglobal_ident (gi : global_ident) : B.global_id =\n    let concrete c : B.global_id =\n      Types.Newtypeglobal_id (B.Concrete (Concrete_ident.to_rust_ast c))\n    in\n    let of_name n = concrete (Concrete_ident.of_name ~value:true n) in\n    match gi with\n    | `Concrete c | `Projector (`Concrete c) -> concrete c\n    | `TupleType length ->\n        Types.Newtypeglobal_id (Tuple (Type { length = Int.to_string length }))\n    | `TupleCons length ->\n        Types.Newtypeglobal_id\n          (Tuple (Constructor { length = Int.to_string length }))\n    | `Projector (`TupleField (field, length)) | `TupleField (field, length) ->\n        let field, length = (Int.to_string field, Int.to_string length) in\n        Types.Newtypeglobal_id (Tuple (Field { length; field }))\n    | `Primitive Deref -> of_name Rust_primitives__hax__deref_op\n    | `Primitive Cast -> of_name Rust_primitives__hax__cast_op\n    | `Primitive (LogicalOp And) -> of_name Rust_primitives__hax__logical_op_and\n    | `Primitive (LogicalOp Or) -> of_name Rust_primitives__hax__logical_op_or\n\n  and dlocal_ident (li : local_ident) : B.local_id =\n    Newtypelocal_id (Newtypesymbol li.name)\n\n  and dconcrete_ident (gi : concrete_ident) : B.global_id =\n    dglobal_ident (`Concrete gi)\n\n  and ddyn_trait_goal span (r : A.dyn_trait_goal) : B.dyn_trait_goal =\n    {\n      non_self_args = List.map ~f:(dgeneric_value span) r.non_self_args;\n      trait_ = dconcrete_ident r.trait;\n    }\n\n  and dtrait_goal span (r : A.trait_goal) : B.trait_goal =\n    {\n      args = List.map ~f:(dgeneric_value span) r.args;\n      trait_ = dconcrete_ident r.trait;\n    }\n\n  and dimpl_ident span (r : A.impl_ident) : B.impl_ident =\n    { goal = dtrait_goal span r.goal; name = Newtypesymbol r.name }\n\n  and dprojection_predicate span (r : A.projection_predicate) :\n      B.projection_predicate =\n    {\n      assoc_item = dconcrete_ident r.assoc_item;\n      impl_ = dimpl_expr span r.impl;\n      ty = dty span r.typ;\n    }\n\n  and dimpl_expr span (i : A.impl_expr) : B.impl_expr =\n    { goal = dtrait_goal span i.goal; kind = dimpl_expr_kind span i.kind }\n\n  and dimpl_expr_kind span (i : A.impl_expr_kind) : B.impl_expr_kind =\n    match i with\n    | A.Self -> B.Self_\n    | A.Concrete tr -> B.Concrete (dtrait_goal span tr)\n    | A.LocalBound { id } -> B.LocalBound { id = B.Newtypesymbol id }\n    | A.Parent { impl; ident } ->\n        B.Parent\n          { impl_ = dimpl_expr span impl; ident = dimpl_ident span ident }\n    | A.Projection { impl; item; ident } ->\n        B.Projection\n          {\n            impl_ = dimpl_expr span impl;\n            item = dconcrete_ident item;\n            ident = dimpl_ident span ident;\n          }\n    | A.ImplApp { impl; args } ->\n        B.ImplApp\n          {\n            impl_ = dimpl_expr span impl;\n            args = List.map ~f:(dimpl_expr span) args;\n          }\n    | A.Dyn -> B.Dyn\n    | A.Builtin tr -> B.Builtin (dtrait_goal span tr)\n\n  and dgeneric_value span (generic_value : A.generic_value) : B.generic_value =\n    match generic_value with\n    | GLifetime _ -> B.Lifetime\n    | GType t -> B.Ty (dty span t)\n    | GConst e -> B.Expr (dexpr e)\n\n  and dborrow_kind (borrow_kind : A.borrow_kind) : B.borrow_kind =\n    match borrow_kind with\n    | Shared -> B.Shared\n    | Unique -> B.Unique\n    | Mut _witness -> B.Mut\n\n  and dmetadata ?(attrs = []) (span : span) : B.metadata =\n    { attributes = List.map ~f:dattr attrs; span = dspan span }\n\n  and dattr (a : attr) : B.attribute =\n    let kind : B.attribute_kind =\n      match Attr_payloads.payload a with\n      | Some (payload, _) -> B.Hax payload\n      | None -> (\n          match a.kind with\n          | Tool { path; tokens } -> B.Tool { path; tokens }\n          | DocComment { kind; body } ->\n              let kind =\n                match kind with DCKLine -> B.Line | DCKBlock -> Block\n              in\n              B.DocComment { kind; body })\n    in\n    { kind; span = dspan a.span }\n\n  and dpat (p : A.pat) : B.pat =\n    let kind : B.pat_kind =\n      match U.HaxFailure.Destruct.pat p with\n      | Some s -> Error (to_error_node p.span s)\n      | _ -> dpat' p.span p.p\n    in\n    { kind; meta = dmetadata p.span; ty = dty p.span p.typ }\n\n  and dpat' span (pat : A.pat') : B.pat_kind =\n    match pat with\n    | PWild -> Wild\n    | PAscription { typ; typ_span; pat } ->\n        Ascription\n          { pat = dpat pat; ty = { span = dspan typ_span; ty = dty span typ } }\n    | PConstruct { constructor; is_record; is_struct; fields } ->\n        Construct\n          {\n            constructor = dglobal_ident constructor;\n            is_record;\n            is_struct;\n            fields =\n              List.map\n                ~f:(fun { field; pat } -> (dglobal_ident field, dpat pat))\n                fields;\n          }\n    | POr { subpats } -> Or { sub_pats = List.map ~f:dpat subpats }\n    | PArray { args } -> Array { args = List.map ~f:dpat args }\n    | PDeref { subpat; witness = _ } -> Deref { sub_pat = dpat subpat }\n    | PConstant { lit } -> Constant { lit = dliteral lit }\n    | PBinding { mut; mode; var; typ = _; subpat } ->\n        let mutable' : bool = match mut with Mutable _ -> true | _ -> false in\n        Binding\n          {\n            mutable';\n            mode = dbinding_mode mode;\n            var = dlocal_ident var;\n            sub_pat = Option.map ~f:(fun (p, _) -> dpat p) subpat;\n          }\n\n  and dspan : span -> B.span = Span.to_rust_ast_span\n\n  and dbinding_mode (binding_mode : A.binding_mode) : B.binding_mode =\n    match binding_mode with\n    | ByValue -> B.ByValue\n    | ByRef (kind, _witness) -> B.ByRef (dborrow_kind kind)\n\n  and dexpr (e : A.expr) : B.expr =\n    let kind : B.expr_kind =\n      match U.HaxFailure.Destruct.expr e with\n      | Some (s, _) -> Error (to_error_node e.span s)\n      | None -> dexpr' e.span e.e\n    in\n    { kind; ty = dty e.span e.typ; meta = dmetadata e.span }\n\n  and dexpr' span (expr : A.expr') : B.expr_kind =\n    match expr with\n    | If { cond; then_; else_ } ->\n        If\n          {\n            condition = dexpr cond;\n            then' = dexpr then_;\n            else_ = Option.map ~f:dexpr else_;\n          }\n    | App { f; args; generic_args; bounds_impls; trait } ->\n        App\n          {\n            head = dexpr f;\n            args = List.map ~f:dexpr args;\n            generic_args = List.map ~f:(dgeneric_value span) generic_args;\n            bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls;\n            trait_ =\n              Option.map\n                ~f:(fun (impl, args) ->\n                  (dimpl_expr span impl, List.map ~f:(dgeneric_value span) args))\n                trait;\n          }\n    | Literal lit -> Literal (dliteral lit)\n    | Array exprs -> Array (List.map ~f:dexpr exprs)\n    | Construct { constructor; is_record; is_struct; fields; base } ->\n        Construct\n          {\n            constructor = dglobal_ident constructor;\n            fields =\n              List.map ~f:(fun (id, e) -> (dglobal_ident id, dexpr e)) fields;\n            base = Option.map ~f:(fun (e, _) -> dexpr e) base;\n            is_record;\n            is_struct;\n          }\n    | Match { scrutinee; arms } ->\n        Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }\n    | Let { monadic = _; lhs; rhs; body } ->\n        Let { lhs = dpat lhs; rhs = dexpr rhs; body = dexpr body }\n    | Block { e; safety_mode; witness = _ } ->\n        Block { body = dexpr e; safety_mode = dsafety_kind safety_mode }\n    | LocalVar id -> LocalId (dlocal_ident id)\n    | GlobalVar id -> GlobalId (dglobal_ident id)\n    | Ascription { e; typ } -> Ascription { e = dexpr e; ty = dty span typ }\n    | MacroInvokation _ -> deprecated_node \"MacroInvokation\"\n    | Assign { lhs; e; witness = _ } ->\n        Assign { lhs = dlhs span lhs; value = dexpr e }\n    | Loop { body; kind; state; control_flow; label; witness = _ } ->\n        Loop\n          {\n            body = dexpr body;\n            kind = dloop_kind span kind;\n            state = Option.map ~f:dloop_state state;\n            control_flow =\n              Option.map ~f:(fun (k, _) -> dcontrol_flow_kind k) control_flow;\n            label = Option.map ~f:(fun s -> B.Newtypesymbol s) label;\n          }\n    | Break { e; acc; label; witness = _ } ->\n        Break\n          {\n            value = dexpr e;\n            label = Option.map ~f:(fun s -> B.Newtypesymbol s) label;\n            state = Option.map ~f:(fst >> dexpr) acc;\n          }\n    | Return { e; witness = _ } -> Return { value = dexpr e }\n    | QuestionMark _ -> deprecated_node \"QuestionMark\"\n    | Continue { acc; label; witness = _ } ->\n        Continue\n          {\n            label = Option.map ~f:(fun s -> B.Newtypesymbol s) label;\n            state = Option.map ~f:(fst >> dexpr) acc;\n          }\n    | Borrow { kind; e; witness = _ } ->\n        Borrow\n          {\n            inner = dexpr e;\n            mutable' = (match kind with Mut _ -> true | _ -> false);\n          }\n    | AddressOf { mut; e; witness = _ } ->\n        AddressOf\n          {\n            inner = dexpr e;\n            mutable' = (match mut with Mutable _ -> true | _ -> false);\n          }\n    | Closure { params; body; captures } ->\n        Closure\n          {\n            params = List.map ~f:dpat params;\n            body = dexpr body;\n            captures = List.map ~f:dexpr captures;\n          }\n    | EffectAction _ -> deprecated_node \"EffectAction\"\n    | Quote q -> Quote { contents = dquote span q }\n\n  and dcontrol_flow_kind (cfk : A.cf_kind) : B.control_flow_kind =\n    match cfk with BreakOnly -> B.BreakOnly | BreakOrReturn -> B.BreakOrReturn\n\n  and dliteral (l : Ast.literal) : B.literal =\n    match l with\n    | String s -> B.String (Newtypesymbol s)\n    | Char c -> B.Char c\n    | Int { value; negative; kind } ->\n        B.Int { value = Newtypesymbol value; negative; kind = dint_kind kind }\n    | Float { value; negative; kind } ->\n        B.Float\n          { value = Newtypesymbol value; negative; kind = dfloat_kind kind }\n    | Bool b -> B.Bool b\n\n  and dquote span ({ contents; _ } : A.quote) : B.quote =\n    let f = function\n      | A.Verbatim code -> B.Verbatim code\n      | A.Expr e -> B.Expr (dexpr e)\n      | A.Pattern p -> B.Pattern (dpat p)\n      | A.Typ t -> B.Ty (dty span t)\n    in\n    Newtypequote (List.map ~f contents)\n\n  and ditem_quote_origin (iqo : item_quote_origin) : B.item_quote_origin =\n    {\n      item_ident = dconcrete_ident iqo.item_ident;\n      item_kind =\n        (match iqo.item_kind with\n        | `Fn -> B.Fn\n        | `TyAlias -> B.TyAlias\n        | `Type -> B.Type\n        | `IMacroInvokation -> B.MacroInvocation\n        | `Trait -> B.Trait\n        | `Impl -> B.Impl\n        | `Alias -> B.Alias\n        | `Use -> B.Use\n        | `Quote -> B.Quote\n        | `HaxError -> B.HaxError\n        | `NotImplementedYet -> B.NotImplementedYet);\n      position =\n        (match iqo.position with\n        | `Before -> B.Before\n        | `After -> B.After\n        | `Replace -> B.Replace);\n    }\n\n  and dloop_kind span (k : A.loop_kind) : B.loop_kind =\n    match k with\n    | A.UnconditionalLoop -> B.UnconditionalLoop\n    | A.WhileLoop { condition; witness = _ } ->\n        B.WhileLoop { condition = dexpr condition }\n    | A.ForLoop { it; pat; witness = _ } ->\n        B.ForLoop { iterator = dexpr it; pat = dpat pat }\n    | A.ForIndexLoop { start; end_; var; var_typ; witness = _ } ->\n        B.ForIndexLoop\n          {\n            start = dexpr start;\n            end' = dexpr end_;\n            var = dlocal_ident var;\n            var_ty = dty span var_typ;\n          }\n\n  and dloop_state (s : A.loop_state) : B.loop_state =\n    { body_pat = dpat s.bpat; init = dexpr s.init }\n\n  and darm (a : A.arm) : B.arm =\n    {\n      body = dexpr a.arm.body;\n      guard = Option.map ~f:dguard a.arm.guard;\n      meta = dmetadata a.span;\n      pat = dpat a.arm.arm_pat;\n    }\n\n  and dguard (a : A.guard) : B.guard =\n    { kind = dguard' a.guard; meta = dmetadata a.span }\n\n  and dguard' (guard : A.guard') : B.guard_kind =\n    match guard with\n    | IfLet { lhs; rhs; witness = _ } ->\n        B.IfLet { lhs = dpat lhs; rhs = dexpr rhs }\n\n  and dlhs span (lhs : A.lhs) : B.lhs =\n    match lhs with\n    | A.LhsLocalVar { var; typ } ->\n        B.LocalVar { var = dlocal_ident var; ty = dty span typ }\n    | A.LhsVecRef { e; typ; _ } ->\n        B.VecRef { e = dlhs span e; ty = dty span typ }\n    | A.LhsArbitraryExpr { e; witness = _ } -> B.ArbitraryExpr (dexpr e)\n    | A.LhsFieldAccessor { e; field; typ; witness = _ } ->\n        B.FieldAccessor\n          { e = dlhs span e; field = dglobal_ident field; ty = dty span typ }\n    | A.LhsArrayAccessor { e; index; typ; witness = _ } ->\n        B.ArrayAccessor\n          { e = dlhs span e; index = dexpr index; ty = dty span typ }\n\n  let dgeneric_param ({ ident; span; attrs; kind } : A.generic_param) :\n      B.generic_param =\n    let kind : B.generic_param_kind =\n      match kind with\n      | GPLifetime { witness = _ } -> Lifetime\n      | GPType -> Type\n      | GPConst { typ } -> Const { ty = dty span typ }\n    in\n    { ident = dlocal_ident ident; meta = dmetadata ~attrs span; kind }\n\n  let dgeneric_constraint span (generic_constraint : A.generic_constraint) :\n      B.generic_constraint =\n    match generic_constraint with\n    | GCLifetime (lf, _witness) -> Lifetime lf\n    | GCType impl_ident -> TypeClass (dimpl_ident span impl_ident)\n    | GCProjection projection ->\n        Equality (dprojection_predicate span projection)\n\n  let dgenerics span (g : A.generics) : B.generics =\n    {\n      constraints = List.map ~f:(dgeneric_constraint span) g.constraints;\n      params = List.map ~f:dgeneric_param g.params;\n    }\n\n  let dparam span (p : A.param) : B.param =\n    {\n      attributes = List.map ~f:dattr p.attrs;\n      pat = dpat p.pat;\n      ty = dty span p.typ;\n      ty_span = Option.map ~f:dspan p.typ_span;\n    }\n\n  let dvariant span (v : A.variant) : B.variant =\n    let dattrs = List.map ~f:dattr in\n    {\n      arguments =\n        List.map\n          ~f:(fun (id, t, a) -> (dconcrete_ident id, dty span t, dattrs a))\n          v.arguments;\n      attributes = dattrs v.attrs;\n      is_record = v.is_record;\n      name = dconcrete_ident v.name;\n    }\n\n  let dtrait_item' span (ti : A.trait_item') : B.trait_item_kind =\n    match ti with\n    | TIType idents -> Type (List.map ~f:(dimpl_ident span) idents)\n    | TIFn t -> Fn (dty span t)\n    | TIDefault { params; body; witness = _ } ->\n        Default { params = List.map ~f:(dparam span) params; body = dexpr body }\n\n  let dtrait_item (ti : A.trait_item) : B.trait_item =\n    {\n      generics = dgenerics ti.ti_span ti.ti_generics;\n      ident = dconcrete_ident ti.ti_ident;\n      kind = dtrait_item' ti.ti_span ti.ti_v;\n      meta = dmetadata ~attrs:ti.ti_attrs ti.ti_span;\n    }\n\n  let dimpl_item' span (ii : A.impl_item') : B.impl_item_kind =\n    match ii with\n    | IIType { typ; parent_bounds } ->\n        Type\n          {\n            ty = dty span typ;\n            parent_bounds =\n              List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;\n          }\n    | IIFn { body; params } ->\n        Fn { body = dexpr body; params = List.map ~f:(dparam span) params }\n\n  let dimpl_item (ii : A.impl_item) : B.impl_item =\n    {\n      generics = dgenerics ii.ii_span ii.ii_generics;\n      ident = dconcrete_ident ii.ii_ident;\n      kind = dimpl_item' ii.ii_span ii.ii_v;\n      meta = dmetadata ~attrs:ii.ii_attrs ii.ii_span;\n    }\n\n  let ditem' (span : Ast.span) (item : A.item') : B.item_kind =\n    match item with\n    | A.Fn { name; generics; body; params; safety } ->\n        B.Fn\n          {\n            name = dconcrete_ident name;\n            generics = dgenerics span generics;\n            body = dexpr body;\n            params = List.map ~f:(dparam span) params;\n            safety = dsafety_kind safety;\n          }\n    | A.Type { name; generics; variants; is_struct } ->\n        B.Type\n          {\n            name = dconcrete_ident name;\n            generics = dgenerics span generics;\n            variants = List.map ~f:(dvariant span) variants;\n            is_struct;\n          }\n    | A.TyAlias { name; generics; ty } ->\n        B.TyAlias\n          {\n            name = dconcrete_ident name;\n            generics = dgenerics span generics;\n            ty = dty span ty;\n          }\n    | A.IMacroInvokation _ -> deprecated_node \"IMacroInvokation\"\n    | A.Trait { name; generics; items; safety } ->\n        B.Trait\n          {\n            name = dconcrete_ident name;\n            generics = dgenerics span generics;\n            items = List.map ~f:dtrait_item items;\n            safety = dsafety_kind safety;\n          }\n    | A.Impl\n        {\n          generics;\n          self_ty;\n          of_trait = trait_id, trait_generics;\n          items;\n          parent_bounds;\n          _;\n        } ->\n        B.Impl\n          {\n            generics = dgenerics span generics;\n            self_ty = dty span self_ty;\n            of_trait =\n              ( dconcrete_ident trait_id,\n                List.map ~f:(dgeneric_value span) trait_generics );\n            items = List.map ~f:dimpl_item items;\n            parent_bounds =\n              List.map\n                ~f:(fun (impl, ident) ->\n                  (dimpl_expr span impl, dimpl_ident span ident))\n                parent_bounds;\n          }\n    | A.Alias { name; item } ->\n        B.Alias { name = dconcrete_ident name; item = dconcrete_ident item }\n    | A.Use { path; is_external; rename } -> B.Use { path; is_external; rename }\n    | A.Quote { quote; origin } ->\n        B.Quote\n          { quote = dquote span quote; origin = ditem_quote_origin origin }\n    | A.NotImplementedYet -> B.NotImplementedYet\n    | A.HaxError s -> Error (to_error_node span s)\n\n  let ditem (i : A.item) : B.item list =\n    [\n      {\n        ident = dconcrete_ident i.ident;\n        kind = ditem' i.span i.v;\n        meta = dmetadata ~attrs:i.attrs i.span;\n      };\n    ]\nend\n"
  },
  {
    "path": "engine/lib/feature_gate.ml",
    "content": "open! Prelude\n\nmodule DefaultSubtype = struct\n  type error = Err of Span.t [@@deriving show, yojson, eq]\n\n  exception E of error\n\n  let reject (type a b) : Span.t -> a -> b = fun span _ -> raise @@ E (Err span)\n\n  include Features.SUBTYPE.Id\n\n  let explain : error -> Features.Enumeration.t -> string =\n   fun _ feat ->\n    \"a node of kind [\"\n    ^ [%show: Features.Enumeration.t] feat\n    ^ \"] have been found in the AST\"\nend\n\nmodule Make\n    (FA : Features.T)\n    (FB : Features.T)\n    (S0 : sig\n            include Features.SUBTYPE.T\n\n            type error [@@deriving show, yojson, eq]\n\n            exception E of error\n\n            val explain : error -> Features.Enumeration.t -> string\n            val metadata : Phase_utils.Metadata.t\n          end\n          with module A = FA\n           and module B = FB) =\nstruct\n  let metadata = S0.metadata\n\n  module S =\n    Features.SUBTYPE.Map\n      (S0)\n      (struct\n        let map (type a b) (f : Span.t -> a -> b)\n            (feature_kind : Features.Enumeration.t) (span : Span.t) (x : a) : b\n            =\n          try f span x\n          with S0.E err ->\n            let thir_span = Span.to_thir span in\n            let kind : Diagnostics.kind =\n              ExplicitRejection\n                { reason = S0.explain err feature_kind; issue_id = None }\n            in\n            let context : Diagnostics.Context.t =\n              Phase S0.metadata.current_phase\n            in\n            Diagnostics.SpanFreeError.raise ~span:thir_span\n              (Span.owner_hint span) context kind\n      end)\n\n  include Subtype.Make (FA) (FB) (S)\n  module FA = FA\nend\n"
  },
  {
    "path": "engine/lib/features.ml",
    "content": "[%%declare_features\nloop,\nfor_loop,\nfor_index_loop,\nwhile_loop,\nstate_passing_loop,\nfold_like_loop,\ncontinue,\nbreak,\nmutable_variable,\nmutable_reference,\nmutable_pointer,\nreference,\nslice,\nraw_pointer,\nearly_exit,\nquestion_mark,\nmacro,\nas_pattern,\nnontrivial_lhs,\narbitrary_lhs,\nlifetime,\nconstruct_base,\nmonadic_action,\nmonadic_binding,\nquote,\nblock,\ndyn,\nmatch_guard,\ntrait_item_default,\nunsafe]\n\nmodule Full = On\n\nmodule Rust = struct\n  include On\n  include Off.While_loop\n  include Off.For_loop\n  include Off.For_index_loop\n  include Off.Question_mark\n  include Off.Monadic_action\n  include Off.Monadic_binding\n  include Off.State_passing_loop\n  include Off.Fold_like_loop\n  include Off.Quote\nend\n\nmodule _ = struct\n  module _ : T = Full\n  module _ : T = Rust\nend\n"
  },
  {
    "path": "engine/lib/generic_printer/generic_printer.ml",
    "content": "open! Prelude\nopen! Ast\nopen! PPrint\nmodule LazyDoc = Generated_generic_printer_base.LazyDoc\nopen LazyDoc\n\nmodule Annotation = struct\n  type loc = { line : int; col : int } [@@deriving show, yojson, eq]\n  type t = loc * span [@@deriving show, yojson, eq]\n\n  let compare ((a, _) : t) ((b, _) : t) : int =\n    let line = Int.compare a.line b.line in\n    if Int.equal line 0 then Int.compare a.col b.col else line\n\n  (** Converts a list of annotation and a string to a list of annotated string\n  *)\n  let split_with_string (s : string) (annots : t list) =\n    let lines_position =\n      String.to_list s\n      |> List.filter_mapi ~f:(fun i ch ->\n             match ch with '\\n' -> Some i | _ -> None)\n      |> List.to_array |> Array.get\n    in\n    let annots = List.sort ~compare annots in\n    let init = ({ line = 0; col = 0 }, None) in\n    let slices =\n      List.folding_map\n        ~f:(fun (start, start_span) (end_, end_span) ->\n          let span = Option.value ~default:end_span start_span in\n          ((end_, Some end_span), (span, start, end_)))\n        ~init annots\n    in\n    List.map slices ~f:(fun (span, start, end_) ->\n        let pos = lines_position start.line + start.col in\n        let len = lines_position end_.line + end_.col - pos in\n        (span, String.sub s ~pos ~len))\n\n  let to_mapping ((loc, span) : t) : Sourcemaps.Source_maps.mapping option =\n    let real_path (x : Types.file_name) =\n      match x with\n      | Real (LocalPath p) | Real (Remapped { local_path = Some p; _ }) ->\n          Some p\n      | _ -> None\n    in\n    let loc_to_loc ({ line; col } : loc) : Sourcemaps.Location.t =\n      { line; col }\n    in\n    let to_loc ({ col; line } : Types.loc) : loc =\n      { col = Int.of_string col; line = Int.of_string line - 1 }\n    in\n    let* span =\n      Span.to_thir span\n      |> List.find ~f:(fun (s : Types.span) ->\n             real_path s.filename |> Option.is_some)\n    in\n    let* src_filename = real_path span.filename in\n    let src_start = to_loc span.lo |> loc_to_loc in\n    let src_end = to_loc span.hi |> loc_to_loc in\n    let dst_start = loc_to_loc loc in\n    Some\n      Sourcemaps.Source_maps.\n        {\n          src = { start = src_start; end_ = Some src_end };\n          gen = { start = dst_start; end_ = None };\n          source = src_filename;\n          name = None;\n        }\nend\n\nmodule AnnotatedString = struct\n  type t = string * Annotation.t list [@@deriving show, yojson, eq]\n\n  let to_string = fst\n\n  let to_spanned_strings ((s, annots) : t) : (Ast.span * string) list =\n    Annotation.split_with_string s annots\n\n  (** Lifts a string to an annotated list *)\n  let pure (s : string) : t = (s, [])\n\n  (** Concatenate two annotated strings *)\n  let concat (x : t) (y : t) : t =\n    let (xs, xl), (ys, yl) = (x, y) in\n    let last_x =\n      let lines = String.split ~on:'\\n' xs in\n      let last_line = List.last lines |> Option.value ~default:\"\" in\n      let col, line = (String.length last_line, List.length lines) in\n      Annotation.{ col; line }\n    in\n    let yl =\n      let f ({ line; col } : Annotation.loc) : Annotation.loc =\n        {\n          line = line + last_x.line;\n          col = (match col with 0 -> col + last_x.col | _ -> col);\n        }\n      in\n      List.map ~f:(f *** Fn.id) yl\n    in\n    (xs ^ ys, xl @ yl)\n\n  let to_sourcemap : t -> Types.source_map =\n    snd >> List.filter_map ~f:Annotation.to_mapping >> Sourcemaps.Source_maps.mk\n    >>\n    fun ({ mappings; sourceRoot; sources; sourcesContent; names; version; file } :\n          Sourcemaps.Source_maps.t)\n    ->\n    Types.\n      { mappings; sourceRoot; sources; sourcesContent; names; version; file }\nend\n\n(** Helper class that brings imperative span *)\nclass span_helper : object\n  method span_data : Annotation.t list\n  (** Get the span annotation accumulated while printing *)\n\n  method with_span : span:span -> (unit -> document) -> document\n  (** Runs the printer `f` under a node of span `span` *)\n\n  method current_span : span\n  (** Get the current span *)\nend =\n  object (self)\n    val mutable current_span = Span.default\n    val mutable span_data : Annotation.t list = []\n    method span_data = span_data\n    method current_span = current_span\n\n    method with_span ~(span : span) (f : unit -> document) : document =\n      let prev_span = current_span in\n      current_span <- span;\n      let doc = f () |> self#spanned_doc |> custom in\n      current_span <- prev_span;\n      doc\n\n    method private spanned_doc (doc : document) : custom =\n      let span = current_span in\n      object\n        method requirement : requirement = requirement doc\n\n        method pretty : output -> state -> int -> bool -> unit =\n          fun o s i b ->\n            span_data <- ({ line = s.line; col = s.column }, span) :: span_data;\n            pretty o s i b doc\n\n        method compact : output -> unit = fun o -> compact o doc\n      end\n  end\n\nmodule Make (F : Features.T) = struct\n  module AST = Ast.Make (F)\n  open Ast.Make (F)\n  module Gen = Generated_generic_printer_base.Make (F)\n\n  type printer = (Annotation.t list, PPrint.document) Gen.object_type\n  type finalized_printer = (unit, string * Annotation.t list) Gen.object_type\n\n  let finalize (new_printer : unit -> printer) : finalized_printer =\n    Gen.map (fun apply ->\n        let printer = new_printer () in\n        let doc = apply printer in\n        let buf = Buffer.create 0 in\n        PPrint.ToBuffer.pretty 1.0 80 buf doc;\n        (Buffer.contents buf, printer#span_data))\n\n  class virtual base =\n    object (self)\n      inherit Gen.base as super\n      inherit span_helper\n      val mutable current_namespace : string list option = None\n\n      method private catch_exn (handle : string -> document)\n          (f : unit -> document) : document =\n        self#catch_exn'\n          (fun context kind ->\n            Diagnostics.pretty_print_context_kind context kind |> handle)\n          f\n\n      method private catch_exn'\n          (handle : Diagnostics.Context.t -> Diagnostics.kind -> document)\n          (f : unit -> document) : document =\n        try f ()\n        with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n          handle context kind\n\n      (** {2:specialize-expr Printer settings} *)\n\n      method virtual printer_name : string\n      (** Mark a path as unreachable *)\n\n      val concrete_ident_view : (module Concrete_ident.RENDER_API) =\n        (module Concrete_ident.DefaultViewAPI)\n      (** The concrete ident view to be used *)\n\n      (** {2:specialize-expr Utility functions} *)\n\n      method assertion_failure : 'any. string -> 'any =\n        fun details ->\n          let span = Span.to_thir self#current_span in\n          let kind = Types.AssertionFailure { details } in\n          let ctx = Diagnostics.Context.GenericPrinter self#printer_name in\n          Diagnostics.SpanFreeError.raise ~span\n            (Span.owner_hint self#current_span)\n            ctx kind\n      (** An assertion failed *)\n\n      method unreachable : 'any. unit -> 'any =\n        self#assertion_failure \"Unreachable\"\n      (** Mark a path as unreachable *)\n\n      method local_ident (id : local_ident) : document =\n        let module View = (val concrete_ident_view) in\n        View.local_ident\n          (match String.chop_prefix ~prefix:\"impl \" id.name with\n          | Some _ ->\n              let name = \"impl_\" ^ Int.to_string ([%hash: string] id.name) in\n              { id with name }\n          | _ -> id)\n        |> string\n      (** {2:specialize-expr Printers for special types} *)\n\n      method concrete_ident ~local (id : Concrete_ident_render_sig.rendered) :\n          document =\n        string\n          (if local then id.name\n           else\n             String.concat ~sep:self#module_path_separator\n               (id.path @ [ id.name ]))\n      (** [concrete_ident ~local id] prints a name without path if [local] is\n          true, otherwise it prints the full path, separated by\n          `module_path_separator`. *)\n\n      method quote ~contents ~witness:_ : document =\n        List.map ~f:(fun doc -> doc#p) contents |> concat\n\n      method quote_content_Verbatim v = string v\n      method quote_content_Expr e = e#p\n      method quote_content_Pattern p = p#p\n      method quote_content_Typ t = t#p\n\n      (** {2:specialize-expr Specialized printers for [expr]} *)\n\n      method virtual expr'_App_constant\n          : super:expr ->\n            constant:concrete_ident lazy_doc ->\n            generics:generic_value lazy_doc list ->\n            document\n      (** [expr'_App_constant ~super ~constant ~generics] prints the constant\n          [e] with generics [generics]. [super] is the unspecialized [expr]. *)\n\n      method virtual expr'_App_application\n          : super:expr ->\n            f:expr lazy_doc ->\n            args:expr lazy_doc list ->\n            generics:generic_value lazy_doc list ->\n            document\n      (** [expr'_App_application ~super ~f ~args ~generics] prints the function\n          application [e<...generics>(...args)]. [super] is the unspecialized\n          [expr]. *)\n\n      method virtual expr'_App_tuple_projection\n          : super:expr -> size:int -> nth:int -> e:expr lazy_doc -> document\n      (** [expr'_App_tuple_projection ~super ~size ~nth ~e] prints the\n          projection of the [nth] component of the tuple [e] of size [size].\n          [super] is the unspecialized [expr]. *)\n\n      method virtual expr'_App_field_projection\n          : super:expr ->\n            field:concrete_ident lazy_doc ->\n            e:expr lazy_doc ->\n            document\n      (** [expr'_App_field_projection ~super ~field ~e] prints the projection of\n          the field [field] in the expression [e]. [super] is the unspecialized\n          [expr]. *)\n\n      method virtual expr'_Construct_inductive\n          : super:expr ->\n            constructor:concrete_ident lazy_doc ->\n            is_record:bool ->\n            is_struct:bool ->\n            fields:(global_ident lazy_doc * expr lazy_doc) list ->\n            base:(expr lazy_doc * F.construct_base) lazy_doc option ->\n            document\n      (** [expr'_Construct_inductive ~super ~is_record ~is_struct ~constructor\n           ~base ~fields] prints the construction of an inductive with base\n          [base] and fields [fields]. [super] is the unspecialized [expr]. TODO\n          doc is_record is_struct *)\n\n      method virtual expr'_Construct_tuple\n          : super:expr -> components:expr lazy_doc list -> document\n\n      method virtual expr'_GlobalVar_concrete\n          : super:expr -> concrete_ident lazy_doc -> document\n\n      method virtual expr'_GlobalVar_primitive\n          : super:expr -> primitive_ident -> document\n\n      (** {2:specialize-pat Specialized printers for [pat]} *)\n\n      method virtual pat'_PConstruct_inductive\n          : super:pat ->\n            constructor:concrete_ident lazy_doc ->\n            is_record:bool ->\n            is_struct:bool ->\n            fields:(global_ident lazy_doc * pat lazy_doc) list ->\n            document\n\n      method virtual pat'_PConstruct_tuple\n          : super:pat -> components:pat lazy_doc list -> document\n\n      (** {2:specialize-lhs Specialized printers for [lhs]} *)\n\n      method virtual lhs_LhsFieldAccessor_field\n          : e:lhs lazy_doc ->\n            typ:ty lazy_doc ->\n            field:concrete_ident lazy_doc ->\n            witness:F.nontrivial_lhs ->\n            document\n\n      method virtual lhs_LhsFieldAccessor_tuple\n          : e:lhs lazy_doc ->\n            typ:ty lazy_doc ->\n            nth:int ->\n            size:int ->\n            witness:F.nontrivial_lhs ->\n            document\n\n      (** {2:specialize-ty Specialized printers for [ty]} *)\n\n      method virtual ty_TApp_tuple : types:ty list -> document\n      (** [ty_TApp_tuple ~types] prints a tuple type with compounds types\n          [types]. *)\n\n      method virtual ty_TApp_application\n          : typ:concrete_ident lazy_doc ->\n            generics:generic_value lazy_doc list ->\n            document\n      (** [ty_TApp_application ~typ ~generics] prints the type\n          [typ<...generics>]. *)\n\n      (** {2:specialize-ty Specialized printers for [item]} *)\n\n      method virtual item'_Type_struct\n          : super:item ->\n            type_name:concrete_ident lazy_doc ->\n            constructor_name:concrete_ident lazy_doc ->\n            generics:generics lazy_doc ->\n            tuple_struct:bool ->\n            arguments:\n              (concrete_ident lazy_doc * ty lazy_doc * attr list lazy_doc) list ->\n            document\n      (** [item'_Type_struct ~super ~type_name ~constructor_name ~generics\n           ~tuple_struct ~arguments] prints the struct definition\n          [struct name<generics> arguments]. `tuple_struct` says whether we are\n          dealing with a tuple struct (e.g. [struct Foo(T1, T2)]) or a named\n          struct (e.g. [struct Foo {field: T1, other: T2}])?\n\n          `type_name` is the identifier of the type itself, while\n          `constructor_name` is the identifier of the constructor of the struct.\n          Depending on the naming policy, those can be rendered as the same name\n          or not. *)\n\n      method virtual item'_Type_enum\n          : super:item ->\n            name:concrete_ident lazy_doc ->\n            generics:generics lazy_doc ->\n            variants:variant lazy_doc list ->\n            document\n      (** [item'_Type_enum ~super ~name ~generics ~variants] prints the enum\n          type [enum name<generics> { ... }]. *)\n\n      method virtual item'_Enum_Variant\n          : name:concrete_ident lazy_doc ->\n            arguments:\n              (concrete_ident lazy_doc * ty lazy_doc * attrs lazy_doc) list ->\n            is_record:bool ->\n            attrs:attrs lazy_doc ->\n            document\n      (** [item'_Enum_Variant] prints a variant of an enum. *)\n\n      (** {2:common-nodes Printers for common nodes} *)\n\n      method virtual common_array : document list -> document\n      (** [common_array values] is a default for printing array-like nodes:\n          array patterns, array expressions. *)\n\n      (** {2:defaults Default printers} **)\n\n      method module_path_separator = \"::\"\n      (** [module_path_separator] is the default separator for paths. `::` by\n          default *)\n\n      method pat'_PArray ~super:_ ~args =\n        List.map ~f:(fun arg -> arg#p) args |> self#common_array\n\n      method expr'_Array ~super:_ args =\n        List.map ~f:(fun arg -> arg#p) args |> self#common_array\n\n      method pat'_POr ~super:_ ~subpats =\n        List.map ~f:(fun subpat -> subpat#p) subpats\n        |> separate (break 1 ^^ char '|' ^^ space)\n\n      (**/**)\n      (* This section is about defining or overriding\n         `_do_not_override_` methods. This is internal logic, whence this\n         is excluded from documentation (with the nice and user friendly\n         `(**/**)` ocamldoc syntax) *)\n\n      method _do_not_override_lhs_LhsFieldAccessor ~e ~typ ~field ~witness =\n        let field =\n          match field with\n          | `Projector field -> field\n          | _ ->\n              self#assertion_failure\n              @@ \"LhsFieldAccessor: field not a [`Projector] \"\n        in\n        match field with\n        | `TupleField (nth, size) ->\n            self#lhs_LhsFieldAccessor_tuple ~e ~typ ~nth ~size ~witness\n        | `Concrete field ->\n            let field : concrete_ident lazy_doc =\n              self#_do_not_override_lazy_of_concrete_ident\n                AstPos_lhs_LhsFieldAccessor_field field\n            in\n            self#lhs_LhsFieldAccessor_field ~e ~typ ~field ~witness\n\n      method _do_not_override_expr'_App ~super ~f ~args ~generic_args\n          ~bounds_impls ~trait =\n        let _ = (super, f, args, generic_args, bounds_impls, trait) in\n        match f#v with\n        | { e = GlobalVar i; _ } -> (\n            let expect_one_arg where =\n              match args with\n              | [ arg ] -> arg\n              | _ -> self#assertion_failure @@ \"Expected one arg at \" ^ where\n            in\n            match i with\n            | `Concrete _ | `Primitive _ -> (\n                match (args, i) with\n                | [], `Concrete i ->\n                    let constant =\n                      self#_do_not_override_lazy_of_concrete_ident\n                        AstPos_expr'_App_f i\n                    in\n                    self#expr'_App_constant ~super ~constant\n                      ~generics:generic_args\n                | [], _ -> self#assertion_failure \"Primitive app of arity 0\"\n                | _ ->\n                    self#expr'_App_application ~super ~f ~args\n                      ~generics:generic_args)\n            | `TupleType _ | `TupleCons _ | `TupleField _ ->\n                self#assertion_failure \"App: unexpected tuple\"\n            | `Projector (`TupleField (nth, size)) ->\n                let e = expect_one_arg \"projector tuple field\" in\n                self#expr'_App_tuple_projection ~super ~size ~nth ~e\n            | `Projector (`Concrete field) ->\n                let e = expect_one_arg \"projector concrete\" in\n                let field =\n                  self#_do_not_override_lazy_of_concrete_ident\n                    AstPos_expr'_App_f field\n                in\n                self#expr'_App_field_projection ~super ~field ~e)\n        | _ -> self#assertion_failure \"Primitive app of arity 0\"\n\n      method _do_not_override_expr'_Construct ~super ~constructor ~is_record\n          ~is_struct ~fields ~base =\n        match constructor with\n        | `Concrete constructor ->\n            let constructor =\n              self#_do_not_override_lazy_of_concrete_ident\n                AstPos_expr'_Construct_constructor constructor\n            in\n            let fields =\n              List.map\n                ~f:(fun field ->\n                  let name, expr = field#v in\n                  ( self#_do_not_override_lazy_of_global_ident\n                      Generated_generic_printer_base\n                      .AstPos_pat'_PConstruct_constructor name,\n                    expr ))\n                fields\n            in\n            self#expr'_Construct_inductive ~super ~constructor ~is_record\n              ~is_struct ~fields ~base\n        | `TupleCons _ ->\n            let components = List.map ~f:(fun field -> snd field#v) fields in\n            self#expr'_Construct_tuple ~super ~components\n        | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->\n            self#assertion_failure \"Construct unexpected constructors\"\n\n      method _do_not_override_expr'_GlobalVar ~super global_ident =\n        match global_ident with\n        | `Concrete concrete ->\n            let concrete =\n              self#_do_not_override_lazy_of_concrete_ident\n                AstPos_expr'_GlobalVar_x0 concrete\n            in\n            self#expr'_GlobalVar_concrete ~super concrete\n        | `Primitive primitive ->\n            self#expr'_GlobalVar_primitive ~super primitive\n        | `TupleCons 0 ->\n            self#_do_not_override_expr'_Construct ~super\n              ~constructor:global_ident ~is_record:false ~is_struct:false\n              ~fields:[] ~base:None\n        | _ ->\n            self#assertion_failure\n            @@ \"GlobalVar: expected a concrete or primitive global ident, got:\"\n            ^ [%show: global_ident] global_ident\n\n      method _do_not_override_pat'_PConstruct ~super ~constructor ~is_record\n          ~is_struct ~fields =\n        match constructor with\n        | `Concrete constructor ->\n            let constructor =\n              self#_do_not_override_lazy_of_concrete_ident\n                AstPos_pat'_PConstruct_constructor constructor\n            in\n            let fields =\n              List.map\n                ~f:(fun field ->\n                  let { field; pat } = field#v in\n                  let field =\n                    self#_do_not_override_lazy_of_global_ident\n                      Generated_generic_printer_base\n                      .AstPos_pat'_PConstruct_fields field\n                  in\n                  let pat =\n                    self#_do_not_override_lazy_of_pat\n                      Generated_generic_printer_base\n                      .AstPos_pat'_PConstruct_fields pat\n                  in\n                  (field, pat))\n                fields\n            in\n            self#pat'_PConstruct_inductive ~super ~constructor ~is_record\n              ~is_struct ~fields\n        | `TupleCons _ ->\n            let components =\n              List.map\n                ~f:(fun field ->\n                  self#_do_not_override_lazy_of_pat AstPos_field_pat__pat\n                    field#v.pat)\n                fields\n            in\n            self#pat'_PConstruct_tuple ~super ~components\n        | `Primitive _ | `TupleType _ | `TupleField _ | `Projector _ ->\n            self#assertion_failure \"Construct unexpected constructors\"\n\n      method _do_not_override_ty_TApp ~ident ~args =\n        match ident with\n        | `Concrete ident ->\n            let typ =\n              self#_do_not_override_lazy_of_concrete_ident AstPos_ty_TApp_args\n                ident\n            in\n            self#ty_TApp_application ~typ ~generics:args |> group\n        | `Primitive _ | `TupleCons _ | `TupleField _ | `Projector _ ->\n            self#assertion_failure \"TApp not concrete\"\n        | `TupleType size ->\n            let types =\n              List.filter_map\n                ~f:(fun garg ->\n                  match garg#v with GType t -> Some t | _ -> None)\n                args\n            in\n            if [%equal: int] (List.length args) size |> not then\n              self#assertion_failure \"malformed [ty.TApp] tuple\";\n            self#ty_TApp_tuple ~types\n\n      method _do_not_override_item'_Type ~super ~name ~generics ~variants\n          ~is_struct =\n        let generics, _, _ = generics#v in\n        if is_struct then\n          match variants with\n          | [ variant ] ->\n              let variant_arguments =\n                List.map\n                  ~f:(fun (ident, typ, attrs) ->\n                    ( self#_do_not_override_lazy_of_concrete_ident\n                        AstPos_variant__arguments ident,\n                      self#_do_not_override_lazy_of_ty AstPos_variant__arguments\n                        typ,\n                      self#_do_not_override_lazy_of_attrs AstPos_variant__attrs\n                        attrs ))\n                  variant#v.arguments\n              in\n              let constructor_name =\n                self#_do_not_override_lazy_of_concrete_ident\n                  AstPos_variant__name variant#v.name\n              in\n              self#item'_Type_struct ~super ~type_name:name ~constructor_name\n                ~generics ~tuple_struct:(not variant#v.is_record)\n                ~arguments:variant_arguments\n          | _ -> self#unreachable ()\n        else self#item'_Type_enum ~super ~name ~generics ~variants\n\n      method _do_not_override_variant :\n          name:concrete_ident lazy_doc ->\n          arguments:\n            (concrete_ident lazy_doc * ty lazy_doc * attrs lazy_doc) list ->\n          is_record:bool ->\n          attrs:attrs lazy_doc ->\n          document =\n        self#item'_Enum_Variant\n\n      method _do_not_override_lazy_of_local_ident ast_position\n          (id : local_ident) =\n        lazy_doc (fun (id : local_ident) -> self#local_ident id) ast_position id\n\n      method _do_not_override_lazy_of_concrete_ident ast_position\n          (id : concrete_ident) : concrete_ident lazy_doc =\n        lazy_doc\n          (fun (id : concrete_ident) ->\n            let module View = (val concrete_ident_view) in\n            let id = View.render id in\n            let ns_path = Option.value ~default:[] current_namespace in\n            let local = [%eq: string list] ns_path id.path in\n            self#concrete_ident ~local id)\n          ast_position id\n\n      method _do_not_override_lazy_of_global_ident ast_position\n          (id : global_ident) : global_ident lazy_doc =\n        lazy_doc\n          (fun (id : global_ident) ->\n            match id with\n            | `Concrete cid | `Projector (`Concrete cid) ->\n                (self#_do_not_override_lazy_of_concrete_ident ast_position cid)\n                  #p\n            | `TupleField (i, j) ->\n                (* self#lhs_LhsFieldAccessor_tuple ~e ~typ ~nth ~size ~witness *)\n                (* _do_not_override_lhs_LhsFieldAccessor *)\n                !^\"tuple_field\" ^^ space\n                ^^ !^(Int.to_string i)\n                ^^ space\n                ^^ !^(Int.to_string j)\n            | _ ->\n                self#assertion_failure\n                  (\"_do_not_override_lazy_of_global_ident: expected [`Concrete \\\n                    _] got [\"\n                  ^ [%show: global_ident] id\n                  ^ \"]\"))\n          ast_position id\n\n      method! _do_not_override_lazy_of_item ast_position (value : item) :\n          item lazy_doc =\n        let module View = (val concrete_ident_view) in\n        current_namespace <- Some (View.render value.ident).path;\n        super#_do_not_override_lazy_of_item ast_position value\n\n      method _do_not_override_lazy_of_generics ast_position (value : generics) :\n          (generics lazy_doc\n          * generic_param lazy_doc list\n          * generic_constraint lazy_doc list)\n          lazy_doc =\n        let params =\n          List.map\n            ~f:(fun x ->\n              self#_do_not_override_lazy_of_generic_param\n                AstPos_generics__params x)\n            value.params\n        in\n        let constraints =\n          List.map\n            ~f:(fun x ->\n              self#_do_not_override_lazy_of_generic_constraint\n                AstPos_generics__constraints x)\n            value.constraints\n        in\n        lazy_doc\n          (fun (lazy_doc, _, _) -> lazy_doc#p)\n          ast_position\n          ( lazy_doc\n              (fun (value : generics) ->\n                self#wrap_generics ast_position value\n                  (self#generics ~params ~constraints))\n              ast_position value,\n            params,\n            constraints )\n\n      (**/**)\n    end\nend\n"
  },
  {
    "path": "engine/lib/generic_printer/generic_printer_template.generate.js",
    "content": "#!/usr/bin/env node\n\n// This script regenerates `generic_printer_template.ml`\n\nconst {readFileSync, writeFileSync} = require('fs');\nconst {execSync} = require('child_process');\n\nconst GENERIC_PRINTER_DIR = `lib/generic_printer`;\nconst GENERIC_PRINTER = `${GENERIC_PRINTER_DIR}/generic_printer.ml`;\nconst TEMPLATE = `${GENERIC_PRINTER_DIR}/generic_printer_template.ml`;\n\n// Utility function to format an OCaml module\nlet fmt = path => execSync(`ocamlformat -i ${path}`);\n\n// Go to the root of the engine\nrequire('process').chdir(`${execSync('git rev-parse --show-toplevel').toString().trim()}/engine`);\n\n\n// Prints the signature of module `Generic_printer` (using `ocaml-print-intf`)\nlet mli = execSync(`dune exec -- ocaml-print-intf ${GENERIC_PRINTER}`).toString().split('class virtual base')[2];\n\nwriteFileSync('/tmp/exported.mli', mli);\n\n// Parses all \nlet virtual_methods = [...mli.matchAll(/^( +)method (private )?virtual +(?<name>.*) +:(?<sig>.*(\\n \\1.*)*)/gm)];\n\nlet output = [];\nfor(let v of virtual_methods) {\n    let {name, sig} = v.groups;\n    let out = sig.trim().split('->').slice(-1)[0].trim().split('.').slice(-1)[0];\n    let args = sig.trim().split('->').map((s, i) => {\n        let chunks = s.trim().split(':').reverse();\n        if(chunks.length > 2 || chunks.length == 0) {\n            throw \"Chunks: bad length\";\n        }\n        let [type, name] = chunks;\n        name = name ? '~'+name+':_' : '_x'+(i + 1);\n        return {type, name};\n    }).map(n => n.name).slice(0, -1).join(' ');\n    \n    output.push(`method ${name} ${args} = default_${out}_for \"${name}\"`);\n}\n\n{\n    let [before, _, after] = readFileSync(TEMPLATE).toString().split(/(?=\\(\\* (?:BEGIN|END) GENERATED \\*\\))/);\n    writeFileSync(TEMPLATE, before + '\\n(* BEGIN GENERATED *)\\n' + output.join('\\n') + '\\n' + after);\n}\n\nfmt(TEMPLATE);\n"
  },
  {
    "path": "engine/lib/generic_printer/generic_printer_template.ml",
    "content": "open! Prelude\nopen! Ast\nopen! PPrint\n\nmodule Make\n    (F : Features.T)\n    (Default : sig\n      val default : string -> string\n    end) =\nstruct\n  module AST = Ast.Make (F)\n  open Ast.Make (F)\n  module Base = Generic_printer.Make (F)\n  open PPrint\n\n  let default_string_for s = \"TODO: please implement the method `\" ^ s ^ \"`\"\n  let default_document_for = default_string_for >> string\n\n  class printer =\n    object\n      inherit Base.base\n\n      (* BEGIN GENERATED *)\n      method arm ~arm:_ ~span:_ = default_document_for \"arm\"\n\n      method arm' ~super:_ ~arm_pat:_ ~body:_ ~guard:_ =\n        default_document_for \"arm'\"\n\n      method attrs _x1 = default_document_for \"attrs\"\n\n      method binding_mode_ByRef _x1 _x2 =\n        default_document_for \"binding_mode_ByRef\"\n\n      method binding_mode_ByValue = default_document_for \"binding_mode_ByValue\"\n      method borrow_kind_Mut _x1 = default_document_for \"borrow_kind_Mut\"\n      method borrow_kind_Shared = default_document_for \"borrow_kind_Shared\"\n      method borrow_kind_Unique = default_document_for \"borrow_kind_Unique\"\n      method cf_kind_BreakOnly = default_document_for \"cf_kind_BreakOnly\"\n\n      method cf_kind_BreakOrReturn =\n        default_document_for \"cf_kind_BreakOrReturn\"\n\n      method common_array _x1 = default_document_for \"common_array\"\n\n      method dyn_trait_goal ~trait:_ ~non_self_args:_ =\n        default_document_for \"dyn_trait_goal\"\n\n      method error_expr _x1 = default_document_for \"error_expr\"\n      method error_item _x1 = default_document_for \"error_item\"\n      method error_pat _x1 = default_document_for \"error_pat\"\n      method expr ~e:_ ~span:_ ~typ:_ = default_document_for \"expr\"\n\n      method expr'_AddressOf ~super:_ ~mut:_ ~e:_ ~witness:_ =\n        default_document_for \"expr'_AddressOf\"\n\n      method expr'_App_application ~super:_ ~f:_ ~args:_ ~generics:_ =\n        default_document_for \"expr'_App_application\"\n\n      method expr'_App_constant ~super:_ ~constant:_ ~generics:_ =\n        default_document_for \"expr'_App_constant\"\n\n      method expr'_App_field_projection ~super:_ ~field:_ ~e:_ =\n        default_document_for \"expr'_App_field_projection\"\n\n      method expr'_App_tuple_projection ~super:_ ~size:_ ~nth:_ ~e:_ =\n        default_document_for \"expr'_App_tuple_projection\"\n\n      method expr'_Ascription ~super:_ ~e:_ ~typ:_ =\n        default_document_for \"expr'_Ascription\"\n\n      method expr'_Assign ~super:_ ~lhs:_ ~e:_ ~witness:_ =\n        default_document_for \"expr'_Assign\"\n\n      method expr'_Block ~super:_ ~e:_ ~safety_mode:_ ~witness:_ =\n        default_document_for \"expr'_Block\"\n\n      method expr'_Borrow ~super:_ ~kind:_ ~e:_ ~witness:_ =\n        default_document_for \"expr'_Borrow\"\n\n      method expr'_Break ~super:_ ~e:_ ~acc:_ ~label:_ ~witness:_ =\n        default_document_for \"expr'_Break\"\n\n      method expr'_Closure ~super:_ ~params:_ ~body:_ ~captures:_ =\n        default_document_for \"expr'_Closure\"\n\n      method expr'_Construct_inductive ~super:_ ~constructor:_ ~is_record:_\n          ~is_struct:_ ~fields:_ ~base:_ =\n        default_document_for \"expr'_Construct_inductive\"\n\n      method expr'_Construct_tuple ~super:_ ~components:_ =\n        default_document_for \"expr'_Construct_tuple\"\n\n      method expr'_Continue ~super:_ ~acc:_ ~label:_ ~witness:_ =\n        default_document_for \"expr'_Continue\"\n\n      method expr'_EffectAction ~super:_ ~action:_ ~argument:_ =\n        default_document_for \"expr'_EffectAction\"\n\n      method expr'_GlobalVar_concrete ~super:_ _x2 =\n        default_document_for \"expr'_GlobalVar_concrete\"\n\n      method expr'_GlobalVar_primitive ~super:_ _x2 =\n        default_document_for \"expr'_GlobalVar_primitive\"\n\n      method expr'_If ~super:_ ~cond:_ ~then_:_ ~else_:_ =\n        default_document_for \"expr'_If\"\n\n      method expr'_Let ~super:_ ~monadic:_ ~lhs:_ ~rhs:_ ~body:_ =\n        default_document_for \"expr'_Let\"\n\n      method expr'_Literal ~super:_ _x2 = default_document_for \"expr'_Literal\"\n      method expr'_LocalVar ~super:_ _x2 = default_document_for \"expr'_LocalVar\"\n\n      method expr'_Loop ~super:_ ~body:_ ~kind:_ ~state:_ ~control_flow:_\n          ~label:_ ~witness:_ =\n        default_document_for \"expr'_Loop\"\n\n      method expr'_MacroInvokation ~super:_ ~macro:_ ~args:_ ~witness:_ =\n        default_document_for \"expr'_MacroInvokation\"\n\n      method expr'_Match ~super:_ ~scrutinee:_ ~arms:_ =\n        default_document_for \"expr'_Match\"\n\n      method expr'_QuestionMark ~super:_ ~e:_ ~return_typ:_ ~witness:_ =\n        default_document_for \"expr'_QuestionMark\"\n\n      method expr'_Quote ~super:_ _x2 = default_document_for \"expr'_Quote\"\n\n      method expr'_Return ~super:_ ~e:_ ~witness:_ =\n        default_document_for \"expr'_Return\"\n\n      method field_pat ~field:_ ~pat:_ = default_document_for \"field_pat\"\n\n      method generic_constraint_GCLifetime _x1 _x2 =\n        default_document_for \"generic_constraint_GCLifetime\"\n\n      method generic_constraint_GCProjection _x1 =\n        default_document_for \"generic_constraint_GCProjection\"\n\n      method generic_constraint_GCType _x1 =\n        default_document_for \"generic_constraint_GCType\"\n\n      method generic_param ~ident:_ ~span:_ ~attrs:_ ~kind:_ =\n        default_document_for \"generic_param\"\n\n      method generic_param_kind_GPConst ~typ:_ =\n        default_document_for \"generic_param_kind_GPConst\"\n\n      method generic_param_kind_GPLifetime ~witness:_ =\n        default_document_for \"generic_param_kind_GPLifetime\"\n\n      method generic_param_kind_GPType =\n        default_document_for \"generic_param_kind_GPType\"\n\n      method generic_value_GConst _x1 =\n        default_document_for \"generic_value_GConst\"\n\n      method generic_value_GLifetime ~lt:_ ~witness:_ =\n        default_document_for \"generic_value_GLifetime\"\n\n      method generic_value_GType _x1 =\n        default_document_for \"generic_value_GType\"\n\n      method generics ~params:_ ~constraints:_ = default_document_for \"generics\"\n      method guard ~guard:_ ~span:_ = default_document_for \"guard\"\n\n      method guard'_IfLet ~super:_ ~lhs:_ ~rhs:_ ~witness:_ =\n        default_document_for \"guard'_IfLet\"\n\n      method impl_expr ~kind:_ ~goal:_ = default_document_for \"impl_expr\"\n\n      method impl_expr_kind_Builtin _x1 =\n        default_document_for \"impl_expr_kind_Builtin\"\n\n      method impl_expr_kind_Concrete _x1 =\n        default_document_for \"impl_expr_kind_Concrete\"\n\n      method impl_expr_kind_Dyn = default_document_for \"impl_expr_kind_Dyn\"\n\n      method impl_expr_kind_ImplApp ~impl:_ ~args:_ =\n        default_document_for \"impl_expr_kind_ImplApp\"\n\n      method impl_expr_kind_LocalBound ~id:_ =\n        default_document_for \"impl_expr_kind_LocalBound\"\n\n      method impl_expr_kind_Parent ~impl:_ ~ident:_ =\n        default_document_for \"impl_expr_kind_Parent\"\n\n      method impl_expr_kind_Projection ~impl:_ ~item:_ ~ident:_ =\n        default_document_for \"impl_expr_kind_Projection\"\n\n      method impl_expr_kind_Self = default_document_for \"impl_expr_kind_Self\"\n      method impl_ident ~goal:_ ~name:_ = default_document_for \"impl_ident\"\n\n      method impl_item ~ii_span:_ ~ii_generics:_ ~ii_v:_ ~ii_ident:_ ~ii_attrs:_\n          =\n        default_document_for \"impl_item\"\n\n      method impl_item'_IIFn ~body:_ ~params:_ =\n        default_document_for \"impl_item'_IIFn\"\n\n      method impl_item'_IIType ~typ:_ ~parent_bounds:_ =\n        default_document_for \"impl_item'_IIType\"\n\n      method item ~v:_ ~span:_ ~ident:_ ~attrs:_ = default_document_for \"item\"\n\n      method item'_Alias ~super:_ ~name:_ ~item:_ =\n        default_document_for \"item'_Alias\"\n\n      method item'_Enum_Variant ~name:_ ~arguments:_ ~is_record:_ ~attrs:_ =\n        default_document_for \"item'_Enum_Variant\"\n\n      method item'_Fn ~super:_ ~name:_ ~generics:_ ~body:_ ~params:_ ~safety:_ =\n        default_document_for \"item'_Fn\"\n\n      method item'_HaxError ~super:_ _x2 = default_document_for \"item'_HaxError\"\n\n      method item'_IMacroInvokation ~super:_ ~macro:_ ~argument:_ ~span:_\n          ~witness:_ =\n        default_document_for \"item'_IMacroInvokation\"\n\n      method item'_Impl ~super:_ ~generics:_ ~self_ty:_ ~of_trait:_ ~items:_\n          ~parent_bounds:_ ~safety:_ =\n        default_document_for \"item'_Impl\"\n\n      method item'_NotImplementedYet =\n        default_document_for \"item'_NotImplementedYet\"\n\n      method item'_Quote ~super:_ ~quote:_ ~origin:_ =\n        default_document_for \"item'_Quote\"\n\n      method item'_Trait ~super:_ ~name:_ ~generics:_ ~items:_ ~safety:_ =\n        default_document_for \"item'_Trait\"\n\n      method item'_TyAlias ~super:_ ~name:_ ~generics:_ ~ty:_ =\n        default_document_for \"item'_TyAlias\"\n\n      method item'_Type_enum ~super:_ ~name:_ ~generics:_ ~variants:_ =\n        default_document_for \"item'_Type_enum\"\n\n      method item'_Type_struct ~super:_ ~type_name:_ ~constructor_name:_\n          ~generics:_ ~tuple_struct:_ ~arguments:_ =\n        default_document_for \"item'_Type_struct\"\n\n      method item'_Use ~super:_ ~path:_ ~is_external:_ ~rename:_ =\n        default_document_for \"item'_Use\"\n\n      method item_quote_origin ~item_kind:_ ~item_ident:_ ~position:_ =\n        default_document_for \"item_quote_origin\"\n\n      method lhs_LhsArbitraryExpr ~e:_ ~witness:_ =\n        default_document_for \"lhs_LhsArbitraryExpr\"\n\n      method lhs_LhsArrayAccessor ~e:_ ~typ:_ ~index:_ ~witness:_ =\n        default_document_for \"lhs_LhsArrayAccessor\"\n\n      method lhs_LhsFieldAccessor_field ~e:_ ~typ:_ ~field:_ ~witness:_ =\n        default_document_for \"lhs_LhsFieldAccessor_field\"\n\n      method lhs_LhsFieldAccessor_tuple ~e:_ ~typ:_ ~nth:_ ~size:_ ~witness:_ =\n        default_document_for \"lhs_LhsFieldAccessor_tuple\"\n\n      method lhs_LhsLocalVar ~var:_ ~typ:_ =\n        default_document_for \"lhs_LhsLocalVar\"\n\n      method lhs_LhsVecRef ~e:_ ~typ:_ ~witness:_ =\n        default_document_for \"lhs_LhsVecRef\"\n\n      method literal_Bool _x1 = default_document_for \"literal_Bool\"\n      method literal_Char _x1 = default_document_for \"literal_Char\"\n\n      method literal_Float ~value:_ ~negative:_ ~kind:_ =\n        default_document_for \"literal_Float\"\n\n      method literal_Int ~value:_ ~negative:_ ~kind:_ =\n        default_document_for \"literal_Int\"\n\n      method literal_String _x1 = default_document_for \"literal_String\"\n\n      method loop_kind_ForIndexLoop ~start:_ ~end_:_ ~var:_ ~var_typ:_\n          ~witness:_ =\n        default_document_for \"loop_kind_ForIndexLoop\"\n\n      method loop_kind_ForLoop ~pat:_ ~it:_ ~witness:_ =\n        default_document_for \"loop_kind_ForLoop\"\n\n      method loop_kind_UnconditionalLoop =\n        default_document_for \"loop_kind_UnconditionalLoop\"\n\n      method loop_kind_WhileLoop ~condition:_ ~witness:_ =\n        default_document_for \"loop_kind_WhileLoop\"\n\n      method loop_state ~init:_ ~bpat:_ ~witness:_ =\n        default_document_for \"loop_state\"\n\n      method modul _x1 = default_document_for \"modul\"\n\n      method param ~pat:_ ~typ:_ ~typ_span:_ ~attrs:_ =\n        default_document_for \"param\"\n\n      method pat ~p:_ ~span:_ ~typ:_ = default_document_for \"pat\"\n\n      method pat'_PAscription ~super:_ ~typ:_ ~typ_span:_ ~pat:_ =\n        default_document_for \"pat'_PAscription\"\n\n      method pat'_PBinding ~super:_ ~mut:_ ~mode:_ ~var:_ ~typ:_ ~subpat:_ =\n        default_document_for \"pat'_PBinding\"\n\n      method pat'_PConstant ~super:_ ~lit:_ =\n        default_document_for \"pat'_PConstant\"\n\n      method pat'_PConstruct_inductive ~super:_ ~constructor:_ ~is_record:_\n          ~is_struct:_ ~fields:_ =\n        default_document_for \"pat'_PConstruct_inductive\"\n\n      method pat'_PConstruct_tuple ~super:_ ~components:_ =\n        default_document_for \"pat'_PConstruct_tuple\"\n\n      method pat'_PDeref ~super:_ ~subpat:_ ~witness:_ =\n        default_document_for \"pat'_PDeref\"\n\n      method pat'_PWild = default_document_for \"pat'_PWild\"\n      method printer_name = default_string_for \"printer_name\"\n\n      method projection_predicate ~impl:_ ~assoc_item:_ ~typ:_ =\n        default_document_for \"projection_predicate\"\n\n      method safety_kind_Safe = default_document_for \"safety_kind_Safe\"\n      method safety_kind_Unsafe _x1 = default_document_for \"safety_kind_Unsafe\"\n\n      method supported_monads_MException _x1 =\n        default_document_for \"supported_monads_MException\"\n\n      method supported_monads_MOption =\n        default_document_for \"supported_monads_MOption\"\n\n      method supported_monads_MResult _x1 =\n        default_document_for \"supported_monads_MResult\"\n\n      method trait_goal ~trait:_ ~args:_ = default_document_for \"trait_goal\"\n\n      method trait_item ~ti_span:_ ~ti_generics:_ ~ti_v:_ ~ti_ident:_\n          ~ti_attrs:_ =\n        default_document_for \"trait_item\"\n\n      method trait_item'_TIDefault ~params:_ ~body:_ ~witness:_ =\n        default_document_for \"trait_item'_TIDefault\"\n\n      method trait_item'_TIFn _x1 = default_document_for \"trait_item'_TIFn\"\n      method trait_item'_TIType _x1 = default_document_for \"trait_item'_TIType\"\n\n      method ty_TApp_application ~typ:_ ~generics:_ =\n        default_document_for \"ty_TApp_application\"\n\n      method ty_TApp_tuple ~types:_ = default_document_for \"ty_TApp_tuple\"\n      method ty_TArray ~typ:_ ~length:_ = default_document_for \"ty_TArray\"\n      method ty_TArrow _x1 _x2 = default_document_for \"ty_TArrow\"\n\n      method ty_TAssociatedType ~impl:_ ~item:_ =\n        default_document_for \"ty_TAssociatedType\"\n\n      method ty_TBool = default_document_for \"ty_TBool\"\n      method ty_TChar = default_document_for \"ty_TChar\"\n      method ty_TDyn ~witness:_ ~goals:_ = default_document_for \"ty_TDyn\"\n      method ty_TFloat _x1 = default_document_for \"ty_TFloat\"\n      method ty_TInt _x1 = default_document_for \"ty_TInt\"\n      method ty_TOpaque _x1 = default_document_for \"ty_TOpaque\"\n      method ty_TParam _x1 = default_document_for \"ty_TParam\"\n      method ty_TRawPointer ~witness:_ = default_document_for \"ty_TRawPointer\"\n\n      method ty_TRef ~witness:_ ~region:_ ~typ:_ ~mut:_ =\n        default_document_for \"ty_TRef\"\n\n      method ty_TSlice ~witness:_ ~ty:_ = default_document_for \"ty_TSlice\"\n      method ty_TStr = default_document_for \"ty_TStr\"\n      (* END GENERATED *)\n    end\nend\n"
  },
  {
    "path": "engine/lib/hax_io.ml",
    "content": "(** This module helps communicating with `cargo-hax`. *)\n\nopen Prelude\n\nmodule type S = sig\n  val read_json : unit -> Yojson.Safe.t option\n  val write_json : Yojson.Safe.t -> unit\nend\n\ninclude (\n  struct\n    (** Contains the module *)\n    let state = ref None\n\n    let init (module M : S) = state := Some (module M : S)\n\n    let get () : (module S) =\n      !state\n      |> Option.value_exn\n           ~message:\"Hax engine: internal error: Hax_io as not initialized\"\n\n    let read_json () =\n      let (module M) = get () in\n      M.read_json ()\n\n    let write_json json =\n      let (module M) = get () in\n      M.write_json json\n  end :\n    sig\n      include S\n\n      val init : (module S) -> unit\n    end)\n\nlet read () : Types.to_engine =\n  read_json () |> Option.value_exn |> [%of_yojson: Types.to_engine]\n\nlet write (msg : Types.from_engine) : unit =\n  [%yojson_of: Types.from_engine] msg |> write_json\n\nlet close () : unit =\n  write Exit;\n  (* Ensure no garbadge collect *)\n  let _ = read_json () in\n  ()\n\nlet request (type a) ~expected (msg : Types.from_engine)\n    (filter : Types.to_engine -> a option) : a =\n  write msg;\n  let response = read () in\n  match filter response with\n  | Some value -> value\n  | None ->\n      let error =\n        \"Internal error: communication protocol error between `hax-engine` and \\\n         `cargo-hax`. Expected `\" ^ expected ^ \"`, got `\"\n        ^ [%show: Types.to_engine] response\n        ^ \"` instead.\"\n      in\n      failwith error\n"
  },
  {
    "path": "engine/lib/import_ast.ml",
    "content": "open! Prelude\n\nlet refute_resugared s =\n  failwith\n    (\"Got a resugared node at \" ^ s\n   ^ \". The AST is never supposed to be sent to the OCaml engine with \\\n      resugared nodes.\")\n\nlet broken_invariant s = failwith s\n\ntype missing_type = unit\n\nmodule A = Rust_engine_types\nmodule F = Features.Full\n\nmodule B = struct\n  include Ast\n  include Ast.Make (F)\nend\n\nmodule U = Ast_utils.Make (F)\nmodule Build = Ast_builder.Make (F)\n\nexception Item_translation_failure of string\n\nlet from_error_node (error_node : Types.error_node) : string =\n  match (error_node.fragment, error_node.diagnostics) with\n  | ( Unknown \"OCamlEngineError\",\n      [\n        {\n          node = Unknown \"OCamlEngineError\";\n          info = { kind = OcamlEngineErrorPayload payload; _ };\n          _;\n        };\n      ] ) ->\n      payload\n  | _ -> [%yojson_of: Types.error_node] error_node |> Yojson.Safe.to_string\n\nlet dsafety_kind (safety : A.safety_kind) : B.safety_kind =\n  match safety with Safe -> B.Safe | Unsafe -> B.Unsafe F.unsafe\n\nlet rec dty (Newtypety ty : A.ty) : B.ty =\n  match ty with\n  | Primitive Bool -> TBool\n  | Primitive Char -> TChar\n  | Primitive (Int k) -> TInt (dint_kind k)\n  | Primitive (Float k) -> TFloat (dfloat_kind k)\n  | Primitive Str -> TStr\n  | App { head; args } ->\n      TApp\n        { ident = dglobal_ident head; args = List.map ~f:dgeneric_value args }\n  | Array { ty; length } -> TArray { typ = dty ty; length = dexpr length }\n  | Slice ty -> TSlice { ty = dty ty; witness = F.slice }\n  | Ref { inner; mutable'; region = _ } ->\n      TRef\n        {\n          witness = F.reference;\n          typ = dty inner;\n          mut = (if mutable' then Mutable F.mutable_reference else Immutable);\n          region = \"unknown\";\n        }\n  | Param local_ident -> TParam (dlocal_ident local_ident)\n  | Arrow { inputs; output } -> TArrow (List.map ~f:dty inputs, dty output)\n  | AssociatedType { impl_; item } ->\n      TAssociatedType { impl = dimpl_expr impl_; item = dconcrete_ident item }\n  | Opaque ident -> TOpaque (dconcrete_ident ident)\n  | RawPointer -> TRawPointer { witness = F.raw_pointer }\n  | Dyn goals ->\n      TDyn { witness = F.dyn; goals = List.map ~f:ddyn_trait_goal goals }\n  | Resugared _ -> refute_resugared \"ty\"\n  | Error s -> U.HaxFailure.Build.ty (from_error_node s)\n\nand dint_kind (ik : A.int_kind) : B.int_kind =\n  let size : B.size =\n    match ik.size with\n    | S8 -> S8\n    | S16 -> S16\n    | S32 -> S32\n    | S64 -> S64\n    | S128 -> S128\n    | SSize -> SSize\n  in\n  {\n    size;\n    signedness =\n      (match ik.signedness with Signed -> Signed | Unsigned -> Unsigned);\n  }\n\nand dfloat_kind (fk : A.float_kind) : B.float_kind =\n  match fk with F16 -> F16 | F32 -> F32 | F64 -> F64 | F128 -> F128\n\nand dglobal_ident ?(skip_projector : bool = false)\n    (Newtypeglobal_id gi : A.global_id) : B.global_ident =\n  match gi with\n  | Types.Concrete c -> (\n      let ci = Concrete_ident.from_rust_ast c in\n      match c.def_id.def_id.kind with\n      | Field ->\n          let res = `Concrete ci in\n          if skip_projector then res else `Projector res\n      | _ ->\n          let is name = Concrete_ident.eq_name name ci in\n          if is Rust_primitives__hax__deref_op then `Primitive Deref\n          else if is Rust_primitives__hax__cast_op then `Primitive Cast\n          else if is Rust_primitives__hax__logical_op_and then\n            `Primitive (LogicalOp And)\n          else if is Rust_primitives__hax__logical_op_or then\n            `Primitive (LogicalOp Or)\n          else `Concrete ci)\n  | Types.Tuple t -> (\n      match t with\n      | Types.Type { length } -> `TupleType (Int.of_string length)\n      | Types.Constructor { length } -> `TupleCons (Int.of_string length)\n      | Types.Field { length; field } ->\n          `TupleField (Int.of_string field, Int.of_string length))\n  | Types.FreshModule _ ->\n      broken_invariant\n        (\"dglobal_ident: got a [`FreshModule _]: \"\n        ^ [%show: A.global_id_inner] gi)\n\nand dlocal_ident (Newtypelocal_id (Newtypesymbol li) : A.local_id) :\n    B.local_ident =\n  { id = (Expr, 0); name = li }\n\nand dconcrete_ident (gi : A.global_id) : B.concrete_ident =\n  match dglobal_ident gi with\n  | `Concrete id -> id\n  (* For variant fields *)\n  | `Projector (`Concrete id) -> id\n  | _ ->\n      broken_invariant\n        (\"dconcrete_ident: got something else than a [`Concrete _]: \"\n        ^ [%show: A.global_id] gi)\n\nand ddyn_trait_goal (r : A.dyn_trait_goal) : B.dyn_trait_goal =\n  {\n    non_self_args = List.map ~f:dgeneric_value r.non_self_args;\n    trait = dconcrete_ident r.trait_;\n  }\n\nand dtrait_goal (r : A.trait_goal) : B.trait_goal =\n  { args = List.map ~f:dgeneric_value r.args; trait = dconcrete_ident r.trait_ }\n\nand dimpl_ident (r : A.impl_ident) : B.impl_ident =\n  {\n    goal = dtrait_goal r.goal;\n    name = (match r.name with Newtypesymbol name -> name);\n  }\n\nand dprojection_predicate (r : A.projection_predicate) : B.projection_predicate\n    =\n  {\n    assoc_item = dconcrete_ident r.assoc_item;\n    impl = dimpl_expr r.impl_;\n    typ = dty r.ty;\n  }\n\nand dimpl_expr (i : A.impl_expr) : B.impl_expr =\n  { goal = dtrait_goal i.goal; kind = dimpl_expr_kind i.kind }\n\nand dimpl_expr_kind (i : A.impl_expr_kind) : B.impl_expr_kind =\n  match i with\n  | A.Self_ -> B.Self\n  | A.Concrete tr -> B.Concrete (dtrait_goal tr)\n  | A.LocalBound { id = A.Newtypesymbol id } -> B.LocalBound { id }\n  | A.Parent { impl_; ident } ->\n      B.Parent { impl = dimpl_expr impl_; ident = dimpl_ident ident }\n  | A.Projection { impl_; item; ident } ->\n      B.Projection\n        {\n          impl = dimpl_expr impl_;\n          item = dconcrete_ident item;\n          ident = dimpl_ident ident;\n        }\n  | A.ImplApp { impl_; args } ->\n      B.ImplApp { impl = dimpl_expr impl_; args = List.map ~f:dimpl_expr args }\n  | A.Dyn -> B.Dyn\n  | A.Builtin tr -> B.Builtin (dtrait_goal tr)\n  | A.Error s -> raise (Item_translation_failure (from_error_node s))\n\nand dgeneric_value (generic_value : A.generic_value) : B.generic_value =\n  match generic_value with\n  | Lifetime -> B.GLifetime { lt = \"\"; witness = F.lifetime }\n  | Ty t -> B.GType (dty t)\n  | Expr e -> B.GConst (dexpr e)\n\nand dborrow_kind (borrow_kind : A.borrow_kind) : B.borrow_kind =\n  match borrow_kind with\n  | Shared -> B.Shared\n  | Unique -> B.Unique\n  | Mut -> B.Mut F.mutable_reference\n\nand dattributes (m : A.attribute2 list) : B.attrs = List.map ~f:dattr m\nand dspan = Span.from_rust_ast_span\n\nand dattr (a : A.attribute) : B.attr =\n  let span = dspan a.span in\n  match a.kind with\n  | Tool { path; tokens } -> { kind = B.Tool { path; tokens }; span }\n  | DocComment { kind; body } ->\n      let kind = match kind with Line -> B.DCKLine | Block -> DCKBlock in\n      { kind = B.DocComment { kind; body }; span }\n  | Hax payload -> Attr_payloads.to_attr payload span\n\nand dpat (p : A.pat) : B.pat =\n  let typ = dty p.ty in\n  let span = dspan p.meta.span in\n  { p = dpat' span typ p.kind; span; typ }\n\nand dpat' span parent_ty (pat : A.pat_kind) : B.pat' =\n  match pat with\n  | Wild -> PWild\n  | Ascription { pat; ty = { ty; span } } ->\n      PAscription { pat = dpat pat; typ_span = dspan span; typ = dty ty }\n  | Construct { constructor; is_record; is_struct; fields } ->\n      PConstruct\n        {\n          constructor = dglobal_ident constructor;\n          is_record;\n          is_struct;\n          fields =\n            List.map\n              ~f:(fun (field, pat) ->\n                B.\n                  {\n                    field = dglobal_ident ~skip_projector:true field;\n                    pat = dpat pat;\n                  })\n              fields;\n        }\n  | Or { sub_pats } -> POr { subpats = List.map ~f:dpat sub_pats }\n  | Array { args } -> PArray { args = List.map ~f:dpat args }\n  | Deref { sub_pat } -> PDeref { subpat = dpat sub_pat; witness = F.reference }\n  | Constant { lit } -> PConstant { lit = dliteral lit }\n  | Binding { mutable'; mode; var; sub_pat } ->\n      let mut = if mutable' then B.Mutable F.mutable_variable else Immutable in\n      PBinding\n        {\n          mut;\n          mode = dbinding_mode mode;\n          var = dlocal_ident var;\n          subpat = Option.map ~f:(fun p -> (dpat p, F.as_pattern)) sub_pat;\n          typ = parent_ty;\n        }\n  | Resugared _ -> refute_resugared \"ty\"\n  | Error diag ->\n      let s = from_error_node diag in\n      (U.HaxFailure.Build.pat span parent_ty s).p\n\nand dbinding_mode (binding_mode : A.binding_mode) : B.binding_mode =\n  match binding_mode with\n  | ByValue -> B.ByValue\n  | ByRef kind -> B.ByRef (dborrow_kind kind, F.reference)\n\nand dexpr (e : A.expr) : B.expr =\n  let typ = dty e.ty in\n  let span = dspan e.meta.span in\n  { e = dexpr' span typ e.kind; typ; span }\n\nand dexpr' span typ (expr : A.expr_kind) : B.expr' =\n  match expr with\n  | If { condition; then'; else_ } ->\n      If\n        {\n          cond = dexpr condition;\n          then_ = dexpr then';\n          else_ = Option.map ~f:dexpr else_;\n        }\n  | App { head; args; generic_args; bounds_impls; trait_ } ->\n      App\n        {\n          f = dexpr head;\n          args = List.map ~f:dexpr args;\n          generic_args = List.map ~f:dgeneric_value generic_args;\n          bounds_impls = List.map ~f:dimpl_expr bounds_impls;\n          trait =\n            Option.map\n              ~f:(fun (impl, args) ->\n                (dimpl_expr impl, List.map ~f:dgeneric_value args))\n              trait_;\n        }\n  | Literal lit -> Literal (dliteral lit)\n  | Array exprs -> Array (List.map ~f:dexpr exprs)\n  | Construct { constructor; is_record; is_struct; fields; base } ->\n      Construct\n        {\n          constructor = dglobal_ident constructor;\n          fields =\n            List.map\n              ~f:(fun (id, e) ->\n                (dglobal_ident ~skip_projector:true id, dexpr e))\n              fields;\n          base = Option.map ~f:(fun e -> (dexpr e, F.construct_base)) base;\n          is_record;\n          is_struct;\n        }\n  | Match { scrutinee; arms } ->\n      Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }\n  | Let { lhs; rhs; body } ->\n      Let { lhs = dpat lhs; rhs = dexpr rhs; body = dexpr body; monadic = None }\n  | Block { body; safety_mode } ->\n      Block\n        {\n          e = dexpr body;\n          safety_mode = dsafety_kind safety_mode;\n          witness = F.block;\n        }\n  | LocalId id -> LocalVar (dlocal_ident id)\n  | GlobalId id -> GlobalVar (dglobal_ident id)\n  | Ascription { e; ty } -> Ascription { e = dexpr e; typ = dty ty }\n  | Assign { lhs; value } ->\n      Assign { lhs = dlhs lhs; e = dexpr value; witness = F.mutable_variable }\n  | Loop { body; kind; state; control_flow; label } ->\n      Loop\n        {\n          body = dexpr body;\n          kind = dloop_kind kind;\n          state = Option.map ~f:dloop_state state;\n          control_flow =\n            Option.map\n              ~f:(fun k -> (dcontrol_flow_kind k, F.fold_like_loop))\n              control_flow;\n          label = Option.map ~f:(fun (A.Newtypesymbol s) -> s) label;\n          witness = F.loop;\n        }\n  | Break { value; label; state } ->\n      Break\n        {\n          e = dexpr value;\n          label = Option.map ~f:(fun (A.Newtypesymbol s) -> s) label;\n          acc = Option.map ~f:(fun e -> (dexpr e, F.state_passing_loop)) state;\n          witness = (F.break, F.loop);\n        }\n  | Return { value } -> Return { e = dexpr value; witness = F.early_exit }\n  | Continue { label; state } ->\n      Continue\n        {\n          label = Option.map ~f:(fun (Newtypesymbol s) -> s) label;\n          acc = Option.map ~f:(fun e -> (dexpr e, F.state_passing_loop)) state;\n          witness = (F.continue, F.loop);\n        }\n  | Borrow { mutable'; inner } ->\n      Borrow\n        {\n          e = dexpr inner;\n          kind = (if mutable' then Mut F.mutable_reference else B.Shared);\n          witness = F.reference;\n        }\n  | AddressOf { mutable'; inner } ->\n      AddressOf\n        {\n          e = dexpr inner;\n          mut = (if mutable' then Mutable F.mutable_pointer else Immutable);\n          witness = F.raw_pointer;\n        }\n  | Closure { params; body; captures } ->\n      Closure\n        {\n          params = List.map ~f:dpat params;\n          body = dexpr body;\n          captures = List.map ~f:dexpr captures;\n        }\n  | Quote { contents } -> Quote (dquote contents)\n  | Resugared _ -> refute_resugared \"expr\"\n  | Error diag -> (U.HaxFailure.Build.expr span typ (from_error_node diag) \"\").e\n\nand dcontrol_flow_kind (cfk : A.control_flow_kind) : B.cf_kind =\n  match cfk with BreakOnly -> B.BreakOnly | BreakOrReturn -> B.BreakOrReturn\n\nand dliteral (l : A.literal) : B.literal =\n  match l with\n  | String (Newtypesymbol s) -> B.String s\n  | Char c -> B.Char c\n  | Int { value = Newtypesymbol value; negative; kind } ->\n      B.Int { value; negative; kind = dint_kind kind }\n  | Float { value = Newtypesymbol value; negative; kind } ->\n      B.Float { value; negative; kind = dfloat_kind kind }\n  | Bool b -> B.Bool b\n\nand dquote (Newtypequote contents : A.quote) : B.quote =\n  let f = function\n    | A.Verbatim code -> B.Verbatim code\n    | A.Expr e -> B.Expr (dexpr e)\n    | A.Pattern p -> B.Pattern (dpat p)\n    | A.Ty t -> B.Typ (dty t)\n  in\n  { contents = List.map ~f contents; witness = F.quote }\n\nand ditem_quote_origin (iqo : A.item_quote_origin) : B.item_quote_origin =\n  {\n    item_ident = dconcrete_ident iqo.item_ident;\n    item_kind =\n      (match iqo.item_kind with\n      | A.Fn -> `Fn\n      | A.TyAlias -> `TyAlias\n      | A.Type -> `Type\n      | A.MacroInvocation -> `IMacroInvokation\n      | A.Trait -> `Trait\n      | A.Impl -> `Impl\n      | A.Alias -> `Alias\n      | A.Use -> `Use\n      | A.Quote -> `Quote\n      | A.HaxError -> `HaxError\n      | A.NotImplementedYet -> `NotImplementedYet);\n    position =\n      (match iqo.position with\n      | A.Before -> `Before\n      | A.After -> `After\n      | A.Replace -> `Replace);\n  }\n\nand dloop_kind (k : A.loop_kind) : B.loop_kind =\n  match k with\n  | A.UnconditionalLoop -> B.UnconditionalLoop\n  | A.WhileLoop { condition } ->\n      B.WhileLoop { condition = dexpr condition; witness = F.while_loop }\n  | A.ForLoop { iterator; pat } ->\n      B.ForLoop { it = dexpr iterator; pat = dpat pat; witness = F.for_loop }\n  | A.ForIndexLoop { start; end'; var; var_ty } ->\n      B.ForIndexLoop\n        {\n          start = dexpr start;\n          end_ = dexpr end';\n          var = dlocal_ident var;\n          var_typ = dty var_ty;\n          witness = F.for_index_loop;\n        }\n\nand dloop_state (s : A.loop_state) : B.loop_state =\n  {\n    bpat = dpat s.body_pat;\n    init = dexpr s.init;\n    witness = F.state_passing_loop;\n  }\n\nand darm (a : A.arm) : B.arm =\n  {\n    arm =\n      {\n        body = dexpr a.body;\n        guard = Option.map ~f:dguard a.guard;\n        arm_pat = dpat a.pat;\n      };\n    span = dspan a.meta.span;\n  }\n\nand dguard (a : A.guard) : B.guard =\n  { guard = dguard' a.kind; span = dspan a.meta.span }\n\nand dguard' (guard : A.guard_kind) : B.guard' =\n  match guard with\n  | IfLet { lhs; rhs } ->\n      B.IfLet { lhs = dpat lhs; rhs = dexpr rhs; witness = F.match_guard }\n\nand dlhs (lhs : A.lhs) : B.lhs =\n  match lhs with\n  | A.LocalVar { var; ty } ->\n      B.LhsLocalVar { var = dlocal_ident var; typ = dty ty }\n  | A.VecRef { e; ty } ->\n      B.LhsVecRef { e = dlhs e; typ = dty ty; witness = F.nontrivial_lhs }\n  | A.ArbitraryExpr e ->\n      B.LhsArbitraryExpr { e = dexpr e; witness = F.arbitrary_lhs }\n  | A.FieldAccessor { e; field; ty } ->\n      B.LhsFieldAccessor\n        {\n          e = dlhs e;\n          field = dglobal_ident field;\n          typ = dty ty;\n          witness = F.nontrivial_lhs;\n        }\n  | A.ArrayAccessor { e; index; ty } ->\n      B.LhsArrayAccessor\n        {\n          e = dlhs e;\n          index = dexpr index;\n          typ = dty ty;\n          witness = F.nontrivial_lhs;\n        }\n\nlet dgeneric_param ({ ident; meta; kind } : A.generic_param) : B.generic_param =\n  let kind : B.generic_param_kind =\n    match kind with\n    | Lifetime -> GPLifetime { witness = F.lifetime }\n    | Type -> GPType\n    | Const { ty } -> GPConst { typ = dty ty }\n  in\n  {\n    ident = dlocal_ident ident;\n    span = dspan meta.span;\n    attrs = dattributes meta.attributes;\n    kind;\n  }\n\nlet dgeneric_constraint (generic_constraint : A.generic_constraint) :\n    B.generic_constraint =\n  match generic_constraint with\n  | Lifetime lf -> GCLifetime (lf, F.lifetime)\n  | TypeClass impl_ident -> GCType (dimpl_ident impl_ident)\n  | Equality projection -> GCProjection (dprojection_predicate projection)\n\nlet dgenerics (g : A.generics) : B.generics =\n  {\n    constraints = List.map ~f:dgeneric_constraint g.constraints;\n    params = List.map ~f:dgeneric_param g.params;\n  }\n\nlet dparam (p : A.param) : B.param =\n  {\n    attrs = dattributes p.attributes;\n    pat = dpat p.pat;\n    typ = dty p.ty;\n    typ_span = Option.map ~f:dspan p.ty_span;\n  }\n\nlet dvariant (v : A.variant) : B.variant =\n  {\n    arguments =\n      List.map\n        ~f:(fun (id, t, a) -> (dconcrete_ident id, dty t, dattributes a))\n        v.arguments;\n    attrs = dattributes v.attributes;\n    is_record = v.is_record;\n    name = dconcrete_ident v.name;\n  }\n\nlet dtrait_item' (ti : A.trait_item_kind) : B.trait_item' =\n  match ti with\n  | Type idents -> TIType (List.map ~f:dimpl_ident idents)\n  | Fn t -> TIFn (dty t)\n  | Default { params; body } ->\n      TIDefault\n        {\n          params = List.map ~f:dparam params;\n          body = dexpr body;\n          witness = F.trait_item_default;\n        }\n  | Resugared _ -> refute_resugared \"trait_item\"\n  | Error _ -> failwith \"TraitItem error node\"\n\nlet dtrait_item (ti : A.trait_item) : B.trait_item =\n  {\n    ti_generics = dgenerics ti.generics;\n    ti_ident = dconcrete_ident ti.ident;\n    ti_v = dtrait_item' ti.kind;\n    ti_span = dspan ti.meta.span;\n    ti_attrs = dattributes ti.meta.attributes;\n  }\n\nlet dimpl_item' (ii : A.impl_item_kind) : B.impl_item' =\n  match ii with\n  | Type { ty; parent_bounds } ->\n      IIType\n        {\n          typ = dty ty;\n          parent_bounds = List.map ~f:(dimpl_expr *** dimpl_ident) parent_bounds;\n        }\n  | Fn { body; params } ->\n      IIFn { body = dexpr body; params = List.map ~f:dparam params }\n  | Resugared _ -> refute_resugared \"impl_item\"\n  | Error _ -> failwith \"Impl item error node\"\n\nlet dimpl_item (ii : A.impl_item) : B.impl_item =\n  {\n    ii_generics = dgenerics ii.generics;\n    ii_ident = dconcrete_ident ii.ident;\n    ii_v = dimpl_item' ii.kind;\n    ii_span = dspan ii.meta.span;\n    ii_attrs = dattributes ii.meta.attributes;\n  }\n\nlet ditem' (item : A.item_kind) : B.item' option =\n  match item with\n  | A.Fn { name; generics; body; params; safety } ->\n      B.Fn\n        {\n          name = dconcrete_ident name;\n          generics = dgenerics generics;\n          body = dexpr body;\n          params = List.map ~f:dparam params;\n          safety = dsafety_kind safety;\n        }\n      |> Option.some\n  | A.Type { name; generics; variants; is_struct } ->\n      B.Type\n        {\n          name = dconcrete_ident name;\n          generics = dgenerics generics;\n          variants = List.map ~f:dvariant variants;\n          is_struct;\n        }\n      |> Option.some\n  | A.TyAlias { name; generics; ty } ->\n      B.TyAlias\n        {\n          name = dconcrete_ident name;\n          generics = dgenerics generics;\n          ty = dty ty;\n        }\n      |> Option.some\n  | A.Trait { name; generics; items; safety } ->\n      B.Trait\n        {\n          name = dconcrete_ident name;\n          generics = dgenerics generics;\n          items = List.map ~f:dtrait_item items;\n          safety = dsafety_kind safety;\n        }\n      |> Option.some\n  | A.Impl\n      {\n        generics;\n        self_ty;\n        of_trait = trait_id, trait_generics;\n        items;\n        parent_bounds;\n      } ->\n      B.Impl\n        {\n          generics = dgenerics generics;\n          self_ty = dty self_ty;\n          of_trait =\n            (dconcrete_ident trait_id, List.map ~f:dgeneric_value trait_generics);\n          items = List.map ~f:dimpl_item items;\n          parent_bounds =\n            List.map\n              ~f:(fun (impl, ident) -> (dimpl_expr impl, dimpl_ident ident))\n              parent_bounds;\n          safety = Safe;\n        }\n      |> Option.some\n  | A.Alias { name; item } ->\n      B.Alias { name = dconcrete_ident name; item = dconcrete_ident item }\n      |> Option.some\n  | A.Use { path; is_external; rename } ->\n      B.Use { path; is_external; rename } |> Option.some\n  | A.Quote { quote; origin } ->\n      B.Quote { quote = dquote quote; origin = ditem_quote_origin origin }\n      |> Option.some\n  | A.Error diag -> B.HaxError (from_error_node diag) |> Option.some\n  | A.NotImplementedYet -> B.NotImplementedYet |> Option.some\n  | Resugared _ -> refute_resugared \"item_kind\" |> Option.some\n  | A.RustModule -> None\n\nlet ditem (i : A.item) : B.item list =\n  try\n    match ditem' i.kind with\n    | Some v ->\n        [\n          {\n            ident = dconcrete_ident i.ident;\n            v;\n            span = dspan i.meta.span;\n            attrs = dattributes i.meta.attributes;\n          };\n        ]\n    | _ -> []\n  with Item_translation_failure msg ->\n    [ B.make_hax_error_item (dspan i.meta.span) (dconcrete_ident i.ident) msg ]\n"
  },
  {
    "path": "engine/lib/import_thir.ml",
    "content": "module Thir = struct\n  include Types\n\n  type item = item_for__thir_body\n  type item_kind = item_kind_for__thir_body\n  type impl_item = impl_item_for__thir_body\n  type impl_item_kind = impl_item_kind_for__thir_body\n  type generics = generics_for__thir_body\n  type trait_item_kind = trait_item_kind_for__thir_body\n  type generic_param = generic_param_for__thir_body\n  type generic_param_kind = generic_param_kind_for__thir_body\n  type trait_item = trait_item_for__thir_body\n  type ty = node_for__ty_kind\n  type item_ref = node_for__item_ref_contents\n  type trait_ref = item_ref\nend\n\nopen! Prelude\nopen Diagnostics\n\nlet assertion_failure (span : Thir.span list) (details : string) =\n  let kind = T.AssertionFailure { details } in\n  Diagnostics.SpanFreeError.raise ~span\n    (Span.dummy () |> Span.owner_hint)\n    ThirImport kind\n\nlet unimplemented ~issue_id (span : Thir.span list) (details : string) =\n  let kind =\n    T.Unimplemented\n      {\n        issue_id = Some (MyInt64.of_int issue_id);\n        details = String.(if details = \"\" then None else Some details);\n      }\n  in\n  Diagnostics.SpanFreeError.raise ~span\n    (Span.dummy () |> Span.owner_hint)\n    ThirImport kind\n\nmodule Ast = struct\n  include Ast\n  include Rust\nend\n\nmodule U = Ast_utils.Make (Features.Rust)\nmodule W = Features.On\nmodule Ast_builder = Ast_builder.Make (Features.Rust)\nopen Ast\n\nlet def_id ~value (def_id : Thir.def_id) : global_ident =\n  `Concrete (Concrete_ident.of_def_id ~value def_id)\n\nlet local_ident kind (ident : Thir.local_ident) : local_ident =\n  {\n    name = ident.name;\n    id = Local_ident.mk_id kind (Int.of_string ident.id.local_id);\n  }\n\nlet int_ty_to_size : Thir.int_ty -> size = function\n  | Isize -> SSize\n  | I8 -> S8\n  | I16 -> S16\n  | I32 -> S32\n  | I64 -> S64\n  | I128 -> S128\n\nlet uint_ty_to_size : Thir.uint_ty -> size = function\n  | Usize -> SSize\n  | U8 -> S8\n  | U16 -> S16\n  | U32 -> S32\n  | U64 -> S64\n  | U128 -> S128\n\nlet c_int_ty (ty : Thir.int_ty) : int_kind =\n  { size = int_ty_to_size ty; signedness = Signed }\n\nlet c_uint_ty (ty : Thir.uint_ty) : int_kind =\n  { size = uint_ty_to_size ty; signedness = Unsigned }\n\nlet csafety (safety : Types.safety) : safety_kind =\n  match safety with Safe -> Safe | Unsafe -> Unsafe W.unsafe\n\nlet c_header_safety (safety : Types.header_safety) : safety_kind =\n  match safety with\n  | SafeTargetFeatures -> Safe\n  | Normal safety -> csafety safety\n\nlet c_mutability (witness : 'a) : bool -> 'a Ast.mutability = function\n  | true -> Mutable witness\n  | false -> Immutable\n\nlet c_borrow_kind span : Thir.borrow_kind -> borrow_kind = function\n  | Shared -> Shared\n  | Fake _ ->\n      assertion_failure [ span ]\n        \"Got a shallow borrow node (`BorrowKind::Fake`). Those are generated \\\n         by the borrow checker and should be discarded after borrow checking: \\\n         we should never see such borrows.\"\n  | Mut _ -> Mut W.mutable_reference\n\nlet c_binding_mode : Thir.by_ref -> binding_mode = function\n  | No -> ByValue\n  | Yes (_, true) -> ByRef (Mut W.mutable_reference, W.reference)\n  | Yes (_, false) -> ByRef (Shared, W.reference)\n\nlet unit_typ : ty = TApp { ident = `TupleType 0; args = [] }\n\nlet unit_expr span : expr =\n  { typ = unit_typ; span; e = Ast.GlobalVar (`TupleCons 0) }\n\nlet wild_pat span : ty -> pat = fun typ -> { typ; span; p = PWild }\n\nlet c_logical_op : Thir.logical_op -> logical_op = function\n  | And -> And\n  | Or -> Or\n\nlet c_attr (attr : Thir.attribute) : attr option =\n  match attr with\n  | Parsed (DocComment { kind; comment; span; _ }) ->\n      let kind =\n        match kind with Thir.Line -> DCKLine | Thir.Block -> DCKBlock\n      in\n      let kind = DocComment { kind; body = comment } in\n      Some { kind; span = Span.of_thir span }\n  | Parsed (AutomaticallyDerived span) ->\n      (* Restore behavior before PR #1534 *)\n      let kind = Tool { path = \"automatically_derived\"; tokens = \"\" } in\n      Some { kind; span = Span.of_thir span }\n  | Unparsed { args = Eq { expr = { symbol; _ }; _ }; path = \"doc\"; span; _ } ->\n      (* Looks for `#[doc = \"something\"]` *)\n      let kind = DocComment { kind = DCKLine; body = symbol } in\n      Some { kind; span = Span.of_thir span }\n  | Unparsed { args; path; span; _ } ->\n      let args_tokens =\n        match args with Delimited { tokens; _ } -> Some tokens | _ -> None\n      in\n      let tokens = Option.value ~default:\"\" args_tokens in\n      let kind = Tool { path; tokens } in\n      Some { kind; span = Span.of_thir span }\n  | _ -> None\n\nlet c_attrs : Thir.attribute list -> attrs = List.filter_map ~f:c_attr\n\nlet c_item_attrs (attrs : Thir.item_attributes) : attrs =\n  (* TODO: This is a quite coarse approximation, we need to reflect\n     that parent/self structure in our AST. See\n     https://github.com/hacspec/hax/issues/123. *)\n  let self = c_attrs attrs.attributes in\n  let parent =\n    c_attrs attrs.parent_attributes\n    |> List.filter ~f:([%matches? ({ kind = DocComment _; _ } : attr)] >> not)\n    |>\n    (* Repeating associateditem or uid is harmful, same for comments *)\n    List.filter ~f:(fun payload ->\n        match Attr_payloads.payloads [ payload ] with\n        | [ ((Uid _ | AssociatedItem _), _) ] -> false\n        | _ -> true)\n  in\n  self @ parent\n\ntype extended_literal =\n  | EL_Lit of literal\n  | EL_U8Array of literal list (* EL_U8Array only encodes arrays of [u8]s *)\n\nlet c_lit' span negative (lit : Thir.lit_kind) (ty : ty) : extended_literal =\n  let mk l = EL_Lit l in\n  let mku8 (n : int) =\n    let kind = { size = S8; signedness = Unsigned } in\n    Int { value = Int.to_string n; kind; negative = false }\n  in\n  let error kind =\n    assertion_failure [ span ]\n      (\"[import_thir:literal] got a \" ^ kind ^ \" literal, expected \" ^ kind\n     ^ \" type, got type [\"\n      ^ [%show: ty] ty\n      ^ \"] instead.\")\n  in\n  match lit with\n  | Err _ ->\n      assertion_failure [ span ]\n        \"[import_thir:literal] got an error literal: this means the Rust \\\n         compiler or Hax's frontend probably reported errors above.\"\n  | Str (str, _) -> mk @@ String str\n  | CStr (l, _) | ByteStr (l, _) -> EL_U8Array (List.map ~f:mku8 l)\n  | Byte n -> mk @@ mku8 n\n  | Char s -> mk @@ Char s\n  | Int (value, _kind) ->\n      mk\n      @@ Int\n           {\n             value;\n             negative;\n             kind = (match ty with TInt k -> k | _ -> error \"integer\");\n           }\n  | Float (value, _kind) ->\n      mk\n      @@ Float\n           {\n             value;\n             negative;\n             kind = (match ty with TFloat k -> k | _ -> error \"float\");\n           }\n  | Bool b -> mk @@ Bool b\n\nlet c_lit span neg (lit : Thir.spanned_for__lit_kind) : ty -> extended_literal =\n  c_lit' span neg lit.node\n\nlet resugar_index_mut (e : expr) : (expr * expr) option =\n  match (U.unbox_underef_expr e).e with\n  | App\n      {\n        f = { e = GlobalVar (`Concrete meth); _ };\n        args = [ { e = Borrow { e = x; _ }; _ }; index ];\n        generic_args = _ (* TODO: see issue #328 *);\n        trait = _ (* TODO: see issue #328 *);\n        bounds_impls = _;\n      }\n    when Concrete_ident.eq_name Core__ops__index__IndexMut__index_mut meth ->\n      Some (x, index)\n  | App\n      {\n        f = { e = GlobalVar (`Concrete meth); _ };\n        args = [ x; index ];\n        generic_args = _ (* TODO: see issue #328 *);\n        trait = _ (* TODO: see issue #328 *);\n        bounds_impls = _;\n      }\n    when Concrete_ident.eq_name Core__ops__index__Index__index meth ->\n      Some (x, index)\n  | _ -> None\n\n(** Name for the cast function from an ADT to its discriminant *)\nlet cast_name_for_type = Concrete_ident.with_suffix `Cast\n\nmodule type EXPR = sig\n  val c_expr : Thir.decorated_for__expr_kind -> expr\n  val c_expr_drop_body : Thir.decorated_for__expr_kind -> expr\n  val c_ty : Thir.span -> Thir.ty -> ty\n  val c_generic_value : Thir.span -> Thir.generic_arg -> generic_value\n  val c_generics : ?offset:int -> Thir.generics -> generics\n  val c_param : Thir.span -> Thir.param -> param\n  val c_fn_params : Thir.span -> Thir.param list -> param list\n  val c_trait_item' : Thir.trait_item -> Thir.trait_item_kind -> trait_item'\n  val c_trait_ref : Thir.span -> Thir.trait_ref -> trait_goal\n  val c_impl_expr : Thir.span -> Thir.impl_expr -> impl_expr\n  val c_clause : Thir.span -> int -> Thir.clause -> generic_constraint option\nend\n\n(* BinOp to [core::ops::*] overloaded functions *)\n\nmodule Make (CTX : sig\n  val is_core_item : bool\nend) : EXPR = struct\n  let c_binop (op : Thir.bin_op) (lhs : expr) (rhs : expr) (span : span)\n      (typ : ty) =\n    let overloaded_names_of_binop : Thir.bin_op -> Concrete_ident.name =\n      function\n      | Add | AddUnchecked -> Core__ops__arith__Add__add\n      | Sub | SubUnchecked -> Core__ops__arith__Sub__sub\n      | Mul | MulUnchecked -> Core__ops__arith__Mul__mul\n      | Div -> Core__ops__arith__Div__div\n      | Rem -> Core__ops__arith__Rem__rem\n      | BitXor -> Core__ops__bit__BitXor__bitxor\n      | BitAnd -> Core__ops__bit__BitAnd__bitand\n      | BitOr -> Core__ops__bit__BitOr__bitor\n      | Shl | ShlUnchecked -> Core__ops__bit__Shl__shl\n      | Shr | ShrUnchecked -> Core__ops__bit__Shr__shr\n      | Lt -> Core__cmp__PartialOrd__lt\n      | Le -> Core__cmp__PartialOrd__le\n      | Ne -> Core__cmp__PartialEq__ne\n      | Ge -> Core__cmp__PartialOrd__ge\n      | Gt -> Core__cmp__PartialOrd__gt\n      | Eq -> Core__cmp__PartialEq__eq\n      | AddWithOverflow | SubWithOverflow | MulWithOverflow ->\n          assertion_failure (Span.to_thir span)\n            \"Overflowing binary operators are not suppored\"\n      | Cmp ->\n          assertion_failure (Span.to_thir span)\n            \"`Cmp` binary operator is not suppored\"\n      | Offset -> Core__ptr__const_ptr__Impl__offset\n    in\n    let primitive_names_of_binop : Thir.bin_op -> Concrete_ident.name = function\n      | Add | AddUnchecked -> Rust_primitives__u128__add\n      | Sub | SubUnchecked -> Rust_primitives__u128__sub\n      | Mul | MulUnchecked -> Rust_primitives__u128__mul\n      | Div -> Rust_primitives__u128__div\n      | Rem -> Rust_primitives__u128__rem\n      | BitXor -> Rust_primitives__u128__bit_xor\n      | BitAnd -> Rust_primitives__u128__bit_and\n      | BitOr -> Rust_primitives__u128__bit_or\n      | Shl | ShlUnchecked -> Rust_primitives__u128__shl\n      | Shr | ShrUnchecked -> Rust_primitives__u128__shr\n      | Lt -> Rust_primitives__u128__lt\n      | Le -> Rust_primitives__u128__le\n      | Ne -> Rust_primitives__u128__ne\n      | Ge -> Rust_primitives__u128__ge\n      | Gt -> Rust_primitives__u128__gt\n      | Eq -> Rust_primitives__u128__eq\n      | AddWithOverflow | SubWithOverflow | MulWithOverflow ->\n          assertion_failure (Span.to_thir span)\n            \"Overflowing binary operators are not suppored\"\n      | Cmp ->\n          assertion_failure (Span.to_thir span)\n            \"`Cmp` binary operator is not suppored\"\n      | Offset -> Rust_primitives__offset\n    in\n    let name =\n      if CTX.is_core_item then\n        let assert_type_eq t1 t2 =\n          if not (U.ty_equality t1 t2) then\n            assertion_failure (Span.to_thir span)\n              (\"Binary operation: expected LHS and RHS to have the same type, \\\n                instead LHS has type [\"\n              ^ [%show: ty] t1\n              ^ \"] while RHS has type [\"\n              ^ [%show: ty] t2\n              ^ \"]\")\n        in\n        let int =\n          (\"int\", function TInt k -> Some (show_int_kind k) | _ -> None)\n        in\n        let float =\n          (\"float\", function TFloat k -> Some (show_float_kind k) | _ -> None)\n        in\n        let bool = (\"bool\", function TBool -> Some \"bool\" | _ -> None) in\n        let concat_tup sep (x, y) = x ^ sep ^ y in\n        let ( <*> ) (x, f) (y, g) =\n          ( x ^ \"*\" ^ y,\n            f *** g >> uncurry Option.both >> Option.map ~f:(concat_tup \"_\") )\n        in\n        let both (e, f) =\n          ( e ^ \"*\" ^ e,\n            fun (t1, t2) ->\n              assert_type_eq t1 t2;\n              f t1 )\n        in\n        let ( <|> ) (x, f) (y, g) =\n          (x ^ \" or\" ^ y, fun v -> match f v with None -> g v | v -> v)\n        in\n        let name = primitive_names_of_binop op in\n        let expected, f =\n          match op with\n          | Add | Sub | Mul | AddWithOverflow | SubWithOverflow\n          | MulWithOverflow | AddUnchecked | SubUnchecked | MulUnchecked | Div\n            ->\n              both int <|> both float\n          | Rem | Cmp -> both int\n          | BitXor | BitAnd | BitOr -> both int <|> both bool\n          | Shl | Shr | ShlUnchecked | ShrUnchecked -> int <*> int\n          | Lt | Le | Ne | Ge | Gt -> both int <|> both float\n          | Eq -> both int <|> both float <|> both bool\n          | Offset -> (\"\", fun _ -> Some \"\")\n        in\n        match f (lhs.typ, rhs.typ) with\n        | Some with_ ->\n            Concrete_ident.of_name ~value:true name\n            |> (Concrete_ident.map_path_strings [@alert \"-unsafe\"]) ~f:(function\n                 | \"u128\" -> with_\n                 | s -> s)\n        | None ->\n            assertion_failure (Span.to_thir span)\n              (\"Binary operation: expected \" ^ expected ^ \" type, got \"\n              ^ [%show: ty] lhs.typ)\n      else Concrete_ident.of_name ~value:true @@ overloaded_names_of_binop op\n    in\n    let needs_borrow =\n      match op with Lt | Le | Ne | Ge | Gt | Eq -> true | _ -> false\n    in\n    let borrow_if_needed (e : expr) =\n      if needs_borrow then\n        match e.typ with\n        | TRef _ -> e\n        | _ ->\n            {\n              span = e.span;\n              e = Borrow { e; kind = Shared; witness = W.reference };\n              typ =\n                TRef\n                  {\n                    witness = W.reference;\n                    region = \"unknown\";\n                    typ = e.typ;\n                    mut = Immutable;\n                  };\n            }\n      else e\n    in\n    let lhs = borrow_if_needed lhs in\n    let rhs = borrow_if_needed rhs in\n    U.call' (`Concrete name) [ lhs; rhs ] span typ\n\n  let binop_of_assignop : Thir.assign_op -> Thir.bin_op = function\n    | AddAssign -> Add\n    | SubAssign -> Sub\n    | MulAssign -> Mul\n    | DivAssign -> Div\n    | RemAssign -> Rem\n    | BitXorAssign -> BitXor\n    | BitAndAssign -> BitAnd\n    | BitOrAssign -> BitOr\n    | ShlAssign -> Shl\n    | ShrAssign -> Shr\n\n  let rec c_expr (e : Thir.decorated_for__expr_kind) : expr =\n    try c_expr_unwrapped e\n    with Diagnostics.SpanFreeError.Exn (Data (ctx, kind)) ->\n      let typ : ty =\n        try c_ty e.span e.ty\n        with Diagnostics.SpanFreeError.Exn _ -> U.HaxFailure.Build.ty \"\"\n      in\n      let span = Span.of_thir e.span in\n      U.hax_failure_expr' span typ (ctx, kind) \"\"\n\n  (** Extracts an expression as the global name `dropped_body`: this drops the\n      computational part of the expression, but keeps a correct type and span.\n  *)\n  and c_expr_drop_body (e : Thir.decorated_for__expr_kind) : expr =\n    let typ = c_ty e.span e.ty in\n    let span = Span.of_thir e.span in\n    let v =\n      Global_ident.of_name ~value:true Rust_primitives__hax__dropped_body\n    in\n    { span; typ; e = GlobalVar v }\n\n  and c_block ~expr ~span ~stmts ~ty ~(safety_mode : Types.block_safety) : expr\n      =\n    let full_span = Span.of_thir span in\n    let typ = c_ty span ty in\n    let safety_mode =\n      match safety_mode with\n      | Safe -> Safe\n      | BuiltinUnsafe | ExplicitUnsafe -> Unsafe W.unsafe\n    in\n    (* if there is no expression & the last expression is ⊥, just use that *)\n    let lift_last_statement_as_expr_if_possible expr stmts (ty : Thir.ty) =\n      match (ty.value, expr, List.drop_last stmts, List.last stmts) with\n      | ( Thir.Never,\n          None,\n          Some stmts,\n          Some ({ kind = Thir.Expr { expr; _ }; _ } : Thir.stmt) ) ->\n          (stmts, Some expr)\n      | _ -> (stmts, expr)\n    in\n    let o_stmts, o_expr =\n      lift_last_statement_as_expr_if_possible expr stmts ty\n    in\n    let init =\n      Option.map\n        ~f:(fun e ->\n          let e = c_expr e in\n          { e with e = Block { e; safety_mode; witness = W.block } })\n        o_expr\n      |> Option.value ~default:(unit_expr full_span)\n    in\n    List.fold_right o_stmts ~init ~f:(fun { kind; _ } body ->\n        match kind with\n        | Expr { expr = rhs; _ } ->\n            let rhs = c_expr rhs in\n            let e =\n              Let { monadic = None; lhs = wild_pat rhs.span rhs.typ; rhs; body }\n            in\n            { e; typ; span = Span.union rhs.span body.span }\n        | Let\n            {\n              else_block = Some { expr; span; stmts; safety_mode; _ };\n              pattern = lhs;\n              initializer' = Some rhs;\n              _;\n            } ->\n            let lhs = c_pat lhs in\n            let rhs = c_expr rhs in\n            let else_block = c_block ~expr ~span ~stmts ~ty ~safety_mode in\n            let lhs_body_span = Span.union lhs.span body.span in\n            let e =\n              Match\n                {\n                  arms =\n                    [\n                      U.M.arm lhs body ~span:lhs_body_span;\n                      U.M.arm\n                        { p = PWild; span = else_block.span; typ = lhs.typ }\n                        { else_block with typ = body.typ }\n                        ~span:else_block.span;\n                    ];\n                  scrutinee = rhs;\n                }\n            in\n            { e; typ; span = full_span }\n        | Let { initializer' = None; _ } ->\n            unimplemented ~issue_id:156 [ span ]\n              \"Sorry, Hax does not support declare-first let bindings (see \\\n               https://doc.rust-lang.org/rust-by-example/variable_bindings/declare.html) \\\n               for now.\"\n        | Let { pattern = lhs; initializer' = Some rhs; _ } ->\n            let lhs = c_pat lhs in\n            let rhs = c_expr rhs in\n            let e = Let { monadic = None; lhs; rhs; body } in\n            { e; typ; span = Span.union rhs.span body.span })\n\n  and c_expr_unwrapped (e : Thir.decorated_for__expr_kind) : expr =\n    (* TODO: eliminate that `call`, use the one from `ast_utils` *)\n    let call f args =\n      App\n        {\n          f;\n          args = List.map ~f:c_expr args;\n          generic_args = [];\n          trait = None;\n          bounds_impls = [];\n        }\n    in\n    let typ = c_ty e.span e.ty in\n    let span = Span.of_thir e.span in\n    let mk_global typ v : expr = { span; typ; e = GlobalVar v } in\n    let ( ->. ) a b = TArrow (a, b) in\n    let (v : expr') =\n      match e.contents with\n      | If\n          {\n            cond = { contents = Let { expr = scrutinee; pat }; _ };\n            else_opt;\n            then';\n            _;\n          } ->\n          let scrutinee = c_expr scrutinee in\n          let arm_pat = c_pat pat in\n          let then_ = c_expr then' in\n          let else_ =\n            Option.value ~default:(U.unit_expr span)\n            @@ Option.map ~f:c_expr else_opt\n          in\n          let arm_then = U.M.arm arm_pat then_ ~span:then_.span in\n          let arm_else =\n            let arm_pat = { arm_pat with p = PWild } in\n            U.M.arm arm_pat else_ ~span:else_.span\n          in\n          Match { scrutinee; arms = [ arm_then; arm_else ] }\n      | If { cond; else_opt; then'; _ } ->\n          let cond = c_expr cond in\n          let then_ = c_expr then' in\n          let else_ = Option.map ~f:c_expr else_opt in\n          If { cond; else_; then_ }\n      | Call { args; fn_span = _; from_hir_call = _; fun'; ty = _ } -> (\n          let args =\n            if List.is_empty args then [ unit_expr span ]\n            else List.map ~f:c_expr args\n          in\n          let f = c_expr fun' in\n          match fun'.contents with\n          | GlobalName\n              {\n                item =\n                  {\n                    value =\n                      { def_id = id; generic_args; impl_exprs; in_trait; _ };\n                    _;\n                  };\n                _;\n              } ->\n              let f = { f with e = GlobalVar (def_id ~value:true id) } in\n              let bounds_impls = List.map ~f:(c_impl_expr e.span) impl_exprs in\n              let generic_args =\n                List.map ~f:(c_generic_value e.span) generic_args\n              in\n              let in_trait = Option.map ~f:(c_impl_expr e.span) in_trait in\n              let trait =\n                Option.map ~f:(fun ie -> (ie, ie.goal.args)) in_trait\n              in\n              App { f; args; generic_args; bounds_impls; trait }\n          | _ ->\n              App\n                { f; args; generic_args = []; bounds_impls = []; trait = None })\n      | Box { value } ->\n          (U.call Rust_primitives__hax__box_new [ c_expr value ] span typ).e\n      | Deref { arg } ->\n          let inner_typ = c_ty arg.span arg.ty in\n          call (mk_global ([ inner_typ ] ->. typ) @@ `Primitive Deref) [ arg ]\n      | Binary { lhs; rhs; op } ->\n          (c_binop op (c_expr lhs) (c_expr rhs) span typ).e\n      | LogicalOp { lhs; rhs; op } ->\n          let lhs_type = c_ty lhs.span lhs.ty in\n          let rhs_type = c_ty rhs.span rhs.ty in\n          call\n            (mk_global ([ lhs_type; rhs_type ] ->. typ)\n            @@ `Primitive (LogicalOp (c_logical_op op)))\n            [ lhs; rhs ]\n      | Unary { arg; op } ->\n          (U.call\n             (match op with\n             | Not -> Core__ops__bit__Not__not\n             | Neg -> Core__ops__arith__Neg__neg\n             | PtrMetadata ->\n                 assertion_failure (Span.to_thir span)\n                   \"Unsupported unary operator: `PtrMetadata`\")\n             [ c_expr arg ]\n             span typ)\n            .e\n      | Cast { source } -> (\n          let source_type = c_ty source.span source.ty in\n          match source_type with\n          (* Each inductive defines a cast function *)\n          | TApp { ident = `Concrete ident; _ } ->\n              (U.call'\n                 (`Concrete (cast_name_for_type ident))\n                 [ c_expr source ]\n                 span typ)\n                .e\n          | _ ->\n              call\n                (mk_global ([ source_type ] ->. typ) @@ `Primitive Cast)\n                [ source ])\n      | Use { source } -> (c_expr source).e\n      | NeverToAny { source } ->\n          (U.call Rust_primitives__hax__never_to_any [ c_expr source ] span typ)\n            .e\n      (* TODO: this is incorrect (NeverToAny) *)\n      | PointerCoercion { cast; source } -> c_pointer e typ span cast source\n      | Loop { body } ->\n          let body = c_expr body in\n          Loop\n            {\n              body;\n              kind = UnconditionalLoop;\n              state = None;\n              label = None;\n              witness = W.loop;\n              control_flow = None;\n            }\n      | Match { scrutinee; arms } ->\n          let scrutinee = c_expr scrutinee in\n          let arms = List.map ~f:c_arm arms in\n          Match { scrutinee; arms }\n      | Let _ ->\n          unimplemented ~issue_id:2018 [ e.span ]\n            \"Let-chains (e.g. `if let .. && let ..`) are not supported.\"\n      | Block { expr; span; stmts; safety_mode; _ } ->\n          let { e; _ } = c_block ~expr ~span ~stmts ~ty:e.ty ~safety_mode in\n          e\n      | Assign { lhs; rhs } ->\n          let lhs = c_expr lhs in\n          let rhs = c_expr rhs in\n          c_expr_assign lhs rhs\n      | AssignOp { lhs; op; rhs } ->\n          let lhs = c_expr lhs in\n          c_expr_assign lhs\n          @@ c_binop (binop_of_assignop op) lhs (c_expr rhs) span lhs.typ\n      | VarRef { id } -> LocalVar (local_ident Expr id)\n      | Field { lhs; field } ->\n          let lhs = c_expr lhs in\n          let projector =\n            GlobalVar\n              (`Projector\n                 (`Concrete (Concrete_ident.of_def_id ~value:true field)))\n          in\n          let span = Span.of_thir e.span in\n          App\n            {\n              f = { e = projector; typ = TArrow ([ lhs.typ ], typ); span };\n              args = [ lhs ];\n              generic_args = [];\n              trait = None;\n              bounds_impls = [];\n            }\n      | TupleField { lhs; field } ->\n          (* TODO: refactor *)\n          let lhs = c_expr lhs in\n          let tuple_len =\n            match lhs.typ with\n            | TApp { ident = `TupleType len; _ } -> len\n            | _ ->\n                assertion_failure [ e.span ]\n                  \"LHS of tuple field projection is not typed as a tuple.\"\n          in\n          let projector =\n            GlobalVar\n              (`Projector (`TupleField (Int.of_string field, tuple_len)))\n          in\n          let span = Span.of_thir e.span in\n          App\n            {\n              f = { e = projector; typ = TArrow ([ lhs.typ ], typ); span };\n              args = [ lhs ];\n              generic_args = [];\n              trait = None;\n              bounds_impls = [];\n            }\n      | GlobalName { item = { value = { def_id = id; _ }; _ }; constructor = _ }\n        ->\n          GlobalVar (def_id ~value:true id)\n      | UpvarRef { var_hir_id = id; _ } -> LocalVar (local_ident Expr id)\n      | Borrow { arg; borrow_kind = kind } ->\n          let e' = c_expr arg in\n          let kind = c_borrow_kind e.span kind in\n          Borrow { kind; e = e'; witness = W.reference }\n      | RawBorrow { arg; mutability = mut } ->\n          let e = c_expr arg in\n          AddressOf\n            {\n              e;\n              mut = c_mutability W.mutable_pointer mut;\n              witness = W.raw_pointer;\n            }\n      | Break { value; _ } ->\n          (* TODO: labels! *)\n          let e = Option.map ~f:c_expr value in\n          let e = Option.value ~default:(unit_expr span) e in\n          Break { e; acc = None; label = None; witness = (W.break, W.loop) }\n      | Continue _ ->\n          Continue { acc = None; label = None; witness = (W.continue, W.loop) }\n      | Return { value } ->\n          let e = Option.map ~f:c_expr value in\n          let e = Option.value ~default:(unit_expr span) e in\n          Return { e; witness = W.early_exit }\n      | ConstBlock _ -> unimplemented ~issue_id:923 [ e.span ] \"ConstBlock\"\n      | ConstParam { param = id; _ } (* TODO: shadowing? *) | ConstRef { id } ->\n          LocalVar\n            {\n              name = id.name;\n              id =\n                Local_ident.mk_id Cnst\n                  (MyInt64.to_int id.index\n                  |> Option.value_or_thunk ~default:(fun _ ->\n                         assertion_failure [ e.span ]\n                           \"Expected const id to fit in an OCaml native int\"));\n            }\n      | Repeat { value; count } ->\n          let value = c_expr value in\n          let count = c_constant_expr count in\n          let inner =\n            U.call Rust_primitives__hax__repeat [ value; count ] span typ\n          in\n          (U.call Alloc__boxed__Impl__new [ inner ] span typ).e\n      | Tuple { fields } ->\n          (U.make_tuple_expr' ~span @@ List.map ~f:c_expr fields).e\n      | Array { fields } -> Array (List.map ~f:c_expr fields)\n      | Adt { info; base; fields; _ } ->\n          let is_struct, is_record =\n            match info.kind with\n            | Struct { named } -> (true, named)\n            | Enum { named; _ } -> (false, named)\n            | Union ->\n                unimplemented ~issue_id:998 [ e.span ]\n                  \"Construct union types: not supported\"\n          in\n          let constructor = def_id ~value:true info.variant in\n          let base =\n            match base with\n            | None' -> None\n            | Base base -> Some (c_expr base.base, W.construct_base)\n            | DefaultFields _ ->\n                unimplemented ~issue_id:1386 [ e.span ]\n                  \"Default field values: not supported\"\n          in\n          let fields =\n            List.map\n              ~f:(fun f ->\n                let field = def_id ~value:true f.field in\n                let value = c_expr f.value in\n                (field, value))\n              fields\n          in\n          Construct { is_record; is_struct; constructor; fields; base }\n      | Literal { lit; neg; _ } -> (\n          match c_lit e.span neg lit typ with\n          | EL_Lit lit -> Literal lit\n          | EL_U8Array l ->\n              Array\n                (List.map\n                   ~f:(fun lit ->\n                     {\n                       e = Literal lit;\n                       span;\n                       typ = TInt { size = S8; signedness = Unsigned };\n                     })\n                   l))\n      | NamedConst\n          {\n            item =\n              { value = { def_id = id; generic_args; in_trait = impl; _ }; _ };\n            _;\n          } ->\n          let f = GlobalVar (def_id ~value:true id) in\n          let args = List.map ~f:(c_generic_value e.span) generic_args in\n          let const_args =\n            List.filter_map args ~f:(function GConst e -> Some e | _ -> None)\n          in\n          if List.is_empty const_args && Option.is_none impl then f\n          else\n            let f =\n              {\n                e = f;\n                span;\n                typ = TArrow (List.map const_args ~f:(fun e -> e.typ), typ);\n              }\n            in\n            let trait =\n              Option.map impl ~f:(c_impl_expr e.span &&& Fn.const args)\n            in\n            App\n              {\n                f;\n                trait;\n                args = const_args;\n                generic_args = [];\n                bounds_impls = [];\n              }\n      | Closure { body; params; upvars; _ } ->\n          let params =\n            List.filter_map ~f:(fun p -> Option.map ~f:c_pat p.pat) params\n          in\n          let params =\n            if List.is_empty params then\n              [ U.M.pat_PWild ~typ:U.M.ty_unit ~span ]\n            else params\n          in\n          let body = c_expr body in\n          let upvars = List.map ~f:c_expr upvars in\n          Closure { body; params; captures = upvars }\n      | Index { index; lhs } ->\n          let index_type = c_ty index.span index.ty in\n          let lhs_type = c_ty lhs.span lhs.ty in\n          call\n            (mk_global ([ lhs_type; index_type ] ->. typ)\n            @@ Global_ident.of_name ~value:true Core__ops__index__Index__index)\n            [ lhs; index ]\n      | StaticRef { def_id = id; _ } -> GlobalVar (def_id ~value:true id)\n      | PlaceTypeAscription _ ->\n          assertion_failure [ e.span ]\n            \"Got a unexpected node `PlaceTypeAscription`. Please report, we \\\n             were not able to figure out an expression yielding that node: a \\\n             bug report would be very valuable here!\"\n      | ValueTypeAscription { source; _ } -> (c_expr source).e\n      | ZstLiteral _ ->\n          assertion_failure [ e.span ]\n            \"`ZstLiteral` are expected to be handled before-hand\"\n      | Yield _ ->\n          unimplemented ~issue_id:924 [ e.span ]\n            \"Got expression `Yield`: coroutines are not supported by hax\"\n      | Todo payload ->\n          assertion_failure [ e.span ] (\"expression Todo\\n\" ^ payload)\n    in\n    { e = v; span; typ }\n\n  and c_lhs lhs =\n    match lhs.e with\n    | LocalVar var -> LhsLocalVar { var; typ = lhs.typ }\n    | _ -> (\n        match resugar_index_mut lhs with\n        | Some (e, index) ->\n            LhsArrayAccessor\n              { e = c_lhs e; typ = lhs.typ; index; witness = W.nontrivial_lhs }\n        | None -> (\n            match (U.unbox_underef_expr lhs).e with\n            | App\n                {\n                  f =\n                    {\n                      e = GlobalVar (`Projector _ as field);\n                      typ = TArrow ([ _ ], _);\n                      span = _;\n                    };\n                  args = [ e ];\n                  generic_args = _;\n                  trait = _;\n                  bounds_impls = _;\n                (* TODO: see issue #328 *)\n                } ->\n                LhsFieldAccessor\n                  {\n                    e = c_lhs e;\n                    typ = lhs.typ;\n                    field;\n                    witness = W.nontrivial_lhs;\n                  }\n            | _ -> LhsArbitraryExpr { e = lhs; witness = W.arbitrary_lhs }))\n\n  and c_expr_assign lhs rhs =\n    Assign { lhs = c_lhs lhs; e = rhs; witness = W.mutable_variable }\n\n  and c_constant_expr (ce : Thir.decorated_for__constant_expr_kind) : expr =\n    let rec constant_expr_to_expr (ce : Thir.decorated_for__constant_expr_kind)\n        : Thir.decorated_for__expr_kind =\n      {\n        attributes = ce.attributes;\n        contents = constant_expr_kind_to_expr_kind ce.contents ce.span;\n        hir_id = ce.hir_id;\n        span = ce.span;\n        ty = ce.ty;\n      }\n    and constant_expr_kind_to_expr_kind (ce : Thir.constant_expr_kind) span :\n        Thir.expr_kind =\n      match ce with\n      | Literal lit ->\n          let lit, neg = constant_lit_to_lit lit span in\n          Literal { lit = { node = lit; span }; neg }\n      | Adt { fields; info } ->\n          let fields = List.map ~f:constant_field_expr fields in\n          Adt { fields; info; base = None'; user_ty = None }\n      | Array { fields } ->\n          Array { fields = List.map ~f:constant_expr_to_expr fields }\n      | Tuple { fields } ->\n          Tuple { fields = List.map ~f:constant_expr_to_expr fields }\n      | GlobalName item -> GlobalName { item; constructor = None }\n      | Borrow arg ->\n          Borrow { arg = constant_expr_to_expr arg; borrow_kind = Thir.Shared }\n      | ConstRef { id } -> ConstRef { id }\n      | Cast _ | RawBorrow _ | TraitConst _ | FnPtr _ | Memory _ ->\n          assertion_failure [ span ]\n            \"constant_lit_to_lit: TraitConst | FnPtr | RawBorrow | Cast | \\\n             Memory\"\n      | Todo _ -> assertion_failure [ span ] \"ConstantExpr::Todo\"\n    and constant_lit_to_lit (l : Thir.constant_literal) span :\n        Thir.lit_kind * bool =\n      match l with\n      | Bool v -> (Bool v, false)\n      | Char v -> (Char v, false)\n      | Int (Int (v, ty)) -> (\n          match String.chop_prefix v ~prefix:\"-\" with\n          | Some v -> (Int (v, Signed ty), true)\n          | None -> (Int (v, Signed ty), false))\n      | Int (Uint (v, ty)) -> (Int (v, Unsigned ty), false)\n      | Float (v, ty) -> (\n          match String.chop_prefix v ~prefix:\"-\" with\n          | Some v -> (Float (v, Suffixed ty), true)\n          | None -> (Float (v, Suffixed ty), false))\n      | Str v -> (Str (v, Cooked), false)\n      | ByteStr v -> (ByteStr (v, Cooked), false)\n      | PtrNoProvenance _ ->\n          assertion_failure [ span ] \"constant_lit_to_lit: PtrNoProvenance\"\n    and constant_field_expr ({ field; value } : Thir.constant_field_expr) :\n        Thir.field_expr =\n      { field; value = constant_expr_to_expr value }\n    in\n    c_expr (constant_expr_to_expr ce)\n\n  and c_pat (pat : Thir.decorated_for__pat_kind) : pat =\n    let span = Span.of_thir pat.span in\n    let typ = c_ty pat.span pat.ty in\n    let v =\n      match pat.contents with\n      | Wild | Missing -> PWild\n      | AscribeUserType { ascription = { annotation; _ }; subpattern } ->\n          let typ, typ_span = c_canonical_user_type_annotation annotation in\n          let pat = c_pat subpattern in\n          PAscription { typ; typ_span; pat }\n      | Binding { mode; subpattern; ty; var; _ } ->\n          let mut = c_mutability W.mutable_variable mode.mutability in\n          let subpat =\n            Option.map ~f:(c_pat &&& Fn.const W.as_pattern) subpattern\n          in\n          let typ = c_ty pat.span ty in\n          let mode = c_binding_mode mode.by_ref in\n          let var = local_ident Expr var in\n          PBinding { mut; mode; var; typ; subpat }\n      | Variant { info; subpatterns; _ } ->\n          let is_struct, is_record =\n            match info.kind with\n            | Struct { named } -> (true, named)\n            | Enum { named; _ } -> (false, named)\n            | Union ->\n                unimplemented ~issue_id:998 [ pat.span ]\n                  \"Pattern match on union types: not supported\"\n          in\n          let constructor = def_id ~value:true info.variant in\n          let fields = List.map ~f:(c_field_pat info) subpatterns in\n          PConstruct { constructor; fields; is_record; is_struct }\n      | Tuple { subpatterns } ->\n          (List.map ~f:c_pat subpatterns |> U.make_tuple_pat').p\n      | Deref { subpattern } ->\n          PDeref { subpat = c_pat subpattern; witness = W.reference }\n      | Constant { value } ->\n          let rec pat_of_expr (e : expr) =\n            { p = pat'_of_expr' e.e e.span; span = e.span; typ = e.typ }\n          and pat'_of_expr' (e : expr') span =\n            match e with\n            | Literal lit -> PConstant { lit }\n            | Array l -> PArray { args = List.map ~f:pat_of_expr l }\n            | Borrow { kind = _; e; witness } ->\n                PDeref { subpat = pat_of_expr e; witness }\n            | _ ->\n                assertion_failure (Span.to_thir span)\n                  (\"expected a pattern, got \" ^ [%show: expr'] e)\n          in\n          (c_constant_expr value |> pat_of_expr).p\n      | ExpandedConstant { subpattern; _ } -> (c_pat subpattern).p\n      | Array _ -> unimplemented ~issue_id:804 [ pat.span ] \"Pat:Array\"\n      | Or { pats } -> POr { subpats = List.map ~f:c_pat pats }\n      | Slice _ -> unimplemented ~issue_id:804 [ pat.span ] \"pat Slice\"\n      | Range _ -> unimplemented ~issue_id:925 [ pat.span ] \"pat Range\"\n      | DerefPattern _ ->\n          unimplemented ~issue_id:926 [ pat.span ] \"pat DerefPattern\"\n      | Never -> unimplemented ~issue_id:927 [ pat.span ] \"pat Never\"\n      | Error _ ->\n          assertion_failure [ pat.span ]\n            \"`Error` node: Rust compilation failed. If Rust compilation was \\\n             fine, please file an issue.\"\n    in\n    { p = v; span; typ }\n\n  and c_field_pat _info (field_pat : Thir.field_pat) : field_pat =\n    {\n      field = def_id ~value:true field_pat.field;\n      pat = c_pat field_pat.pattern;\n    }\n\n  and extended_literal_of_expr (e : expr) : extended_literal =\n    let not_a_literal () =\n      assertion_failure (Span.to_thir e.span)\n        (\"expected a literal, got \" ^ [%show: expr] e)\n    in\n    match e.e with\n    | Literal lit -> EL_Lit lit\n    | Array lits ->\n        EL_U8Array\n          (List.map\n             ~f:(function\n               | {\n                   e =\n                     Literal\n                       (Int { kind = { size = S8; signedness = Unsigned }; _ }\n                        as lit);\n                   _;\n                 } ->\n                   lit\n               | _ -> not_a_literal ())\n             lits)\n    | _ -> not_a_literal ()\n\n  and c_canonical_user_type_annotation\n      (annotation : Thir.canonical_user_type_annotation) : ty * span =\n    (c_ty annotation.span annotation.inferred_ty, Span.of_thir annotation.span)\n\n  and c_pointer e typ span cast source =\n    match cast with\n    | ClosureFnPointer Safe | ReifyFnPointer ->\n        (* we have arrow types, we do not distinguish between top-level functions and closures *)\n        (c_expr source).e\n    | Unsize _ ->\n        (* https://doc.rust-lang.org/std/marker/trait.Unsize.html *)\n        (U.call Rust_primitives__unsize [ c_expr source ] span typ).e\n        (* let source = c_expr source in *)\n        (* let from_typ = source.typ in *)\n        (* let to_typ = typ in *)\n        (* match (U.Box.Ty.destruct from_typ, U.Box.Ty.destruct to_typ) with *)\n        (* | Some _from_typ, Some to_typ -> ( *)\n        (*     match U.Box.Expr.destruct source with *)\n        (*     | Some source -> *)\n        (*         (U.Box.Expr.make *)\n        (*         @@ U.call \"dummy\" \"unsize_cast\" [] [ source ] span to_typ) *)\n        (*           .e *)\n        (*     | _ -> *)\n        (*         unimplemented e.span *)\n        (*           \"[Pointer(Unsize)] cast from not directly boxed expression\") *)\n        (* | _ -> *)\n        (*     unimplemented e.span *)\n        (*       (\"[Pointer(Unsize)] cast\\n • from type [\" *)\n        (*       ^ [%show: ty] from_typ *)\n        (*       ^ \"]\\n • to type [\" *)\n        (*       ^ [%show: ty] to_typ *)\n        (*       ^ \"]\\n\\nThe expression is: \" *)\n        (*       ^ [%show: expr] source)) *)\n    | _ ->\n        assertion_failure [ e.span ]\n          (\"Pointer, with [cast] being \" ^ [%show: Thir.pointer_coercion] cast)\n\n  and c_ty (span : Thir.span) (ty : Thir.ty) : ty =\n    match ty.value with\n    | Bool -> TBool\n    | Char -> TChar\n    | Int k -> TInt (c_int_ty k)\n    | Uint k -> TInt (c_uint_ty k)\n    | Float k ->\n        TFloat\n          (match k with F16 -> F16 | F32 -> F32 | F64 -> F64 | F128 -> F128)\n    | Arrow fn_sig | Closure { fn_sig; _ } | FnDef { fn_sig; _ } ->\n        let ({ inputs; output; _ } : Thir.ty_fn_sig) = fn_sig.value in\n        let inputs =\n          if List.is_empty inputs then [ U.unit_typ ]\n          else List.map ~f:(c_ty span) inputs\n        in\n        TArrow (inputs, c_ty span output)\n    | Adt { value = { def_id = id; generic_args; _ }; _ } ->\n        let ident = def_id ~value:false id in\n        let args = List.map ~f:(c_generic_value span) generic_args in\n        TApp { ident; args }\n    | Foreign _ -> unimplemented ~issue_id:928 [ span ] \"Foreign\"\n    | Str -> TStr\n    | Array item_ref ->\n        let ty, len =\n          match item_ref.value.generic_args with\n          | [ Type ty; Const len ] -> (ty, len)\n          | _ ->\n              assertion_failure [ span ]\n                \"Wrong generics for array: expected a type and a constant. See \\\n                 synthetic_items in hax frontend.\"\n        in\n        TArray { typ = c_ty span ty; length = c_constant_expr len }\n    | Slice item_ref ->\n        let ty =\n          match item_ref.value.generic_args with\n          | [ Type ty ] -> ty\n          | _ ->\n              assertion_failure [ span ]\n                \"Wrong generics for slice: expected a type. See \\\n                 synthetic_items in hax frontend.\"\n        in\n        let ty = c_ty span ty in\n        TSlice { ty; witness = W.slice }\n    | RawPtr _ -> TRawPointer { witness = W.raw_pointer }\n    | Ref (_region, ty, mut) ->\n        let typ = c_ty span ty in\n        let mut = c_mutability W.mutable_reference mut in\n        TRef { witness = W.reference; region = \"todo\"; typ; mut }\n    | Never -> U.never_typ\n    | Tuple item_ref ->\n        let types =\n          List.map\n            ~f:(function Types.Type ty -> Some ty | _ -> None)\n            item_ref.value.generic_args\n          |> Option.all\n          |> Option.value_or_thunk ~default:(fun _ ->\n                 assertion_failure [ span ]\n                   \"Wrong generics for slice: expected a type. See \\\n                    synthetic_items in hax frontend.\")\n        in\n        let types = List.map ~f:(fun ty -> GType (c_ty span ty)) types in\n        TApp { ident = `TupleType (List.length types); args = types }\n    | Alias { kind = Projection { assoc_item = _; impl_expr }; def_id; _ } ->\n        let impl = c_impl_expr span impl_expr in\n        let item = Concrete_ident.of_def_id ~value:false def_id in\n        TAssociatedType { impl; item }\n    | Alias { kind = Opaque _; def_id; _ } ->\n        TOpaque (Concrete_ident.of_def_id ~value:false def_id)\n    | Alias { kind = Inherent; _ } ->\n        assertion_failure [ span ] \"Ty::Alias with AliasTyKind::Inherent\"\n    | Alias { kind = Free; _ } ->\n        assertion_failure [ span ] \"Ty::Alias with AliasTyKind::Free\"\n    | Param { index; name } ->\n        (* TODO: [id] might not unique *)\n        TParam\n          {\n            name;\n            id =\n              Local_ident.mk_id Typ\n                (MyInt64.to_int index\n                |> Option.value_or_thunk ~default:(fun _ ->\n                       assertion_failure [ span ]\n                         \"Expected param id to fit in an OCaml native int\"));\n          }\n    | Error ->\n        assertion_failure [ span ]\n          \"got type `Error`: Rust compilation probably failed.\"\n    | Dynamic (_, predicates, _region) -> (\n        let goals, non_traits =\n          List.partition_map\n            ~f:(fun ((clause, _span) : Types.clause * _) ->\n              match clause.kind.value with\n              | Trait { trait_ref; _ } ->\n                  let goal : dyn_trait_goal =\n                    {\n                      trait =\n                        Concrete_ident.of_def_id ~value:false\n                          trait_ref.value.def_id;\n                      non_self_args =\n                        List.map ~f:(c_generic_value span)\n                          (List.tl_exn trait_ref.value.generic_args);\n                    }\n                  in\n                  First goal\n              | _ -> Second ())\n            predicates.predicates\n        in\n        match non_traits with\n        | [] -> TDyn { witness = W.dyn; goals }\n        | _ -> assertion_failure [ span ] \"type Dyn with non trait predicate\")\n    | Coroutine _ ->\n        unimplemented ~issue_id:924 [ span ]\n          \"Got type `Coroutine`: coroutines are not supported by hax\"\n    | Placeholder _ ->\n        assertion_failure [ span ]\n          \"type Placeholder: should be gone after typechecking\"\n    | Bound _ ->\n        assertion_failure [ span ]\n          \"type Bound: should be gone after typechecking\"\n    | Infer _ ->\n        assertion_failure [ span ]\n          \"type Infer: should be gone after typechecking\"\n    | Todo _ -> assertion_failure [ span ] \"type Todo\"\n  (* fun _ -> Ok Bool *)\n\n  and c_impl_expr (span : Thir.span) (ie : Thir.impl_expr) : impl_expr =\n    let goal = c_trait_ref span ie.trait.value in\n    let impl = { kind = c_impl_expr_atom span ie.impl goal; goal } in\n    match ie.impl with\n    | Concrete { value = { impl_exprs = []; _ }; _ } -> impl\n    | Concrete { value = { impl_exprs; _ }; _ } ->\n        let args = List.map ~f:(c_impl_expr span) impl_exprs in\n        { kind = ImplApp { impl; args }; goal }\n    | _ -> impl\n\n  and c_trait_ref span (tr : Thir.trait_ref) : trait_goal =\n    let trait = Concrete_ident.of_def_id ~value:false tr.value.def_id in\n    let args = List.map ~f:(c_generic_value span) tr.value.generic_args in\n    { trait; args }\n\n  and c_impl_expr_atom (span : Thir.span) (ie : Thir.impl_expr_atom) goal :\n      impl_expr_kind =\n    let browse_path (item_kind : impl_expr_kind)\n        (chunk : Thir.impl_expr_path_chunk) =\n      match chunk with\n      | AssocItem\n          { item; predicate = { value = { trait_ref; _ }; _ }; index; _ } ->\n          let ident =\n            { goal = c_trait_ref span trait_ref; name = \"i\" ^ index }\n          in\n          let item = Concrete_ident.of_def_id ~value:false item.value.def_id in\n          let trait_ref = c_trait_ref span trait_ref in\n          Projection\n            { impl = { kind = item_kind; goal = trait_ref }; ident; item }\n      | Parent { predicate = { value = { trait_ref; _ }; _ }; index; _ } ->\n          let ident =\n            { goal = c_trait_ref span trait_ref; name = \"i\" ^ index }\n          in\n          let trait_ref = c_trait_ref span trait_ref in\n          Parent { impl = { kind = item_kind; goal = trait_ref }; ident }\n    in\n    match ie with\n    | Concrete { value = { def_id; generic_args; _ }; _ } ->\n        let trait = Concrete_ident.of_def_id ~value:false def_id in\n        let args = List.map ~f:(c_generic_value span) generic_args in\n        Concrete { trait; args }\n    | LocalBound { index; path; _ } ->\n        let init = LocalBound { id = \"i\" ^ index } in\n        List.fold ~init ~f:browse_path path\n    | Dyn -> Dyn\n    | SelfImpl { path; _ } -> List.fold ~init:Self ~f:browse_path path\n    | Builtin _ -> Builtin goal\n    | Error str ->\n        unimplemented ~issue_id:707 [ span ]\n          (\"Could not resolve trait reference: \" ^ str)\n\n  and c_generic_value (span : Thir.span) (ty : Thir.generic_arg) : generic_value\n      =\n    match ty with\n    | Type ty -> GType (c_ty span ty)\n    | Const e -> GConst (c_constant_expr e)\n    | _ -> GLifetime { lt = \"todo generics\"; witness = W.lifetime }\n\n  and c_arm (arm : Thir.arm) : arm =\n    let arm_pat = c_pat arm.pattern in\n    let body = c_expr arm.body in\n    let span = Span.of_thir arm.span in\n    let guard =\n      Option.map\n        ~f:(fun (e : Thir.decorated_for__expr_kind) ->\n          let guard =\n            match e.contents with\n            | Let { expr; pat } ->\n                IfLet\n                  {\n                    lhs = c_pat pat;\n                    rhs = c_expr expr;\n                    witness = W.match_guard;\n                  }\n            | _ ->\n                IfLet\n                  {\n                    lhs =\n                      { p = PConstant { lit = Bool true }; span; typ = TBool };\n                    rhs = c_expr e;\n                    witness = W.match_guard;\n                  }\n          in\n          { guard; span = Span.of_thir e.span })\n        arm.guard\n    in\n    { arm = { arm_pat; body; guard }; span }\n\n  and c_param span (param : Thir.param) : param =\n    {\n      typ_span = Option.map ~f:Span.of_thir param.ty_span;\n      typ = c_ty (Option.value ~default:span param.ty_span) param.ty;\n      pat =\n        c_pat\n          (Option.value_or_thunk param.pat ~default:(fun _ ->\n               assertion_failure [ span ]\n                 \"c_param: expected param.pat to be non-empty\"));\n      attrs = c_attrs param.attributes;\n    }\n\n  let c_fn_params span (params : Thir.param list) : param list =\n    if List.is_empty params then [ U.make_unit_param (Span.of_thir span) ]\n    else List.map ~f:(c_param span) params\n\n  let c_generic_param (param : Thir.generic_param) : generic_param =\n    let ident =\n      let kind =\n        match (param.kind : Thir.generic_param_kind) with\n        | Lifetime _ -> Local_ident.LILifetime\n        | Type _ -> Local_ident.Typ\n        | Const _ -> Local_ident.Cnst\n      in\n      match param.name with\n      | Fresh ->\n          (* fail with (\"[Fresh] ident? \" ^ Thir.show_generic_param param) *)\n          (* TODO might be wrong to just have a wildcard here *)\n          ({ name = \"_\"; id = Local_ident.mk_id kind 123 } : local_ident)\n      | Error -> assertion_failure [ param.span ] \"[Error] ident\"\n      | Plain n -> local_ident kind n\n    in\n    let kind =\n      match (param.kind : Thir.generic_param_kind) with\n      | Lifetime _ -> GPLifetime { witness = W.lifetime }\n      | Type _ -> GPType\n      (* Rustc always fills in const generics on use. Thus we can drop this information. *)\n      | Const { default = _; ty } -> GPConst { typ = c_ty param.span ty }\n    in\n    let span = Span.of_thir param.span in\n    let attrs = c_attrs param.attributes in\n    { ident; span; attrs; kind }\n\n  let c_clause_kind span id (kind : Thir.clause_kind) :\n      generic_constraint option =\n    match kind with\n    | Trait { is_positive = true; trait_ref } ->\n        let args =\n          List.map ~f:(c_generic_value span) trait_ref.value.generic_args\n        in\n        let trait =\n          Concrete_ident.of_def_id ~value:false trait_ref.value.def_id\n        in\n        Some (GCType { goal = { trait; args }; name = \"i\" ^ Int.to_string id })\n    | Projection { impl_expr; assoc_item; ty } ->\n        let impl = c_impl_expr span impl_expr in\n        let assoc_item =\n          Concrete_ident.of_def_id ~value:false assoc_item.def_id\n        in\n        let typ = c_ty span ty in\n        Some (GCProjection { impl; assoc_item; typ })\n    | _ -> None\n\n  let c_clause span (index : int) (p : Thir.clause) : generic_constraint option\n      =\n    let ({ kind; _ } : Thir.clause) = p in\n    c_clause_kind span index kind.value\n\n  let list_dedup (equal : 'a -> 'a -> bool) : 'a list -> 'a list =\n    let rec aux (seen : 'a list) (todo : 'a list) : 'a list =\n      match todo with\n      | hd :: tl ->\n          if List.mem ~equal seen hd then aux seen tl\n          else hd :: aux (hd :: seen) tl\n      | _ -> todo\n    in\n    aux []\n\n  let c_bounds ?(offset : int = 0) span bounds =\n    List.fold_left ~init:(offset, [])\n      ~f:(fun (i, clauses) c ->\n        match c_clause span i c with\n        | Some (GCType _ as c) -> (i + 1, c :: clauses)\n        | Some c -> (i, c :: clauses)\n        | None -> (i, clauses))\n      bounds\n    |> snd |> List.rev\n\n  let c_generics ?(offset : int = 0) (generics : Thir.generics) : generics =\n    let bounds = c_bounds ~offset generics.span generics.bounds in\n    {\n      params = List.map ~f:c_generic_param generics.params;\n      constraints = bounds |> list_dedup equal_generic_constraint;\n    }\n\n  let c_trait_item' (super : Thir.trait_item) (item : Thir.trait_item_kind) :\n      trait_item' =\n    let span = super.span in\n    match item with\n    | Const (_, Some default) ->\n        TIDefault\n          {\n            params = [];\n            body = c_expr default.expr;\n            witness = W.trait_item_default;\n          }\n    | Const (ty, None) -> TIFn (c_ty span ty)\n    | RequiredFn (sg, _) ->\n        let (Thir.{ inputs; output; _ } : Thir.fn_decl) = sg.decl in\n        let output =\n          match output with\n          | DefaultReturn _span -> unit_typ\n          | Return ty -> c_ty span ty\n        in\n        let inputs =\n          if List.is_empty inputs then [ U.unit_typ ]\n          else List.map ~f:(c_ty span) inputs\n        in\n        TIFn (TArrow (inputs, output))\n    | ProvidedFn (_, { params; body; _ }) ->\n        TIDefault\n          {\n            params = c_fn_params span params;\n            body = c_expr body.expr;\n            witness = W.trait_item_default;\n          }\n    | Type (bounds, None) ->\n        let bounds =\n          c_bounds span bounds\n          |> List.filter_map ~f:(fun bound ->\n                 match bound with GCType impl -> Some impl | _ -> None)\n        in\n        TIType bounds\n    | Type (_, Some _) ->\n        unimplemented ~issue_id:929 [ span ]\n          \"Associated types defaults are not supported by hax yet (it is a \\\n           nightly feature)\"\nend\n\ninclude struct\n  open Make (struct\n    let is_core_item = false\n  end)\n\n  let import_ty : Types.span -> Types.node_for__ty_kind -> Ast.Rust.ty = c_ty\n\n  let import_trait_ref : Types.span -> Thir.trait_ref -> Ast.Rust.trait_goal =\n    c_trait_ref\n\n  let import_clause :\n      Types.span -> int -> Types.clause -> Ast.Rust.generic_constraint option =\n    c_clause\nend\n\n(** Instantiate the functor for translating expressions. The crate name can be\n    configured (there are special handling related to `core`) *)\nlet make ~krate : (module EXPR) =\n  let is_core_item = String.(krate = \"core\" || krate = \"core_hax_model\") in\n  let module M : EXPR = Make (struct\n    let is_core_item = is_core_item\n  end) in\n  (module M)\n\nlet c_trait_item (item : Thir.trait_item) : trait_item =\n  let open (val make ~krate:item.owner_id.contents.value.krate : EXPR) in\n  let { params; constraints } = c_generics ~offset:1 item.generics in\n  (* TODO: see TODO in impl items *)\n  let ti_ident = Concrete_ident.of_def_id ~value:false item.owner_id in\n  {\n    ti_span = Span.of_thir item.span;\n    ti_generics = { params; constraints };\n    ti_v = c_trait_item' item item.kind;\n    ti_ident;\n    ti_attrs = c_item_attrs item.attributes;\n  }\n\nlet is_automatically_derived (attrs : Thir.attribute list) =\n  List.exists (* We need something better here, see issue #108 *)\n    ~f:(function\n      (* This will break once these attributes get properly parsed. It will\n          then be very easy to parse them correctly *)\n      | Parsed (AutomaticallyDerived _) -> true\n      | _ -> false)\n    attrs\n\nlet should_skip (attrs : Thir.item_attributes) =\n  let attrs = attrs.attributes @ attrs.parent_attributes in\n  is_automatically_derived attrs\n\n(** Converts a generic parameter to a generic value. This assumes the parameter\n    is bound. *)\nlet generic_param_to_value ({ ident; kind; span; _ } : generic_param) :\n    generic_value =\n  match kind with\n  | GPLifetime { witness } ->\n      GLifetime { lt = [%show: local_ident] ident; witness }\n  | GPType -> GType (TParam ident)\n  | GPConst { typ } -> GConst { e = LocalVar ident; typ; span }\n\n(** Generate a cast function from an inductive to its represantant type. *)\nlet cast_of_enum typ_name generics typ thir_span\n    (variants : (variant * Types.variant_for__thir_body) list) : item =\n  let span = Span.of_thir thir_span in\n  let (module M) = Ast_builder.make span in\n  let self =\n    let args = List.map ~f:generic_param_to_value generics.params in\n    TApp { ident = `Concrete typ_name; args }\n  in\n  let expr_of_int (n : Int64.t) : expr =\n    let kind =\n      match typ with\n      | TInt kind -> kind\n      | typ ->\n          assertion_failure [ thir_span ]\n            (\"cast_of_enum: expected in type, got \" ^ [%show: ty] typ)\n    in\n    let value = Int64.to_string n in\n    M.expr_Literal ~typ (Int { value; negative = Int64.is_negative n; kind })\n  in\n  let arms =\n    (* Each variant comes with a [rustc_middle::ty::VariantDiscr]. Some variant have [Explicit] discr (i.e. an expression)\n       while other have [Relative] discr (the distance to the previous last explicit discr). *)\n    List.folding_map variants ~init:None\n      ~f:(fun previous_explicit_discriminator (variant, { discr; _ }) ->\n        let pat =\n          let mk_wild_field (cid, typ, _) =\n            { field = `Concrete cid; pat = M.pat_PWild ~typ }\n          in\n          M.pat_PConstruct ~constructor:(`Concrete variant.name)\n            ~is_struct:false ~typ ~is_record:variant.is_record\n            ~fields:(List.map ~f:mk_wild_field variant.arguments)\n        in\n        match (previous_explicit_discriminator, discr) with\n        | None, Relative m -> (None, (pat, expr_of_int m))\n        | _, Explicit { def_id = did; _ } ->\n            let e = M.expr_GlobalVar ~typ (def_id ~value:true did) in\n            (Some e, (pat, e))\n        | Some e, Relative n ->\n            let n = expr_of_int n in\n            let e = U.call Core__ops__arith__Add__add [ e; n ] span typ in\n            (previous_explicit_discriminator, (pat, e)))\n    |> List.map ~f:(fun (p, e) -> M.arm p e)\n  in\n  let scrutinee_var = Local_ident.{ name = \"x\"; id = mk_id Expr (-1) } in\n  let scrutinee = M.expr_LocalVar ~typ:self scrutinee_var in\n  let ident = cast_name_for_type typ_name in\n  let params =\n    let pat = U.make_var_pat scrutinee_var self span in\n    [ { pat; typ = self; typ_span = None; attrs = [] } ]\n  in\n  let body = M.expr_Match ~typ ~scrutinee ~arms in\n  M.item_Fn ~ident ~attrs:[] ~name:ident ~generics ~params ~safety:Safe ~body\n\nlet rec c_item ~ident ~type_only (item : Thir.item) : item list =\n  try\n    Span.with_owner_hint item.owner_id (fun _ ->\n        c_item_unwrapped ~ident ~type_only item)\n  with Diagnostics.SpanFreeError.Exn payload ->\n    let context, kind = Diagnostics.SpanFreeError.payload payload in\n    let error = Diagnostics.pretty_print_context_kind context kind in\n    let span = Span.of_thir item.span in\n    [ make_hax_error_item span ident error ]\n\nand c_item_unwrapped ~ident ~type_only (item : Thir.item) : item list =\n  let open (val make ~krate:item.owner_id.contents.value.krate : EXPR) in\n  let span = Span.of_thir item.span in\n  let attrs = c_item_attrs item.attributes in\n  (* this is true if the user explicilty requested to erase using the `opaque` macro *)\n  let erased_by_user attrs =\n    Attr_payloads.payloads attrs\n    |> List.exists ~f:(fst >> [%matches? (Erased : Types.ha_payload)])\n  in\n  let item_erased_by_user = erased_by_user attrs in\n  let type_only =\n    type_only\n    && Attr_payloads.payloads attrs\n       |> List.exists ~f:(fst >> [%matches? (NeverErased : Types.ha_payload)])\n       |> not\n  in\n  (* This is true if the item should be erased because we are in type-only mode\n     (Only certain kinds of items are erased in this case). *)\n  let erased_by_hax =\n    should_skip item.attributes\n    || type_only\n       &&\n       match item.kind with\n       | Fn _ | Static _ -> true\n       | Impl { of_trait = Some _; items; _ }\n         when List.exists items ~f:(fun item ->\n                  match item.kind with Type _ -> true | _ -> false)\n              |> not ->\n           true\n       | _ -> false\n  in\n  (* If the item is erased by hax we need to add the Erased attribute.\n     It is already present if the item is erased by user. *)\n  let attrs_with_erased erased_by_hax erased_by_user attrs =\n    if erased_by_hax && not erased_by_user then\n      Attr_payloads.to_attr Erased span :: attrs\n    else attrs\n  in\n  let attrs = attrs_with_erased erased_by_hax item_erased_by_user attrs in\n  let erased = item_erased_by_user || erased_by_hax in\n\n  let mk_one v = { span; v; ident; attrs } in\n  let mk v = [ mk_one v ] in\n  let drop_body =\n    erased\n    && Attr_payloads.payloads attrs\n       |> List.exists ~f:(fst >> [%matches? (NeverErased : Types.ha_payload)])\n       |> not\n  in\n  let c_body = if drop_body then c_expr_drop_body else c_expr in\n  let assert_item_def_id () =\n    Option.value_or_thunk item.def_id ~default:(fun _ ->\n        assertion_failure [ item.span ] \"Expected this item to have a `def_id`\")\n  in\n  (* TODO: things might be unnamed (e.g. constants) *)\n  match (item.kind : Thir.item_kind) with\n  | Const (_, generics, _, body) ->\n      mk\n      @@ Fn\n           {\n             name = Concrete_ident.of_def_id ~value:true (assert_item_def_id ());\n             generics = c_generics generics;\n             body = c_body body.expr;\n             params = [];\n             safety = Safe;\n           }\n  | Static (true, _, _, _) ->\n      unimplemented ~issue_id:1343 [ item.span ]\n        \"Mutable static items are not supported.\"\n  | Static (false, _, _ty, body) ->\n      let name = Concrete_ident.of_def_id ~value:true (assert_item_def_id ()) in\n      let generics = { params = []; constraints = [] } in\n      mk\n        (Fn\n           {\n             name;\n             generics;\n             body = c_body body.expr;\n             params = [];\n             safety = Safe;\n           })\n  | TyAlias (_, generics, ty) ->\n      mk\n      @@ TyAlias\n           {\n             name =\n               Concrete_ident.of_def_id ~value:false (assert_item_def_id ());\n             generics = c_generics generics;\n             ty = c_ty item.span ty;\n           }\n  | Fn { generics; def = { body; params; header = { safety; _ }; _ }; _ } ->\n      mk\n      @@ Fn\n           {\n             name = Concrete_ident.of_def_id ~value:true (assert_item_def_id ());\n             generics = c_generics generics;\n             body = c_body body.expr;\n             params = c_fn_params item.span params;\n             safety = c_header_safety safety;\n           }\n  | (Enum (_, generics, _, _) | Struct (_, generics, _)) when erased ->\n      let generics = c_generics generics in\n      let is_struct = match item.kind with Struct _ -> true | _ -> false in\n      let def_id = assert_item_def_id () in\n      let name = Concrete_ident.of_def_id ~value:false def_id in\n      mk @@ Type { name; generics; variants = []; is_struct }\n  | Enum (_, generics, variants, repr) ->\n      let def_id = assert_item_def_id () in\n      let generics = c_generics generics in\n      let is_struct = false in\n      let discs =\n        (* Each variant might introduce a anonymous constant defining its discriminant integer  *)\n        List.filter_map ~f:(fun v -> v.disr_expr) variants\n        |> List.map ~f:(fun Types.{ def_id; body; _ } ->\n               let name = Concrete_ident.of_def_id ~value:true def_id in\n               let generics = { params = []; constraints = [] } in\n               let body = c_expr body.expr in\n               {\n                 v = Fn { name; generics; body; params = []; safety = Safe };\n                 span;\n                 ident = name;\n                 attrs = [];\n               })\n      in\n      let is_primitive =\n        List.for_all\n          ~f:(fun { data; _ } ->\n            match data with\n            | Unit _ | Tuple ([], _, _) | Struct { fields = []; _ } -> true\n            | _ -> false)\n          variants\n      in\n      let variants =\n        List.map\n          ~f:(fun ({ data; def_id = variant_id; attributes; _ } as original) ->\n            let is_record =\n              [%matches? (Struct { fields = _ :: _; _ } : Types.variant_data)]\n                data\n            in\n            let name = Concrete_ident.of_def_id ~value:true variant_id in\n            let arguments =\n              match data with\n              | Tuple (fields, _, _) | Struct { fields; _ } ->\n                  List.map\n                    ~f:(fun { def_id = id; ty; span; attributes; _ } ->\n                      ( Concrete_ident.of_def_id ~value:true id,\n                        c_ty span ty,\n                        c_attrs attributes ))\n                    fields\n              | Unit _ -> []\n            in\n            let attrs = c_attrs attributes in\n            ({ name; arguments; is_record; attrs }, original))\n          variants\n      in\n      let name = Concrete_ident.of_def_id ~value:true def_id in\n      let cast_fun =\n        cast_of_enum name generics (c_ty item.span repr.typ) item.span variants\n      in\n      let variants, _ = List.unzip variants in\n      let result =\n        mk_one (Type { name; generics; variants; is_struct }) :: discs\n      in\n      if is_primitive then cast_fun :: result else result\n  | Struct (_, generics, v) ->\n      let generics = c_generics generics in\n      let def_id = assert_item_def_id () in\n      let is_struct = true in\n      (* repeating the attributes of the item in the variant: TODO is that ok? *)\n      let v =\n        let name = Concrete_ident.of_def_id ~value:true def_id in\n        (* let name = Concrete_ident.Create.move_under name ~new_parent:name in *)\n        let mk fields is_record =\n          let arguments =\n            List.map\n              ~f:(fun Thir.{ def_id = id; ty; span; attributes; _ } ->\n                ( Concrete_ident.of_def_id ~value:true id,\n                  c_ty span ty,\n                  c_attrs attributes ))\n              fields\n          in\n          { name; arguments; is_record; attrs }\n        in\n        match v with\n        | Tuple (fields, _, _) -> mk fields false\n        | Struct { fields = _ :: _ as fields; _ } -> mk fields true\n        | _ -> { name; arguments = []; is_record = false; attrs }\n      in\n      let variants = [ v ] in\n      let name = Concrete_ident.of_def_id ~value:false def_id in\n      mk @@ Type { name; generics; variants; is_struct }\n  | Trait (NotConst, No, safety, _, generics, _bounds, items) ->\n      let items =\n        List.filter\n          ~f:(fun { attributes; _ } -> not (should_skip attributes))\n          items\n      in\n      let name =\n        Concrete_ident.of_def_id ~value:false (assert_item_def_id ())\n      in\n      let { params; constraints } = c_generics generics in\n      let self =\n        let id =\n          Local_ident.mk_id Typ 0\n          (* todo *)\n        in\n        let ident = Local_ident.{ name = \"Self\"; id } in\n        { ident; span; attrs = []; kind = GPType }\n      in\n      let params = self :: params in\n      let generics = { params; constraints } in\n      let items = List.map ~f:c_trait_item items in\n      let safety = csafety safety in\n      mk @@ Trait { name; generics; items; safety }\n  | Trait (_, Yes, _, _, _, _, _) ->\n      unimplemented ~issue_id:930 [ item.span ] \"Auto trait\"\n  | Trait (Const, _, _, _, _, _, _) ->\n      unimplemented ~issue_id:930 [ item.span ] \"Const trait\"\n  | Impl { of_trait = None; generics; items; _ } ->\n      let items =\n        List.filter\n          ~f:(fun { attributes; _ } -> not (should_skip attributes))\n          items\n      in\n      List.map\n        ~f:(fun (item : Thir.impl_item) ->\n          let item_def_id =\n            Concrete_ident.of_def_id ~value:false item.owner_id\n          in\n          let attrs = c_item_attrs item.attributes in\n          let sub_item_erased_by_user = erased_by_user attrs in\n          let erased_by_type_only =\n            type_only && match item.kind with Fn _ -> true | _ -> false\n          in\n          let sub_item_erased =\n            sub_item_erased_by_user || erased_by_type_only\n          in\n          let attrs =\n            attrs_with_erased erased_by_type_only sub_item_erased_by_user attrs\n          in\n          let c_body = if sub_item_erased then c_expr_drop_body else c_body in\n\n          let generics = c_generics generics in\n          let offset =\n            List.count generics.constraints ~f:[%matches? GCType _]\n          in\n\n          let v =\n            match (item.kind : Thir.impl_item_kind) with\n            | Fn { body; params; header = { safety; _ }; _ } ->\n                let params =\n                  if List.is_empty params then [ U.make_unit_param span ]\n                  else List.map ~f:(c_param item.span) params\n                in\n                Fn\n                  {\n                    name = item_def_id;\n                    generics =\n                      U.concat_generics generics\n                        (c_generics ~offset item.generics);\n                    body = c_body body.expr;\n                    params;\n                    safety = c_header_safety safety;\n                  }\n            | Const (_ty, e) ->\n                Fn\n                  {\n                    name = item_def_id;\n                    generics;\n                    (* does that make sense? can we have `const<T>`? *)\n                    body = c_body e.expr;\n                    params = [];\n                    safety = Safe;\n                  }\n            | Type _ty ->\n                assertion_failure [ item.span ]\n                  \"Inherent implementations are not supposed to have \\\n                   associated types \\\n                   (https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations).\"\n          in\n          let ident = Concrete_ident.of_def_id ~value:false item.owner_id in\n          { span = Span.of_thir item.span; v; ident; attrs })\n        items\n  | Impl\n      {\n        of_trait = Some of_trait;\n        generics;\n        self_ty;\n        items;\n        safety;\n        parent_bounds;\n        _;\n      } ->\n      let items =\n        List.filter\n          ~f:(fun { attributes; _ } -> not (should_skip attributes))\n          items\n      in\n      let generics = c_generics generics in\n      let offset = List.count generics.constraints ~f:[%matches? GCType _] in\n      let items =\n        if erased then []\n        else\n          List.map\n            ~f:(fun (item : Thir.impl_item) ->\n              (* TODO: introduce a Kind.TraitImplItem or\n                 something. Otherwise we have to assume every\n                 backend will see traits and impls as\n                 records. See https://github.com/hacspec/hax/issues/271. *)\n              let ii_ident =\n                Concrete_ident.of_def_id ~value:false item.owner_id\n              in\n              {\n                ii_span = Span.of_thir item.span;\n                ii_generics = c_generics ~offset item.generics;\n                ii_v =\n                  (match (item.kind : Thir.impl_item_kind) with\n                  | Fn { body; params; _ } ->\n                      let params =\n                        if List.is_empty params then [ U.make_unit_param span ]\n                        else List.map ~f:(c_param item.span) params\n                      in\n                      IIFn { body = c_expr body.expr; params }\n                  | Const (_ty, e) -> IIFn { body = c_expr e.expr; params = [] }\n                  | Type { ty; parent_bounds } ->\n                      IIType\n                        {\n                          typ = c_ty item.span ty;\n                          parent_bounds =\n                            List.fold_left ~init:(0, [])\n                              ~f:(fun (i, clauses) (clause, impl_expr, span) ->\n                                match c_clause span i clause with\n                                | Some (GCType trait_goal) ->\n                                    ( i + 1,\n                                      (c_impl_expr span impl_expr, trait_goal)\n                                      :: clauses )\n                                | _ -> (i, clauses))\n                              parent_bounds\n                            |> snd |> List.rev;\n                        });\n                ii_ident;\n                ii_attrs = c_item_attrs item.attributes;\n              })\n            items\n      in\n      mk\n      @@ Impl\n           {\n             generics;\n             self_ty = c_ty item.span self_ty;\n             of_trait =\n               ( Concrete_ident.of_def_id ~value:false of_trait.value.def_id,\n                 List.map\n                   ~f:(c_generic_value item.span)\n                   of_trait.value.generic_args );\n             items;\n             parent_bounds =\n               List.filter_mapi\n                 ~f:(fun i (clause, impl_expr, span) ->\n                   let* bound = c_clause span i clause in\n                   match bound with\n                   | GCType trait_goal ->\n                       Some (c_impl_expr span impl_expr, trait_goal)\n                   | _ -> None)\n                 parent_bounds;\n             safety = csafety safety;\n           }\n  | Use ({ span = _; res; segments; rename }, _) ->\n      let v =\n        Use\n          {\n            path = List.map ~f:(fun x -> fst x.ident) segments;\n            is_external =\n              List.exists\n                ~f:(function None | Some Err -> true | _ -> false)\n                res;\n            (* TODO: this should represent local/external? *)\n            rename;\n          }\n      in\n      (* ident is supposed to always be an actual item, thus here we need to cheat a bit *)\n      (* TODO: is this DUMMY thing really needed? there's a `Use` segment (see #272) *)\n      let def_id = item.owner_id in\n      (* let def_id : Types.def_id =\n           let value =\n             {\n               def_id.contents.value with\n               path =\n                 def_id.contents.value.path\n                 @ [\n                     Types.\n                       { data = ValueNs \"DUMMY\"; disambiguator = MyInt64.of_int 0 };\n                   ];\n             }\n           in\n           { contents = { def_id.contents with value } }\n         in *)\n      [\n        { span; v; ident = Concrete_ident.of_def_id ~value:false def_id; attrs };\n      ]\n  | Union _ ->\n      unimplemented ~issue_id:998 [ item.span ] \"Union types: not supported\"\n  | GlobalAsm _ ->\n      unimplemented ~issue_id:1344 [ item.span ]\n        \"Inline assembly blocks are not supported\"\n  | ExternCrate _ | Macro _ | Mod _ | ForeignMod _ | TraitAlias _ ->\n      mk NotImplementedYet\n\nlet import_item ~type_only (item : Thir.item) :\n    concrete_ident * (item list * Diagnostics.t list) =\n  let ident = Concrete_ident.of_def_id ~value:false item.owner_id in\n  let r, reports =\n    let f = U.Reducers.disambiguate_local_idents in\n    Diagnostics.Core.capture (fun _ ->\n        c_item item ~ident ~type_only |> List.map ~f)\n  in\n  (ident, (r, reports))\n"
  },
  {
    "path": "engine/lib/import_thir.mli",
    "content": "val import_ty : Types.span -> Types.node_for__ty_kind -> Ast.Rust.ty\n\nval import_trait_ref :\n  Types.span -> Types.node_for__item_ref_contents -> Ast.Rust.trait_goal\n\nval import_clause :\n  Types.span -> int -> Types.clause -> Ast.Rust.generic_constraint option\n\nval import_item :\n  type_only:bool ->\n  Types.item_for__thir_body ->\n  Concrete_ident.t * (Ast.Rust.item list * Diagnostics.t list)\n"
  },
  {
    "path": "engine/lib/local_ident.ml",
    "content": "open! Prelude\n\nmodule T = struct\n  type kind = Typ | Cnst | Expr | LILifetime | Final | SideEffectHoistVar\n  [@@deriving show, yojson, hash, compare, sexp, eq]\n\n  type id = kind * int [@@deriving show, yojson, hash, compare, sexp, eq]\n\n  let mk_id kind id = (kind, id)\n\n  type t = { name : string; id : id }\n  [@@deriving show, yojson, hash, compare, sexp, eq]\n\n  let make_final name = { name; id = mk_id Final 0 }\n  let is_final { id; _ } = [%matches? Final] @@ fst id\n\n  let is_side_effect_hoist_var { id; _ } =\n    [%matches? SideEffectHoistVar] @@ fst id\nend\n\ninclude Base.Comparator.Make (T)\ninclude T\n"
  },
  {
    "path": "engine/lib/local_ident.mli",
    "content": "module T : sig\n  type kind =\n    | Typ  (** type namespace *)\n    | Cnst  (** Generic constant namespace *)\n    | Expr  (** Expression namespace *)\n    | LILifetime  (** Lifetime namespace *)\n    | Final\n        (** Frozen identifier: such an identifier will *not* be rewritten by the\n            name policy *)\n    | SideEffectHoistVar  (** A variable generated by `Side_effect_utils` *)\n  [@@deriving show, yojson, hash, compare, sexp, eq]\n\n  type id = kind * int [@@deriving show, yojson, hash, compare, sexp, eq]\n\n  val mk_id : kind -> int -> id\n\n  type t = { name : string; id : id }\n  [@@deriving show, yojson, hash, compare, sexp, eq]\n\n  val make_final : string -> t\n  (** Creates a frozen final local identifier: such an indentifier won't be\n      rewritten by a name policy *)\n\n  val is_final : t -> bool\n  val is_side_effect_hoist_var : t -> bool\nend\n\ninclude module type of struct\n  include Base.Comparator.Make (T)\n  include T\nend\n"
  },
  {
    "path": "engine/lib/phase_utils.ml",
    "content": "open! Prelude\n\nmodule Metadata : sig\n  type t = private {\n    current_phase : Diagnostics.Phase.t;\n    previous_phase : t option;\n  }\n\n  val make : Diagnostics.Phase.t -> t\n  val bind : t -> t -> t\n  val previous_phases : t -> Diagnostics.Phase.t list\nend = struct\n  type t = { current_phase : Diagnostics.Phase.t; previous_phase : t option }\n\n  let make name = { current_phase = name; previous_phase = None }\n  let bind (x : t) (y : t) : t = { y with previous_phase = Some x }\n\n  let rec previous_phases' (p : t) : Diagnostics.Phase.t list =\n    previous_phases p @ [ p.current_phase ]\n\n  and previous_phases (p : t) : Diagnostics.Phase.t list =\n    Option.map ~f:previous_phases' p.previous_phase |> Option.value ~default:[]\nend\n\nmodule type PHASE = sig\n  val metadata : Metadata.t\n\n  module FA : Features.T\n  module FB : Features.T\n  module A : Ast.T\n  module B : Ast.T\n\n  val ditems : A.item list -> B.item list\nend\n\nmodule MakePhaseImplemT (A : Ast.T) (B : Ast.T) = struct\n  module type T = sig\n    val metadata : Metadata.t\n    val ditems : A.item list -> B.item list\n  end\nend\n\n(** Functor that produces module types of monomorphic phases *)\nmodule MAKE_MONOMORPHIC_PHASE (F : Features.T) = struct\n  module type ARG = sig\n    val phase_id : Diagnostics.Phase.t\n    val ditems : Ast.Make(F).item list -> Ast.Make(F).item list\n  end\n\n  module type T = sig\n    include module type of struct\n      module FB = F\n      module A = Ast.Make (F)\n      module B = Ast.Make (FB)\n      module ImplemT = MakePhaseImplemT (A) (B)\n      module FA = F\n    end\n\n    include ImplemT.T\n  end\nend\n\n(** Make a monomorphic phase: a phase that transform an AST with feature set [F]\n    into an AST with the same feature set [F] *)\nmodule MakeMonomorphicPhase\n    (F : Features.T)\n    (M : MAKE_MONOMORPHIC_PHASE(F).ARG) : MAKE_MONOMORPHIC_PHASE(F).T = struct\n  module FA = F\n  module FB = F\n  module A = Ast.Make (F)\n  module B = Ast.Make (FB)\n  module ImplemT = MakePhaseImplemT (A) (B)\n\n  module Implem = struct\n    let metadata = Metadata.make M.phase_id\n\n    include M\n\n    let subtype (l : A.item list) : B.item list = Stdlib.Obj.magic l\n    let ditems (l : A.item list) : B.item list = ditems l |> subtype\n  end\n\n  include Implem\nend\n\n(** Type of an unconstrainted (forall feature sets) monomorphic phases *)\nmodule type UNCONSTRAINTED_MONOMORPHIC_PHASE = functor (F : Features.T) -> sig\n  include module type of struct\n    module FB = F\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = MakePhaseImplemT (A) (B)\n    module FA = F\n  end\n\n  include ImplemT.T\nend\n\nexception ReportError of Diagnostics.kind\n\nmodule type ERROR = sig\n  type t = { kind : Diagnostics.kind; span : Ast.span }\n\n  val raise : t -> 'never\n  val unimplemented : ?issue_id:int -> ?details:string -> Ast.span -> 'never\n  val assertion_failure : Ast.span -> string -> 'never\nend\n\nmodule MakeError (Ctx : sig\n  val ctx : Diagnostics.Context.t\nend) : ERROR = struct\n  type t = { kind : Diagnostics.kind; span : Ast.span } [@@deriving show, eq]\n\n  let raise err =\n    let span = Span.to_thir err.span in\n    Diagnostics.SpanFreeError.raise ~span (Span.owner_hint err.span) Ctx.ctx\n      err.kind\n\n  let unimplemented ?issue_id ?details span =\n    raise\n      {\n        kind =\n          Unimplemented\n            { issue_id = Option.map ~f:MyInt64.of_int issue_id; details };\n        span;\n      }\n\n  let assertion_failure span details =\n    raise { kind = AssertionFailure { details }; span }\nend\n\nmodule MakeBase\n    (FA : Features.T)\n    (FB : Features.T)\n    (M : sig\n      val phase_id : Diagnostics.Phase.t\n    end) =\nstruct\n  module A = Ast.Make (FA)\n  module B = Ast.Make (FB)\n  module UA = Ast_utils.Make (FA)\n  module UB = Ast_utils.Make (FB)\n  module ImplemT = MakePhaseImplemT (A) (B)\n  include M\n\n  let metadata = Metadata.make phase_id\n  let failwith = ()\n\n  module Error : ERROR = MakeError (struct\n    let ctx = Diagnostics.Context.Phase M.phase_id\n  end)\nend\n\nmodule Identity (F : Features.T) = struct\n  module FA = F\n  module FB = F\n  module A = Ast.Make (F)\n  module B = Ast.Make (F)\n\n  let ditems (l : A.item list) : B.item list = l\n  let metadata = Metadata.make Diagnostics.Phase.Identity\nend\n\nmodule _ (F : Features.T) : PHASE = Identity (F)\n\nlet _DEBUG_SHOW_ITEM = false\nlet _DEBUG_SHOW_BACKTRACE = false\n\nmodule DebugPhaseInfo = struct\n  type t = Before | Phase of Diagnostics.Phase.t\n  [@@deriving eq, sexp, hash, compare, yojson]\n\n  let show (s : t) : string =\n    match s with\n    | Before -> \"initial_input\"\n    | Phase p -> Diagnostics.Phase.display p\n\n  let pp (fmt : Stdlib.Format.formatter) (s : t) : unit =\n    Stdlib.Format.pp_print_string fmt @@ show s\nend\n\nmodule DebugBindPhase : sig\n  val add : DebugPhaseInfo.t -> int -> (unit -> Ast.Full.item list) -> unit\n  val export : unit -> string list\n  val enable : unit -> unit\nend = struct\n  let enabled = ref false\n  let enable () = enabled := true\n\n  let cache : (DebugPhaseInfo.t, int * Ast.Full.item list ref) Hashtbl.t =\n    Hashtbl.create (module DebugPhaseInfo)\n\n  let add (phase_info : DebugPhaseInfo.t) (nth : int)\n      (mk_item : unit -> Ast.Full.item list) =\n    if !enabled (* `!` is not `not` *) then\n      let _, l =\n        Hashtbl.find_or_add cache phase_info ~default:(fun _ -> (nth, ref []))\n      in\n      l := !l @ mk_item ()\n    else ()\n\n  open struct\n    module Visitors = Ast_visitors.Make (Features.Full)\n  end\n\n  let export' () =\n    Logs.info (fun m -> m \"Exporting debug informations\");\n\n    Hashtbl.to_alist cache\n    |> List.sort ~compare:(fun (_, (a, _)) (_, (b, _)) -> Int.compare a b)\n    |> List.map ~f:(fun (k, (nth, l)) ->\n           let regenerate_span_ids =\n             (object\n                inherit [_] Visitors.map\n                method! visit_span = Fn.const Span.refresh_id\n             end)\n               #visit_item\n               ()\n           in\n           (* we regenerate spans IDs, so that we have more precise regions *)\n           let l = List.map ~f:regenerate_span_ids !l in\n           let rustish = Print_rust.pitems l in\n           let json =\n             `Assoc\n               [\n                 (\"name\", `String ([%show: DebugPhaseInfo.t] k));\n                 (\"nth\", `Int nth);\n                 (\"items\", [%yojson_of: Ast.Full.item list] l);\n                 ( \"rustish\",\n                   [%yojson_of: Print_rust.AnnotatedString.Output.t] rustish );\n               ]\n           in\n           json)\n    |> List.map ~f:Yojson.Safe.to_string\n\n  let export () =\n    if !enabled (* recall: ! is deref, not `not`, great op. choice..... *) then\n      export' ()\n    else []\nend\n\nmodule type S = sig\n  module A : Ast.T\n\n  val ditem : A.item -> Ast.Full.item list\nend\n\nmodule TracePhase (P : PHASE) = struct\n  include P\n\n  let name = [%show: Diagnostics.Phase.t] P.metadata.current_phase\n  (* We distinguish between composite phases (i.e. `BindPhase(_)(_)`) versus non-composite ones. *)\n\n  let composite_phase = Option.is_some P.metadata.previous_phase\n\n  let ditems =\n    if composite_phase then P.ditems\n    else fun items ->\n      Logs.info (fun m -> m \"Entering phase [%s]\" name);\n      let items = P.ditems items in\n      Logs.info (fun m -> m \"Exiting phase [%s]\" name);\n      items\nend\n\nmodule ProfilePhase (P : PHASE) = struct\n  include P\n\n  (* We distinguish between composite phases (i.e. `BindPhase(_)(_)`) versus non-composite ones. *)\n  let composite_phase = Option.is_some P.metadata.previous_phase\n\n  let ditems items =\n    if composite_phase then P.ditems items\n    else\n      let ctx = Diagnostics.Context.Phase P.metadata.current_phase in\n      Profiling.profile ctx (List.length items) (fun () -> P.ditems items)\nend\n\nmodule BindPhase\n    (D1 : PHASE)\n    (D2 : PHASE with module FA = D1.FB and module A = D1.B) =\nstruct\n  module D1' = ProfilePhase (TracePhase (D1))\n  module D2' = ProfilePhase (TracePhase (D2))\n  module FA = D1.FA\n  module FB = D2.FB\n  module A = D1.A\n  module B = D2.B\n\n  let metadata = Metadata.bind D1.metadata D2.metadata\n\n  let ditems (items : A.item list) : B.item list =\n    let nth = List.length @@ Metadata.previous_phases D1'.metadata in\n    (if Int.equal nth 0 then\n       let coerce_to_full_ast : D1'.A.item -> Ast.Full.item =\n         Stdlib.Obj.magic\n       in\n       DebugBindPhase.add Before 0 (fun _ ->\n           List.map ~f:coerce_to_full_ast items));\n    let items' = D1'.ditems items in\n    let coerce_to_full_ast : D2'.A.item list -> Ast.Full.item list =\n      Stdlib.Obj.magic\n    in\n    DebugBindPhase.add (Phase D1'.metadata.current_phase) (nth + 1) (fun _ ->\n        coerce_to_full_ast items');\n    D2'.ditems items'\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_and_mut_defsite.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make\n    (FA :\n      Features.T\n        with type mutable_variable = Features.On.mutable_variable\n         and type mutable_reference = Features.On.mutable_reference\n         and type nontrivial_lhs = Features.On.nontrivial_lhs\n         and type arbitrary_lhs = Features.On.arbitrary_lhs\n         and type reference = Features.On.reference) =\nstruct\n  open Ast\n  module FB = FA\n\n  include\n    Phase_utils.MakeBase (FA) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module A = Ast.Make (FA)\n  module B = Ast.Make (FB)\n  module BVisitors = Ast_visitors.Make (FB)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module S = struct\n      include Features.SUBTYPE.Id\n    end\n\n    module UB = Ast_utils.Make (FB)\n\n    module M = struct\n      open B\n      open UB\n\n      (* given `ty`, produces type `&mut ty` *)\n      let mut_ref (typ : ty) : ty =\n        let mut = Mutable Features.On.mutable_reference in\n        TRef { witness = Features.On.reference; region = \"\"; typ; mut }\n\n      (* given `e`, produces well-typed expr `&mut e` *)\n      let mut_borrow (e : expr) : expr =\n        let kind = Mut Features.On.mutable_reference in\n        let witness = Features.On.reference in\n        let e' = Borrow { kind; e; witness } in\n        { e with e = e'; typ = mut_ref e.typ }\n\n      let expect_mut_ref_param (all_vars : local_ident list) (i : int)\n          (param : param) : (local_ident * ty * span) option =\n        let* typ = Expect.mut_ref param.typ in\n        match param.pat.p with\n        | PBinding\n            { mut = Immutable; mode = ByValue; var; typ = _; subpat = None } ->\n            Some (var, typ, param.pat.span)\n        | PWild ->\n            let var =\n              fresh_local_ident_in all_vars (\"arg_\" ^ Int.to_string i ^ \"_wild\")\n            in\n            Some (var, typ, param.pat.span)\n        | _ ->\n            Error.raise\n              { kind = NonTrivialAndMutFnInput; span = param.pat.span }\n\n      let rewrite_fn_sig (all_vars : local_ident list) (params : param list)\n          (output : ty) :\n          (param list * ty * (local_ident * ty * span) list) option =\n        let and_muts =\n          List.filter_mapi ~f:(expect_mut_ref_param all_vars) params\n        in\n        match and_muts with\n        | [] -> None\n        | _ ->\n            let params =\n              List.mapi\n                ~f:(fun i param ->\n                  match expect_mut_ref_param all_vars i param with\n                  | None -> param\n                  | Some (var, typ, span) ->\n                      let p : pat' =\n                        let mut = Mutable Features.On.mutable_variable in\n                        PBinding\n                          { mut; mode = ByValue; var; typ; subpat = None }\n                      in\n                      { param with pat = { p; span; typ }; typ })\n                params\n            in\n            let output_components =\n              List.map ~f:snd3 and_muts\n              @ if UB.is_unit_typ output then [] else [ output ]\n            in\n            let output = UB.make_tuple_typ output_components in\n            Some (params, output, and_muts)\n\n      (* visit an expression and replace all `Return e` nodes by `Return (f e)` *)\n      let map_returns ~(f : expr -> expr) : expr -> expr =\n        let visitor =\n          object (self)\n            inherit [_] Visitors.map as super\n\n            method! visit_expr' () e =\n              match e with\n              | Return { e; witness } ->\n                  let e = self#visit_expr () e in\n                  Return { e = f e; witness }\n              | _ -> super#visit_expr' () e\n          end\n        in\n        visitor#visit_expr ()\n\n      (* transforms\n          `(let … = … in)* expr`\n         into\n          `(let … = … in)* let output = expr in output` *)\n      let wrap_in_identity_let (e : expr) : expr =\n        let var = Local_ident.{ id = mk_id Expr 0; name = \"hax_temp_output\" } in\n        let f (e : expr) : expr =\n          match e.e with\n          | GlobalVar (`TupleCons 0) -> e\n          | _ ->\n              let rhs = e in\n              let lhs, body =\n                if [%eq: ty] e.typ UB.unit_typ then\n                  (* This case has been added to fix https://github.com/hacspec/hax/issues/720.\n                     It might need a better solution. *)\n                  ( UB.M.pat_PWild ~span:e.span ~typ:e.typ,\n                    UB.M.expr_unit ~span:e.span )\n                else\n                  (UB.make_var_pat var e.typ e.span, { e with e = LocalVar var })\n              in\n              { body with e = Let { monadic = None; lhs; rhs; body } }\n        in\n        UB.map_body_of_nested_lets f e\n\n      let mutref_to_mut_expr (vars : local_ident list) : expr -> expr =\n        let ( <|?> ) (type a) (x : a option) (f : unit -> a option) : a option =\n          x |> Option.map ~f:Option.some |> Option.value_or_thunk ~default:f\n        in\n        let in_vars = List.mem vars ~equal:[%equal: local_ident] in\n        let expect_in_vars_local_var (x : expr) : local_ident option =\n          match x.e with LocalVar v when in_vars v -> Some v | _ -> None\n        in\n        let retyped_local_var_in_vars e =\n          let* var = expect_in_vars_local_var e in\n          (* var is supposed to be typed `&mut _` *)\n          let typ =\n            Expect.mut_ref e.typ\n            |> Option.value_or_thunk ~default:(fun () ->\n                   Error.assertion_failure e.span\n                   @@ \"Expect.mut_ref: got `None`\")\n          in\n          (* we reconstruct `e` to type it correctly *)\n          Some { e = LocalVar var; typ; span = e.span }\n        in\n        let visitor =\n          object\n            inherit [_] Visitors.map as super\n\n            method! visit_expr () e =\n              (let* e = Expect.deref e in\n               retyped_local_var_in_vars e)\n              <|?> (fun _ -> retyped_local_var_in_vars e)\n              |> Option.value_or_thunk ~default:(fun _ -> super#visit_expr () e)\n          end\n        in\n        visitor#visit_expr ()\n\n      let convert_lhs =\n        (* TODO: refactor (see #316) *)\n        let rec place_to_lhs (p : Place.t) : lhs =\n          let typ = p.typ in\n          match p.place with\n          | LocalVar var -> LhsLocalVar { var; typ }\n          | FieldProjection { place; projector } ->\n              let e = place_to_lhs place in\n              LhsFieldAccessor\n                {\n                  witness = Features.On.nontrivial_lhs;\n                  field = projector;\n                  typ;\n                  e;\n                }\n          | IndexProjection { place; index } ->\n              let e = place_to_lhs place in\n              LhsArrayAccessor\n                { e; typ; index; witness = Features.On.nontrivial_lhs }\n          | _ ->\n              let e = Place.to_expr p in\n              LhsArbitraryExpr { witness = Features.On.arbitrary_lhs; e }\n        in\n\n        let visitor =\n          object\n            inherit [_] Visitors.map as super\n\n            method! visit_expr () e =\n              try super#visit_expr () e\n              with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n                UB.hax_failure_expr e.span e.typ (context, kind)\n                  (UB.LiftToFullAst.expr e)\n\n            method! visit_expr' () e =\n              match e with\n              | Assign { lhs; e; witness } ->\n                  let span = e.span in\n                  let lhs = UB.expr_of_lhs span lhs in\n                  let lhs =\n                    lhs |> Place.of_expr\n                    |> Option.value_or_thunk ~default:(fun () ->\n                           Error.assertion_failure span\n                           @@ \"Place.of_expr: got `None` for: \"\n                           ^ Print_rust.pexpr_str (UB.LiftToFullAst.expr lhs))\n                    |> place_to_lhs\n                  in\n                  Assign { lhs; e; witness }\n              | _ -> super#visit_expr' () e\n          end\n        in\n        visitor#visit_expr ()\n\n      let rewrite_function (params : param list) (body : expr) :\n          (param list * expr) option =\n        let all_vars =\n          UB.Reducers.collect_local_idents#visit_expr () body\n          :: List.map ~f:(Reducers.collect_local_idents#visit_param ()) params\n          |> Set.union_list (module Local_ident)\n          |> Set.to_list\n        in\n        let* params, _, vars = rewrite_fn_sig all_vars params body.typ in\n        let idents = List.map ~f:fst3 vars in\n        let vars =\n          List.map\n            ~f:(fun (var, typ, span) -> B.{ span; typ; e = LocalVar var })\n            vars\n        in\n        let f (e : B.expr) : B.expr =\n          UB.make_tuple_expr ~span:e.span\n            (vars @ if UB.is_unit_typ e.typ then [] else [ e ])\n        in\n        let body =\n          body |> mutref_to_mut_expr idents |> convert_lhs |> map_returns ~f\n          |> wrap_in_identity_let\n          |> UB.map_body_of_nested_lets f\n        in\n        Some (params, body)\n    end\n\n    include M\n\n    let ditems (items : A.item list) : B.item list =\n      let items : B.item list = Stdlib.Obj.magic items in\n      let visitor =\n        object\n          inherit [_] BVisitors.map as super\n\n          method! visit_impl_item' () item' =\n            (match item' with\n            | IIFn { params; body } ->\n                let* params, body = rewrite_function params body in\n                Some (B.IIFn { body; params })\n            | _ -> None)\n            |> Option.value_or_thunk\n                 ~default:(Fn.flip super#visit_impl_item' item')\n\n          method! visit_trait_item () item =\n            let span = item.ti_span in\n            let ti_v =\n              (match item.ti_v with\n              | TIFn (TArrow (inputs, output)) ->\n                  (* Here, we craft a dummy function so that we can\n                     call `rewrite_function` *)\n                  let var = Local_ident.{ id = mk_id Expr 0; name = \"dummy\" } in\n                  let params =\n                    List.map\n                      ~f:(fun typ ->\n                        let pat = UB.make_var_pat var typ span in\n                        (* let pat : B.pat = { typ; p; span } in *)\n                        B.{ pat; typ; typ_span = None; attrs = [] })\n                      inputs\n                  in\n                  let body =\n                    B.\n                      {\n                        e =\n                          (* this is wrongly typed, though it's fine,\n                             we throw this away before returning *)\n                          (UB.unit_expr span).e;\n                        typ = output;\n                        span;\n                      }\n                  in\n                  let* params, body = rewrite_function params body in\n                  let inputs = List.map ~f:(fun p -> p.typ) params in\n                  let output = body.typ in\n                  let ty = B.TArrow (inputs, output) in\n                  Some (B.TIFn ty)\n              | TIDefault { params; body; witness } ->\n                  let* params, body = rewrite_function params body in\n                  let witness = S.trait_item_default span witness in\n                  Some (B.TIDefault { params; body; witness })\n              | _ -> None)\n              |> Option.value_or_thunk\n                   ~default:(Fn.flip super#visit_trait_item' item.ti_v)\n            in\n            { item with ti_v }\n\n          method! visit_item () i =\n            try super#visit_item () i\n            with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n              let error = Diagnostics.pretty_print_context_kind context kind in\n              let cast_item : B.item -> Ast.Full.item = Stdlib.Obj.magic in\n              let ast = cast_item i |> Print_rust.pitem_str in\n              let msg =\n                error ^ \"\\nLast available AST for this item:\\n\\n\" ^ ast\n              in\n              B.make_hax_error_item i.span i.ident msg\n\n          method! visit_item' () item' =\n            (match item' with\n            | Fn { name; generics; body; params; safety } ->\n                let* params, body = rewrite_function params body in\n                Some (B.Fn { name; generics; body; params; safety })\n            | _ -> None)\n            |> Option.value_or_thunk ~default:(Fn.flip super#visit_item' item')\n        end\n      in\n      List.map ~f:(visitor#visit_item ()) items\n\n    let dexpr (_e : A.expr) : B.expr =\n      Stdlib.failwith \"Should not be called directly\"\n  end\n\n  include Implem\n  module FA = FA\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_and_mut_defsite.mli",
    "content": "module Make\n    (F :\n      Features.T\n        with type mutable_variable = Features.On.mutable_variable\n         and type mutable_reference = Features.On.mutable_reference\n         and type nontrivial_lhs = Features.On.nontrivial_lhs\n         and type arbitrary_lhs = Features.On.arbitrary_lhs\n         and type reference = Features.On.reference) : sig\n  include module type of struct\n    module FB = F\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n    module FA = F\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_bundle_cycles.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      module A = Ast.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      module Attrs = Attr_payloads.MakeBase (Error)\n\n      let ditems items =\n        let module DepGraph = Dependencies.Make (F) in\n        DepGraph.bundle_cyclic_modules items\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_bundle_cycles.mli",
    "content": "(** This phase makes sure the items don't yield any cycle, namespace-wise. It\n    does so by creating namespaces we call bundles, in which we regroup\n    definitions that would otherwise yield cycles. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_cf_into_monads.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make\n    (F :\n      Features.T\n        with type monadic_action = Features.Off.monadic_action\n         and type monadic_binding = Features.Off.monadic_binding) =\nstruct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.Off.Continue\n    include Features.Off.Early_exit\n    include Features.Off.Question_mark\n    include Features.Off.Break\n    include Features.On.Monadic_binding\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (F)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      module A = FA\n      module B = FB\n      include Features.SUBTYPE.Id\n\n      let monadic_binding _ = Features.On.monadic_binding\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    module KnownMonads = struct\n      type t = { monad : B.supported_monads option; typ : B.ty }\n      [@@deriving show, eq]\n      (** types of computations *)\n      (* | MId of { typ : B.ty } *)\n      (* | MReturn of { return : B.ty; continue : B.ty } *)\n\n      (** translate a computation type to a simple type *)\n      let to_typ (x : t) : B.ty =\n        match x.monad with\n        | None -> x.typ\n        | Some (MResult err) ->\n            let args = List.map ~f:(fun t -> B.GType t) [ x.typ; err ] in\n            let ident =\n              Global_ident.of_name ~value:false Core__result__Result\n            in\n            TApp { ident; args }\n        | Some MOption ->\n            let args = List.map ~f:(fun t -> B.GType t) [ x.typ ] in\n            let ident =\n              Global_ident.of_name ~value:false Core__option__Option\n            in\n            TApp { ident; args }\n        | Some (MException return) ->\n            let args = List.map ~f:(fun t -> B.GType t) [ return; x.typ ] in\n            let ident =\n              Global_ident.of_name ~value:false\n                Core__ops__control_flow__ControlFlow\n            in\n            TApp { ident; args }\n\n      let from_typ' : B.ty -> t = function\n        | TApp { ident; args = [ GType return; GType continue ] }\n          when Global_ident.eq_name Core__ops__control_flow__ControlFlow ident\n          ->\n            { monad = Some (MException return); typ = continue }\n        | TApp { ident; args = [ GType ok; GType err ] }\n          when Global_ident.eq_name Core__result__Result ident ->\n            { monad = Some (MResult err); typ = ok }\n        | TApp { ident; args = [ GType ok ] }\n          when Global_ident.eq_name Core__option__Option ident ->\n            { monad = Some MOption; typ = ok }\n        | typ -> { monad = None; typ }\n\n      (** the type of pure expression we can return in the monad *)\n      let pure_type (x : t) = x.typ\n\n      let lift details (e : B.expr) monad_of_e monad_destination : B.expr =\n        match (monad_of_e, monad_destination) with\n        | m1, m2 when [%equal: B.supported_monads option] m1 m2 -> e\n        | None, Some (B.MResult _) ->\n            UB.call_Constructor Core__result__Result__Ok false [ e ] e.span\n              (to_typ { monad = monad_destination; typ = e.typ })\n        | None, Some B.MOption ->\n            UB.call_Constructor Core__option__Option__Some false [ e ] e.span\n              (to_typ { monad = monad_destination; typ = e.typ })\n        | _, Some (B.MException _) ->\n            UB.call_Constructor Core__ops__control_flow__ControlFlow__Continue\n              false [ e ] e.span\n              (to_typ { monad = monad_destination; typ = e.typ })\n        | m1, m2 ->\n            Error.assertion_failure e.span\n            @@ \"Cannot lift from monad [\"\n            ^ [%show: B.supported_monads option] m1\n            ^ \"] to monad [\"\n            ^ [%show: B.supported_monads option] m2\n            ^ \"]\" ^ \"\\n Details: \" ^ details\n\n      let lub span m1 m2 =\n        match (m1, m2) with\n        | None, m | m, None -> m\n        | Some m1, Some m2 ->\n            let impossible () =\n              Error.assertion_failure span\n              @@ \"Trying to compute the lub of two incompatible monads:\"\n              ^ \"\\n • \"\n              ^ [%show: B.supported_monads] m1\n              ^ \"\\n • \"\n              ^ [%show: B.supported_monads] m2\n            in\n            Option.some\n              (match (m1, m2) with\n              | (B.MResult _ | B.MOption), (B.MException _ as m)\n              | (B.MException _ as m), (B.MResult _ | B.MOption) ->\n                  m\n              | B.MResult _, B.MResult _\n              | B.MOption, B.MOption\n              | B.MException _, B.MException _ ->\n                  m1\n              | B.MResult _, B.MOption | B.MOption, B.MResult _ -> impossible ())\n\n      (** after transformation, are we **getting** inside a monad? *)\n      let from_typ dty (old : A.ty) (new_ : B.ty) : t =\n        let old = dty Span.default (* irrelevant *) old in\n        let monad = from_typ' new_ in\n        if B.equal_ty (pure_type monad) old then monad\n        else { monad = None; typ = new_ }\n    end\n\n    let rec dexpr_unwrapped (expr : A.expr) : B.expr =\n      let span = expr.span in\n      let typ = dty span expr.typ in\n      match expr.e with\n      | Let { monadic = Some _; _ } -> .\n      | Let { monadic = None; lhs; rhs; body } -> (\n          let body' = dexpr body in\n          let rhs' = dexpr rhs in\n          let mrhs = KnownMonads.from_typ dty rhs.typ rhs'.typ in\n          let lhs = { (dpat lhs) with typ = KnownMonads.pure_type mrhs } in\n          match mrhs with\n          | { monad = None; _ } ->\n              let monadic = None in\n              let rhs = rhs' in\n              let body = body' in\n              { e = Let { monadic; lhs; rhs; body }; span; typ = body.typ }\n          | _ ->\n              let mbody = KnownMonads.from_typ dty body.typ body'.typ in\n              let m = KnownMonads.lub span mbody.monad mrhs.monad in\n              let body = KnownMonads.lift \"Let:body\" body' mbody.monad m in\n              let rhs = KnownMonads.lift \"Let:rhs\" rhs' mrhs.monad m in\n              let monadic =\n                match m with\n                | None -> None\n                | Some m -> Some (m, Features.On.monadic_binding)\n              in\n              { e = Let { monadic; lhs; rhs; body }; span; typ = body.typ })\n      | Match { scrutinee; arms } ->\n          let arms =\n            List.map\n              ~f:(fun { arm = { arm_pat; body = a; guard }; span } ->\n                let b = dexpr a in\n                let m = KnownMonads.from_typ dty a.typ b.typ in\n                let g = Option.map ~f:dguard guard in\n                (m, (dpat arm_pat, span, b, g)))\n              arms\n          in\n          let arms =\n            let m =\n              List.map ~f:(fun ({ monad; _ }, _) -> monad) arms\n              |> List.reduce ~f:(KnownMonads.lub span)\n            in\n            match m with\n            | None -> [] (* [arms] is empty *)\n            | Some m ->\n                List.map\n                  ~f:(fun (mself, (arm_pat, span, body, guard)) ->\n                    let body = KnownMonads.lift \"Match\" body mself.monad m in\n                    let arm_pat = { arm_pat with typ = body.typ } in\n                    ({ arm = { arm_pat; body; guard }; span } : B.arm))\n                  arms\n          in\n          let typ =\n            match arms with [] -> UB.never_typ | hd :: _ -> hd.arm.body.typ\n          in\n          { e = Match { scrutinee = dexpr scrutinee; arms }; span; typ }\n      | If { cond; then_; else_ } ->\n          let cond = dexpr cond in\n          let then' = dexpr then_ in\n          let else' = Option.map ~f:dexpr else_ in\n          let mthen = KnownMonads.from_typ dty then_.typ then'.typ in\n          let melse =\n            match (else_, else') with\n            | Some else_, Some else' ->\n                KnownMonads.from_typ dty else_.typ else'.typ\n            | _ -> mthen\n          in\n          let m = KnownMonads.lub span mthen.monad melse.monad in\n          let else_ =\n            Option.map\n              ~f:(fun else' ->\n                KnownMonads.lift \"If:else-branch\" else' melse.monad m)\n              else'\n          in\n          let then_ = KnownMonads.lift \"If:then-branch\" then' mthen.monad m in\n          { e = If { cond; then_; else_ }; span; typ = then_.typ }\n      | Continue _ ->\n          Error.unimplemented ~issue_id:15\n            ~details:\"TODO: Monad for loop-related control flow\" span\n      | Break _ ->\n          Error.unimplemented ~issue_id:15\n            ~details:\"TODO: Monad for loop-related control flow\" span\n      | QuestionMark { e; _ } -> dexpr e\n      | Return { e; _ } ->\n          let open KnownMonads in\n          let e = dexpr e in\n          UB.call_Constructor Core__ops__control_flow__ControlFlow__Break false\n            [ e ] span\n            (to_typ @@ { monad = Some (MException e.typ); typ })\n      | [%inline_arms\n          \"dexpr'.*\" - Let - Match - If - Continue - Break - QuestionMark\n          - Return] ->\n          map (fun e -> B.{ e; typ = dty expr.span expr.typ; span = expr.span })\n\n    and lift_if_necessary (e : B.expr) (target_type : B.ty) =\n      if B.equal_ty e.typ target_type then e\n      else\n        UB.call Rust_primitives__hax__control_flow_monad__ControlFlowMonad__lift\n          [ e ] e.span target_type\n    [@@inline_ands bindings_of dexpr - dexpr']\n\n    module Item = struct\n      module OverrideDExpr = struct\n        let dexpr (e : A.expr) : B.expr =\n          let e' = dexpr e in\n          match KnownMonads.from_typ dty e.typ e'.typ with\n          | { monad = Some m; typ } ->\n              UB.call\n                (match m with\n                | MException _ ->\n                    Rust_primitives__hax__control_flow_monad__mexception__run\n                | MResult _ ->\n                    Rust_primitives__hax__control_flow_monad__mresult__run\n                | MOption ->\n                    Rust_primitives__hax__control_flow_monad__moption__run)\n                [ e' ] e.span typ\n          | _ -> e'\n      end\n\n      open OverrideDExpr\n\n      [%%inline_defs \"Item.*\"]\n    end\n\n    include Item\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_cf_into_monads.mli",
    "content": "open! Prelude\n\nmodule Make\n    (F :\n      Features.T\n        with type monadic_action = Features.Off.monadic_action\n         and type monadic_binding = Features.Off.monadic_binding) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.Off.Continue\n      include Features.Off.Early_exit\n      include Features.Off.Question_mark\n      include Features.Off.Break\n      include Features.On.Monadic_binding\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_direct_and_mut.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make\n    (FA :\n      Features.T\n        with type raw_pointer = Features.Off.raw_pointer\n         and type mutable_pointer = Features.Off.mutable_pointer) =\nstruct\n  open Ast\n\n  module FB = struct\n    include FA\n    include Features.On.Mutable_variable\n    include Features.On.Arbitrary_lhs\n    include Features.On.Nontrivial_lhs\n    include Features.Off.Mutable_reference\n  end\n\n  include\n    Phase_utils.MakeBase (FA) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  (** Reference to a fresh local ident (item-wise) *)\n  let out_var = ref Local_ident.{ id = mk_id Expr 0; name = \"out\" }\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    let hax_core_extraction =\n      Sys.getenv \"HAX_CORE_EXTRACTION_MODE\"\n      |> [%equal: string option] (Some \"on\")\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.Mutable_variable\n      include Features.SUBTYPE.On.Nontrivial_lhs\n      include Features.SUBTYPE.On.Arbitrary_lhs\n    end\n\n    module UA = Ast_utils.Make (FA)\n    module UB = Ast_utils.Make (FB)\n\n    let ( let* ) x f = Option.bind ~f x\n\n    module Place = UA.Place\n\n    let expect_mut_borrow_of_place_or_pure_expr (e : A.expr) :\n        (Place.t, A.expr) Either.t option =\n      let e = UA.Mappers.normalize_borrow_mut#visit_expr () e in\n      let e = UA.remove_unsize e in\n      let* e = UA.Expect.mut_borrow e in\n      Option.some\n      @@\n      match\n        let* p = Place.of_expr e in\n        Some (Place.skip_allowed_deref_mut p)\n      with\n      | Some place -> Either.First place\n      | None -> Second e\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dty (span : span) (ty : A.ty) : B.ty =\n      match ty with\n      | [%inline_arms \"dty.*\" - TRef] -> auto\n      | TRef { mut = Mutable _; typ; _ } ->\n          if hax_core_extraction then\n            TApp\n              {\n                ident =\n                  Global_ident.of_name ~value:false Rust_primitives__hax__MutRef;\n                args = [ GType (dty span typ) ];\n              }\n          else Error.raise { kind = UnallowedMutRef; span }\n      | TRef { witness; typ; mut = Immutable as mut; region } ->\n          TRef { witness; typ = dty span typ; mut; region }\n\n    and dborrow_kind (_span : span) (borrow_kind : A.borrow_kind) :\n        B.borrow_kind =\n      match borrow_kind with\n      | [%inline_arms \"dborrow_kind.*\" - Mut] -> auto\n      | Mut _ -> Shared\n\n    (* TODO: refactor (see #316) *)\n    and place_to_lhs (p : Place.t) : B.lhs =\n      let typ = dty p.span p.typ in\n      match p.place with\n      | LocalVar var -> LhsLocalVar { var; typ }\n      | VecRef inner ->\n          LhsVecRef\n            {\n              e = place_to_lhs inner;\n              typ;\n              witness = Features.On.nontrivial_lhs;\n            }\n      | FieldProjection { place; projector } ->\n          let e = place_to_lhs place in\n          LhsFieldAccessor\n            { witness = Features.On.nontrivial_lhs; field = projector; typ; e }\n      | IndexProjection { place; index } ->\n          let e = place_to_lhs place in\n          let index = dexpr index in\n          LhsArrayAccessor\n            { e; typ; index; witness = Features.On.nontrivial_lhs }\n      | _ ->\n          let e = Place.to_expr p |> dexpr in\n          LhsArbitraryExpr { witness = Features.On.arbitrary_lhs; e }\n\n    and translate_app (span : span) (otype : A.ty) (f : A.expr)\n        (raw_args : A.expr list) (generic_args : B.generic_value list)\n        (trait : (B.impl_expr * B.generic_value list) option) bounds_impls :\n        B.expr =\n      (* `otype` and `_otype` (below) are supposed to be the same\n         type, but sometimes `_otype` is less precise (i.e. an associated\n         type while a concrete type is available) *)\n      let arg_types, _otype =\n        UA.Expect.arrow f.typ\n        |> Option.value_or_thunk ~default:(fun _ ->\n               Error.assertion_failure span \"expected an arrow type here\")\n      in\n      (* each input of `f` is either:\n         - of type `&mut _` and then the value fed to f should either be a place or a \"pure\" expression;\n         - of another type, and then the value can be anything.\n      *)\n      let args : ((Place.t, A.expr) Either.t * bool) list =\n        (match List.zip arg_types raw_args with\n        | Ok inputs -> inputs\n        | _ -> Error.assertion_failure span \"application: bad arity\")\n        |> List.map ~f:(fun (typ, (arg : A.expr)) ->\n               if UA.Expect.mut_ref typ |> Option.is_some then\n                 (* the argument of the function is mutable *)\n                 let v =\n                   expect_mut_borrow_of_place_or_pure_expr arg\n                   |> Option.value_or_thunk ~default:(fun _ ->\n                          Error.raise { kind = ExpectedMutRef; span = arg.span })\n                 in\n                 (v, true)\n               else (Either.second arg, false))\n      in\n      (* `mutargs`: all mutable borrows fed to `f` *)\n      let mutargs : (Place.t, A.expr) Either.t list =\n        args |> List.filter ~f:snd |> List.map ~f:fst\n      in\n      match mutargs with\n      | [] ->\n          (* there is no mutation, we can reconstruct the expression right away *)\n          let f, typ = (dexpr f, dty span otype) in\n          let args = List.map ~f:dexpr raw_args in\n          B.\n            {\n              e = B.App { f; args; generic_args; trait; bounds_impls };\n              typ;\n              span;\n            }\n      | _ -> (\n          (* TODO: when LHS are better (issue #222), compress `p1 = tmp1; ...; pN = tmpN` in `(p1...pN) = ...` *)\n          (* we are generating:\n             ```\n             let (tmp1, …, tmpN, out) = ⟨f⟩(⟨…un-&mut args⟩);\n             p1 = tmp1;\n                 …\n             pN = tmpN;\n             out\n             ```\n          *)\n          let ty_of_either : (Place.t, A.expr) Either.t -> A.ty = function\n            | First p -> p.typ\n            | Second e -> e.typ\n          in\n          let span_of_either : (Place.t, A.expr) Either.t -> span = function\n            | First p -> p.span\n            | Second e -> e.span\n          in\n          let b_ty_of_either : (Place.t, A.expr) Either.t -> B.ty = function\n            | First p -> dty p.span p.typ\n            | Second e -> dty e.span e.typ\n          in\n\n          let mutargs : ((local_ident * B.lhs) option * (B.ty * span)) list =\n            let to_ident_lhs i = function\n              | Either.First (place : Place.t) ->\n                  let var =\n                    Local_ident.\n                      { id = mk_id Expr 0; name = \"tmp\" ^ Int.to_string i }\n                  in\n                  Some (var, place_to_lhs place)\n              | _ -> None\n            in\n            let to_ty_span x =\n              let span = span_of_either x in\n              (dty span (ty_of_either x), span)\n            in\n            List.mapi ~f:(fun i -> to_ident_lhs i &&& to_ty_span) mutargs\n          in\n\n          let out_var = !out_var in\n          let otype = dty f.span otype in\n          let pat =\n            let out =\n              if UB.is_unit_typ otype then []\n              else [ UB.make_var_pat out_var otype f.span ]\n            in\n            List.map\n              ~f:(function\n                | Some (var, _), (ty, span) -> UB.make_var_pat var ty span\n                | None, (typ, span) -> UB.M.pat_PWild ~typ ~span)\n              mutargs\n            @ out\n            |> UB.make_tuple_pat\n          in\n          let f_call =\n            let f : B.expr =\n              let typ =\n                B.TArrow (List.map ~f:(fst >> b_ty_of_either) args, pat.typ)\n              in\n              B.{ span = f.span; typ; e = dexpr' f.span f.e }\n            in\n            let unmut_args =\n              args\n              |> List.map\n                   ~f:\n                     ( fst >> function\n                       | Either.First p -> Place.to_expr p\n                       | Either.Second e -> e )\n              |> List.map ~f:dexpr\n            in\n            B.\n              {\n                e =\n                  App\n                    { f; args = unmut_args; generic_args; trait; bounds_impls };\n                typ = pat.typ;\n                span = pat.span;\n              }\n          in\n          (* when lhs type accepts tuple (issue #222), assigns will be an option instead of a list *)\n          let assigns =\n            let flatten (o, meta) = Option.map o ~f:Fn.(id &&& const meta) in\n            List.filter_map ~f:flatten mutargs\n            |> List.map ~f:(fun ((var, lhs), (typ, span)) ->\n                   let e = B.{ e = LocalVar var; span; typ } in\n                   let witness = Features.On.mutable_variable in\n                   B.{ e = Assign { lhs; e; witness }; span; typ = UB.unit_typ })\n          in\n          (* TODO: this should be greatly simplified when `lhs` type will accept tuples (issue #222) *)\n          match assigns with\n          | [ { e = Assign { lhs; witness; _ }; span; typ } ]\n            when UB.is_unit_typ otype ->\n              { e = Assign { lhs; e = f_call; witness }; span; typ }\n              |> extract_vec_ref\n          | _ ->\n              let body =\n                let init =\n                  if UB.is_unit_typ otype then UB.unit_expr f.span\n                  else B.{ typ = otype; span = f.span; e = LocalVar out_var }\n                in\n                assigns\n                |> List.map ~f:extract_vec_ref\n                |> List.fold_right ~init ~f:UB.make_seq\n              in\n              let r = UB.make_let pat f_call body in\n              r)\n\n    and extract_vec_ref : B.expr -> B.expr = function\n      | { e = Assign { lhs = LhsVecRef { e = lhs; _ }; witness; e }; span; typ }\n        ->\n          {\n            e =\n              Assign\n                {\n                  lhs;\n                  e = UB.call Alloc__slice__Impl__to_vec [ e ] e.span typ;\n                  witness;\n                };\n            span;\n            typ;\n          }\n      | e -> e\n\n    and dexpr' (span : span) (e : A.expr') : B.expr' =\n      match e with\n      | [%inline_arms \"dexpr'.*\" - Borrow - App] -> auto\n      | Borrow { kind; e; witness } ->\n          Borrow\n            {\n              kind =\n                (match kind with\n                | Mut _ -> Error.raise { kind = UnallowedMutRef; span }\n                | Shared -> B.Shared\n                | Unique -> B.Unique);\n              e = dexpr e;\n              witness;\n            }\n      | App _ ->\n          Error.assertion_failure span\n            \"should have been handled by dexpr_unwrapped\"\n\n    and dexpr_unwrapped (expr : A.expr) : B.expr =\n      let span = expr.span in\n      match expr.e with\n      | App { f; args; generic_args; trait; bounds_impls } ->\n          let dgeneric_args = List.map ~f:(dgeneric_value span) in\n          let generic_args = dgeneric_args generic_args in\n          let trait = Option.map ~f:(dimpl_expr span *** dgeneric_args) trait in\n          let bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls in\n          translate_app span expr.typ f args generic_args trait bounds_impls\n      | _ ->\n          let e = dexpr' span expr.e in\n          B.{ e; typ = dty expr.span expr.typ; span = expr.span }\n    [@@inline_ands bindings_of dexpr]\n\n    [%%inline_defs\n    dgeneric_param + dgeneric_constraint + dgenerics + dparam + dvariant\n    + dtrait_item' + dimpl_item']\n\n    let rec ditem' span (item : A.item') : B.item' =\n      let vars = UA.Reducers.collect_local_idents#visit_item' () item in\n      out_var := UA.fresh_local_ident_in (Set.to_list vars) \"out\";\n      [%inline_body ditem'] span item\n    [@@inline_ands \"Item.*\"]\n  end\n\n  include Implem\n  module FA = FA\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_direct_and_mut.mli",
    "content": "open! Prelude\n\nmodule Make\n    (F :\n      Features.T\n        with type raw_pointer = Features.Off.raw_pointer\n         and type mutable_pointer = Features.Off.mutable_pointer) : sig\n  include module type of struct\n    module FB = struct\n      include F\n      include Features.On.Mutable_variable\n      include Features.On.Arbitrary_lhs\n      include Features.On.Nontrivial_lhs\n      include Features.Off.Mutable_reference\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n    module FA = F\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_blocks.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (F : Features.T) = struct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.Off.Block\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module UA = Ast_utils.Make (F)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module S = struct\n      include Features.SUBTYPE.Id\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dexpr' (span : span) (e : A.expr') : B.expr' =\n      match (UA.unbox_underef_expr { e; span; typ = UA.never_typ }).e with\n      | [%inline_arms \"dexpr'.*\" - Block] -> auto\n      | Block { e; _ } -> (dexpr e).e\n    [@@inline_ands bindings_of dexpr - dexpr']\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_blocks.mli",
    "content": "open! Prelude\n\n(** Only use this phase if you are also rejecting [unsafe] *)\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.Off.Block\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_match_guards.ml",
    "content": "(* This phase removes guards from pattern matchings. It rewrites\n   them using only pattern matchings without guards.\n   See #806 and the example in tests/guards. *)\n\n(* Rewrite example: *)\n(*\n    match x {\n        None => 0,\n        Some(v) if let Ok(y) = v => y,\n        Some(Err(y)) => y,\n        _ => 1,\n    }\n*)\n(* Becomes *)\n(*\n    match x {\n        None => 0,\n        _ => match match x {\n            Some(v) => match v {\n                Ok(y) => Some(y),\n                _ => None,\n            },\n            _ => None,\n        } {\n            Some(y) => y,\n            None => match x {\n                Some(Err(y)) => y,\n                _ => 1,\n            },\n        },\n    }\n*)\n\nopen! Prelude\n\nmodule%inlined_contents Make (F : Features.T) = struct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.Off.Match_guard\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module UA = Ast_utils.Make (F)\n  module UB = Ast_utils.Make (FB)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module S = struct\n      include Features.SUBTYPE.Id\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let maybe_simplified_match scrutinee ?(original_arms : A.arm list = [])\n        (arms : B.arm list) : B.expr' =\n      match (original_arms, arms) with\n      (* If the one wildcard branch was not produced by this phase, keep it *)\n      | ( [ { arm = { arm_pat = { p = PWild; _ }; _ }; _ } ],\n          [ { arm = { arm_pat = { p = PWild; _ }; _ }; _ } ] ) ->\n          Match { scrutinee; arms }\n      (* If there is only one wildcard branch we can simplify *)\n      | _, [ { arm = { body; arm_pat = { p = PWild; _ }; _ }; _ } ] -> body.e\n      (* General case *)\n      | _ -> Match { scrutinee; arms }\n\n    let rec dexpr' (span : span) (expr : A.expr') : B.expr' =\n      match expr with\n      | [%inline_arms \"dexpr'.*\" - Match] -> auto\n      | Match { scrutinee; arms } ->\n          let new_arms = transform_arms (dexpr scrutinee) (List.rev arms) [] in\n          maybe_simplified_match ~original_arms:arms (dexpr scrutinee) new_arms\n\n    and transform_arms (scrutinee : B.expr) (remaining : A.arm list)\n        (treated : B.arm list) : B.arm list =\n      match remaining with\n      | [] -> treated\n      | { arm = { arm_pat; body; guard = None }; span } :: remaining ->\n          let new_arm : B.arm = UB.M.arm (dpat arm_pat) (dexpr body) ~span in\n          transform_arms scrutinee remaining (new_arm :: treated)\n      (* Matches an arm `arm_pat if let lhs = rhs => body` *)\n      (* And rewrites to `_ => match <option_match> {Some(x) => x, None => match scrutinee {<treated>} }` *)\n      (* where `option_match` is `match scrutinee {arm_pat => <match_guard>, _ => None }` *)\n      (* and `match_guard` is `match rhs {lhs  => Some(body), _ => None}` *)\n      (* and `treated` is the other arms coming after this one (that have already been treated as the arms are reversed ) *)\n      | {\n          arm =\n            {\n              arm_pat;\n              body;\n              guard = Some { guard = IfLet { lhs; rhs; _ }; span = guard_span };\n            };\n          span;\n        }\n        :: remaining ->\n          let module MS = (val UB.M.make guard_span) in\n          let result_typ = dty span body.typ in\n          let opt_result_typ : B.ty =\n            TApp\n              {\n                ident = Global_ident.of_name ~value:false Core__option__Option;\n                args = [ GType result_typ ];\n              }\n          in\n          let mk_opt_expr (value : B.expr option) : B.expr =\n            let (name : Concrete_ident.name), args =\n              match value with\n              | Some v -> (Core__option__Option__Some, [ v ])\n              | None -> (Core__option__Option__None, [])\n            in\n            UB.call_Constructor name false args guard_span opt_result_typ\n          in\n\n          let mk_opt_pattern (binding : B.pat option) : B.pat =\n            let (name : Concrete_ident.name), (fields : B.field_pat list) =\n              match binding with\n              | Some b ->\n                  ( Core__option__Option__Some,\n                    [ { field = `TupleField (0, 1); pat = b } ] )\n              | None -> (Core__option__Option__None, [])\n            in\n            MS.pat_PConstruct\n              ~constructor:(Global_ident.of_name ~value:true name)\n              ~fields ~is_record:false ~is_struct:false ~typ:opt_result_typ\n          in\n\n          let expr_none = mk_opt_expr None in\n\n          (* This is the nested pattern matching equivalent to the guard *)\n          (* Example: .. if let pat = rhs => body *)\n          (* Rewrites with match rhs { pat => Some(body), _ => None }*)\n          let guard_match : B.expr =\n            MS.expr_Match ~scrutinee:(dexpr rhs)\n              ~arms:\n                [\n                  UB.M.arm (dpat lhs) (mk_opt_expr (Some (dexpr body))) ~span;\n                  MS.arm (MS.pat_PWild ~typ:(dty guard_span lhs.typ)) expr_none;\n                ]\n              ~typ:opt_result_typ\n          in\n\n          (* `r` corresponds to `option_match` in the example above *)\n          let r : B.expr =\n            MS.expr_Match ~scrutinee\n              ~arms:\n                [\n                  MS.arm (dpat arm_pat) guard_match;\n                  MS.arm\n                    (UB.M.pat_PWild\n                       ~typ:(dty guard_span arm_pat.typ)\n                       ~span:guard_span)\n                    expr_none;\n                ]\n              ~typ:opt_result_typ\n          in\n          let id = UB.fresh_local_ident_in [] \"x\" in\n          let new_body : B.expr =\n            MS.expr_Match ~scrutinee:r\n              ~arms:\n                [\n                  MS.arm\n                    (mk_opt_pattern\n                       (Some\n                          (MS.pat_PBinding ~mut:Immutable ~mode:ByValue ~var:id\n                             ~typ:result_typ ~subpat:None)))\n                    { e = LocalVar id; span; typ = result_typ };\n                  MS.arm (mk_opt_pattern None)\n                    {\n                      e = maybe_simplified_match scrutinee treated;\n                      span = guard_span;\n                      typ = result_typ;\n                    };\n                ]\n              ~typ:result_typ\n          in\n          let new_arm : B.arm =\n            UB.M.arm\n              (UB.M.pat_PWild ~typ:(dty span arm_pat.typ) ~span)\n              new_body ~span\n          in\n          transform_arms scrutinee remaining [ new_arm ]\n    [@@inline_ands bindings_of dexpr - dexpr' - darm - darm' - dguard - dguard']\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_match_guards.mli",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.Off.Match_guard\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_references.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make\n    (F :\n      Features.T\n        with type raw_pointer = Features.Off.raw_pointer\n         and type mutable_reference = Features.Off.mutable_reference) =\nstruct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.Off.Mutable_pointer\n    include Features.Off.Lifetime\n    include Features.Off.Reference\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module UA = Ast_utils.Make (F)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module S = struct\n      include Features.SUBTYPE.Id\n    end\n\n    [%%inline_defs dsafety_kind]\n\n    let rec dty (span : span) (t : A.ty) : B.ty =\n      match t with\n      | [%inline_arms \"dty.*\" - TApp - TRef] -> auto\n      | TApp { ident; args = [ GType boxed_ty; _ ] }\n        when Global_ident.eq_name Alloc__boxed__Box ident ->\n          dty span boxed_ty\n      | TApp { ident; args } ->\n          TApp { ident; args = List.filter_map ~f:(dgeneric_value span) args }\n      | TRef { typ; mut = Immutable; _ } -> dty span typ\n      | TRef _ -> .\n\n    and dgeneric_value (span : span) (g : A.generic_value) :\n        B.generic_value option =\n      match g with\n      | GLifetime _ -> None\n      | [%inline_arms \"dgeneric_value.*\" - GLifetime] ->\n          map (Option.some : B.generic_value -> _)\n\n    and dtrait_goal (span : span) (r : A.trait_goal) : B.trait_goal =\n      {\n        trait = r.trait;\n        args = List.filter_map ~f:(dgeneric_value span) r.args;\n      }\n\n    and ddyn_trait_goal (span : span) (r : A.dyn_trait_goal) : B.dyn_trait_goal\n        =\n      {\n        trait = r.trait;\n        non_self_args = List.filter_map ~f:(dgeneric_value span) r.non_self_args;\n      }\n\n    and dpat' (span : span) (p : A.pat') : B.pat' =\n      match p with\n      | [%inline_arms \"dpat'.*\" - PBinding - PDeref] -> auto\n      | PBinding { mut; var : Local_ident.t; typ; subpat; _ } ->\n          PBinding\n            {\n              mut;\n              mode = ByValue;\n              var;\n              typ = dty span typ;\n              subpat =\n                Option.map ~f:(fun (p, as_pat) -> (dpat p, as_pat)) subpat;\n            }\n      | PDeref { subpat; _ } -> (dpat subpat).p\n\n    and dexpr' (span : span) (e : A.expr') : B.expr' =\n      match (UA.unbox_underef_expr { e; span; typ = UA.never_typ }).e with\n      | [%inline_arms\n          If + Literal + Array + Block + QuestionMark + \"dexpr'.Quote\"] ->\n          auto\n      | Construct { constructor; is_record; is_struct; fields; base } ->\n          Construct\n            {\n              constructor;\n              is_record;\n              is_struct;\n              fields = List.map ~f:(fun (i, e) -> (i, dexpr e)) fields;\n              base = Option.map ~f:(dexpr *** S.construct_base span) base;\n            }\n      | Match { scrutinee; arms } ->\n          Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }\n      | Let { monadic; lhs; rhs; body } ->\n          Let\n            {\n              monadic = Option.map ~f:(dsupported_monads span *** Fn.id) monadic;\n              lhs = dpat lhs;\n              rhs = dexpr rhs;\n              body = dexpr body;\n            }\n      | LocalVar local_ident -> LocalVar local_ident\n      | GlobalVar global_ident -> GlobalVar global_ident\n      | Ascription { e = e'; typ } ->\n          Ascription { e = dexpr e'; typ = dty span typ }\n      | MacroInvokation { macro; args; witness } ->\n          MacroInvokation { macro; args; witness }\n      | Assign { lhs; e; witness } ->\n          Assign { lhs = dlhs span lhs; e = dexpr e; witness }\n      | [%inline_arms Loop + Continue + Break] ->\n          auto (* TODO: inline more arms! *)\n      | Return { e; witness } -> Return { e = dexpr e; witness }\n      | Borrow { e; _ } -> (dexpr e).e\n      | EffectAction { action; argument } ->\n          EffectAction { action; argument = dexpr argument }\n      | Closure { params; body; captures } ->\n          Closure\n            {\n              params = List.map ~f:dpat params;\n              body = dexpr body;\n              captures = List.map ~f:dexpr captures;\n            }\n      | App { f; args; generic_args; trait; bounds_impls } ->\n          let f = dexpr f in\n          let args = List.map ~f:dexpr args in\n          let dgeneric_args = List.filter_map ~f:(dgeneric_value span) in\n          let trait = Option.map ~f:(dimpl_expr span *** dgeneric_args) trait in\n          let generic_args = dgeneric_args generic_args in\n          let bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls in\n          App { f; args; generic_args; trait; bounds_impls }\n      | _ -> .\n    [@@inline_ands bindings_of dexpr - dbinding_mode]\n\n    let dgeneric_param (_span : span)\n        ({ ident; kind; attrs; span } : A.generic_param) :\n        B.generic_param option =\n      let ( let* ) x f = Option.bind ~f x in\n      let* kind =\n        match kind with\n        | GPLifetime _ -> None\n        | GPType -> Some B.GPType\n        | GPConst { typ } -> Some (B.GPConst { typ = dty span typ })\n      in\n      Some B.{ ident; kind; attrs; span }\n\n    and dprojection_predicate (span : span) (r : A.projection_predicate) :\n        B.projection_predicate =\n      {\n        impl = dimpl_expr span r.impl;\n        assoc_item = r.assoc_item;\n        typ = dty span r.typ;\n      }\n\n    let dgeneric_constraint (span : span) (p : A.generic_constraint) :\n        B.generic_constraint option =\n      match p with\n      | GCLifetime _ -> None\n      | GCType idents -> Some (B.GCType (dimpl_ident span idents))\n      | GCProjection projection ->\n          Some (B.GCProjection (dprojection_predicate span projection))\n\n    let dgenerics (span : span) (g : A.generics) : B.generics =\n      {\n        params = List.filter_map ~f:(dgeneric_param span) g.params;\n        constraints =\n          List.filter_map ~f:(dgeneric_constraint span) g.constraints;\n      }\n\n    [%%inline_defs dparam + dvariant + dtrait_item + dimpl_item]\n\n    let rec ditem = [%inline_body ditem]\n    and ditem_unwrapped = [%inline_body ditem_unwrapped]\n\n    and ditem' (span : span) (item : A.item') : B.item' =\n      match item with\n      | [%inline_arms \"ditem'.*\" - Impl] -> auto\n      | Impl\n          {\n            generics;\n            self_ty;\n            of_trait = of_trait_id, of_trait_generics;\n            items;\n            parent_bounds;\n            safety;\n          } ->\n          B.Impl\n            {\n              generics = dgenerics span generics;\n              self_ty = dty span self_ty;\n              of_trait =\n                ( of_trait_id,\n                  List.filter_map ~f:(dgeneric_value span) of_trait_generics );\n              items = List.map ~f:dimpl_item items;\n              parent_bounds =\n                List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;\n              safety = dsafety_kind span safety;\n            }\n\n    [%%inline_defs ditems]\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_references.mli",
    "content": "open! Prelude\n\nmodule Make\n    (F :\n      Features.T\n        with type raw_pointer = Features.Off.raw_pointer\n         and type mutable_reference = Features.Off.mutable_reference) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.Off.Mutable_pointer\n      include Features.Off.Lifetime\n      include Features.Off.Reference\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_return_break_continue.ml",
    "content": "(** This phase removes `return`s in exit position. Inside loops, it replaces\n    `return`, `break` and `continue` (in exit position) by their encoding in the\n    `ControlFlow` enum. It replaces another expression in exit position by an\n    equivalent `continue`. This phase should comae after `RewriteControlFlow` to\n    ensure all control flow is in exit position. *)\n\nopen! Prelude\n\nmodule%inlined_contents Make (F : Features.T) = struct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.On.Fold_like_loop\n    include Features.Off.Early_exit\n    include Features.Off.Break\n    include Features.Off.Continue\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (F)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n    end\n\n    (* break_type is \"by default\" unit since there always is a (possibly implicit) break type *)\n    type loop_info = { return_type : A.ty option; break_type : A.ty option }\n\n    let has_return =\n      let module Visitors = Ast_visitors.Make (F) in\n      object (self)\n        inherit [_] Visitors.reduce as super\n        method zero = { return_type = None; break_type = None }\n\n        method plus li1 li2 =\n          {\n            return_type = Option.first_some li1.return_type li2.return_type;\n            break_type = Option.first_some li1.break_type li2.break_type;\n          }\n\n        method! visit_expr' () e =\n          match e with\n          | Return { e; _ } -> { return_type = Some e.typ; break_type = None }\n          | Break { e; _ } -> { return_type = None; break_type = Some e.typ }\n          (* We should avoid catching breaks of a nested\n             loops as they could have different types. *)\n          | Loop { body; _ } ->\n              {\n                return_type = (self#visit_expr () body).return_type;\n                break_type = None;\n              }\n          | _ -> super#visit_expr' () e\n      end\n\n    let visitor =\n      let module Visitors = Ast_visitors.Make (F) in\n      object (self)\n        inherit [_] Visitors.map as _super\n\n        method! visit_expr (in_loop : (loop_info * A.ty) option) e =\n          let span = e.span in\n          match (e.e, in_loop) with\n          | Return { e; _ }, None -> e\n          (* we know [e] is on an exit position: the return is\n             thus useless, we can skip it *)\n          | Let { monadic = None; lhs; rhs; body }, _ ->\n              let body = self#visit_expr in_loop body in\n              {\n                e with\n                e = Let { monadic = None; lhs; rhs; body };\n                typ = body.typ;\n              }\n              (* If a let expression is an exit node, then it's body\n                 is as well *)\n          | Match { scrutinee; arms }, _ ->\n              let arms = List.map ~f:(self#visit_arm in_loop) arms in\n              let typ =\n                match arms with { arm; _ } :: _ -> arm.body.typ | [] -> e.typ\n              in\n              { e with e = Match { scrutinee; arms }; typ }\n          | If { cond; then_; else_ }, _ ->\n              let then_ = self#visit_expr in_loop then_ in\n              let else_ = Option.map ~f:(self#visit_expr in_loop) else_ in\n              { e with e = If { cond; then_; else_ }; typ = then_.typ }\n          | Return { e; _ }, Some ({ return_type; break_type }, acc_type) ->\n              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~e\n                ~acc:{ e with typ = acc_type } `Return\n          | ( Break { e; acc = Some (acc, _); _ },\n              Some ({ return_type; break_type }, _) ) ->\n              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~e ~acc\n                `Break\n          | ( Continue { acc = Some (acc, _); _ },\n              Some ({ return_type = None; break_type = None }, _) ) ->\n              acc\n          | ( Continue { acc = Some (acc, _); _ },\n              Some ({ return_type; break_type }, _) ) ->\n              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~acc\n                `Continue\n          | _, Some ({ return_type; break_type }, _)\n            when Option.is_some return_type || Option.is_some break_type ->\n              UA.M.expr_Constructor_CF ~return_type ~span ~break_type ~acc:e\n                `Continue\n          | _ -> e\n        (** The invariant here is that [visit_expr] is called only on\n            expressions that are on exit positions. [visit_expr] is first called\n            on root expressions, which are (by definition) exit nodes. Then,\n            [visit_expr] itself makes recursive calls to sub expressions that\n            are themselves in exit nodes. **)\n      end\n\n    let closure_visitor =\n      let module Visitors = Ast_visitors.Make (F) in\n      object\n        inherit [_] Visitors.map as super\n\n        method! visit_expr' () e =\n          match e with\n          | Closure ({ body; _ } as closure) ->\n              Closure { closure with body = visitor#visit_expr None body }\n          | _ -> super#visit_expr' () e\n      end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dexpr' (span : span) (expr : A.expr') : B.expr' =\n      match expr with\n      | [%inline_arms \"dexpr'.*\" - Return - Break - Continue - Loop] -> auto\n      | Return _ | Break _ | Continue _ ->\n          Error.assertion_failure span\n            \"Return/Break/Continue are expected to be gone as this point\"\n      | Loop { body; kind; state; label; witness; _ } ->\n          let control_flow_type = has_return#visit_expr () body in\n          let control_flow =\n            match control_flow_type with\n            | { return_type = Some _; _ } ->\n                Some (B.BreakOrReturn, Features.On.fold_like_loop)\n            | { break_type = Some _; _ } ->\n                Some (BreakOnly, Features.On.fold_like_loop)\n            | _ -> None\n          in\n          let acc_type =\n            match body.typ with\n            | TApp { ident; args = [ GType _; GType continue_type ] }\n              when Ast.Global_ident.equal ident\n                     (Ast.Global_ident.of_name ~value:false\n                        Core__ops__control_flow__ControlFlow) ->\n                continue_type\n            | _ -> body.typ\n          in\n          let body =\n            visitor#visit_expr (Some (control_flow_type, acc_type)) body\n            |> dexpr\n          in\n          let kind = dloop_kind span kind in\n          let state = Option.map ~f:(dloop_state span) state in\n          Loop { body; control_flow; kind; state; label; witness }\n    [@@inline_ands bindings_of dexpr - dexpr']\n\n    [%%inline_defs \"Item.*\" - ditems]\n\n    let ditems (items : A.item list) : B.item list =\n      List.concat_map items\n        ~f:(visitor#visit_item None >> closure_visitor#visit_item () >> ditem)\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_return_break_continue.mli",
    "content": "(** This phase transforms `return e` expressions into `e` when `return e` is on\n    an exit position. It should come after phase `RewriteControlFlow` and thus\n    eliminate all `return`s. Inside loops it rewrites `return`, `break` and\n    `continue` as their equivalent in terms of the `ControlFlow` wrapper that\n    will be handled by the specific fold operators introduced by phase\n    `FunctionalizeLoops`. *)\n\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.On.Fold_like_loop\n      include Features.Off.Early_exit\n      include Features.Off.Break\n      include Features.Off.Continue\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_sized_trait.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let ident_is_sized : Ast.concrete_ident -> bool =\n        Concrete_ident.eq_name Core__marker__Sized\n\n      let visitor =\n        let keep (ii : impl_ident) = ident_is_sized ii.goal.trait |> not in\n        object\n          inherit [_] Visitors.map as super\n\n          method! visit_generics () generics =\n            let generics = super#visit_generics () generics in\n            {\n              generics with\n              constraints =\n                List.filter\n                  ~f:(function GCType ii -> keep ii | _ -> true)\n                  generics.constraints;\n            }\n\n          method! visit_item' () item' =\n            let item' = super#visit_item' () item' in\n            match item' with\n            | Impl payload ->\n                Impl\n                  {\n                    payload with\n                    parent_bounds =\n                      List.filter ~f:(snd >> keep) payload.parent_bounds;\n                  }\n            | _ -> item'\n\n          method! visit_trait_item' () ti' =\n            let ti' = super#visit_trait_item' () ti' in\n            match ti' with\n            | TIType impl_idents -> TIType (List.filter ~f:keep impl_idents)\n            | _ -> ti'\n\n          method! visit_impl_item' () ii' =\n            let ii' = super#visit_impl_item' () ii' in\n            match ii' with\n            | IIType payload ->\n                IIType\n                  {\n                    payload with\n                    parent_bounds =\n                      List.filter ~f:(snd >> keep) payload.parent_bounds;\n                  }\n            | _ -> ii'\n        end\n\n      let ditems =\n        List.filter ~f:(fun item ->\n            match item.v with\n            (* Drop any implementation of the `Sized` trait. *)\n            | Impl { of_trait = tr, _; _ } when ident_is_sized tr -> false\n            | _ -> true)\n        >> List.map ~f:(visitor#visit_item ())\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_drop_sized_trait.mli",
    "content": "(** This phase remove any occurence to the `core::marker::sized` trait. This\n    trait appears a lot, but is generally not very useful in our backends. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_explicit_conversions.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      module A = Ast.Make (F)\n      module UA = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      module Attrs = Attr_payloads.MakeBase (Error)\n\n      let explicit_conversions =\n        object\n          inherit [_] Visitors.map as super\n\n          method! visit_expr () e =\n            match super#visit_expr () e with\n            | {\n             e =\n               App\n                 {\n                   f = { e = GlobalVar f; _ };\n                   args = [ ({ typ = TApp { ident; _ }; _ } as inner) ];\n                   _;\n                 };\n             typ = TSlice _ as t;\n             span;\n            }\n              when Ast.Global_ident.eq_name Core__ops__deref__Deref__deref f\n                   && Ast.Global_ident.eq_name Alloc__vec__Vec ident ->\n                UA.call Alloc__vec__Impl_1__as_slice [ inner ] span t\n            | e -> e\n\n          (* Option.value ~default:e \n            (\n              let* _ = e.typ  in\n              let* e = UA.Expect.concrete_app1 Core__ops__deref__Deref__deref e in \n              let e = UA.call Alloc__vec__Impl_1__as_slice [e] e.span e.typ in\n              Some e\n            ) *)\n          (* match e with\n            | {\n             e = Borrow { e = { typ = TApp { ident; _ }; _ } as inner; _ };\n             typ = TSlice _;\n             _;\n            }\n              when Ast.Global_ident.eq_name Alloc__vec__Vec ident ->\n                inner\n            | _ -> super#visit_expr () e *)\n        end\n\n      let ditems = List.map ~f:(explicit_conversions#visit_item ())\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_explicit_conversions.mli",
    "content": "(** This phase adds explicit conversions from Vec to slice, instead of\n    conversions by taking references, which are erased by the phase\n    DropReferences. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_functionalize_loops.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make\n    (F :\n      Features.T\n        with type continue = Features.Off.continue\n         and type early_exit = Features.Off.early_exit\n         and type break = Features.Off.break) =\nstruct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.Off.Loop\n    include Features.Off.For_loop\n    include Features.Off.While_loop\n    include Features.Off.For_index_loop\n    include Features.Off.State_passing_loop\n    include Features.Off.Fold_like_loop\n    include Features.Off.Continue\n    include Features.Off.Early_exit\n    include Features.Off.Break\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (F)\n    module UB = Ast_utils.Make (FB)\n    module Visitors = Ast_visitors.Make (F)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n    end\n\n    type loop_annotation_kind =\n      | LoopInvariant of { index_pat : B.pat option; invariant : B.expr }\n      | LoopVariant of B.expr\n\n    type loop_annotation = {\n      body : B.expr;\n      annotation : loop_annotation_kind option;\n    }\n\n    let extract_loop_annotation (body : B.expr) : loop_annotation =\n      let rhs_body =\n        let* (e_let : UB.D.expr_Let) = UB.D.expr_Let body in\n        let*? _ = Option.is_none e_let.monadic in\n        let* _ = UB.D.pat_PWild e_let.lhs in\n        let* app = UB.D.expr_App e_let.rhs in\n        let* f = UB.D.expr_GlobalVar app.f in\n        Some (f, app.args, e_let.body)\n      in\n      match rhs_body with\n      | Some\n          ( f,\n            [ { e = Closure { params = [ pat ]; body = invariant; _ }; _ } ],\n            body )\n        when Global_ident.eq_name Hax_lib___internal_loop_invariant f ->\n          {\n            body;\n            annotation =\n              Some (LoopInvariant { index_pat = Some pat; invariant });\n          }\n      | Some (f, [ invariant ], body)\n        when Global_ident.eq_name Hax_lib___internal_while_loop_invariant f ->\n          {\n            body;\n            annotation = Some (LoopInvariant { index_pat = None; invariant });\n          }\n      | Some (f, [ invariant ], body)\n        when Global_ident.eq_name Hax_lib___internal_loop_decreases f ->\n          { body; annotation = Some (LoopVariant invariant) }\n      | _ -> { body; annotation = None }\n\n    let expect_invariant_variant (annotation1 : loop_annotation_kind option)\n        (annotation2 : loop_annotation_kind option) :\n        loop_annotation_kind option * loop_annotation_kind option =\n      match annotation1 with\n      | Some (LoopVariant _) -> (annotation2, annotation1)\n      | _ -> (annotation1, annotation2)\n\n    type iterator =\n      | Range of { start : B.expr; end_ : B.expr }\n      | Slice of B.expr\n      | ChunksExact of { size : B.expr; slice : B.expr }\n      | Enumerate of iterator\n      | StepBy of { n : B.expr; it : iterator }\n    [@@deriving show]\n\n    let rec as_iterator (e : B.expr) : iterator option =\n      match e.e with\n      | Construct\n          {\n            constructor = `Concrete range_ctor;\n            is_record = true;\n            is_struct = true;\n            fields =\n              [ (`Concrete start_field, start); (`Concrete end_field, end_) ];\n            base = None;\n          }\n        when Concrete_ident.eq_name Core__ops__range__Range__start start_field\n             && Concrete_ident.eq_name Core__ops__range__Range range_ctor\n             && Concrete_ident.eq_name Core__ops__range__Range__end end_field ->\n          Some (Range { start; end_ })\n      | _ -> meth_as_iterator e\n\n    and meth_as_iterator (e : B.expr) : iterator option =\n      let* f, args =\n        match e.e with\n        | App { f = { e = GlobalVar f; _ }; args; _ } -> Some (f, args)\n        | _ -> None\n      in\n      let f_eq n = Global_ident.eq_name n f in\n      let one_arg () = match args with [ x ] -> Some x | _ -> None in\n      let two_args () = match args with [ x; y ] -> Some (x, y) | _ -> None in\n      if f_eq Core__iter__traits__iterator__Iterator__step_by then\n        let* it, n = two_args () in\n        let* it = as_iterator it in\n        Some (StepBy { n; it })\n      else if\n        f_eq Core__iter__traits__collect__IntoIterator__into_iter\n        || f_eq Core__slice__Impl__iter\n      then\n        let* iterable = one_arg () in\n        match iterable.typ with\n        | TSlice _ | TArray _ -> Some (Slice iterable)\n        | _ -> as_iterator iterable\n      else if f_eq Core__iter__traits__iterator__Iterator__enumerate then\n        let* iterable = one_arg () in\n        let* iterator = as_iterator iterable in\n        Some (Enumerate iterator)\n      else if f_eq Core__slice__Impl__chunks_exact then\n        let* slice, size = two_args () in\n        Some (ChunksExact { size; slice })\n      else None\n\n    let fn_args_of_iterator (cf : A.cf_kind option) (it : iterator) :\n        (Concrete_ident.name * B.expr list * B.ty) option =\n      let open Concrete_ident_generated in\n      let usize = B.TInt { size = SSize; signedness = Unsigned } in\n      match it with\n      | Enumerate (ChunksExact { size; slice }) ->\n          let fold_op =\n            match cf with\n            | Some BreakOrReturn ->\n                Rust_primitives__hax__folds__fold_enumerated_chunked_slice_return\n            | Some BreakOnly ->\n                Rust_primitives__hax__folds__fold_enumerated_chunked_slice_cf\n            | None -> Rust_primitives__hax__folds__fold_enumerated_chunked_slice\n          in\n          Some (fold_op, [ size; slice ], usize)\n      | ChunksExact { size; slice } ->\n          let fold_op =\n            match cf with\n            | Some BreakOrReturn ->\n                Rust_primitives__hax__folds__fold_chunked_slice_return\n            | Some BreakOnly ->\n                Rust_primitives__hax__folds__fold_chunked_slice_cf\n            | None -> Rust_primitives__hax__folds__fold_chunked_slice\n          in\n          Some (fold_op, [ size; slice ], usize)\n      | Enumerate (Slice slice) ->\n          let fold_op =\n            match cf with\n            | Some BreakOrReturn ->\n                Rust_primitives__hax__folds__fold_enumerated_slice_return\n            | Some BreakOnly ->\n                Rust_primitives__hax__folds__fold_enumerated_slice_cf\n            | None -> Rust_primitives__hax__folds__fold_enumerated_slice\n          in\n          Some (fold_op, [ slice ], usize)\n      | StepBy { n; it = Range { start; end_ } } ->\n          let fold_op =\n            match cf with\n            | Some BreakOrReturn ->\n                Rust_primitives__hax__folds__fold_range_step_by_return\n            | Some BreakOnly ->\n                Rust_primitives__hax__folds__fold_range_step_by_cf\n            | None -> Rust_primitives__hax__folds__fold_range_step_by\n          in\n          Some (fold_op, [ start; end_; n ], start.typ)\n      | Range { start; end_ } ->\n          let fold_op =\n            match cf with\n            | Some BreakOrReturn ->\n                Rust_primitives__hax__folds__fold_range_return\n            | Some BreakOnly -> Rust_primitives__hax__folds__fold_range_cf\n            | None -> Rust_primitives__hax__folds__fold_range\n          in\n          Some (fold_op, [ start; end_ ], start.typ)\n      | _ -> None\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dexpr_unwrapped (expr : A.expr) : B.expr =\n      let span = expr.span in\n      let module M = UB.M in\n      let module MS = (val M.make span) in\n      match expr.e with\n      | Loop { body; kind = ForLoop { it; pat; _ }; state; control_flow; _ } ->\n          let bpat, init =\n            match state with\n            | Some { bpat; init; _ } -> (dpat bpat, dexpr init)\n            | None ->\n                let unit = UB.unit_expr span in\n                (M.pat_PWild ~span ~typ:unit.typ, unit)\n          in\n          let body = dexpr body in\n          let { body; annotation } = extract_loop_annotation body in\n          let it = dexpr it in\n          let pat = dpat pat in\n          let fn : B.expr = UB.make_closure [ bpat; pat ] body body.span in\n          let cf = Option.map ~f:fst control_flow in\n          let f, args =\n            match as_iterator it |> Option.bind ~f:(fn_args_of_iterator cf) with\n            | Some (f, args, typ) ->\n                (* TODO what happens if there is control flow? *)\n                let invariant : B.expr =\n                  let default =\n                    let pat = MS.pat_PWild ~typ in\n                    (pat, MS.expr_Literal ~typ:TBool (Bool true))\n                  in\n                  let pat, invariant =\n                    match annotation with\n                    | Some (LoopInvariant { index_pat = Some pat; invariant })\n                      ->\n                        (pat, invariant)\n                    | _ -> default\n                  in\n                  UB.make_closure [ bpat; pat ] invariant invariant.span\n                in\n                (f, args @ [ invariant; init; fn ])\n            | None ->\n                let fold : Concrete_ident.name =\n                  match cf with\n                  | Some BreakOrReturn ->\n                      Rust_primitives__hax__folds__fold_return\n                  | Some BreakOnly -> Rust_primitives__hax__folds__fold_cf\n                  | None -> Core__iter__traits__iterator__Iterator__fold\n                in\n                (fold, [ it; init; fn ])\n          in\n          UB.call f args span (dty span expr.typ)\n      | Loop { body; kind = WhileLoop { condition; _ }; state; control_flow; _ }\n        ->\n          let bpat, init =\n            match state with\n            | Some { bpat; init; _ } -> (dpat bpat, dexpr init)\n            | None ->\n                let unit = UB.unit_expr span in\n                (M.pat_PWild ~span ~typ:unit.typ, unit)\n          in\n          let body = dexpr body in\n          let { body; annotation = annotation1 } =\n            extract_loop_annotation body\n          in\n          let { body; annotation = annotation2 } =\n            extract_loop_annotation body\n          in\n          let invariant, variant =\n            expect_invariant_variant annotation1 annotation2\n          in\n          let invariant =\n            match invariant with\n            | Some (LoopInvariant { index_pat = None; invariant }) -> invariant\n            | _ -> MS.expr_Literal ~typ:TBool (Bool true)\n          in\n          let variant =\n            match variant with\n            | Some (LoopVariant variant) -> variant\n            | _ ->\n                let kind = { size = S32; signedness = Unsigned } in\n                let e =\n                  UB.M.expr_Literal ~typ:(TInt kind) ~span:body.span\n                    (Int { value = \"0\"; negative = false; kind })\n                in\n                UB.call Rust_primitives__hax__int__from_machine [ e ] e.span\n                  (TApp\n                     {\n                       ident =\n                         `Concrete\n                           (Concrete_ident.of_name ~value:false\n                              Hax_lib__int__Int);\n                       args = [];\n                     })\n          in\n          let condition = dexpr condition in\n          let condition : B.expr =\n            M.expr_Closure ~params:[ bpat ] ~body:condition ~captures:[]\n              ~span:condition.span\n              ~typ:(TArrow ([ bpat.typ ], condition.typ))\n          in\n          let body : B.expr =\n            M.expr_Closure ~params:[ bpat ] ~body ~captures:[]\n              ~typ:(TArrow ([ bpat.typ ], body.typ))\n              ~span:body.span\n          in\n          let fold_operator : Concrete_ident.name =\n            match control_flow with\n            | Some (BreakOrReturn, _) -> Rust_primitives__hax__while_loop_return\n            | Some (BreakOnly, _) -> Rust_primitives__hax__while_loop_cf\n            | None -> Rust_primitives__hax__while_loop\n          in\n          let invariant : B.expr =\n            UB.make_closure [ bpat ] invariant invariant.span\n          in\n          let variant = UB.make_closure [ bpat ] variant variant.span in\n          (* The invariant should come before the condition. This allows to use the invariant\n             to prove panic freedom of the condition. *)\n          UB.call fold_operator\n            [ invariant; condition; variant; init; body ]\n            span (dty span expr.typ)\n      | Loop _ ->\n          Error.unimplemented ~issue_id:933 ~details:\"Unhandled loop kind\" span\n      | [%inline_arms \"dexpr'.*\" - Loop - Break - Continue - Return] ->\n          map (fun e -> B.{ e; typ = dty expr.span expr.typ; span = expr.span })\n      | _ -> .\n    [@@inline_ands bindings_of dexpr - dexpr' - dloop_kind - dloop_state]\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_functionalize_loops.mli",
    "content": "open! Prelude\n\nmodule Make\n    (F :\n      Features.T\n        with type continue = Features.Off.continue\n         and type early_exit = Features.Off.early_exit\n         and type break = Features.Off.break) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.Off.Loop\n      include Features.Off.While_loop\n      include Features.Off.For_loop\n      include Features.Off.For_index_loop\n      include Features.Off.State_passing_loop\n      include Features.Off.Fold_like_loop\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_hoist_disjunctive_patterns.ml",
    "content": "(* This phase transforms deep disjunctive patterns in equivalent\n   shallow ones. For example `Some(1 | 2)` becomes `Some(1) | Some(2)` *)\n\nopen! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let hoist_disjunctions =\n        object (self)\n          inherit [_] Visitors.map\n\n          method! visit_pat () p =\n            let return_pat p' = { p = p'; span = p.span; typ = p.typ } in\n\n            (* When there is a list of subpaterns, we use the distributivity of nested\n               disjunctions: (a | b, c | d) gives (a, c) | (a, d) | (b, c) | (b,d) *)\n            let rec treat_args cases = function\n              | { p = POr { subpats }; _ } :: tail ->\n                  treat_args\n                    (List.concat_map\n                       ~f:(fun subpat ->\n                         List.map ~f:(fun args -> subpat :: args) cases)\n                       subpats)\n                    tail\n              | pat :: tail ->\n                  let pat = self#visit_pat () pat in\n                  treat_args (List.map ~f:(fun args -> pat :: args) cases) tail\n              | [] -> cases\n            in\n            let subpats_to_disj subpats =\n              match subpats with\n              | [ pat ] -> pat\n              | _ -> POr { subpats } |> return_pat\n            in\n\n            (* When there is one subpattern, we check if it is a disjunction,\n               and if it is, we hoist it. *)\n            let treat_subpat pat to_pattern =\n              let subpat = self#visit_pat () pat in\n              match subpat with\n              | { p = POr { subpats }; span; _ } ->\n                  return_pat\n                    (POr\n                       {\n                         subpats =\n                           List.map\n                             ~f:(fun pat ->\n                               { p = to_pattern pat; span; typ = p.typ })\n                             subpats;\n                       })\n              | _ -> p\n            in\n\n            match p.p with\n            | PConstruct { constructor; fields; is_record; is_struct } ->\n                let fields_as_pat =\n                  List.rev_map fields ~f:(fun arg -> self#visit_pat () arg.pat)\n                in\n                let subpats =\n                  List.map (treat_args [ [] ] fields_as_pat)\n                    ~f:(fun fields_as_pat ->\n                      let fields =\n                        (* exn justification: `rev_map fields` and `fields` have the same length *)\n                        List.map2_exn fields_as_pat fields\n                          ~f:(fun pat { field; _ } -> { field; pat })\n                      in\n                      PConstruct { constructor; fields; is_record; is_struct }\n                      |> return_pat)\n                in\n\n                subpats_to_disj subpats\n            | PArray { args } ->\n                let subpats =\n                  List.map\n                    ~f:(fun args -> PArray { args } |> return_pat)\n                    (treat_args [ [] ]\n                       (List.rev_map args ~f:(fun arg -> self#visit_pat () arg)))\n                in\n                subpats_to_disj subpats\n            | POr { subpats } ->\n                let subpats = List.map ~f:(self#visit_pat ()) subpats in\n                POr\n                  {\n                    subpats =\n                      List.concat_map\n                        ~f:(function\n                          | { p = POr { subpats }; _ } -> subpats | p -> [ p ])\n                        subpats;\n                  }\n                |> return_pat\n            | PAscription { typ; typ_span; pat } ->\n                treat_subpat pat (fun pat -> PAscription { typ; typ_span; pat })\n            | PBinding { subpat = Some (pat, as_pat); mut; mode; typ; var } ->\n                treat_subpat pat (fun pat ->\n                    PBinding\n                      { subpat = Some (pat, as_pat); mut; mode; typ; var })\n            | PDeref { subpat; witness } ->\n                treat_subpat subpat (fun subpat -> PDeref { subpat; witness })\n            | PWild | PConstant _ | PBinding { subpat = None; _ } -> p\n        end\n\n      let ditems = List.map ~f:(hoist_disjunctions#visit_item ())\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_hoist_disjunctive_patterns.mli",
    "content": "(** This phase eliminates nested disjunctive patterns (leaving only shallow\n    disjunctions). It moves the disjunctions up to the top-level pattern. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_local_mutation.ml",
    "content": "(* TODO: handle Exn report *)\nopen! Prelude\nopen Side_effect_utils\n\nmodule%inlined_contents Make\n    (F :\n      Features.T\n        with type mutable_reference = Features.Off.mutable_reference\n         and type mutable_pointer = Features.Off.mutable_pointer\n         and type raw_pointer = Features.Off.raw_pointer\n         and type arbitrary_lhs = Features.Off.arbitrary_lhs\n         and type nontrivial_lhs = Features.Off.nontrivial_lhs\n         and type monadic_action = Features.Off.monadic_action\n         and type monadic_binding = Features.Off.monadic_binding\n         and type for_index_loop = Features.Off.for_index_loop) =\nstruct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.Off.Mutable_variable\n    include Features.On.State_passing_loop\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (F)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.State_passing_loop\n    end\n\n    module SI = MakeSI (FB)\n\n    module Instructions = struct\n      type t = {\n        expr_level : UB.TypedLocalIdent.t list;\n        fun_level : UB.TypedLocalIdent.t list;\n        loop_level : UB.TypedLocalIdent.t list;\n        drop_expr : bool;\n      }\n\n      let zero =\n        { expr_level = []; fun_level = []; loop_level = []; drop_expr = false }\n    end\n\n    let free_assigned_variables =\n      UA.Reducers.free_assigned_variables (function _ -> .)\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dpat' (span : span) (p : A.pat') : B.pat' =\n      match p with\n      | [%inline_arms \"dpat'.*\" - PBinding - PDeref] -> auto\n      | PBinding { var : Local_ident.t; typ; subpat; _ } ->\n          PBinding\n            {\n              mut = Immutable;\n              mode = ByValue;\n              var;\n              typ = dty span typ;\n              subpat = Option.map ~f:(dpat *** Fn.id) subpat;\n            }\n      | PDeref { subpat; _ } -> (dpat subpat).p\n\n    (* [s] is the list of variables the last expression should return, packed in a tuple *)\n    and dexpr_s (s : Instructions.t) (expr : A.expr) : B.expr =\n      let dexpr_same e = dexpr_s s e in\n      let rec dexpr e = dexpr_s { s with expr_level = []; drop_expr = false } e\n      and dloop_state = [%inline_body dloop_state] in\n      let span = expr.span in\n      let local_vars_expr =\n        let vars =\n          List.map\n            ~f:(fun (i, typ) : B.expr -> { e = LocalVar i; typ; span })\n            s.loop_level\n        in\n        match vars with [ v ] -> v | _ -> UB.make_tuple_expr ~span vars\n      in\n      match expr.e with\n      | Let\n          {\n            monadic = None;\n            lhs;\n            rhs =\n              {\n                e =\n                  Assign\n                    { lhs = LhsLocalVar { var; typ }; e = value; witness = _ };\n                _;\n              };\n            body;\n          } ->\n          let h (type a) (f : a list -> a) (x : a) (y : a) =\n            match lhs.p with PWild -> y | _ -> f [ x; y ]\n          in\n          let body = dexpr_same body in\n          {\n            e =\n              Let\n                {\n                  monadic = None;\n                  lhs =\n                    h UB.make_tuple_pat (dpat lhs)\n                      (UB.make_var_pat var (dty span typ) span);\n                  rhs =\n                    h (UB.make_tuple_expr ~span) (UB.unit_expr span)\n                      (dexpr_s\n                         { s with expr_level = []; drop_expr = false }\n                         value);\n                  body;\n                };\n            typ = body.typ;\n            span = expr.span;\n          }\n      | Let { monadic = Some _; _ } -> .\n      | Let { monadic = None; lhs; rhs; body } ->\n          let drop_expr = [%matches? A.PWild] lhs.p in\n          let rhs_vars =\n            free_assigned_variables#visit_expr () rhs\n            |> Set.to_list\n            |> List.map ~f:(fun (i, t) -> (i, dty span t))\n          in\n          let vars_pat =\n            List.map ~f:(fun (i, t) -> UB.make_var_pat i t span) rhs_vars\n            |> UB.make_tuple_pat\n          in\n          let lhs = dpat lhs in\n          let lhs' =\n            if List.is_empty rhs_vars then lhs\n            else if drop_expr then vars_pat\n            else UB.make_tuple_pat [ vars_pat; lhs ]\n          in\n          let body = dexpr_same body in\n          {\n            e =\n              Let\n                {\n                  monadic = None;\n                  lhs = lhs';\n                  rhs = dexpr_s { s with expr_level = rhs_vars; drop_expr } rhs;\n                  body;\n                };\n            typ = body.typ;\n            span = expr.span;\n          }\n      | Assign { e; lhs = LhsLocalVar { var; _ }; _ } ->\n          let vars =\n            List.map\n              ~f:(fun (i, typ) : B.expr ->\n                if Local_ident.equal i var then\n                  dexpr_s { s with expr_level = []; drop_expr = false } e\n                else { e = LocalVar i; typ; span })\n              s.expr_level\n          in\n          let vars =\n            match vars with [ v ] -> v | _ -> UB.make_tuple_expr ~span vars\n          in\n          if s.drop_expr then vars\n          else UB.make_tuple_expr ~span [ vars; UB.unit_expr span ]\n      | Assign _ -> .\n      | Closure { params; body; captures } ->\n          let observable_mutations =\n            free_assigned_variables#visit_expr () expr\n          in\n          if observable_mutations |> Set.is_empty |> not then\n            Error.raise\n              {\n                kind =\n                  ClosureMutatesParentBindings\n                    {\n                      bindings =\n                        Set.to_list observable_mutations\n                        |> List.map ~f:(fun (Local_ident.{ name; _ }, _) ->\n                               name);\n                    };\n                span;\n              };\n          let s =\n            {\n              s with\n              expr_level =\n                (UA.Reducers.free_assigned_variables (function _ -> .))\n                  #visit_expr () body\n                |> Set.to_list\n                |> List.map ~f:(fun (i, t) -> (i, dty span t));\n              drop_expr = false;\n            }\n          in\n          {\n            e =\n              Closure\n                {\n                  params = List.map ~f:dpat params;\n                  body = dexpr_s s body;\n                  captures =\n                    List.map\n                      ~f:\n                        (dexpr_s\n                           Instructions.zero\n                           (* TODO: what to do with captures? We discard them entirely for now. Maybe we should remove that from the AST. *))\n                      captures;\n                };\n            typ = dty span expr.typ;\n            span = expr.span;\n          }\n      | If { cond; then_; else_ } ->\n          let then_ = dexpr_same then_ in\n          let else_ =\n            Option.value ~default:(UA.unit_expr expr.span) else_\n            |> dexpr_same |> Option.some\n          in\n          let cond =\n            dexpr_s { s with expr_level = []; drop_expr = false } cond\n          in\n          { e = If { cond; then_; else_ }; typ = then_.typ; span = expr.span }\n      | Match { scrutinee; arms } ->\n          let arms =\n            let dexpr = dexpr_same in\n            let rec darm = [%inline_body darm]\n            and darm' = [%inline_body darm'] in\n            List.map ~f:darm arms\n          in\n          let typ =\n            match arms with [] -> UB.never_typ | hd :: _ -> hd.arm.body.typ\n          in\n          let scrutinee =\n            dexpr_s { s with expr_level = []; drop_expr = false } scrutinee\n          in\n          { e = Match { scrutinee; arms }; typ; span = expr.span }\n      | Break { e; label; witness; _ } ->\n          let w = Features.On.state_passing_loop in\n          {\n            e =\n              Break\n                {\n                  e = dexpr_same e;\n                  acc = Some (local_vars_expr, w);\n                  label;\n                  witness;\n                };\n            span = expr.span;\n            typ = local_vars_expr.typ;\n          }\n      | Return { e; witness } ->\n          {\n            e = Return { e = dexpr e; witness };\n            span = expr.span;\n            typ = dty expr.span expr.typ;\n          }\n      | Continue { acc = None; label; witness; _ } ->\n          let w = Features.On.state_passing_loop in\n          let e = local_vars_expr in\n          {\n            e = Continue { acc = Some (e, w); label; witness };\n            span = expr.span;\n            typ = e.typ;\n          }\n      | Loop { body; kind; state; label; witness; _ } ->\n          let variables_to_output = s.expr_level in\n          let drop_expr = s.drop_expr in\n          (* [adapt]: should we reorder shadowings? *)\n          let observable_mutations, adapt =\n            let set =\n              free_assigned_variables#visit_expr () expr\n              |> Set.map\n                   (module UB.TypedLocalIdent)\n                   ~f:(fun (i, t) -> (i, dty span t))\n            in\n            let idents_of_set = Set.map (module Local_ident) ~f:fst set in\n            let idents_of_variables_to_output =\n              variables_to_output |> List.map ~f:fst\n              |> Set.of_list (module Local_ident)\n            in\n            (* if we mutate exactly s.expr_level, return that in this order *)\n            if Set.equal idents_of_set idents_of_variables_to_output then\n              (variables_to_output, false)\n            else (set |> Set.to_list, true)\n          in\n          let s =\n            {\n              s with\n              expr_level = observable_mutations;\n              loop_level = observable_mutations;\n              drop_expr = true;\n            }\n          in\n          let empty_s = { s with expr_level = []; drop_expr = false } in\n          let state : B.loop_state option =\n            if List.is_empty observable_mutations then\n              Option.map ~f:(dloop_state span) state\n            else\n              Some\n                (let bpat' =\n                   List.map\n                     ~f:(fun (i, t) -> UB.make_var_pat i t span)\n                     observable_mutations\n                   |> UB.make_tuple_pat\n                 in\n                 let init' =\n                   List.map\n                     ~f:(fun (i, typ) : B.expr -> { e = LocalVar i; typ; span })\n                     observable_mutations\n                   |> UB.make_tuple_expr ~span\n                 in\n                 let witness = Features.On.state_passing_loop in\n                 match state with\n                 | None -> { init = init'; bpat = bpat'; witness }\n                 | Some { init; bpat; _ } ->\n                     {\n                       init =\n                         UB.make_tuple_expr ~span\n                           [ init'; dexpr_s empty_s init ];\n                       bpat = UB.make_tuple_pat [ bpat'; dpat bpat ];\n                       witness;\n                     })\n          in\n          let kind =\n            let dexpr = dexpr_s empty_s in\n            [%inline_body dloop_kind] span kind\n          in\n          let body = dexpr_s s body in\n          (* we deal with a for loop: this is always a unit expression (i.e. no [break foo] with [foo] non-unit allowed) *)\n          let typ = List.map ~f:snd observable_mutations |> UB.make_tuple_typ in\n          let loop : B.expr =\n            {\n              e =\n                Loop { body; kind; state; label; witness; control_flow = None };\n              typ;\n              span;\n            }\n          in\n          let vars =\n            if adapt && not (List.is_empty variables_to_output) then\n              (* here, we need to introduce the shadowings as bindings *)\n              let out =\n                UB.make_tuple_expr ~span\n                @@ List.map\n                     ~f:(fun (ident, typ) ->\n                       B.{ e = LocalVar ident; typ; span })\n                     variables_to_output\n              in\n              let lhs =\n                UB.make_tuple_pat\n                @@ List.map\n                     ~f:(fun (ident, typ) -> UB.make_var_pat ident typ span)\n                     observable_mutations\n              in\n              B.\n                {\n                  e = Let { monadic = None; lhs; rhs = loop; body = out };\n                  span;\n                  typ = out.typ;\n                }\n            else loop\n          in\n          if drop_expr then vars\n          else UB.make_tuple_expr ~span [ vars; UB.unit_expr span ]\n      | [%inline_arms\n          \"dexpr'.*\" - Let - Assign - Closure - Loop - If - Match - Break\n          - Return] ->\n          map (fun e ->\n              let e' =\n                B.{ e; typ = dty expr.span expr.typ; span = expr.span }\n              in\n              match e with\n              | If _ | Match _ | Loop _ | Assign _ -> e'\n              | _ when List.is_empty s.expr_level -> e'\n              | _ ->\n                  let vars =\n                    List.map\n                      ~f:(fun (i, typ) : B.expr ->\n                        { e = LocalVar i; typ; span })\n                      s.expr_level\n                    |> UB.make_tuple_expr ~span\n                  in\n                  if s.drop_expr then\n                    let effect_e' =\n                      snd (SI.Hoist.collect_and_hoist_effects e')\n                    in\n                    if SI.SideEffects.reads_local_mut_only effect_e' then vars\n                    else\n                      {\n                        vars with\n                        e =\n                          Let\n                            {\n                              monadic = None;\n                              lhs = UB.M.pat_PWild ~typ:e'.typ ~span:e'.span;\n                              rhs = e';\n                              body = vars;\n                            };\n                      }\n                  else UB.make_tuple_expr ~span [ vars; e' ])\n\n    and dexpr_unwrapped e = dexpr_s Instructions.zero e\n    [@@inline_ands bindings_of dexpr - dexpr']\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_local_mutation.mli",
    "content": "open! Prelude\n\nmodule Make\n    (F :\n      Features.T\n        with type mutable_reference = Features.Off.mutable_reference\n         and type mutable_pointer = Features.Off.mutable_pointer\n         and type raw_pointer = Features.Off.raw_pointer\n         and type arbitrary_lhs = Features.Off.arbitrary_lhs\n         and type nontrivial_lhs = Features.Off.nontrivial_lhs\n         and type monadic_action = Features.Off.monadic_action\n         and type monadic_binding = Features.Off.monadic_binding\n         and type for_index_loop = Features.Off.for_index_loop) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.Off.Mutable_variable\n      include Features.On.State_passing_loop\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_newtype_as_refinement.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      module A = Ast.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n      open A\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      module Attrs = Attr_payloads.Make (F) (Error)\n\n      let visitor =\n        object\n          inherit [_] Visitors.map as super\n\n          method! visit_expr () e =\n            let e = super#visit_expr () e in\n            match e.e with\n            | App { f = { e = GlobalVar f; _ }; args = [ inner ]; _ }\n              when Ast.Global_ident.eq_name Hax_lib__Refinement__new f\n                   || Ast.Global_ident.eq_name Hax_lib__RefineAs__into_checked f\n                   || Ast.Global_ident.eq_name Hax_lib__Refinement__get_mut f\n                   || Ast.Global_ident.eq_name Hax_lib__Refinement__get f ->\n                { e with e = Ascription { typ = e.typ; e = inner } }\n            | _ -> e\n\n          method! visit_item () i =\n            match i.v with\n            | Type\n                {\n                  name;\n                  generics;\n                  variants = [ { arguments = [ (_, ty, _) ]; _ } ];\n                  _;\n                }\n              when Attrs.find_unique_attr i.attrs\n                     ~f:\n                       ([%eq: Types.ha_payload] NewtypeAsRefinement\n                       >> Fn.flip Option.some_if ())\n                   |> Option.is_some ->\n                { i with v = TyAlias { name; generics; ty } }\n            | _ -> super#visit_item () i\n        end\n\n      let ditems = List.map ~f:(visitor#visit_item ())\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_newtype_as_refinement.mli",
    "content": "(** This phase transforms annotated struct definitions into (refined) type\n    aliases. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_asserts.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let reconstruct_assert =\n        object (self)\n          inherit [_] Visitors.map as super\n\n          method! visit_expr () e =\n            let extract_block e =\n              let* { e; _ } = U.D.expr_Block e in\n              let* { f; args; _ } = U.D.expr_App e in\n              let* nta = U.D.expr_GlobalVar f in\n              match args with\n              | [ { e = App { f = { e = GlobalVar panic; _ }; _ }; _ } ] ->\n                  Some (nta, panic)\n              | _ -> None\n            in\n            let extract_app e =\n              let* { f; args; _ } = U.D.expr_App e in\n              let* nta = U.D.expr_GlobalVar f in\n              let* arg = U.D.list_1 args in\n              let* { body; _ } = U.D.expr_Let arg in\n              let* { e; _ } = U.D.expr_Block body in\n              let* { f; _ } = U.D.expr_App e in\n              let* panic = U.D.expr_GlobalVar f in\n              Some (nta, panic)\n            in\n            let extract e =\n              let* { cond; then_; _ } = U.D.expr_If e in\n              let* nta, panic =\n                extract_app then_ <|> fun _ -> extract_block then_\n              in\n              Some (panic, nta, cond)\n            in\n            match extract e with\n            | Some (panic, nta, cond)\n              when Ast.Global_ident.eq_name Rust_primitives__hax__never_to_any\n                     nta\n                   && (Ast.Global_ident.eq_name Core__panicking__panic panic\n                      || Ast.Global_ident.eq_name Core__panicking__assert_failed\n                           panic) ->\n                let cond_expr = self#visit_expr () cond in\n\n                let prop =\n                  match cond_expr.e with\n                  (* assert! and assert_eq! *)\n                  | App { f = { e = GlobalVar fnot; _ }; args = [ prop ]; _ }\n                    when Ast.Global_ident.eq_name Core__ops__bit__Not__not fnot\n                    ->\n                      prop\n                  (* assert_ne! *)\n                  | _ ->\n                      {\n                        cond_expr with\n                        e =\n                          App\n                            {\n                              f =\n                                {\n                                  e =\n                                    GlobalVar\n                                      (Ast.Global_ident.of_name ~value:true\n                                         Core__ops__bit__Not__not);\n                                  span = cond_expr.span;\n                                  typ = TArrow ([ TBool ], TBool);\n                                };\n                              args = [ cond_expr ];\n                              generic_args = [];\n                              bounds_impls = [];\n                              trait = None;\n                            };\n                      }\n                in\n\n                {\n                  e with\n                  e =\n                    App\n                      {\n                        f =\n                          {\n                            e =\n                              GlobalVar\n                                (Ast.Global_ident.of_name ~value:true\n                                   Hax_lib__assert);\n                            span = e.span;\n                            typ =\n                              TArrow\n                                ( [ TBool ],\n                                  TApp { ident = `TupleType 0; args = [] } );\n                          };\n                        args = [ prop ];\n                        generic_args = [];\n                        bounds_impls = [];\n                        trait = None;\n                      };\n                }\n            | _ -> super#visit_expr () e\n        end\n\n      let ditems = List.map ~f:(reconstruct_assert#visit_item ())\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_asserts.mli",
    "content": "(** This phase recognizes desugared `assert!(...)` to rewrite into\n    `hax_lib::assert(..)`. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_for_index_loops.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (FA : Features.T) = struct\n  open Ast\n\n  module FB = struct\n    include FA\n    include Features.On.For_index_loop\n  end\n\n  include\n    Phase_utils.MakeBase (FA) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (FA)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.For_index_loop\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dloop_kind (span : span) (k : A.loop_kind) : B.loop_kind =\n      match k with\n      | ForLoop\n          {\n            it =\n              {\n                e =\n                  App\n                    {\n                      f = { e = GlobalVar (`Concrete into_iter_meth); _ };\n                      args =\n                        [\n                          {\n                            e =\n                              Construct\n                                {\n                                  constructor = `Concrete range_ctor;\n                                  is_record = true;\n                                  is_struct = true;\n                                  fields =\n                                    [\n                                      (`Concrete start_field, start);\n                                      (`Concrete end_field, end_);\n                                    ];\n                                  base = None;\n                                };\n                            _;\n                          };\n                        ];\n                      _ (* TODO: see issue #328 *);\n                    };\n                typ;\n                _;\n              };\n            pat =\n              {\n                p =\n                  PBinding\n                    { mut = Immutable; mode = ByValue; var; subpat = None; _ };\n                _;\n              };\n            _;\n          }\n        when Concrete_ident.eq_name\n               Core__iter__traits__collect__IntoIterator__into_iter\n               into_iter_meth\n             && Concrete_ident.eq_name Core__ops__range__Range__start\n                  start_field\n             && Concrete_ident.eq_name Core__ops__range__Range range_ctor\n             && Concrete_ident.eq_name Core__ops__range__Range__end end_field ->\n          ForIndexLoop\n            {\n              start = dexpr start;\n              end_ = dexpr end_;\n              var;\n              var_typ = dty span typ;\n              witness = Features.On.for_index_loop;\n            }\n      | [%inline_arms \"dloop_kind.*\"] -> auto\n    [@@inline_ands bindings_of dexpr]\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\n  module FA = FA\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_for_index_loops.mli",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.On.For_index_loop\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_for_loops.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make\n    (FA :\n      Features.T\n    (* with type raw_pointer = Features.off *)\n    (*  and type mutable_pointer = Features.off *)) =\nstruct\n  open Ast\n\n  module FB = struct\n    include FA\n    include Features.On.For_loop\n  end\n\n  include\n    Phase_utils.MakeBase (FA) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (FA)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.For_loop\n    end\n\n    module For = struct\n      [@@@warning \"-9\"]\n\n      open A\n\n      type t = {\n        it : expr;\n        pat : pat;\n        body : expr;\n        state : loop_state option;\n        label : string option;\n        witness : FA.loop;\n      }\n      [@@deriving show]\n\n      let extract (e : expr) : t option =\n        let e = UA.Mappers.normalize_borrow_mut#visit_expr () e in\n        match e.e with\n        | Match\n            {\n              scrutinee = it;\n              arms =\n                [\n                  {\n                    arm =\n                      {\n                        arm_pat =\n                          {\n                            p =\n                              PBinding\n                                {\n                                  mut = Mutable _;\n                                  mode = ByValue;\n                                  var = iter_variable;\n                                  subpat = None;\n                                };\n                          };\n                        body =\n                          {\n                            e =\n                              Loop\n                                {\n                                  label;\n                                  kind = UnconditionalLoop;\n                                  state;\n                                  witness;\n                                  body =\n                                    {\n                                      e =\n                                        Let\n                                          {\n                                            monadic = None;\n                                            lhs = { p = PWild };\n                                            rhs =\n                                              {\n                                                e =\n                                                  Match\n                                                    {\n                                                      scrutinee =\n                                                        {\n                                                          e =\n                                                            App\n                                                              {\n                                                                f =\n                                                                  {\n                                                                    e =\n                                                                      GlobalVar\n                                                                        (`Concrete\n                                                                           next_meth);\n                                                                  };\n                                                                args =\n                                                                  [\n                                                                    {\n                                                                      e =\n                                                                        Borrow\n                                                                          {\n                                                                            kind =\n                                                                              Mut\n                                                                                _;\n                                                                            e =\n                                                                              {\n                                                                                e =\n                                                                                LocalVar\n                                                                                next_iter_variable;\n                                                                              };\n                                                                          };\n                                                                    };\n                                                                  ];\n                                                                _\n                                                                (* TODO: see issue #328 *);\n                                                              };\n                                                        };\n                                                      arms =\n                                                        [\n                                                          {\n                                                            arm =\n                                                              {\n                                                                arm_pat =\n                                                                  {\n                                                                    p =\n                                                                      PConstruct\n                                                                        {\n                                                                          constructor =\n                                                                            `Concrete\n                                                                              none_ctor;\n                                                                          fields =\n                                                                            [];\n                                                                          _;\n                                                                        };\n                                                                  };\n                                                                body =\n                                                                  {\n                                                                    e =\n                                                                      App\n                                                                        {\n                                                                          f =\n                                                                            {\n                                                                              e =\n                                                                                GlobalVar\n                                                                                never_to_any;\n                                                                            };\n                                                                          args =\n                                                                            [\n                                                                              {\n                                                                                e =\n                                                                                Break\n                                                                                {\n                                                                                e =\n                                                                                {\n                                                                                e =\n                                                                                GlobalVar\n                                                                                (`TupleCons\n                                                                                0);\n                                                                                };\n                                                                                };\n                                                                              };\n                                                                            ];\n                                                                          _\n                                                                          (* TODO: see issue #328 *);\n                                                                        };\n                                                                  };\n                                                              };\n                                                          };\n                                                          {\n                                                            arm =\n                                                              {\n                                                                arm_pat =\n                                                                  {\n                                                                    p =\n                                                                      PConstruct\n                                                                        {\n                                                                          constructor =\n                                                                            `Concrete\n                                                                              some_ctor;\n                                                                          fields =\n                                                                            [\n                                                                              {\n                                                                                pat;\n                                                                              };\n                                                                            ];\n                                                                          _;\n                                                                        };\n                                                                  };\n                                                                body;\n                                                              };\n                                                          };\n                                                        ];\n                                                    };\n                                              };\n                                            body =\n                                              { e = GlobalVar (`TupleCons 0) };\n                                          };\n                                    };\n                                  _;\n                                };\n                          };\n                      };\n                  };\n                ];\n            }\n          when [%eq: local_ident] iter_variable next_iter_variable\n               && Concrete_ident.eq_name\n                    Core__iter__traits__iterator__Iterator__next next_meth\n               && Concrete_ident.eq_name Core__option__Option__None none_ctor\n               && Concrete_ident.eq_name Core__option__Option__Some some_ctor\n               && Global_ident.eq_name Rust_primitives__hax__never_to_any\n                    never_to_any ->\n            let body =\n              match body.e with\n              | Let\n                  {\n                    lhs = { p = PWild };\n                    rhs;\n                    body = { e = GlobalVar (`TupleCons 0) };\n                  }\n                when UA.is_unit_typ rhs.typ ->\n                  rhs\n              | _ -> body\n            in\n\n            Some { it; pat; body; state; label; witness }\n        | _ -> None\n               [@ocamlformat \"disable\"]\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dexpr_unwrapped (expr : A.expr) : B.expr =\n      let h = [%inline_body dexpr_unwrapped] in\n      match For.extract expr with\n      | Some { it; pat; body; label; state; witness } ->\n          {\n            e =\n              Loop\n                {\n                  body = dexpr body;\n                  kind =\n                    ForLoop\n                      {\n                        it = dexpr it;\n                        pat = dpat pat;\n                        witness = Features.On.for_loop;\n                      };\n                  state = Option.map ~f:(dloop_state expr.span) state;\n                  label;\n                  witness = S.loop expr.span witness;\n                  control_flow = None;\n                };\n            span = expr.span;\n            typ = UB.unit_typ;\n          }\n      | None -> h expr\n    [@@inline_ands bindings_of dexpr]\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\n  module FA = FA\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_for_loops.mli",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.On.For_loop\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_question_marks.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (FA : Features.T) = struct\n  open Ast\n\n  module FB = struct\n    include FA\n    include Features.On.Question_mark\n  end\n\n  include\n    Phase_utils.MakeBase (FA) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (FA)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.Question_mark\n    end\n\n    module QuestionMarks = struct\n      [@@@warning \"-9\"]\n\n      open A\n\n      (** The types supported for [e] in a [e?] expression *)\n      type qm_kind = QMResult of { success : ty; error : ty } | QMOption of ty\n\n      (** Interpret a type [t] as a [qm_kind] *)\n      let qm_kind_of_typ span (t : ty) : qm_kind =\n        let is_result = Global_ident.eq_name Core__result__Result in\n        let is_option = Global_ident.eq_name Core__option__Option in\n        match t with\n        | TApp { ident; args = [ GType s; GType e ] } when is_result ident ->\n            QMResult { success = s; error = e }\n        | TApp { ident; args = [ GType s ] } when is_option ident -> QMOption s\n        | _ ->\n            Error.assertion_failure span\n              (\"expected something of type Option<_> or Result<_, _>, instead, \\\n                got: \"\n              ^ [%show: ty] t)\n\n      (** Expects [impl] to be an impl. expr. for the trait\n          `std::ops::FromResidual` for the type [Result<_, _>], and extract its\n          parent [From] impl expr *)\n      let expect_residual_impl_result (impl : impl_expr) : impl_expr option =\n        match impl with\n        | {\n         kind = ImplApp { args = [ from_impl ]; _ };\n         goal =\n           {\n             trait;\n             args =\n               [\n                 GType (TApp { ident = arg1; _ });\n                 GType (TApp { ident = arg2; _ });\n               ];\n           };\n        }\n          when Concrete_ident.eq_name Core__ops__try_trait__FromResidual trait\n               && Global_ident.eq_name Core__result__Result arg1\n               && Global_ident.eq_name Core__result__Result arg2 ->\n            Some from_impl\n        | _ -> None\n\n      (** Expects [t] to be [Result<S, E>], and returns [(S, E)] *)\n      let expect_result_type (t : ty) : (ty * ty) option =\n        match t with\n        | TApp { ident; args = [ GType s; GType e ] }\n          when Global_ident.eq_name Core__result__Result ident ->\n            Some (s, e)\n        | _ -> None\n\n      (** Construct [Result<S,E>] *)\n      let make_result_type (success : ty) (error : ty) : ty =\n        let ident = Global_ident.of_name ~value:false Core__result__Result in\n        TApp { ident; args = [ GType success; GType error ] }\n\n      (** Retype a [Err::<_, E>(x)] literal, as [Err::<success, E>(x)] *)\n      let retype_err_literal (e : expr) (success : ty) (error : ty) =\n        match e.e with\n        | Construct { constructor; _ }\n          when Global_ident.eq_name Core__result__Result__Err constructor ->\n            { e with typ = make_result_type success error }\n        | _ -> e\n\n      (** [map_err e error_dest impl] creates the expression [e.map_err(from)]\n          with the proper types and impl informations. *)\n      let map_err (e : expr) (error_dest : ty) impl : expr option =\n        let* success, error_src = expect_result_type e.typ in\n        let* impl = expect_residual_impl_result impl in\n        if [%equal: ty] error_src error_dest then Some e\n        else\n          let from_typ = TArrow ([ error_src ], error_dest) in\n          let from =\n            UA.call ~impl Core__convert__From__from [] e.span from_typ\n          in\n          let call =\n            UA.call Core__result__Impl__map_err [ e; from ] e.span\n              (make_result_type success error_dest)\n          in\n          Some call\n\n      (** [extract e] returns [Some (x, ty)] if [e] was a `y?` desugared by\n          rustc. `y` is `x` plus possibly a coercion. [ty] is the return type of\n          the function. *)\n      let extract (e : expr) : (expr * ty) option =\n        let extract_return (e : expr) =\n          match e.e with\n          | Return\n              {\n                e =\n                  {\n                    e =\n                      App\n                        {\n                          f = { e = GlobalVar f };\n                          args = [ { e = LocalVar residual_var; _ } ];\n                          trait = Some (impl, _);\n                        };\n                    typ = return_typ;\n                    _;\n                  };\n                _;\n              } ->\n              Some (f, residual_var, return_typ, impl)\n          | _ -> None\n        in\n        let extract_pat_app_bd (p : pat) : (global_ident * local_ident) option =\n          match p.p with\n          | PConstruct\n              {\n                constructor;\n                fields =\n                  [\n                    {\n                      pat =\n                        {\n                          p =\n                            PBinding { mut = Immutable; var; subpat = None; _ };\n                          _;\n                        };\n                      _;\n                    };\n                  ];\n                _;\n              } ->\n              Some (constructor, var)\n          | _ -> None\n        in\n        match e.e with\n        | Match\n            {\n              scrutinee =\n                { e = App { f = { e = GlobalVar n; _ }; args = [ expr ] }; _ };\n              arms =\n                [\n                  { arm = { arm_pat = pat_break; body }; _ };\n                  {\n                    arm =\n                      {\n                        arm_pat = pat_continue;\n                        body = { e = LocalVar continue_var; _ };\n                      };\n                    _;\n                  };\n                ];\n            }\n        (*[@ocamlformat \"disable\"]*)\n          when Global_ident.eq_name Core__ops__try_trait__Try__branch n ->\n            let* body =\n              UA.Expect.concrete_app1 Rust_primitives__hax__never_to_any body\n            in\n            let* f, residual_var, fun_return_typ, residual_impl =\n              extract_return body\n            in\n            let* f_break, residual_var' = extract_pat_app_bd pat_break in\n            let* f_continue, continue_var' = extract_pat_app_bd pat_continue in\n            let*? _ = [%equal: local_ident] residual_var residual_var' in\n            let*? _ = [%equal: local_ident] continue_var continue_var' in\n            let*? _ =\n              Global_ident.eq_name Core__ops__control_flow__ControlFlow__Break\n                f_break\n              && Global_ident.eq_name\n                   Core__ops__control_flow__ControlFlow__Continue f_continue\n              && Global_ident.eq_name\n                   Core__ops__try_trait__FromResidual__from_residual f\n            in\n            let expr =\n              let kind = qm_kind_of_typ e.span in\n              match (kind expr.typ, kind fun_return_typ) with\n              | ( QMResult { error = local_err; _ },\n                  QMResult { error = return_err; _ } ) ->\n                  let expr = retype_err_literal expr e.typ local_err in\n                  map_err expr return_err residual_impl\n                  |> Option.value ~default:expr\n              | QMOption _, QMOption _ -> expr\n              | _ ->\n                  Error.assertion_failure e.span\n                    \"expected expr.typ and fun_return_typ to be both Options \\\n                     or both Results\"\n            in\n            Some (expr, fun_return_typ)\n        | _ -> None\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dexpr_unwrapped (expr : A.expr) : B.expr =\n      let h = [%inline_body dexpr_unwrapped] in\n      match QuestionMarks.extract expr with\n      | Some (e, return_typ) ->\n          {\n            e =\n              QuestionMark\n                {\n                  e = dexpr e;\n                  return_typ = dty e.span return_typ;\n                  witness = Features.On.question_mark;\n                };\n            span = expr.span;\n            typ = dty expr.span expr.typ;\n          }\n      | None -> h expr\n    [@@inline_ands bindings_of dexpr]\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\n  module FA = FA\nend\n[@@add \"subtype.ml\"]\n\n(* module _ (F: Features.T): Phase_utils.PHASE = Make(F) *)\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_question_marks.mli",
    "content": "(** In THIR, there are no construct for question marks. Instead, Rustc desugars\n    `e?` into the following:\n\n    {@rust[\n      match core::ops::try_trait::branch(y) {\n          core::ops::control_flow::Break(residual) => {\n              never_to_any(\n                  {return core::ops::try_trait::from_residual(residual)},\n              )\n          }\n          core::ops::control_flow::Continue(val) => val,\n      })\n    ]}\n\n    This phase does the opposite rewrite.\n\n    While `e?` in Rust might implies an implicit coercion, in our AST, a\n    question mark is expected to already be of the right type. This phase\n    inlines a coercion (of the shape `x.map_err(from)`, in the case of a\n    `Result`). *)\n\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    (** This phase outputs an AST with question marks. *)\n    module FB = struct\n      include F\n      include Features.On.Question_mark\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_while_loops.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (FA : Features.T) = struct\n  open Ast\n\n  module FB = struct\n    include FA\n    include Features.On.While_loop\n  end\n\n  include\n    Phase_utils.MakeBase (FA) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (FA)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.While_loop\n    end\n\n    module While = struct\n      [@@@warning \"-9\"]\n\n      open A\n\n      type t = {\n        condition : expr;\n        body : expr;\n        state : loop_state option;\n        label : string option;\n        witness : FA.loop;\n      }\n      [@@deriving show]\n\n      let expect_never_to_any (e : expr) : expr option =\n        match e.e with\n        | App { f = { e = GlobalVar f }; args = [ x ]; _ }\n          when Global_ident.eq_name Rust_primitives__hax__never_to_any f ->\n            Some x\n        | _ -> None\n\n      let expect_break_unit (e : expr) : unit option =\n        match e.e with\n        | Break { e = { e = GlobalVar (`TupleCons 0) } } -> Some ()\n        | _ -> None\n\n      let strip_block (e : expr) : expr =\n        match e.e with Block { e; safety_mode = Safe; _ } -> e | _ -> e\n\n      let expect_ite (e : expr) : (expr * expr * expr option) option =\n        match e.e with\n        | If { cond; then_; else_ } -> Some (cond, then_, else_)\n        | _ -> None\n\n      let extract (e : expr) : t option =\n        let e = UA.Mappers.normalize_borrow_mut#visit_expr () e in\n        match e.e with\n        | Loop { label; kind = UnconditionalLoop; state; witness; body; _ } ->\n            let body = strip_block body in\n            let* condition, body, else_ = expect_ite body in\n            let* else_ = else_ in\n            let else_ = strip_block else_ in\n            let* else_ = expect_never_to_any else_ in\n            let else_ = strip_block else_ in\n            let* else_ = expect_never_to_any else_ in\n            let* _ = expect_break_unit else_ in\n            Some { condition; body; state; label; witness }\n        | _ -> None\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dexpr_unwrapped (expr : A.expr) : B.expr =\n      let h = [%inline_body dexpr_unwrapped] in\n      match While.extract expr with\n      | Some { condition; body; state; label; witness } ->\n          {\n            e =\n              Loop\n                {\n                  body = dexpr body;\n                  kind =\n                    WhileLoop\n                      {\n                        condition = dexpr condition;\n                        witness = Features.On.while_loop;\n                      };\n                  state = Option.map ~f:(dloop_state expr.span) state;\n                  label;\n                  witness = S.loop expr.span witness;\n                  control_flow = None;\n                };\n            span = expr.span;\n            typ = UB.unit_typ;\n          }\n      | None -> h expr\n    [@@inline_ands bindings_of dexpr]\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\n  module FA = FA\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_reconstruct_while_loops.mli",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.On.While_loop\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_reject.ml",
    "content": "let make_metadata rejection_phase =\n  Phase_utils.Metadata.make (Diagnostics.Phase.Reject rejection_phase)\n\nmodule Arbitrary_lhs (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Arbitrary_lhs\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let arbitrary_lhs = reject\n        let metadata = make_metadata ArbitraryLhs\n      end)\nend\n\nmodule _ (FA : Features.T) : Phase_utils.PHASE = Arbitrary_lhs (FA)\n\nmodule Continue (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Continue\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let continue = reject\n        let metadata = make_metadata Continue\n      end)\nend\n\nmodule _ (FA : Features.T) : Phase_utils.PHASE = Continue (FA)\n\nmodule Question_mark (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Question_mark\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let question_mark = reject\n        let metadata = make_metadata QuestionMark\n      end)\nend\n\nmodule _ (FA : Features.T) : Phase_utils.PHASE = Question_mark (FA)\n\nmodule RawOrMutPointer (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Raw_pointer\n    include Features.Off.Mutable_pointer\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let mutable_pointer = reject\n        let raw_pointer = reject\n        let metadata = make_metadata RawOrMutPointer\n      end)\nend\n\nmodule _ (FA : Features.T) : Phase_utils.PHASE = RawOrMutPointer (FA)\n\nmodule EarlyExit (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Early_exit\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let early_exit = reject\n        let metadata = make_metadata EarlyExit\n      end)\nend\n\nmodule As_pattern (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.As_pattern\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let as_pattern = reject\n        let metadata = make_metadata AsPattern\n      end)\nend\n\nmodule Dyn (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Dyn\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let dyn = reject\n        let metadata = make_metadata Dyn\n      end)\nend\n\nmodule Trait_item_default (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Trait_item_default\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let trait_item_default = reject\n        let metadata = make_metadata TraitItemDefault\n      end)\nend\n\nmodule Unsafe (FA : Features.T) = struct\n  module FB = struct\n    include FA\n    include Features.Off.Unsafe\n  end\n\n  include\n    Feature_gate.Make (FA) (FB)\n      (struct\n        module A = FA\n        module B = FB\n        include Feature_gate.DefaultSubtype\n\n        let unsafe = reject\n        let metadata = make_metadata Unsafe\n      end)\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_reject_impl_type_method.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let reject_anon_assoc_ty =\n        object\n          inherit [_] Visitors.map as super\n\n          method! visit_ty span t =\n            match t with\n            | TAssociatedType { item; _ }\n              when Concrete_ident.is_anon_assoc_ty item ->\n                Error.unimplemented ~issue_id:1965\n                  ~details:\n                    \"`impl` types are not supported in type signatures of \\\n                     associated items.\"\n                  (Option.value_exn span)\n            | _ -> super#visit_ty span t\n\n          method! visit_item _ i =\n            try super#visit_item (Some i.span) i\n            with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n              let error = Diagnostics.pretty_print_context_kind context kind in\n              let cast_item : item -> Ast.Full.item = Stdlib.Obj.magic in\n              let ast = cast_item i |> Print_rust.pitem_str in\n              let msg =\n                error ^ \"\\nLast available AST for this item:\\n\\n\" ^ ast\n              in\n              make_hax_error_item i.span i.ident msg\n        end\n\n      let ditems = List.map ~f:(reject_anon_assoc_ty#visit_item None)\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_reject_impl_type_method.mli",
    "content": "(** This phase rejects `impl T` types in trait items *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_reorder_fields.ml",
    "content": "(** This phase re-order fields in structs according to the attribute\n    [AttrPayload::Order] (if any). *)\n\nopen! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module M = Ast_builder.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      module Attrs = Attr_payloads.MakeBase (Error)\n\n      let order_of_argument = thd3 >> Attrs.order\n\n      let ditems =\n        List.map ~f:(fun item ->\n            match item.v with\n            | Type ({ variants; _ } as o) ->\n                let variants =\n                  let f (v : variant) =\n                    let arguments =\n                      List.mapi\n                        ~f:(fun i ->\n                          order_of_argument >> Option.value ~default:i &&& Fn.id)\n                        v.arguments\n                      |> List.stable_sort ~compare:(fun (i, _) (j, _) ->\n                             Int.compare i j)\n                      |> List.map ~f:snd\n                    in\n                    { v with arguments }\n                  in\n                  List.map ~f variants\n                in\n                { item with v = Type { o with variants } }\n            | _ -> item)\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_reorder_fields.mli",
    "content": "(** This phase re-order fields in structs according to the attribute\n    [AttrPayload::Order] (if any). *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_rewrite_control_flow.ml",
    "content": "(* This phase rewrites: `if c {return a}; b` as `if c {return a; b} else {b}`\n   and does the equivalent transformation for pattern matchings.\n   It rewrites the body of loops considering `break` and `continue`\n   as `return` to place them in return position. If a loop contains\n   a `return` it places it is rewritten inside a pattern matching over the result. *)\n\nopen! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module M = Ast_builder.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let has_cf =\n        object (_self)\n          inherit [_] Visitors.reduce as super\n          method zero = false\n          method plus = ( || )\n\n          method! visit_expr' break_continue e =\n            match e with\n            | Return _ -> true\n            | (Break _ | Continue _) when break_continue -> true\n            | _ -> super#visit_expr' break_continue e\n        end\n\n      let loop_return_type =\n        object (_self)\n          inherit [_] Visitors.reduce as super\n          method zero = (U.unit_typ, None)\n          method plus l r = if [%eq: ty] (fst l) U.unit_typ then r else l\n\n          method! visit_expr' () e =\n            match e with\n            | Return { e; witness; _ } -> (e.typ, Some witness)\n            | _ -> super#visit_expr' () e\n        end\n\n      let rewrite_control_flow =\n        object (self)\n          inherit [_] Visitors.map as super\n\n          method! visit_expr in_loop e =\n            let loop_with_return (loop : expr) stmts_after final pat =\n              let return_type, witness = loop_return_type#visit_expr () loop in\n\n              let typ =\n                U.M.ty_cf ~continue_type:loop.typ ~break_type:return_type\n              in\n              let loop = { loop with typ } in\n              let span = loop.span in\n              let id = U.fresh_local_ident_in [] \"ret\" in\n              let module MS = (val U.M.make span) in\n              let mk_cf_pat = U.M.pat_Constructor_CF ~span ~typ in\n              let return_expr =\n                let inner_e = MS.expr_LocalVar ~typ:return_type id in\n                match witness with\n                | Some witness ->\n                    MS.expr_Return ~typ:return_type ~witness ~inner_e\n                | None -> inner_e\n              in\n              let arms =\n                [\n                  MS.arm\n                    (mk_cf_pat `Break (U.make_var_pat id return_type span))\n                    return_expr;\n                  MS.arm (mk_cf_pat `Continue pat)\n                    (U.make_lets stmts_after final |> self#visit_expr in_loop);\n                ]\n              in\n              MS.expr_Match ~scrutinee:loop ~arms ~typ:return_type\n            in\n            match e.e with\n            (* This is supposed to improve performance but it might actually make it worse in some cases *)\n            | _ when not (has_cf#visit_expr true e) -> e\n            | Loop loop ->\n                let return_inside = has_cf#visit_expr false loop.body in\n                let new_body = self#visit_expr true loop.body in\n                let loop_expr =\n                  {\n                    e with\n                    e =\n                      Loop\n                        {\n                          loop with\n                          body = { new_body with typ = loop.body.typ };\n                        };\n                  }\n                in\n                if return_inside then\n                  let id = U.fresh_local_ident_in [] \"loop_res\" in\n                  let pat = U.make_var_pat id loop_expr.typ loop_expr.span in\n                  let module MS = (val U.M.make loop_expr.span) in\n                  let final = MS.expr_LocalVar ~typ:loop_expr.typ id in\n                  loop_with_return loop_expr [] final pat\n                else loop_expr\n            | Let _ -> (\n                (* Collect let bindings to get the sequence\n                   of \"statements\", find the first \"statement\" that is a\n                   control flow containing a return. Rewrite it.\n                *)\n                let stmts, final = U.collect_let_bindings e in\n                let inline_in_branch branch p stmts_after final =\n                  let branch_stmts, branch_final =\n                    U.collect_let_bindings branch\n                  in\n                  let stmts_to_add =\n                    match (p, branch_final) with\n                    (* This avoids adding `let _ = ()` *)\n                    | { p = PWild; _ }, { e = GlobalVar (`TupleCons 0); _ } ->\n                        stmts_after\n                    (* This avoids adding `let x = x` *)\n                    | { p = PBinding { var; _ }; _ }, { e = LocalVar evar; _ }\n                      when Local_ident.equal var evar ->\n                        stmts_after\n                    | stmt -> stmt :: stmts_after\n                  in\n                  U.make_lets (branch_stmts @ stmts_to_add) final\n                in\n                let stmts_before, stmt_and_stmts_after =\n                  List.split_while stmts ~f:(fun (_, e) ->\n                      match e.e with\n                      | (If _ | Match _) when has_cf#visit_expr in_loop e ->\n                          false\n                      | Loop _ when has_cf#visit_expr false e -> false\n                      | Return _ | Break _ | Continue _ -> false\n                      | _ -> true)\n                in\n                match stmt_and_stmts_after with\n                | (p, ({ e = Loop loop; _ } as rhs)) :: stmts_after ->\n                    let new_body = self#visit_expr true loop.body in\n                    let loop_expr =\n                      {\n                        rhs with\n                        e =\n                          Loop\n                            {\n                              loop with\n                              body = { new_body with typ = loop.body.typ };\n                            };\n                      }\n                    in\n                    U.make_lets stmts_before\n                      (loop_with_return loop_expr stmts_after final p)\n                | (p, ({ e = If { cond; then_; else_ }; _ } as rhs))\n                  :: stmts_after ->\n                    (* We know there is no \"return\" in the condition\n                       so we must rewrite the if *)\n                    let then_ = inline_in_branch then_ p stmts_after final in\n                    let else_ =\n                      Some\n                        (match else_ with\n                        | Some else_ ->\n                            inline_in_branch else_ p stmts_after final\n                        | None -> U.make_lets stmts_after final)\n                    in\n                    U.make_lets stmts_before\n                      { rhs with e = If { cond; then_; else_ } }\n                    |> self#visit_expr in_loop\n                | (p, ({ e = Match { scrutinee; arms }; _ } as rhs))\n                  :: stmts_after ->\n                    let arms =\n                      List.map arms ~f:(fun arm ->\n                          let body =\n                            inline_in_branch arm.arm.body p stmts_after final\n                          in\n                          { arm with arm = { arm.arm with body } })\n                    in\n                    U.make_lets stmts_before\n                      { rhs with e = Match { scrutinee; arms } }\n                    |> self#visit_expr in_loop\n                (* The statements coming after a \"return\" are useless. *)\n                | (_, ({ e = Return _ | Break _ | Continue _; _ } as rhs)) :: _\n                  ->\n                    U.make_lets stmts_before rhs |> self#visit_expr in_loop\n                | _ ->\n                    let stmts =\n                      List.map stmts ~f:(fun (p, e) ->\n                          (p, self#visit_expr in_loop e))\n                    in\n                    U.make_lets stmts (self#visit_expr in_loop final))\n            | _ -> super#visit_expr in_loop e\n        end\n\n      let ditems = List.map ~f:(rewrite_control_flow#visit_item false)\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_rewrite_control_flow.mli",
    "content": "(** This phase finds control flow expression (`if` or `match`) with a `return`\n    expression in one of the branches. We replace them by replicating what comes\n    after in all the branches. This allows the `return` to be eliminated by\n    `drop_needless_returns`. This phase should come after\n    `phase_local_mutation`. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_rewrite_local_self.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let ditem i =\n        match i.v with\n        | Trait ({ items; _ } as t) ->\n            let items =\n              List.map\n                ~f:\n                  ((object\n                      inherit [_] U.Visitors.map as super\n\n                      method! visit_impl_expr () ie =\n                        match super#visit_impl_expr () ie with\n                        | { kind = LocalBound { id }; _ }\n                          when [%eq: string] id \"i0\" ->\n                            { ie with kind = Self }\n                        | ie -> ie\n                   end)\n                     #visit_trait_item\n                     ())\n                items\n            in\n            { i with v = Trait { t with items } }\n        | _ -> i\n\n      let ditems = List.map ~f:ditem\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_rewrite_local_self.mli",
    "content": "(** Rewrites, in traits and impls, local bounds refereing to `Self` into the\n    impl expr of kind `Self`. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_simplify_hoisting.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let inline_matches =\n        object (self)\n          inherit [_] Visitors.map as super\n\n          method! visit_expr () e =\n            match e with\n            | {\n             e =\n               Let\n                 {\n                   monadic = None;\n                   lhs =\n                     {\n                       p =\n                         PBinding\n                           {\n                             mut = Immutable;\n                             mode = ByValue;\n                             var;\n                             subpat = None;\n                             _;\n                           };\n                       _;\n                     };\n                   rhs;\n                   body;\n                 };\n             _;\n            }\n              when Local_ident.is_side_effect_hoist_var var ->\n                let body, count =\n                  (object\n                     inherit [_] Visitors.mapreduce as super\n                     method zero = 0\n                     method plus = ( + )\n\n                     method! visit_expr () e =\n                       match e.e with\n                       | LocalVar v when [%eq: Local_ident.t] v var -> (rhs, 1)\n                       | _ -> super#visit_expr () e\n                  end)\n                    #visit_expr\n                    () body\n                in\n                if [%eq: int] count 1 then self#visit_expr () body\n                else super#visit_expr () e\n            | _ -> super#visit_expr () e\n        end\n\n      let ditems = List.map ~f:(inline_matches#visit_item ())\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_simplify_hoisting.mli",
    "content": "(** This phase rewrites `let pat = match ... { ... => ..., ... => return ... }; e`\n    into `match ... { ... => let pat = ...; e}`. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_simplify_match_return.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      open Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let inline_matches =\n        object\n          inherit [_] Visitors.map as super\n\n          method! visit_expr () e =\n            match e with\n            | {\n             e =\n               Let\n                 {\n                   monadic = None;\n                   lhs;\n                   rhs =\n                     {\n                       e =\n                         Match\n                           {\n                             scrutinee;\n                             arms =\n                               [\n                                 arm;\n                                 ({\n                                    arm =\n                                      {\n                                        body =\n                                          {\n                                            e = Return _ as return;\n                                            span = return_span;\n                                            _;\n                                          };\n                                        guard = None;\n                                        _;\n                                      };\n                                    _;\n                                  } as diverging_arm);\n                               ];\n                           };\n                       _;\n                     } as match_expr;\n                   body;\n                 };\n             _;\n            } ->\n                let arm_body = arm.arm.body in\n                let arm_pat = arm.arm.arm_pat in\n                let arm_pat, let_expr =\n                  ((* if the match produces only a variable *)\n                   let* var =\n                     match arm_body.e with LocalVar v -> Some v | _ -> None\n                   in\n                   let found = ref false in\n                   let arm_pat =\n                     (object\n                        inherit [_] Visitors.map as super\n\n                        method! visit_pat () p =\n                          match p.p with\n                          | PBinding b when [%eq: Local_ident.t] b.var var ->\n                              found := true;\n                              lhs\n                          | _ -> super#visit_pat () p\n                     end)\n                       #visit_pat\n                       () arm_pat\n                   in\n                   let*? _ = !found in\n                   Some (arm_pat, body))\n                  |> Option.value\n                       ~default:\n                         ( arm_pat,\n                           {\n                             e with\n                             e =\n                               Let { monadic = None; lhs; rhs = arm_body; body };\n                           } )\n                in\n                let arm =\n                  { arm with arm = { arm_pat; body = let_expr; guard = None } }\n                in\n                let diverging_arm =\n                  {\n                    diverging_arm with\n                    arm =\n                      {\n                        diverging_arm.arm with\n                        body = { e = return; span = return_span; typ = e.typ };\n                      };\n                  }\n                in\n                let result =\n                  let e' = Match { scrutinee; arms = [ arm; diverging_arm ] } in\n                  let span = match_expr.span in\n                  { span; typ = e.typ; e = e' }\n                in\n                super#visit_expr () result\n            | _ -> super#visit_expr () e\n        end\n\n      let ditems = List.map ~f:(inline_matches#visit_item ())\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_simplify_match_return.mli",
    "content": "(** This phase rewrites `let pat = match ... { ... => ..., ... => return ... }; e`\n    into `match ... { ... => let pat = ...; e}`. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_simplify_question_marks.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (FA : Features.T) = struct\n  open Ast\n\n  module FB = struct\n    include FA\n    include Features.On.Question_mark\n  end\n\n  include\n    Phase_utils.MakeBase (FA) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (FA)\n    module UB = Ast_utils.Make (FB)\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.Question_mark\n    end\n\n    module QuestionMarks = struct\n      [@@@warning \"-9\"]\n\n      open A\n\n      (** The types supported for [e] in a [e?] expression *)\n      type qm_kind = QMResult of { success : ty; error : ty } | QMOption of ty\n\n      (** Interpret a type [t] as a [qm_kind] *)\n      let qm_kind_of_typ span (t : ty) : qm_kind =\n        let is_result = Global_ident.eq_name Core__result__Result in\n        let is_option = Global_ident.eq_name Core__option__Option in\n        match t with\n        | TApp { ident; args = [ GType s; GType e ] } when is_result ident ->\n            QMResult { success = s; error = e }\n        | TApp { ident; args = [ GType s ] } when is_option ident -> QMOption s\n        | _ ->\n            Error.assertion_failure span\n              (\"expected something of type Option<_> or Result<_, _>, instead, \\\n                got: \"\n              ^ [%show: ty] t)\n\n      (** Expects [impl] to be an impl. expr. for the trait\n          `std::ops::FromResidual` for the type [Result<_, _>], and extract its\n          parent [From] impl expr *)\n      let expect_residual_impl_result (impl : impl_expr) : impl_expr option =\n        match impl with\n        | {\n         kind = ImplApp { args = [ from_impl ]; _ };\n         goal =\n           {\n             trait;\n             args =\n               [\n                 GType (TApp { ident = arg1; _ });\n                 GType (TApp { ident = arg2; _ });\n               ];\n           };\n        }\n          when Concrete_ident.eq_name Core__ops__try_trait__FromResidual trait\n               && Global_ident.eq_name Core__result__Result arg1\n               && Global_ident.eq_name Core__result__Result arg2 ->\n            Some from_impl\n        | _ -> None\n\n      (** Expects [t] to be [Result<S, E>], and returns [(S, E)] *)\n      let expect_result_type (t : ty) : (ty * ty) option =\n        match t with\n        | TApp { ident; args = [ GType s; GType e ] }\n          when Global_ident.eq_name Core__result__Result ident ->\n            Some (s, e)\n        | _ -> None\n\n      (** Construct [Result<S,E>] *)\n      let make_result_type (success : ty) (error : ty) : ty =\n        let ident = Global_ident.of_name ~value:false Core__result__Result in\n        TApp { ident; args = [ GType success; GType error ] }\n\n      (** Retype a [Err::<_, E>(x)] literal, as [Err::<success, E>(x)] *)\n      let retype_err_literal (e : expr) (success : ty) (error : ty) =\n        match e.e with\n        | Construct { constructor; _ }\n          when Global_ident.eq_name Core__result__Result__Err constructor ->\n            { e with typ = make_result_type success error }\n        | _ -> e\n\n      let convert_from (e : expr) (error_dest : ty) impl : expr option =\n        let error_src = e.typ in\n        let* impl = expect_residual_impl_result impl in\n        let*? _ = [%eq: ty] error_src error_dest |> not in\n        let from_typ = TArrow ([ error_src ], error_dest) in\n        let impl_generic_args = [ GType error_dest; GType error_src ] in\n        Some\n          (UA.call ~impl_generic_args ~impl Core__convert__From__from [ e ]\n             e.span from_typ)\n\n      (** [map_err e error_dest impl] creates the expression [e.map_err(from)]\n          with the proper types and impl informations. *)\n      let map_err (e : expr) (error_dest : ty) impl : expr option =\n        let* success, error_src = expect_result_type e.typ in\n        let* impl = expect_residual_impl_result impl in\n        let from_typ = TArrow ([ error_src ], error_dest) in\n        let from = UA.call ~impl Core__convert__From__from [] e.span from_typ in\n        let call =\n          UA.call Core__result__Impl__map_err [ e; from ] e.span\n            (make_result_type success error_dest)\n        in\n        Some call\n\n      let mk_pconstruct ~is_struct ~is_record ~span ~typ\n          (constructor : Concrete_ident_generated.t)\n          (fields : (Concrete_ident_generated.t * pat) list) =\n        let constructor = Global_ident.of_name ~value:true constructor in\n        let fields =\n          List.map\n            ~f:(fun (field, pat) ->\n              let field = Global_ident.of_name ~value:true field in\n              { field; pat })\n            fields\n        in\n        let p = PConstruct { constructor; fields; is_record; is_struct } in\n        { p; span; typ }\n\n      (** [extract e] returns [Some (x, ty)] if [e] was a `y?` desugared by\n          rustc. `y` is `x` plus possibly a coercion. [ty] is the return type of\n          the function. *)\n      let extract (e : expr) : expr option =\n        let extract_return (e : expr) =\n          match e.e with\n          | Return\n              {\n                e =\n                  {\n                    e =\n                      App\n                        {\n                          f = { e = GlobalVar f };\n                          args = [ { e = LocalVar residual_var; _ } ];\n                          trait = Some (impl, _);\n                        };\n                    typ = return_typ;\n                    _;\n                  };\n                witness;\n              } ->\n              Some (f, residual_var, return_typ, impl, witness)\n          | _ -> None\n        in\n        let extract_pat_app_bd (p : pat) : (global_ident * local_ident) option =\n          match p.p with\n          | PConstruct\n              {\n                constructor = name;\n                fields =\n                  [\n                    {\n                      pat =\n                        {\n                          p =\n                            PBinding { mut = Immutable; var; subpat = None; _ };\n                          _;\n                        };\n                      _;\n                    };\n                  ];\n                _;\n              } ->\n              Some (name, var)\n          | _ -> None\n        in\n        match e.e with\n        | Match\n            {\n              scrutinee =\n                { e = App { f = { e = GlobalVar n; _ }; args = [ expr ] }; _ };\n              arms =\n                [\n                  { arm = { arm_pat = pat_break; body }; _ };\n                  { arm = { arm_pat = pat_continue; body = continue_expr }; _ };\n                ];\n            }\n          when Global_ident.eq_name Core__ops__try_trait__Try__branch n ->\n            let* body =\n              UA.Expect.concrete_app1 Rust_primitives__hax__never_to_any body\n            in\n            let* f, residual_var, fun_return_typ, residual_impl, return_witness\n                =\n              extract_return body\n            in\n            let* f_break, residual_var' = extract_pat_app_bd pat_break in\n            let* f_continue, continue_var' = extract_pat_app_bd pat_continue in\n            let continue_expr =\n              Option.value\n                (UA.Expect.borrow continue_expr)\n                ~default:continue_expr\n            in\n            let continue_expr = UA.unbox_underef_expr continue_expr in\n            let* continue_var = UA.Expect.local_var continue_expr in\n            let*? _ = [%equal: local_ident] residual_var residual_var' in\n            let*? _ = [%equal: local_ident] continue_var continue_var' in\n            let*? _ =\n              Global_ident.eq_name Core__ops__control_flow__ControlFlow__Break\n                f_break\n              && Global_ident.eq_name\n                   Core__ops__control_flow__ControlFlow__Continue f_continue\n              && Global_ident.eq_name\n                   Core__ops__try_trait__FromResidual__from_residual f\n            in\n            let kind = qm_kind_of_typ e.span in\n            let span = expr.span in\n            let mk_var name : local_ident =\n              { name; id = Local_ident.mk_id Expr (-1) }\n            in\n            let mk_cons =\n              mk_pconstruct ~is_struct:false ~is_record:false ~span\n                ~typ:expr.typ\n            in\n            let expr =\n              match (kind expr.typ, kind fun_return_typ) with\n              | ( QMResult { error = local_err; success = local_success },\n                  QMResult { error = return_err; _ } ) ->\n                  let var_ok, var_err = (mk_var \"ok\", mk_var \"err\") in\n                  let arm_ok : A.arm =\n                    let pat = UA.make_var_pat var_ok local_success span in\n                    let arm_pat =\n                      mk_cons Core__result__Result__Ok\n                        [ (Core__result__Result__Ok__0, pat) ]\n                    in\n                    let body =\n                      { typ = local_success; e = LocalVar var_ok; span }\n                    in\n                    { arm = { arm_pat; body; guard = None }; span }\n                  in\n                  let arm_err =\n                    let pat = UA.make_var_pat var_err local_err span in\n                    let arm_pat =\n                      mk_cons Core__result__Result__Err\n                        [ (Core__result__Result__Err__0, pat) ]\n                    in\n                    let err = { typ = local_err; e = LocalVar var_err; span } in\n                    let err =\n                      convert_from err return_err residual_impl\n                      |> Option.value ~default:err\n                    in\n                    let err =\n                      UA.call_Constructor Core__result__Result__Err false\n                        [ err ] e.span fun_return_typ\n                    in\n                    let e = Return { e = err; witness = return_witness } in\n                    let return = { typ = local_success; e; span } in\n                    { arm = { arm_pat; body = return; guard = None }; span }\n                  in\n                  let arms, typ = ([ arm_ok; arm_err ], local_success) in\n                  { e = Match { scrutinee = expr; arms }; typ; span }\n              | QMOption local_success, QMOption _ ->\n                  let var_some = mk_var \"some\" in\n                  let arm_some : A.arm =\n                    let pat = UA.make_var_pat var_some local_success span in\n                    let arm_pat =\n                      mk_cons Core__option__Option__Some\n                        [ (Core__option__Option__Some__0, pat) ]\n                    in\n                    let body =\n                      { typ = local_success; e = LocalVar var_some; span }\n                    in\n                    { arm = { arm_pat; body; guard = None }; span }\n                  in\n                  let arm_none =\n                    let arm_pat = mk_cons Core__option__Option__None [] in\n                    let none =\n                      UA.call_Constructor Core__option__Option__None false []\n                        e.span fun_return_typ\n                    in\n                    let e = Return { e = none; witness = return_witness } in\n                    let return = { typ = local_success; e; span } in\n                    { arm = { arm_pat; body = return; guard = None }; span }\n                  in\n                  let arms, typ = ([ arm_some; arm_none ], local_success) in\n                  { e = Match { scrutinee = expr; arms }; typ; span }\n              | _ ->\n                  Error.assertion_failure e.span\n                    \"expected expr.typ and fun_return_typ to be both Options \\\n                     or both Results\"\n            in\n            Some expr\n        | _ -> None\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec dexpr_unwrapped (expr : A.expr) : B.expr =\n      QuestionMarks.extract expr |> Option.value ~default:expr\n      |> [%inline_body dexpr_unwrapped]\n    [@@inline_ands bindings_of dexpr]\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\n  module FA = FA\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_simplify_question_marks.mli",
    "content": "(** In THIR, there are no construct for question marks. Instead, Rustc desugars\n    `e?` into the following:\n\n    {@rust[\n      match core::ops::try_trait::branch(y) {\n          core::ops::control_flow::Break(residual) => {\n              never_to_any(\n                  {return core::ops::try_trait::from_residual(residual)},\n              )\n          }\n          core::ops::control_flow::Continue(val) => val,\n      })\n    ]}\n\n    This phase does the opposite rewrite.\n\n    While `e?` in Rust might implies an implicit coercion, in our AST, a\n    question mark is expected to already be of the right type. This phase\n    inlines a coercion (of the shape `x.map_err(from)`, in the case of a\n    `Result`). *)\n\nopen! Prelude\n\n(** This phase can be applied to any feature set. *)\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    (** This phase outputs an AST with question marks. *)\n    module FB = struct\n      include F\n      include Features.On.Question_mark\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_sort_items.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      module A = Ast.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      module Attrs = Attr_payloads.MakeBase (Error)\n\n      let ditems items =\n        let module Deps = Dependencies.Make (F) in\n        Deps.sort items\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_sort_items.mli",
    "content": "(** This phase sorts items so that each item comes after the items it depends\n    on. This is done by sorting namespaces with the same property, and then\n    sorting items within each namespace, trying as much as possible to respect\n    the original order. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_sort_items_namespace_wise.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      module A = Ast.Make (F)\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      module Attrs = Attr_payloads.MakeBase (Error)\n\n      let ditems items =\n        let module Deps = Dependencies.Make (F) in\n        Deps.sort_namespace_wise items\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_sort_items_namespace_wise.mli",
    "content": "(** This phase sorts items so that each item comes after the items it depends\n    on. This is done by sorting namespaces with the same property, and then\n    sorting items within each namespace, trying as much as possible to respect\n    the original order. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_specialize.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      module A = Ast.Make (F)\n      module FB = F\n      module B = Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module Visitors = Ast_visitors.Make (F)\n      open A\n\n      open struct\n        open Concrete_ident_generated\n\n        module FnReplace = struct\n          type t =\n            span:Span.t ->\n            typ:ty ->\n            f:expr ->\n            args:expr list ->\n            generic_args:generic_value list ->\n            bounds_impls:impl_expr list ->\n            trait:(impl_expr * generic_value list) option ->\n            expr\n\n          (** Retype a function application: this concretize the types, using\n              concrete types from arguments. *)\n          let retype (fn : t) : t =\n           fun ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->\n            let f =\n              let typ =\n                if List.is_empty args then f.typ\n                else TArrow (List.map ~f:(fun e -> e.typ) args, typ)\n              in\n              { f with typ }\n            in\n            fn ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait\n\n          (** Gets rid of trait and impl informations. *)\n          let remove_traits (fn : t) : t =\n           fun ~span ~typ ~f ~args ~generic_args:_ ~bounds_impls:_ ~trait:_ ->\n            fn ~span ~typ ~f ~args ~generic_args:[] ~bounds_impls:[] ~trait:None\n\n          (** Monomorphize a function call: this removes any impl references,\n              and concretize types. *)\n          let monorphic (fn : t) : t = remove_traits (retype fn)\n\n          let name name : t =\n           fun ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->\n            let name = Ast.Global_ident.of_name ~value:true name in\n            let f = { f with e = GlobalVar name } in\n            let e = App { args; f; generic_args; bounds_impls; trait } in\n            { typ; span; e }\n\n          let and_then (f1 : t) (f2 : expr -> expr) : t =\n           fun ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->\n            f1 ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait |> f2\n\n          let map_args (fn : int -> expr -> expr) : t -> t =\n           fun g ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait ->\n            let args = List.mapi ~f:fn args in\n            g ~span ~typ ~f ~args ~generic_args ~bounds_impls ~trait\n        end\n\n        type pattern = {\n          fn : t;\n          fn_replace : FnReplace.t;\n          args : (expr -> bool) list;\n          ret : ty -> bool;\n        }\n        (** A pattern that helps matching against function applications *)\n\n        type ('a, 'b) predicate = 'a -> 'b option\n        (** Instead of working directly with boolean predicate, we work with `_\n            -> _ option` so that we can chain them *)\n\n        (** Constructs a predicate out of predicates and names *)\n        let mk' (args : ('a, 'b) predicate list) (ret : ('c, 'd) predicate)\n            (fn : t) (fn_replace : FnReplace.t) : pattern =\n          let args = List.map ~f:(fun p x -> p x |> Option.is_some) args in\n          let ret t = ret t |> Option.is_some in\n          { fn; fn_replace; args; ret }\n\n        let mk (args : ('a, 'b) predicate list) (ret : ('c, 'd) predicate)\n            (fn : t) (fn_replace : t) : pattern =\n          mk' args ret fn (FnReplace.name fn_replace |> FnReplace.monorphic)\n\n        open struct\n          let etyp (e : expr) : ty = e.typ\n          let tref = function TRef { typ; _ } -> Some typ | _ -> None\n\n          let tapp0 = function\n            | TApp { ident; args = [] } -> Some ident\n            | _ -> None\n\n          let ( >>& ) (f1 : ('a, 'b) predicate) (f2 : ('b, 'c) predicate) :\n              ('a, 'c) predicate =\n           fun x -> Option.bind (f1 x) ~f:f2\n\n          let eq : 'a 'b. eq:('a -> 'b -> bool) -> 'a -> ('b, 'b) predicate =\n           fun ~eq x x' -> if eq x x' then Some x' else None\n\n          let eq_global_ident :\n              t -> (Ast.Global_ident.t, Ast.Global_ident.t) predicate =\n            eq ~eq:Ast.Global_ident.eq_name\n\n          let erase : 'a. ('a, unit) predicate = fun _ -> Some ()\n\n          let ( ||. ) (type a b) (f : (a, b) predicate) (g : (a, b) predicate) :\n              (a, b) predicate =\n           fun x ->\n            match (f x, g x) with Some a, _ | _, Some a -> Some a | _ -> None\n\n          let is_int : (ty, unit) predicate =\n            tapp0 >>& eq_global_ident Hax_lib__int__Int >>& erase\n\n          let is_machine_int : (ty, unit) predicate =\n           fun t ->\n            match t with\n            | TInt _\n            | TRef { typ = TInt _; _ }\n            | TRef { typ = TRef { typ = TInt _; _ }; _ } ->\n                Some ()\n            | _ -> None\n\n          let is_prop : (ty, unit) predicate =\n            tapp0 >>& eq_global_ident Hax_lib__prop__Prop >>& erase\n\n          let is_bool : (ty, unit) predicate = function\n            | TBool\n            | TRef { typ = TBool; _ }\n            | TRef { typ = TRef { typ = TBool; _ }; _ } ->\n                Some ()\n            | _ -> None\n\n          let any _ = Some ()\n          let int_any = mk [ etyp >> is_int ] any\n          let int_int_any = mk [ etyp >> is_int; etyp >> is_int ] any\n          let any_int = mk [ any ] is_int\n          let rint_any = mk [ etyp >> (tref >>& is_int) ] any\n\n          let rint_rint_any =\n            mk [ etyp >> (tref >>& is_int); etyp >> (tref >>& is_int) ] any\n\n          let any_rint = mk [ any ] (tref >>& is_int)\n\n          let mint_mint_any =\n            mk [ etyp >> is_machine_int; etyp >> is_machine_int ] any\n\n          let mint_any = mk [ etyp >> is_machine_int ] any\n          let bool_prop = mk [ etyp >> is_bool ] is_prop\n          let prop_bool = mk [ etyp >> is_prop ] is_bool\n\n          let arrow : (ty, ty list) predicate = function\n            | TArrow (ts, t) -> Some (ts @ [ t ])\n            | _ -> None\n\n          let a_to_b a b : _ predicate =\n            arrow >> fun x ->\n            let* t, u =\n              match x with Some [ a; b ] -> Some (a, b) | _ -> None\n            in\n            let* a = a t in\n            let* b = b u in\n            Some (a, b)\n        end\n\n        let int_replacements =\n          [\n            mint_mint_any Core__ops__arith__Add__add\n              Rust_primitives__hax__machine_int__add;\n            mint_mint_any Core__ops__arith__Sub__sub\n              Rust_primitives__hax__machine_int__sub;\n            mint_mint_any Core__ops__arith__Mul__mul\n              Rust_primitives__hax__machine_int__mul;\n            mint_mint_any Core__ops__arith__Div__div\n              Rust_primitives__hax__machine_int__div;\n            mint_mint_any Core__ops__arith__Rem__rem\n              Rust_primitives__hax__machine_int__rem;\n            mint_mint_any Core__ops__bit__Shl__shl\n              Rust_primitives__hax__machine_int__shl;\n            mint_mint_any Core__ops__bit__Shr__shr\n              Rust_primitives__hax__machine_int__shr;\n            mint_mint_any Core__ops__bit__BitXor__bitxor\n              Rust_primitives__hax__machine_int__bitxor;\n            mint_mint_any Core__ops__bit__BitAnd__bitand\n              Rust_primitives__hax__machine_int__bitand;\n            mint_mint_any Core__ops__bit__BitOr__bitor\n              Rust_primitives__hax__machine_int__bitor;\n            mint_any Core__ops__bit__Not__not\n              Rust_primitives__hax__machine_int__not;\n            mint_mint_any Core__cmp__PartialOrd__gt\n              Rust_primitives__hax__machine_int__gt;\n            mint_mint_any Core__cmp__PartialOrd__ge\n              Rust_primitives__hax__machine_int__ge;\n            mint_mint_any Core__cmp__PartialOrd__lt\n              Rust_primitives__hax__machine_int__lt;\n            mint_mint_any Core__cmp__PartialOrd__le\n              Rust_primitives__hax__machine_int__le;\n            mint_mint_any Core__cmp__PartialEq__ne\n              Rust_primitives__hax__machine_int__ne;\n            mint_mint_any Core__cmp__PartialEq__eq\n              Rust_primitives__hax__machine_int__eq;\n            mint_any Core__ops__arith__Neg__neg Rust_primitives__arithmetic__neg;\n            int_int_any Core__ops__arith__Add__add\n              Rust_primitives__hax__int__add;\n            int_int_any Core__ops__arith__Sub__sub\n              Rust_primitives__hax__int__sub;\n            int_int_any Core__ops__arith__Mul__mul\n              Rust_primitives__hax__int__mul;\n            int_int_any Core__ops__arith__Div__div\n              Rust_primitives__hax__int__div;\n            int_int_any Core__ops__arith__Rem__rem\n              Rust_primitives__hax__int__rem;\n            int_any Core__ops__arith__Neg__neg Rust_primitives__hax__int__neg;\n            rint_rint_any Core__cmp__PartialOrd__gt\n              Rust_primitives__hax__int__gt;\n            rint_rint_any Core__cmp__PartialOrd__ge\n              Rust_primitives__hax__int__ge;\n            rint_rint_any Core__cmp__PartialOrd__lt\n              Rust_primitives__hax__int__lt;\n            rint_rint_any Core__cmp__PartialOrd__le\n              Rust_primitives__hax__int__le;\n            rint_rint_any Core__cmp__PartialEq__ne Rust_primitives__hax__int__ne;\n            rint_rint_any Core__cmp__PartialEq__eq Rust_primitives__hax__int__eq;\n            any_int Hax_lib__abstraction__Abstraction__lift\n              Rust_primitives__hax__int__from_machine;\n            any_int Hax_lib__int__ToInt__to_int\n              Rust_primitives__hax__int__from_machine;\n            int_any Hax_lib__abstraction__Concretization__concretize\n              Rust_primitives__hax__int__into_machine;\n          ]\n\n        let prop_replacements =\n          let name_from_bool = Hax_lib__prop__constructors__from_bool in\n          let prop_type =\n            let ident =\n              Ast.Global_ident.of_name ~value:false Hax_lib__prop__Prop\n            in\n            TApp { ident; args = [] }\n          in\n          let bool_prop__from_bool f = bool_prop f name_from_bool in\n          let poly n f g =\n            let args =\n              let prop_or_bool = is_bool ||. is_prop in\n              List.init n ~f:(fun _ ->\n                  etyp\n                  >> (prop_or_bool\n                     ||. (a_to_b prop_or_bool prop_or_bool >> erase)))\n            in\n            let promote_bool (e : A.expr) =\n              match e.typ with\n              | TBool -> U.call name_from_bool [ e ] e.span prop_type\n              | _ -> e\n            in\n            mk' args is_prop f\n              (FnReplace.map_args\n                 (fun _ e ->\n                   let e = promote_bool e in\n                   match e.e with\n                   | Closure { params; body; captures } ->\n                       let body = promote_bool body in\n                       { e with e = Closure { params; body; captures } }\n                   | _ -> e)\n                 (FnReplace.name g |> FnReplace.monorphic))\n          in\n          [\n            bool_prop__from_bool Hax_lib__abstraction__Abstraction__lift;\n            bool_prop__from_bool Hax_lib__prop__ToProp__to_prop;\n            bool_prop__from_bool Core__convert__Into__into;\n            bool_prop__from_bool Core__convert__From__from;\n            (* Transform inherent methods on Prop *)\n            poly 2 Hax_lib__prop__Impl__and Hax_lib__prop__constructors__and;\n            poly 2 Hax_lib__prop__Impl__or Hax_lib__prop__constructors__or;\n            poly 1 Hax_lib__prop__Impl__not Hax_lib__prop__constructors__not;\n            poly 2 Hax_lib__prop__Impl__eq Hax_lib__prop__constructors__eq;\n            poly 2 Hax_lib__prop__Impl__ne Hax_lib__prop__constructors__ne;\n            poly 2 Hax_lib__prop__Impl__implies\n              Hax_lib__prop__constructors__implies;\n            (* Transform standalone functions in `prop` *)\n            poly 2 Hax_lib__prop__implies Hax_lib__prop__constructors__implies;\n            poly 1 Hax_lib__prop__forall Hax_lib__prop__constructors__forall;\n            poly 1 Hax_lib__prop__exists Hax_lib__prop__constructors__exists;\n            (* Transform core `&`, `|`, `!` on `Prop` *)\n            poly 2 Core__ops__bit__BitAnd__bitand\n              Hax_lib__prop__constructors__and;\n            poly 2 Core__ops__bit__BitOr__bitor Hax_lib__prop__constructors__or;\n            poly 1 Core__ops__bit__Not__not Hax_lib__prop__constructors__not;\n          ]\n\n        let replacements = List.concat [ int_replacements; prop_replacements ]\n      end\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      module Attrs = Attr_payloads.Make (F) (Error)\n\n      (** Drop `from` or `into` when they are of type `T -> T`, for any `T`. *)\n      let remove_from_into_identity =\n        object\n          inherit [_] Visitors.map as super\n\n          method! visit_expr () e =\n            let e =\n              match e.e with\n              | App { f = { e = GlobalVar f; _ }; args = [ x ]; _ }\n                when [%equal: ty] e.typ x.typ\n                     && (Ast.Global_ident.eq_name Core__convert__Into__into f\n                        || Ast.Global_ident.eq_name Core__convert__From__from f\n                        ) ->\n                  x\n              | _ -> e\n            in\n            super#visit_expr () e\n        end\n\n      let visitor =\n        object (self)\n          inherit [_] Visitors.map as super\n\n          method! visit_expr () e =\n            match e.e with\n            | App\n                {\n                  f = { e = GlobalVar f; _ } as f';\n                  args = l;\n                  trait;\n                  generic_args;\n                  bounds_impls;\n                } -> (\n                let l = List.map ~f:(self#visit_expr ()) l in\n                let matching =\n                  List.filter\n                    (List.mapi ~f:(fun i x -> (i, x)) replacements)\n                    ~f:(fun (_, { fn; args; ret; fn_replace = _ }) ->\n                      Ast.Global_ident.eq_name fn f\n                      && ret e.typ\n                      &&\n                      match List.for_all2 args l ~f:apply with\n                      | Ok r -> r\n                      | _ -> false)\n                in\n                match matching with\n                | [ (_, { fn_replace; _ }) ] ->\n                    let e =\n                      fn_replace ~args:l ~typ:e.typ ~span:e.span ~generic_args\n                        ~bounds_impls ~trait ~f:f'\n                    in\n                    self#visit_expr () e\n                | [] -> (\n                    (* In this case we need to avoid recursing again through the arguments *)\n                    let visited =\n                      let args = [] in\n                      let e' =\n                        App { f = f'; args; trait; generic_args; bounds_impls }\n                      in\n                      super#visit_expr () { e with e = e' }\n                    in\n                    match visited.e with\n                    | App { f; trait; generic_args; bounds_impls; _ } ->\n                        {\n                          visited with\n                          e =\n                            App\n                              { f; args = l; trait; generic_args; bounds_impls };\n                        }\n                    | _ -> super#visit_expr () e)\n                | r ->\n                    let msg =\n                      \"Found multiple matching patterns: \"\n                      ^ [%show: int list] (List.map ~f:fst r)\n                    in\n                    Stdio.prerr_endline msg;\n                    U.Debug.expr e;\n                    Error.assertion_failure e.span msg)\n            | _ -> super#visit_expr () e\n        end\n\n      let ditems (l : A.item list) : B.item list =\n        List.map\n          ~f:(visitor#visit_item () >> remove_from_into_identity#visit_item ())\n          l\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_specialize.mli",
    "content": "(** This phase specializes certain specific method applications (according to\n    their name and the type it is being used on) into plain functions.\n\n    This is useful espcially for math integers: the methods of the traits `Add`,\n    `Sub`, `Mul` etc. are mapped to \"primitive\" functions in backends (e.g.\n    Prims.whatever in FStar). *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_traits_specs.ml",
    "content": "open! Prelude\n\nmodule Make (F : Features.T) =\n  Phase_utils.MakeMonomorphicPhase\n    (F)\n    (struct\n      let phase_id = [%auto_phase_name auto]\n\n      module A = Ast.Make (F)\n      module FB = F\n      module B = Ast.Make (F)\n      module U = Ast_utils.Make (F)\n      module BVisitors = Ast_visitors.Make (F)\n      open A\n\n      module Error = Phase_utils.MakeError (struct\n        let ctx = Diagnostics.Context.Phase phase_id\n      end)\n\n      let mk_name ident kind = Concrete_ident.with_suffix kind ident\n\n      module Attrs = Attr_payloads.Make (F) (Error)\n\n      let ditems (l : A.item list) : B.item list =\n        let (module Attrs) = Attrs.with_items l in\n        let f' (item : item) : item =\n          let v =\n            match item.v with\n            | Trait { name; generics; items; safety } ->\n                let f attrs (item : trait_item) =\n                  let mk role kind =\n                    let ti_ident = mk_name item.ti_ident kind in\n                    {\n                      item with\n                      ti_ident;\n                      ti_attrs =\n                        [\n                          Attr_payloads.to_attr TraitMethodNoPrePost\n                            item.ti_span;\n                        ]\n                        @ (List.filter\n                             ~f:\n                               [%matches?\n                                 Types.AssociatedItem { role = role'; _ }, _ when \n                                 [%eq: Types.ha_assoc_role] role role']\n                             attrs\n                          |> List.map ~f:(uncurry Attr_payloads.to_attr));\n                    }\n                  in\n                  match item.ti_v with\n                  | TIFn (TArrow (inputs, output)) ->\n                      [\n                        {\n                          (mk Types.Requires `Pre) with\n                          ti_v = TIFn (TArrow (inputs, TBool));\n                        };\n                        {\n                          (mk Types.Ensures `Post) with\n                          ti_v = TIFn (TArrow (inputs @ [ output ], TBool));\n                        };\n                      ]\n                  | TIFn _ -> [ (* REFINEMENTS FOR CONSTANTS? *) ]\n                  | TIType _ -> [ (* TODO REFINEMENTS FOR TYPES *) ]\n                  | TIDefault _ -> [ (* TODO REFINEMENTS FOR DEFAULT ITEMS *) ]\n                in\n                let items =\n                  List.concat_map\n                    ~f:(fun item ->\n                      let attrs = Attr_payloads.payloads item.ti_attrs in\n                      let ti_attrs =\n                        attrs\n                        |> List.filter\n                             ~f:\n                               (fst\n                               >> [%matches?\n                                    Types.AssociatedItem\n                                      { role = Ensures | Requires; _ }]\n                               >> not)\n                        |> List.map ~f:(uncurry Attr_payloads.to_attr)\n                      in\n                      f attrs item @ [ { item with ti_attrs } ])\n                    items\n                in\n                Trait { name; generics; items; safety }\n            | Impl { generics; self_ty; of_trait; items; parent_bounds; safety }\n              ->\n                let f (item : impl_item) =\n                  let mk kind =\n                    let ii_ident = mk_name item.ii_ident kind in\n                    { item with ii_ident }\n                  in\n                  let default =\n                    {\n                      e = Literal (Bool true);\n                      span = item.ii_span;\n                      typ = TBool;\n                    }\n                  in\n                  match item.ii_v with\n                  | IIFn { params = []; _ } -> []\n                  | IIFn { body; params } ->\n                      (* We always need to produce a pre and a post\n                         condition implementation for each method in\n                         the impl. *)\n                      [\n                        (let params, body =\n                           match Attrs.associated_fn Requires item.ii_attrs with\n                           | Some (_, params, body) -> (params, body)\n                           | None -> (params, default)\n                         in\n                         { (mk `Pre) with ii_v = IIFn { body; params } });\n                        (let params, body =\n                           match Attrs.associated_fn Ensures item.ii_attrs with\n                           | Some (_, params, body) -> (params, body)\n                           | None ->\n                               (* There is no explicit post-condition\n                                  on this method. We need to define a\n                                  trivial one. *)\n                               (* Post-condition *always* an extra\n                                  argument in final position for the\n                                  output. *)\n                               let out_ident =\n                                 U.fresh_local_ident_in\n                                   (U.Reducers.collect_local_idents\n                                      #visit_impl_item () item\n                                   |> Set.to_list)\n                                   \"out\"\n                               in\n                               let pat =\n                                 U.make_var_pat out_ident body.typ body.span\n                               in\n                               let typ = body.typ in\n                               let out =\n                                 { pat; typ; typ_span = None; attrs = [] }\n                               in\n                               (params @ [ out ], default)\n                         in\n                         { (mk `Post) with ii_v = IIFn { body; params } });\n                      ]\n                  | IIType _ -> []\n                in\n                let items =\n                  List.concat_map ~f:(fun item -> f item @ [ item ]) items\n                in\n                Impl\n                  { generics; self_ty; of_trait; items; parent_bounds; safety }\n            | v -> v\n          in\n          { item with v }\n        in\n        let f item =\n          try f' item\n          with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n            let error = Diagnostics.pretty_print_context_kind context kind in\n            let msg = error in\n            B.make_hax_error_item item.span item.ident msg\n        in\n        List.map ~f l\n    end)\n"
  },
  {
    "path": "engine/lib/phases/phase_traits_specs.mli",
    "content": "(** This phase adds specification to traits. For each method `f` in a trait, we\n    add a `f_pre` and a `f_post`. *)\n\nmodule Make : Phase_utils.UNCONSTRAINTED_MONOMORPHIC_PHASE\n"
  },
  {
    "path": "engine/lib/phases/phase_transform_hax_lib_inline.ml",
    "content": "open! Prelude\nopen! Ast\n\nmodule%inlined_contents Make (F : Features.T) = struct\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.On.Quote\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module UA = Ast_utils.Make (F)\n    module UB = Ast_utils.Make (FB)\n    module Visitors = Ast_visitors.Make (FB)\n    module Attrs = Attr_payloads.Make (F) (Error)\n\n    module S = struct\n      module A = FA\n      module B = FB\n      include Features.SUBTYPE.Id\n\n      let quote _ _ = Features.On.quote\n    end\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    (** Patterns are \"stored\" in a [match None { Some <PAT> => (), _ => () }]\n        dummy expression. *)\n    let extract_pattern (e : B.expr) : B.pat option =\n      match e.e with\n      | Block\n          {\n            e =\n              {\n                e =\n                  Match\n                    {\n                      arms =\n                        [\n                          {\n                            arm =\n                              {\n                                arm_pat =\n                                  { p = PConstruct { fields = [ arg ]; _ }; _ };\n                                _;\n                              };\n                            _;\n                          };\n                          _;\n                        ];\n                      _;\n                    };\n                _;\n              };\n            _;\n          } ->\n          Some arg.pat\n      | _ -> None\n\n    (** Extracts the first global_ident found in a pattern *)\n    let first_global_ident (pat : B.pat) : global_ident option =\n      UB.Reducers.collect_global_idents#visit_pat () pat |> Set.choose\n\n    let counter = ref 0\n\n    let rec dexpr' span (expr : A.expr') : B.expr' =\n      quote_of_expr' span expr\n      |> Option.map ~f:(fun quote : B.expr' -> B.Quote quote)\n      |> Option.value_or_thunk ~default:(fun _ ->\n             [%inline_body dexpr'] span expr)\n\n    and quote_of_expr (expr : A.expr) = quote_of_expr' expr.span expr.e\n\n    and quote_of_expr' span (expr : A.expr') =\n      match expr with\n      | App { f = { e = GlobalVar f; _ }; args = [ payload ]; _ }\n        when Global_ident.eq_name Hax_lib__inline f\n             || Global_ident.eq_name Hax_lib__inline_unsafe f ->\n          let bindings, str = dexpr payload |> UB.collect_let_bindings in\n          let str =\n            match\n              UB.Expect.(block >> Option.bind ~f:borrow >> Option.bind ~f:deref)\n                str\n            with\n            | Some { e = Literal (String str); _ } -> str\n            | _ ->\n                Error.assertion_failure span\n                  \"Malformed call to 'inline': cannot find string payload.\"\n          in\n          let code : B.quote_content list =\n            List.map bindings ~f:(fun (pat, e) ->\n                match\n                  UB.Expect.pbinding_simple pat\n                  |> Option.map ~f:(fun ((i, _) : Local_ident.t * _) -> i.name)\n                with\n                | Some \"_constructor\" ->\n                    let id =\n                      extract_pattern e\n                      |> Option.bind ~f:first_global_ident\n                      |> Option.value_or_thunk ~default:(fun _ ->\n                             Error.assertion_failure span\n                               \"Could not extract pattern (case constructor): \\\n                                this may be a bug in the quote macros in \\\n                                hax-lib.\")\n                    in\n                    B.Expr { e with e = GlobalVar id }\n                | Some \"_pat\" ->\n                    let pat =\n                      extract_pattern e\n                      |> Option.value_or_thunk ~default:(fun _ ->\n                             Error.assertion_failure span\n                               \"Could not extract pattern (case pat): this may \\\n                                be a bug in the quote macros in hax-lib.\")\n                    in\n                    Pattern pat\n                | Some \"_ty\" ->\n                    let typ =\n                      match pat.typ with\n                      | TApp { args = [ GType typ ]; _ } -> typ\n                      | _ ->\n                          Stdio.prerr_endline @@ \"-pat->\" ^ [%show: B.pat] pat;\n                          Stdio.prerr_endline @@ \"-expr->\"\n                          ^ [%show: B.expr'] e.e;\n                          Error.assertion_failure span\n                            \"Malformed call to 'inline': expected type \\\n                             `Option<_>`.\"\n                    in\n                    Typ typ\n                | _ -> Expr e)\n          in\n          let verbatim = split_str ~on:\"SPLIT_QUOTE\" str in\n          let contents =\n            let rec f verbatim (code : B.quote_content list) =\n              match (verbatim, code) with\n              | s :: s', code :: code' -> B.Verbatim s :: code :: f s' code'\n              | [ s ], [] -> [ Verbatim s ]\n              | [], [] -> []\n              | _ ->\n                  Error.assertion_failure span\n                  @@ \"Malformed call to 'inline'.\" ^ \"\\nverbatim=\"\n                  ^ [%show: string list] verbatim\n                  ^ \"\\ncode=\"\n                  ^ [%show: B.quote_content list] code\n            in\n            f verbatim code\n          in\n          Some { contents; witness = Features.On.quote }\n      | _ -> None\n    [@@inline_ands bindings_of dexpr - dexpr']\n\n    [%%inline_defs \"Item.*\" - ditems]\n\n    let ditems items =\n      let find_parent_item :\n          Attr_payloads.UId.t -> (Attr_payloads.AssocRole.t * A.item) option =\n        List.concat_map\n          ~f:(fun (item : A.item) ->\n            Attrs.raw_associated_item item.attrs\n            |> List.map ~f:(fun (role, child_uid) -> (child_uid, (role, item))))\n          items\n        |> Map.of_alist_exn (module Attr_payloads.UId)\n        |> Map.find\n      in\n      (* If [item] can be interpreted as a quote, return a `Quote` item *)\n      let item_as_quote (item : A.item) =\n        let* body =\n          match item.v with\n          | Fn { body = { e = Block { e; _ }; _ }; _ } -> Some e\n          | _ -> None\n        in\n        let* uid = Attrs.uid item.attrs in\n        let* role, parent = find_parent_item uid in\n        let*? () = [%equal: Attr_payloads.AssocRole.t] ItemQuote role in\n        let replace = Attrs.late_skip parent.attrs in\n        let* role =\n          Attrs.find_unique_attr\n            ~f:(function ItemQuote q -> Some q | _ -> None)\n            item.attrs\n        in\n        let origin : item_quote_origin =\n          {\n            item_kind = UA.kind_of_item parent;\n            item_ident = parent.ident;\n            position =\n              (if replace then `Replace\n               else\n                 match role.position with After -> `After | Before -> `Before);\n          }\n        in\n        let quote =\n          quote_of_expr body\n          |> Option.value_or_thunk ~default:(fun _ ->\n                 Error.assertion_failure item.span\n                 @@ \"Malformed `Quote` item: `quote_of_expr` failed. \\\n                     Expression was:\\n\"\n                 ^ [%show: A.expr] body)\n        in\n        let attrs =\n          let is_late_skip =\n            [%matches? Types.ItemStatus (Included { late_skip = true })]\n          in\n          item.attrs |> Attr_payloads.payloads\n          |> List.filter ~f:(fst >> is_late_skip >> not)\n          |> List.map ~f:(fun (v, span) -> Attr_payloads.to_attr v span)\n        in\n        let A.{ span; ident; _ } = item in\n        Some B.{ v = Quote { quote; origin }; span; ident; attrs }\n      in\n      (* Wraps [item_as_quote] to handle exns and fallback to the original item if the item is not a quote. *)\n      let f i =\n        try\n          item_as_quote i\n          |> Option.map ~f:(fun i -> [ i ])\n          |> Option.value ~default:(ditem i)\n        with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n          let error = Diagnostics.pretty_print_context_kind context kind in\n          let cast_item : A.item -> Ast.Full.item = Stdlib.Obj.magic in\n          let ast = cast_item i |> Print_rust.pitem_str in\n          let msg = error ^ \"\\nLast available AST for this item:\\n\\n\" ^ ast in\n          [ B.make_hax_error_item i.span i.ident msg ]\n      in\n      List.concat_map ~f items\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_transform_hax_lib_inline.mli",
    "content": "(** This phase transforms nodes like:\n    {@rust[\n      hax_lib::inline({\n        let _KIND = ...;\n        ...\n        let _KIND = ...;\n        \"payload\"\n      })\n    ]}\n\n    into [hax_lib::inline(\"payload'\")] where [payload'] is a string with all the\n    binding names substituted.\n\n    Note: above `_KIND` can be `_expr`, `_pat` or `_constructor`. *)\n\nmodule Make (F : Features.T) : sig\n  include module type of struct\n    module FB = struct\n      include F\n      include Features.On.Quote\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n    module FA = F\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases/phase_trivialize_assign_lhs.ml",
    "content": "open! Prelude\n\nmodule%inlined_contents Make (F : Features.T) = struct\n  open Ast\n  module FA = F\n\n  module FB = struct\n    include F\n    include Features.Off.Nontrivial_lhs\n    include Features.On.Construct_base\n    include Features.On.Slice\n  end\n\n  include\n    Phase_utils.MakeBase (F) (FB)\n      (struct\n        let phase_id = [%auto_phase_name auto]\n      end)\n\n  module Implem : ImplemT.T = struct\n    let metadata = metadata\n\n    module S = struct\n      include Features.SUBTYPE.Id\n      include Features.SUBTYPE.On.Construct_base\n      include Features.SUBTYPE.On.Slice\n    end\n\n    module UA = Ast_utils.Make (F)\n    module UB = Ast_utils.Make (FB)\n\n    [%%inline_defs dmutability + dsafety_kind]\n\n    let rec updater_of_lhs (lhs : A.lhs) (rhs : B.expr) (span : span) :\n        (Local_ident.t * B.ty) * B.expr =\n      match lhs with\n      | LhsLocalVar { var; typ } -> ((var, dty span typ), rhs)\n      | LhsVecRef { e; _ } -> updater_of_lhs e rhs span\n      | LhsFieldAccessor { e; field; _ } -> (\n          let lhs = UA.expr_of_lhs span e |> dexpr in\n          match lhs.typ with\n          | TApp { ident; _ } ->\n              let rhs =\n                UB.M.expr_Construct ~constructor:ident\n                  ~is_record:true (* TODO: might not be, actually *)\n                  ~is_struct:true\n                  ~fields:[ (field, rhs) ]\n                  ~base:(Some (lhs, Features.On.construct_base))\n                  ~span ~typ:lhs.typ\n              in\n              updater_of_lhs e rhs span\n          | _ -> Error.raise { kind = ArbitraryLHS; span })\n      | LhsArrayAccessor { e; typ = _; index; _ } ->\n          let lhs = UA.expr_of_lhs span e |> dexpr in\n          let update_at : Concrete_ident.name =\n            let is_array_slice_or_vec =\n              match lhs.typ with\n              | TSlice _ | TArray _ -> true\n              | TApp { ident; _ } -> Global_ident.eq_name Alloc__vec__Vec ident\n              | _ -> false\n            in\n            if is_array_slice_or_vec then\n              let index_typ =\n                match index.typ with TRef { typ; _ } -> typ | _ -> index.typ\n              in\n              match index_typ with\n              | TInt { size = SSize; signedness = Unsigned } ->\n                  Rust_primitives__hax__monomorphized_update_at__update_at_usize\n              | TApp { ident; _ }\n                when Global_ident.eq_name Core__ops__range__Range ident ->\n                  Rust_primitives__hax__monomorphized_update_at__update_at_range\n              | TApp { ident; _ }\n                when Global_ident.eq_name Core__ops__range__RangeFrom ident ->\n                  Rust_primitives__hax__monomorphized_update_at__update_at_range_from\n              | TApp { ident; _ }\n                when Global_ident.eq_name Core__ops__range__RangeTo ident ->\n                  Rust_primitives__hax__monomorphized_update_at__update_at_range_to\n              | TApp { ident; _ }\n                when Global_ident.eq_name Core__ops__range__RangeFull ident ->\n                  Rust_primitives__hax__monomorphized_update_at__update_at_range_full\n              | _ -> Rust_primitives__hax__update_at\n            else Rust_primitives__hax__update_at\n          in\n          let vec_elem_type =\n            match lhs.typ with\n            | TApp { ident; args = [ GType inner; _ ] }\n              when Global_ident.eq_name Alloc__vec__Vec ident ->\n                Some inner\n            | _ -> None\n          in\n          let vec_typ = lhs.typ in\n          let lhs =\n            match vec_elem_type with\n            | Some ty ->\n                UB.call Alloc__vec__Impl_1__as_slice [ lhs ] span\n                  (TSlice { witness = Features.On.slice; ty })\n            | None -> lhs\n          in\n          let rhs = UB.call update_at [ lhs; dexpr index; rhs ] span lhs.typ in\n          let rhs =\n            if Option.is_some vec_elem_type then\n              UB.call Alloc__slice__Impl__to_vec [ rhs ] span vec_typ\n            else rhs\n          in\n\n          updater_of_lhs e rhs span\n      | LhsArbitraryExpr _ -> Error.raise { kind = ArbitraryLHS; span }\n\n    and dexpr_unwrapped (expr : A.expr) : B.expr =\n      let span = expr.span in\n      match expr.e with\n      | Assign { lhs; e; witness } ->\n          let (var, typ), inner_e = updater_of_lhs lhs (dexpr e) span in\n          let lhs : B.lhs = LhsLocalVar { var; typ } in\n          UB.M.expr_Assign ~lhs ~inner_e ~witness ~span ~typ:UB.unit_typ\n      | [%inline_arms \"dexpr'.*\" - Assign] ->\n          map (fun e -> B.{ e; typ = dty span expr.typ; span })\n    [@@inline_ands bindings_of dexpr - dlhs - dexpr']\n\n    [%%inline_defs \"Item.*\"]\n  end\n\n  include Implem\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/phases/phase_trivialize_assign_lhs.mli",
    "content": "module Make (F : Features.T) : sig\n  include module type of struct\n    module FA = F\n\n    module FB = struct\n      include F\n      include Features.Off.Nontrivial_lhs\n      include Features.On.Construct_base\n      include Features.On.Slice\n    end\n\n    module A = Ast.Make (F)\n    module B = Ast.Make (FB)\n    module ImplemT = Phase_utils.MakePhaseImplemT (A) (B)\n  end\n\n  include ImplemT.T\nend\n"
  },
  {
    "path": "engine/lib/phases.ml",
    "content": "[%%phases_index ()]\n\nmodule Reject = Phase_reject\n"
  },
  {
    "path": "engine/lib/prelude.ml",
    "content": "include Base\ninclude Utils\ninclude Ppx_yojson_conv_lib.Yojson_conv.Primitives\n"
  },
  {
    "path": "engine/lib/print_rust.ml",
    "content": "open! Prelude\nopen Ast\nopen Ast.Full\n\nmodule View = struct\n  include Concrete_ident.MakeRenderAPI (struct\n    include Concrete_ident.DefaultNamePolicy\n\n    let anonymous_field_transform field = \"_\" ^ field\n  end)\n\n  let to_definition_name id = (render id).name\nend\n\nmodule AnnotatedString = struct\n  module T = struct\n    type t = (span * string) list [@@deriving show]\n\n    let empty : t = []\n    let append : t -> t -> t = List.append\n\n    let concat ?(sep : t option) (l : t list) : t =\n      List.concat\n      @@ match sep with None -> l | Some (sep : t) -> List.intersperse ~sep l\n\n    let pure : span -> string -> t = fun meta s -> [ (meta, s) ]\n    let ( & ) = append\n    let to_string = List.map ~f:snd >> String.concat ~sep:\"\"\n    let split_re = Re.Pcre.regexp \"[\\t\\n ]+|[^A-Za-z0-9_]\"\n\n    let split =\n      let open Re.Pcre in\n      full_split ~rex:split_re\n      >> List.concat_map ~f:(function\n           | (Text s | Delim s | Group (_, s)) when not (String.is_empty s) ->\n               [ s ]\n           | _ -> [])\n\n    let tokenize : t -> t =\n      List.concat_map ~f:(fun (span, s) -> split s |> List.map ~f:(tup2 span))\n  end\n\n  include T\n\n  module Output = struct\n    type t = { string : string; map : (int * int * string) list }\n    [@@deriving show, yojson]\n\n    let convert (v : T.t) : t =\n      (* let annotations, map = *)\n      let map =\n        List.map v ~f:(fun (span, s) -> (String.length s, Span.id_of span, s))\n      in\n      (*   List.fold v ~init:([], []) ~f:(fun (annotations, acc) (span, s) -> *)\n      (*       let len = String.length s in *)\n      (*       let i, annotations = *)\n      (*         match List.findi ~f:(Fn.const @@ equal_span span) annotations with *)\n      (*         | Some (i, _) -> (i, annotations) *)\n      (*         | None -> (List.length annotations, annotations @ [ span ]) *)\n      (*       in *)\n      (*       (annotations, (len, i) :: acc)) *)\n      (* in *)\n      { map; string = T.to_string v }\n\n    let raw_string : t -> string = fun { string; _ } -> string\n  end\nend\n\nlet re_matches rex (s : string) : bool =\n  try Re.Pcre.pmatch ~rex s with _ -> false\n\nmodule Raw = struct\n  open AnnotatedString\n\n  let pliteral span (e : literal) : AnnotatedString.t =\n    let pnegative = function true -> \"-\" | _ -> \"\" in\n    pure span\n    @@\n    match e with\n    | String s -> \"\\\"\" ^ String.escaped s ^ \"\\\"\"\n    | Char c -> \"'\" ^ Char.to_string c ^ \"'\"\n    | Int { value; _ } -> value\n    | Float { value; kind; negative } ->\n        pnegative negative ^ value ^ show_float_kind kind\n    | Bool b -> Bool.to_string b\n\n  let pprimitive_ident span : _ -> AnnotatedString.t =\n    pure span << function\n    | Deref -> \"deref\"\n    | Cast -> \"cast\"\n    | LogicalOp op -> \"BinOp::\" ^ [%show: logical_op] op\n\n  let rec pglobal_ident' prefix span (e : global_ident) : AnnotatedString.t =\n    let ( ! ) s = pure span (prefix ^ s) in\n    match e with\n    | `Concrete c ->\n        !(let s = View.show c in\n          if String.equal \"_\" s then \"_anon\" else s)\n    | `Primitive p -> pprimitive_ident span p\n    | `TupleType n -> ![%string \"tuple%{Int.to_string n}\"]\n    | `TupleCons n -> ![%string \"Tuple%{Int.to_string n}\"]\n    | `TupleField (n, _) -> ![%string \"proj_tuple%{Int.to_string n}\"]\n    | `Projector o -> pglobal_ident' \"proj_\" span (o :> global_ident)\n\n  let pglobal_ident = pglobal_ident' \"\"\n\n  let plocal_ident span (e : Local_ident.t) : AnnotatedString.t =\n    let name =\n      match String.chop_prefix ~prefix:\"impl \" e.name with\n      | Some name ->\n          \"impl_\"\n          ^ String.map\n              ~f:(function\n                | 'a' .. 'z' as letter -> letter\n                | 'A' .. 'Z' as letter -> letter\n                | _ -> '_')\n              name\n      | _ -> e.name\n    in\n    let name = if String.equal name \"_\" then \"_anon\" else name in\n    pure span name\n\n  let dmutability span : _ -> AnnotatedString.t =\n    pure span << function Mutable _ -> \"mut \" | _ -> \"\"\n\n  let dbinding_mode span =\n    pure span << function ByValue -> \"\" | ByRef _ -> \"&\"\n\n  let pborrow_kind span = pure span << function Mut _ -> \"mut \" | _ -> \"\"\n\n  let rec last_of_global_ident (g : global_ident) span =\n    match g with\n    | `Concrete c -> View.to_definition_name c\n    | `Projector c -> last_of_global_ident (c :> global_ident) span\n    | _ ->\n        Diagnostics.report\n          {\n            context = DebugPrintRust;\n            kind =\n              AssertionFailure\n                {\n                  details =\n                    \"[last_of_global_ident] was given a non-concrete global \\\n                     ident\";\n                };\n            span = Span.to_thir span;\n            owner_id = Span.owner_hint span;\n          };\n        \"print_rust_last_of_global_ident_error\"\n\n  let rec pty span (e : ty) =\n    let ( ! ) = pure span in\n    match e with\n    | TBool -> !\"bool\"\n    | TChar -> !\"char\"\n    | TInt _k -> !\"int\"\n    | TFloat _k -> !\"float\"\n    | TStr -> !\"String\"\n    | TApp { ident; args = [] } -> pglobal_ident span ident\n    | TApp { ident; args } ->\n        let args : AnnotatedString.t =\n          List.map ~f:(pgeneric_value span) args |> concat ~sep:!\", \"\n        in\n        pglobal_ident span ident & !\"<\" & args & !\">\"\n    | TArray { typ; length } -> !\"[\" & pty span typ & !\";\" & pexpr length & !\"]\"\n    | TSlice { ty; _ } -> !\"[\" & pty span ty & !\"]\"\n    | TRawPointer _ -> !\"raw_pointer!()\"\n    | TRef { typ; mut; _ } -> !\"&\" & dmutability span mut & pty span typ\n    | TParam i -> plocal_ident span i\n    | TArrow (inputs, output) ->\n        let arrow =\n          List.map ~f:(pty span) (inputs @ [ output ]) |> concat ~sep:!\" -> \"\n        in\n        !\"arrow!(\" & arrow & !\")\"\n    | TAssociatedType _ -> !\"proj_asso_type!()\"\n    | TOpaque ident -> !(View.show ident)\n    | TDyn { goals; _ } ->\n        let goals =\n          concat ~sep:!\" + \" (List.map ~f:(pdyn_trait_goal span) goals)\n        in\n        !\"dyn(\" & goals & !\")\"\n\n  and pdyn_trait_goal span { trait; non_self_args } =\n    let ( ! ) = pure span in\n    let args =\n      List.map ~f:(pgeneric_value span) non_self_args |> concat ~sep:!\", \"\n    in\n    !(View.show trait)\n    & if List.is_empty args then empty else !\"<\" & args & !\">\"\n\n  and pgeneric_value span (e : generic_value) : AnnotatedString.t =\n    match e with\n    | GLifetime _ -> pure span \"lifetime!(something)\"\n    | GType t -> pty span t\n    | _ -> pure span \"generic_value!(todo)\"\n\n  and ppat (e : pat) =\n    let ( ! ) = pure e.span in\n    match e.p with\n    | PWild -> !\"_\"\n    | PAscription { typ; pat; _ } ->\n        !\"pat_ascription!(\" & ppat pat & !\" as \" & pty e.span typ & !\")\"\n    | PConstruct { constructor; fields; is_record; _ } ->\n        pglobal_ident e.span constructor\n        &\n        if List.is_empty fields then !\"\"\n        else if is_record then\n          !\"{\"\n          & concat ~sep:!\", \"\n              (List.map\n                 ~f:(fun { field; pat } ->\n                   !(last_of_global_ident field e.span) & !\":\" & ppat pat)\n                 fields)\n          & !\"}\"\n        else\n          !\"(\"\n          & concat ~sep:!\", \" (List.map ~f:(fun { pat; _ } -> ppat pat) fields)\n          & !\")\"\n    | POr { subpats } -> concat ~sep:!\" | \" (List.map ~f:ppat subpats)\n    | PArray { args } -> !\"[\" & concat ~sep:!\",\" (List.map ~f:ppat args) & !\"]\"\n    | PDeref { subpat; _ } -> !\"&\" & ppat subpat\n    | PConstant { lit } -> pliteral e.span lit\n    | PBinding { mut; mode; var; typ = _; subpat } ->\n        let subpat =\n          match subpat with Some (p, _) -> !\" @ \" & ppat p | None -> !\"\"\n        in\n        dbinding_mode e.span mode & dmutability e.span mut\n        & plocal_ident e.span var & subpat\n\n  and psupported_monads span m =\n    let ( ! ) = pure span in\n    match m with\n    | MException t -> !\"MException<\" & pty span t & !\">\"\n    | MResult t -> !\"MResult<\" & pty span t & !\">\"\n    | MOption -> !\"MOption\"\n\n  and pquote span quote =\n    let ( ! ) = pure span in\n    !\"quote!(\"\n    & List.map\n        ~f:(function\n          | Verbatim code -> !code\n          | Expr e -> pexpr e\n          | Pattern p -> ppat p\n          | Typ t -> pty span t)\n        quote.contents\n      |> concat ~sep:!\"\"\n    & !\")\"\n\n  and pexpr' (e : expr) =\n    let ( ! ) = pure e.span in\n    match e.e with\n    | If { cond; then_; else_ } ->\n        let else_ =\n          match else_ with Some e -> !\" else {\" & pexpr e & !\"}\" | None -> !\"\"\n        in\n        !\"(\" & !\"if \" & pexpr cond & !\"{\" & pexpr then_ & !\"}\" & else_ & !\")\"\n    | App { f; args; generic_args; _ } ->\n        let args = concat ~sep:!\",\" @@ List.map ~f:pexpr args in\n        let generic_args =\n          let f = pgeneric_value e.span in\n          if List.is_empty generic_args then !\"\"\n          else !\"::<\" & (concat ~sep:!\",\" @@ List.map ~f generic_args) & !\">\"\n        in\n        pexpr f & generic_args & !\"(\" & args & !\")\"\n    | Literal l -> pliteral e.span l\n    | Block { e; safety_mode; _ } -> (\n        let e = !\"{\" & pexpr e & !\"}\" in\n        match safety_mode with Safe -> e | Unsafe _ -> !\"unsafe \" & e)\n    | Array l -> !\"[\" & concat ~sep:!\",\" (List.map ~f:pexpr l) & !\"]\"\n    | Construct { is_record = false; constructor; fields; _ } ->\n        let fields = List.map ~f:(snd >> pexpr) fields |> concat ~sep:!\",\" in\n        pglobal_ident e.span constructor & !\"(\" & fields & !\")\"\n    | Construct { is_record = true; constructor; fields; base; _ } ->\n        let fields =\n          List.map\n            ~f:(fun (field, value) ->\n              !(last_of_global_ident field e.span) & !\":\" & pexpr value)\n            fields\n          |> concat ~sep:!\",\"\n        in\n        let base =\n          match base with\n          | Some (base, _) -> !\"..(\" & pexpr base & !\")\"\n          | _ -> !\"\"\n        in\n        pglobal_ident e.span constructor & !\"{\" & fields & !\",\" & base & !\"}\"\n    | Match { scrutinee; arms } ->\n        let arms =\n          List.map\n            ~f:(fun { arm = { arm_pat; body; guard }; _ } ->\n              let guard : t =\n                guard\n                |> Option.map\n                     ~f:\n                       (fun { guard = IfLet { lhs; rhs; _ }; _ } ->\n                          !\" if let \" & ppat lhs & !\" = \" & pexpr rhs\n                         : guard -> t)\n                |> Option.value ~default:!\"\"\n              in\n              ppat arm_pat & guard & !\" => {\" & pexpr body & !\"}\")\n            arms\n          |> concat ~sep:!\",\"\n        in\n        !\"(match (\" & pexpr scrutinee & !\") {\" & arms & !\"})\"\n    (* | Let { monadic = Some _; _ } -> !\"monadic_let!()\" *)\n    | Let { monadic; lhs; rhs; body } ->\n        (* TODO: here, [rhs.typ]! *)\n        let lhs_typ = pty lhs.span lhs.typ in\n        let rhs_typ = pty rhs.span rhs.typ in\n        let note =\n          if String.equal (to_string lhs_typ) (to_string rhs_typ) then !\"\"\n          else !\"#[note(\\\"rhs.typ=\" & rhs_typ & !\"\\\")]\\n\"\n        in\n        let monadic =\n          match monadic with\n          | Some (m, _) ->\n              !\"#[monadic_let(\" & psupported_monads e.span m & !\")]\"\n          | _ -> !\"\"\n        in\n        note & monadic & !\"let \" & ppat lhs & !\": \" & lhs_typ & !\" = {\"\n        & pexpr rhs & !\"};\" & pexpr body\n    | LocalVar local_ident -> plocal_ident e.span local_ident\n    | GlobalVar global_ident -> pglobal_ident e.span global_ident\n    | Ascription { e = e'; typ } ->\n        !\"(\" & pexpr e' & !\" as \" & pty e.span typ & !\")\"\n    | MacroInvokation { macro; args; _ } ->\n        pglobal_ident e.span macro & !\"!(\" & !args & !\")\"\n    | Assign { lhs; e; _ } -> !\"(\" & plhs lhs e.span & !\" = \" & pexpr e & !\")\"\n    | Loop { body; kind; state; _ } -> (\n        let header =\n          match kind with\n          | UnconditionalLoop -> !\"loop\"\n          | WhileLoop { condition; _ } -> !\"while \" & pexpr condition\n          | ForLoop { it; pat; _ } ->\n              !\"for \" & ppat pat & !\" in (\" & pexpr it & !\")\"\n          | ForIndexLoop { start; end_; var; _ } ->\n              !\"for \" & plocal_ident e.span var & !\" in (\" & pexpr start\n              & !\")..(\" & pexpr end_ & !\")\"\n        in\n        let body_wrapper body =\n          match state with\n          | Some { bpat; _ } -> !\"|\" & ppat bpat & !\"| {\" & body & !\"}\"\n          | None -> body\n        in\n        let main = header & !\" { \" & body_wrapper (pexpr body) & !\" }\" in\n        match state with\n        | Some { init; _ } -> !\"(\" & main & !\")(\" & pexpr init & !\")\"\n        | None -> main)\n    | Break { e; _ } -> !\"(break (\" & pexpr e & !\"))\"\n    | Continue { acc = None; _ } -> !\"continue\"\n    | Continue { acc = Some (e, _); _ } ->\n        !\"state_passing_continue!(\" & pexpr e & !\")\"\n    | Return { e; _ } -> !\"(return \" & pexpr e & !\")\"\n    | QuestionMark { e; _ } -> !\"(\" & pexpr e & !\")?\"\n    | Borrow { kind; e; _ } ->\n        !\"&\" & pborrow_kind e.span kind & !\"(\" & pexpr e & !\")\"\n    | AddressOf _ -> !\"address_of\"\n    | EffectAction _ -> !\"EffectAction\"\n    | Closure { params; body; _ } ->\n        let params = List.map ~f:ppat params |> concat ~sep:!\",\" in\n        !\"(|\" & params & !\"| {\" & pexpr body & !\"})\"\n    | Quote quote -> pquote e.span quote\n  (* | _ -> \"todo!()\" *)\n\n  and plhs (e : lhs) span =\n    let ( ! ) = pure span in\n    match e with\n    | LhsFieldAccessor { e; field; _ } ->\n        let field =\n          match field with\n          | `Projector field -> (field :> global_ident)\n          | _ -> field\n        in\n        plhs e span & !\".\" & !(last_of_global_ident field span)\n    | LhsArrayAccessor { e; index; _ } ->\n        plhs e span & !\"[\" & pexpr index & !\"]\"\n    | LhsLocalVar { var; _ } -> plocal_ident span var\n    | LhsVecRef { e; _ } -> plhs e span\n    | LhsArbitraryExpr { e; _ } -> pexpr e\n\n  and pexpr (e : expr) =\n    let ( ! ) = pure e.span in\n    let need_braces = [%matches? Let _ | Loop _] e.e in\n    let e = pexpr' e in\n    if need_braces then !\"{\" & e & !\"}\" else e\n\n  let pattr (attr : attr) =\n    let ( ! ) = pure attr.span in\n    match attr.kind with\n    | Tool { path; tokens } -> !\"#[\" & !path & !\"(\" & !tokens & !\")\" & !\"]\"\n    | DocComment { kind = _; body } -> !\"/**\" & !body & !\"*/\"\n\n  let pattrs attrs = List.map ~f:pattr attrs |> concat\n\n  let pgeneric_param_kind span (pk : generic_param_kind) =\n    let ( ! ) = pure span in\n    match pk with\n    | GPLifetime _ -> (empty, !\": 'unk\")\n    | GPType -> (empty, empty)\n    | GPConst { typ } -> (!\"const \", !\":\" & pty span typ)\n\n  let pgeneric_param (p : generic_param) =\n    let prefix, suffix = pgeneric_param_kind p.span p.kind in\n    let name =\n      match p.ident.name with\n      | \"_\" -> \"Anonymous\"\n      | \"Self\" -> \"Self_\"\n      | name -> name\n    in\n    let id = plocal_ident p.span { p.ident with name } in\n    pattrs p.attrs & prefix & id & suffix\n\n  let pgeneric_params (pl : generic_param list) =\n    match pl with\n    | { span; _ } :: _ ->\n        let ( ! ) = pure span in\n        !\"<\" & concat ~sep:!\", \" (List.map ~f:pgeneric_param pl) & !\">\"\n    | _ -> empty\n\n  let ptrait_goal span { trait; args } =\n    let ( ! ) = pure span in\n    let args = List.map ~f:(pgeneric_value span) args |> concat ~sep:!\", \" in\n    !(View.show trait)\n    & if List.is_empty args then empty else !\"<\" & args & !\">\"\n\n  let pprojection_predicate span (pp : projection_predicate) =\n    let ( ! ) = pure span in\n    pp.impl.goal.args\n    |> List.find_map ~f:(function GType ty -> Some ty | _ -> None)\n    |> Option.map ~f:(pty span)\n    |> Option.value ~default:!\"unknown_self\"\n    & !\" :\"\n    & !(View.show pp.impl.goal.trait)\n    & !\"<\"\n    & !(View.to_definition_name pp.assoc_item)\n    & !\" = \" & pty span pp.typ & !\">\"\n\n  let pgeneric_constraint span (p : generic_constraint) =\n    let ( ! ) = pure span in\n    match p with\n    | GCLifetime _ -> !\"'unk: 'unk\"\n    | GCType { goal; _ } -> !\"_: \" & ptrait_goal span goal\n    | GCProjection pp -> pprojection_predicate span pp\n\n  let pgeneric_constraints span (constraints : generic_constraint list) =\n    if List.is_empty constraints then empty\n    else\n      let ( ! ) = pure span in\n      !\" where \"\n      & concat ~sep:!\",\" (List.map ~f:(pgeneric_constraint span) constraints)\n\n  let pvariant_body span { name = _; arguments; attrs = _; is_record } =\n    let ( ! ) = pure span in\n    if is_record then\n      !\"{\"\n      & concat ~sep:!\",\"\n          (List.map arguments ~f:(fun (id, ty, attrs) ->\n               pattrs attrs & !(View.to_definition_name id) & !\":\" & pty span ty))\n      & !\"}\"\n    else\n      !\"(\"\n      & concat ~sep:!\",\"\n          (List.map arguments ~f:(fun (_, ty, attrs) ->\n               pattrs attrs & pty span ty))\n      & !\")\"\n\n  let pvariant span (variant : variant) =\n    let ( ! ) = pure span in\n    pattrs variant.attrs\n    & !(View.to_definition_name variant.name)\n    & pvariant_body span variant\n\n  let pvariants span variants =\n    let ( ! ) = pure span in\n    concat ~sep:!\", \" (List.map ~f:(pvariant span) variants)\n\n  let pparam span ({ pat; typ; typ_span; attrs } : param) =\n    let ( ! ) = pure span in\n    pattrs attrs & ppat pat & !\": \"\n    & pty (Option.value ~default:pat.span typ_span) typ\n\n  let pparams span (l : param list) =\n    let ( ! ) = pure span in\n    !\"(\" & List.map ~f:(pparam span) l |> concat ~sep:!\",\" & !\")\"\n\n  let ptrait_item (ti : trait_item) =\n    let ( ! ) = pure ti.ti_span in\n    let generics = pgeneric_params ti.ti_generics.params in\n    let bounds = pgeneric_constraints ti.ti_span ti.ti_generics.constraints in\n    let ident = !(View.to_definition_name ti.ti_ident) in\n    pattrs ti.ti_attrs\n    &\n    match ti.ti_v with\n    | TIType _ -> !\"type \" & ident & !\": TodoPrintRustBoundsTyp;\"\n    | TIFn ty ->\n        let inputs, output =\n          match ty with\n          | TArrow (inputs, output) -> (inputs, output)\n          | ty -> ([], ty)\n        in\n        let return_type = pty ti.ti_span output in\n        let params =\n          List.map ~f:(fun typ -> !\"_: \" & pty ti.ti_span typ) inputs\n          |> concat ~sep:!\",\"\n        in\n        !\"fn \" & ident & generics & !\"(\" & params & !\") -> \" & return_type\n        & bounds & !\";\"\n    | TIDefault { params; body; _ } ->\n        let params = pparams ti.ti_span params in\n        let generics_constraints =\n          pgeneric_constraints ti.ti_span ti.ti_generics.constraints\n        in\n        let return_type = pty ti.ti_span body.typ in\n        let body = pexpr body in\n        !\"fn \" & ident & generics & !\"(\" & params & !\") -> \" & return_type\n        & generics_constraints & !\"{\" & body & !\"}\"\n\n  let pimpl_item (ii : impl_item) =\n    let span = ii.ii_span in\n    let ( ! ) = pure span in\n    let generics = pgeneric_params ii.ii_generics.params in\n    let bounds = pgeneric_constraints span ii.ii_generics.constraints in\n    let ident = !(View.to_definition_name ii.ii_ident) in\n    pattrs ii.ii_attrs\n    &\n    match ii.ii_v with\n    | IIType _ -> !\"type \" & ident & !\": TodoPrintRustBoundsTyp;\"\n    | IIFn { body; params } ->\n        let return_type = pty span body.typ in\n        !\"fn \" & ident & generics & pparams span params & !\" -> \" & return_type\n        & bounds & !\"{\" & pexpr body & !\"}\"\n\n  let pitem (e : item) =\n    let exception NotImplemented in\n    let ( ! ) = pure e.span in\n    try\n      let pi =\n        match e.v with\n        | Fn { name; body; generics; params; safety } ->\n            let return_type = pty e.span body.typ in\n            (match safety with Safe -> !\"fn \" | Unsafe _ -> !\"unsafe fn \")\n            & !(View.to_definition_name name)\n            & pgeneric_params generics.params\n            & pparams e.span params & !\" -> \" & return_type\n            & pgeneric_constraints e.span generics.constraints\n            & !\"{\" & pexpr body & !\"}\"\n        | TyAlias { name; generics; ty } ->\n            !\"type \"\n            & !(View.to_definition_name name)\n            & pgeneric_params generics.params\n            & pgeneric_constraints e.span generics.constraints\n            & !\"=\" & pty e.span ty & !\";\"\n        | Type { name; generics; variants = [ variant ]; is_struct = true } ->\n            !\"struct \"\n            & !(View.to_definition_name name)\n            & pgeneric_params generics.params\n            & pgeneric_constraints e.span generics.constraints\n            & pvariant_body e.span variant\n            & if variant.is_record then !\"\" else !\";\"\n        | Type { name; generics; variants; _ } ->\n            !\"enum \"\n            & !(View.to_definition_name name)\n            & pgeneric_params generics.params\n            & pgeneric_constraints e.span generics.constraints\n            &\n            if List.is_empty variants then empty\n            else !\"{\" & pvariants e.span variants & !\"}\"\n        | Trait { name; generics; items; safety } ->\n            let safety =\n              match safety with Safe -> !\"\" | Unsafe _ -> !\"unsafe \"\n            in\n            safety & !\"trait \"\n            & !(View.to_definition_name name)\n            & pgeneric_params generics.params\n            & pgeneric_constraints e.span generics.constraints\n            & !\"{\"\n            & List.map ~f:ptrait_item items |> concat ~sep:!\"\\n\"\n            & !\"}\"\n        | Impl { generics; self_ty; of_trait; items; parent_bounds = _; safety }\n          ->\n            let trait =\n              pglobal_ident e.span (`Concrete (fst of_trait))\n              & !\"<\"\n              & concat ~sep:!\",\"\n                  (List.map ~f:(pgeneric_value e.span) (snd of_trait))\n              & !\">\"\n            in\n            let safety =\n              match safety with Safe -> !\"\" | Unsafe _ -> !\"unsafe \"\n            in\n            safety & !\"impl \"\n            & pgeneric_params generics.params\n            & trait & !\" for \" & pty e.span self_ty\n            & pgeneric_constraints e.span generics.constraints\n            & !\"{\"\n            & List.map ~f:pimpl_item items |> concat ~sep:!\"\\n\"\n            & !\"}\"\n        | Quote { quote; _ } -> pquote e.span quote & !\";\"\n        | _ -> raise NotImplemented\n      in\n      pattrs e.attrs & pi\n    with NotImplemented ->\n      !(\"\\n/** print_rust: pitem: not implemented  (item: \"\n       ^ [%show: concrete_ident] e.ident\n       ^ \") */\\nconst _: () = ();\\n\")\nend\n\nlet rustfmt (s : string) : string =\n  match\n    Hax_io.request (PrettyPrintRust s) ~expected:\"PrettyPrintedRust\" (function\n      | Types.PrettyPrintedRust s -> Some s\n      | _ -> None)\n  with\n  | Ok formatted -> formatted\n  | Err error ->\n      let err =\n        [%string\n          \"\\n\\n\\\n           #######################################################\\n\\\n           ########### WARNING: Failed formatting ###########\\n\\\n           %{error}\\n\\\n           STRING:\\n\\\n           %{s}\\n\\\n           #######################################################\\n\"]\n      in\n      Stdio.prerr_endline err;\n      s\n\nexception RetokenizationFailure\n\nlet rustfmt_annotated' (x : AnnotatedString.t) : AnnotatedString.t =\n  let original = AnnotatedString.tokenize x in\n  let tokens = AnnotatedString.(to_string x |> rustfmt |> split) in\n  let is_symbol = re_matches AnnotatedString.split_re in\n  let all_symbol = List.for_all ~f:(snd >> is_symbol) in\n  let f (original, result) s =\n    let last =\n      List.hd result |> Option.map ~f:fst\n      |> Option.value_or_thunk ~default:Span.dummy\n    in\n    let original', tuple =\n      match List.split_while ~f:(snd >> String.equal s >> not) original with\n      | prev, (span, s') :: original' ->\n          assert (String.equal s s');\n          if all_symbol prev then\n            (* it is fine to skip symbols *)\n            (original', (span, s))\n          else if is_symbol s then\n            (* if [s] is a symbol as well, this is fine *)\n            (original, (Span.dummy (), s))\n          else (\n            Stdio.prerr_endline @@ \"\\n##### RUSTFMT TOKEN ERROR #####\";\n            Stdio.prerr_endline @@ \"s=\" ^ s;\n            raise RetokenizationFailure)\n      | _ -> (original, (last, s))\n    in\n    (original', tuple :: result)\n  in\n  let r = snd @@ List.fold_left tokens ~init:(original, []) ~f in\n  List.rev r\n\nlet rustfmt_annotated (x : AnnotatedString.t) : AnnotatedString.t =\n  let rf = Option.value ~default:\"\" (Sys.getenv \"HAX_RUSTFMT\") in\n  if String.equal rf \"no\" then x\n  else try rustfmt_annotated' x with RetokenizationFailure -> x\n\nmodule type T = sig\n  val pitem : item -> AnnotatedString.Output.t\n  val pitems : item list -> AnnotatedString.Output.t\n  val pitem_str : item -> string\n  val pexpr_str : expr -> string\n  val pty_str : ty -> string\nend\n\nmodule Traditional : T = struct\n  let pitem : item -> AnnotatedString.Output.t =\n    Raw.pitem >> rustfmt_annotated >> AnnotatedString.Output.convert\n\n  let pitems : item list -> AnnotatedString.Output.t =\n    List.concat_map ~f:Raw.pitem\n    >> rustfmt_annotated >> AnnotatedString.Output.convert\n\n  let pitem_str : item -> string = pitem >> AnnotatedString.Output.raw_string\n\n  let pexpr_str (e : expr) : string =\n    let e = Raw.pexpr e in\n    let ( ! ) = AnnotatedString.pure @@ Span.dummy () in\n    let ( & ) = AnnotatedString.( & ) in\n    let prefix = \"fn expr_wrapper() {\" in\n    let suffix = \"}\" in\n    let item = !prefix & e & !suffix in\n    rustfmt_annotated item |> AnnotatedString.Output.convert\n    |> AnnotatedString.Output.raw_string |> Stdlib.String.trim\n    |> String.chop_suffix_if_exists ~suffix\n    |> String.chop_prefix_if_exists ~prefix\n    |> Stdlib.String.trim\n\n  let pty_str (e : ty) : string =\n    let e = Raw.pty (Span.dummy ()) e in\n    let ( ! ) = AnnotatedString.pure @@ Span.dummy () in\n    let ( & ) = AnnotatedString.( & ) in\n    let prefix = \"type TypeWrapper = \" in\n    let suffix = \";\" in\n    let item = !prefix & e & !suffix in\n    rustfmt_annotated item |> AnnotatedString.Output.convert\n    |> AnnotatedString.Output.raw_string |> Stdlib.String.trim\n    |> String.chop_suffix_if_exists ~suffix\n    |> String.chop_prefix_if_exists ~prefix\n    |> Stdlib.String.trim\nend\n\n(* module Experimental : T = struct *)\n(*   module GenericRustPrinter = Generic_rust_printer.Make (Features.Full) *)\n\n(*   let pitem : item -> AnnotatedString.Output.t = *)\n(*     GenericRustPrinter.item () *)\n(*     >> Generic_printer_api.AnnotatedString.to_spanned_strings *)\n(*     >> AnnotatedString.Output.convert *)\n\n(*   let pitems : item list -> AnnotatedString.Output.t = *)\n(*     GenericRustPrinter.items () *)\n(*     >> Generic_printer_api.AnnotatedString.to_spanned_strings *)\n(*     >> AnnotatedString.Output.convert *)\n\n(*   let pexpr : expr -> AnnotatedString.Output.t = *)\n(*     GenericRustPrinter.expr () *)\n(*     >> Generic_printer_api.AnnotatedString.to_spanned_strings *)\n(*     >> AnnotatedString.Output.convert *)\n\n(*   let pitem_str : item -> string = *)\n(*     GenericRustPrinter.item () >> Generic_printer_api.AnnotatedString.to_string *)\n\n(*   let pexpr_str : expr -> string = *)\n(*     GenericRustPrinter.expr () >> Generic_printer_api.AnnotatedString.to_string *)\n\n(*   let pty_str : ty -> string = *)\n(*     GenericRustPrinter.ty () >> Generic_printer_api.AnnotatedString.to_string *)\n(* end *)\n\nlet experimental =\n  Sys.getenv \"HAX_ENGINE_EXPERIMENTAL_RUST_PRINTER\" |> Option.is_some\n\ninclude\n  (val if experimental then failwith \"todo\" (*module Experimental : T*)\n       else (module Traditional : T))\n"
  },
  {
    "path": "engine/lib/print_rust.mli",
    "content": "open Ast.Full\n\nmodule AnnotatedString : sig\n  module Output : sig\n    type t [@@deriving show, yojson]\n\n    val raw_string : t -> string\n  end\nend\n\nval pitem : item -> AnnotatedString.Output.t\nval pitems : item list -> AnnotatedString.Output.t\nval pitem_str : item -> string\nval pexpr_str : expr -> string\nval pty_str : ty -> string\n"
  },
  {
    "path": "engine/lib/profiling.ml",
    "content": "open Prelude\n\n(** Is profiling enabled? *)\nlet enabled = ref false\n\n(** Profiles the function `f`, that operates in a given context over a given\n    quantity of things it is processing. *)\nlet profile (type b) (context : Diagnostics.Context.t) (quantity : int)\n    (f : unit -> b) : b =\n  if !enabled (* `!` derefs, it's not a negation *) then (\n    let time0 = Core.Time_ns.now () in\n    let mem0 = Core.Gc.minor_words () in\n    let finalize errored =\n      if !enabled (* `!` derefs, it's not a negation *) then\n        let time1 = Core.Time_ns.now () in\n        let mem1 = Core.Gc.minor_words () in\n        let time_ns = Core.Time_ns.diff time1 time0 in\n        let memory = mem1 - mem0 in\n        Hax_io.write\n          (Types.ProfilingData\n             {\n               context = Diagnostics.Context.display context;\n               time_ns =\n                 Core.Time_ns.Span.to_int63_ns time_ns |> Int63.to_string;\n               memory = Int.to_string memory;\n               quantity = Int.to_int64 quantity;\n               errored;\n             })\n      else ()\n    in\n    try\n      let result = f () in\n      finalize false;\n      result\n    with e ->\n      finalize true;\n      raise e)\n  else f ()\n"
  },
  {
    "path": "engine/lib/rust_engine_types.ml",
    "content": "(** This module re-exports and renames a subset of `Types`. `Types` contains\n    both the modules from the frontend and from the Rust engine. Thus, some\n    types are deduplicated, and get renamed. *)\n\nmodule Renamed = struct\n  type arm = Types.arm2\n  type attribute = Types.attribute2\n  type attribute_kind = Types.attribute_kind2\n  type binding_mode = Types.binding_mode2\n  type borrow_kind = Types.borrow_kind2\n  type def_id = Types.def_id_inner\n  type global_id = Types.global_id\n  type expr_kind = Types.expr_kind2\n  type impl_expr = Types.impl_expr2\n  type param = Types.param2\n  type pat_kind = Types.pat_kind2\n  type projection_predicate = Types.projection_predicate2\n  type region = Types.region2\n  type span = Types.span2\nend\n\ninclude Types\ninclude Renamed\n"
  },
  {
    "path": "engine/lib/side_effect_utils.ml",
    "content": "open! Prelude\n\nmodule MakeSI\n    (F :\n      Features.T\n        with type monadic_binding = Features.Off.monadic_binding\n         and type for_index_loop = Features.Off.for_index_loop) =\nstruct\n  module AST = Ast.Make (F)\n  module U = Ast_utils.Make (F)\n  include Ast\n  include AST\n  module Visitors = Ast_visitors.Make (F)\n\n  module SideEffects = struct\n    (* TODO: consider non-terminaison and closed-mutation *)\n    type t = {\n      reads_local_mut : U.Sets.TypedLocalIdent.t;  (** only free variables *)\n      writes_local_mut : U.Sets.TypedLocalIdent.t;  (** only free variables *)\n      deep_mutation : bool;\n      return : ty option;\n      continue : ty option option; (* TODO: continue with labels *)\n      break : ty option; (* TODO: break with labels *)\n    }\n    [@@deriving show]\n\n    let zero : t =\n      {\n        reads_local_mut = Set.empty (module U.TypedLocalIdent);\n        writes_local_mut = Set.empty (module U.TypedLocalIdent);\n        deep_mutation = false;\n        return = None;\n        continue = None;\n        break = None;\n      }\n\n    let plus : t -> t -> t =\n      let merge_ty x y =\n        if not @@ U.ty_equality x y then\n          Diagnostics.failure ~context:(Other \"side_effect_utils.ml\")\n            ~span:(Span.dummy ())\n            (AssertionFailure\n               {\n                 details =\n                   \"Expected two exact same types, got x=\"\n                   ^ (x |> U.LiftToFullAst.ty |> Print_rust.pty_str)\n                   ^ \" and y=\"\n                   ^ (y |> U.LiftToFullAst.ty |> Print_rust.pty_str);\n               })\n        else x\n      in\n      let merge_opts (type x) (f : x -> x -> x) (a : x option) (b : x option) =\n        match (a, b) with\n        | Some a, Some b -> Some (f a b)\n        | Some a, None | None, Some a -> Some a\n        | None, None -> None\n      in\n      fun x y ->\n        {\n          reads_local_mut = Set.union x.reads_local_mut y.reads_local_mut;\n          writes_local_mut = Set.union x.writes_local_mut y.writes_local_mut;\n          deep_mutation = x.deep_mutation || y.deep_mutation;\n          return = merge_opts merge_ty x.return y.return;\n          continue =\n            merge_opts\n              (fun x y ->\n                match (x, y) with\n                | Some x, Some y -> Some (merge_ty x y)\n                | _ -> None)\n              x.continue y.continue;\n          break = merge_opts merge_ty x.break y.break;\n        }\n\n    let reads (var : Local_ident.t) (ty : ty) =\n      {\n        zero with\n        reads_local_mut = Set.singleton (module U.TypedLocalIdent) (var, ty);\n      }\n\n    let writes (var : Local_ident.t) (ty : ty) =\n      {\n        zero with\n        writes_local_mut = Set.singleton (module U.TypedLocalIdent) (var, ty);\n      }\n\n    let no_deep_mut_or_cf : t -> bool =\n      [%matches?\n        {\n          deep_mutation = false;\n          return = None;\n          continue = None;\n          break = None;\n          _;\n        }]\n\n    let reads_local_mut_only : t -> bool =\n     fun x -> no_deep_mut_or_cf x && Set.is_empty x.writes_local_mut\n\n    let commute : t -> t -> bool =\n      curry @@ function\n      | ( ({ reads_local_mut = xr; writes_local_mut = xw; _ } as x),\n          ({ reads_local_mut = yr; writes_local_mut = yw; _ } as y) )\n        when no_deep_mut_or_cf x && no_deep_mut_or_cf y ->\n          let open Set in\n          let x = union xw xr in\n          let y = union yw yr in\n          is_empty @@ union (inter xw y) (inter yw x)\n      | x, y when reads_local_mut_only x || reads_local_mut_only y -> true\n      | _ -> false\n\n    class ['s] monoid =\n      object\n        method private zero = zero\n        method private plus = plus\n      end\n\n    let without_rw_vars (vars : U.Sets.Local_ident.t) (effects : t) =\n      let without = Set.filter ~f:(fst >> Set.mem vars >> not) in\n      {\n        effects with\n        writes_local_mut = without effects.writes_local_mut;\n        reads_local_mut = without effects.reads_local_mut;\n      }\n  end\n\n  module Hoist = struct\n    type binding = pat * expr [@@deriving show]\n    type t = { lbs : binding list; effects : SideEffects.t } [@@deriving show]\n\n    let plus x y : t =\n      let effects = SideEffects.plus x.effects y.effects in\n      { lbs = x.lbs @ y.lbs; effects }\n\n    let zero : t = { lbs = []; effects = SideEffects.zero }\n    let flbs { lbs; _ } = lbs\n    let feff { effects; _ } = effects\n    let no_lbs effects = { lbs = []; effects }\n\n    class ['s] monoid =\n      object\n        method private zero = zero\n        method private plus = plus\n      end\n\n    class ['s] bool_monoid =\n      object\n        method private zero = false\n        method private plus = ( && )\n      end\n\n    module CollectContext = struct\n      type t = { mutable fresh_id : int }\n\n      let fresh_local_ident (self : t) : Local_ident.t =\n        self.fresh_id <- self.fresh_id + 1;\n        {\n          name = \"hoist\" ^ Int.to_string self.fresh_id;\n          id = Local_ident.mk_id SideEffectHoistVar (-1) (* todo *);\n        }\n\n      let empty = { fresh_id = 0 }\n    end\n\n    module HoistSeq = struct\n      let ( let* ) x f = Option.bind ~f x\n\n      let many (ctx : CollectContext.t) (l : (expr * t) list)\n          (next : expr list -> t -> expr * t) =\n        let fresh () = CollectContext.fresh_local_ident ctx in\n        let effects, l =\n          List.fold_right l ~init:(SideEffects.zero, [])\n            ~f:(fun (expr, { lbs; effects = effects0 }) (effects, l) ->\n              ( SideEffects.plus effects0 effects,\n                (if\n                   SideEffects.reads_local_mut_only effects0\n                   && SideEffects.commute effects0 effects\n                 then (lbs, expr)\n                 else\n                   let var =\n                     (* if the body is a local variable, use that,\n                        otherwise get a fresh one *)\n                     match snd @@ U.collect_let_bindings expr with\n                     (* TODO: this optimization is disabled because it fails in cases like f(x, {x = 3; x}) *)\n                     | { e = LocalVar var; _ } when false -> var\n                     | _ -> fresh ()\n                   in\n                   ( lbs @ [ (U.make_var_pat var expr.typ expr.span, expr) ],\n                     { expr with e = LocalVar var } ))\n                :: l ))\n        in\n        let lbs = List.concat_map ~f:fst l in\n        next (List.map ~f:snd l) { lbs; effects }\n\n      let err_hoist_invariant span (type r) (location : string) : r =\n        Diagnostics.failure ~context:(Other \"HoistSeq\") ~span\n          (AssertionFailure\n             {\n               details =\n                 \"[HoistSeq.many] broke its invariant (location:\" ^ location\n                 ^ \")\";\n             })\n\n      let one (ctx : CollectContext.t) (e : expr * t)\n          (next : expr -> t -> expr * t) =\n        many ctx [ e ] (function\n          | [ e ] -> next e\n          | _ -> err_hoist_invariant (fst e).span Stdlib.__LOC__)\n    end\n\n    let let_of_binding ((pat, rhs) : pat * expr) (body : expr) : expr =\n      U.make_let pat rhs body\n\n    let lets_of_bindings (bindings : (pat * expr) list) (body : expr) : expr =\n      List.fold_right ~init:body ~f:let_of_binding bindings\n\n    let collect_and_hoist_effects_object =\n      object (self)\n        (* inherit [_] expr_mapreduce *)\n        inherit [_] Visitors.mapreduce as super\n        inherit [_] monoid as m\n\n        (* method visit_t _ x = (x, m#zero) *)\n        (* method visit_mutability _ _ x = (x, m#zero) *)\n\n        (* Collecting effects bottom up *)\n        method! visit_lhs (env : CollectContext.t) lhs =\n          match lhs with\n          | LhsLocalVar { var; typ } ->\n              (LhsLocalVar { var; typ }, no_lbs @@ SideEffects.writes var typ)\n          | LhsArbitraryExpr { e; witness } ->\n              let deep_mutation =\n                (object\n                   inherit [_] Visitors.reduce as _super\n                   inherit [_] bool_monoid as _m\n\n                   (* method visit_t _ _ = m#zero *)\n                   (* method visit_mutability _ _ _ = m#zero *)\n                   (* method! visit_Deref _ _ _ = true *)\n                   method! visit_item () _ = false\n                end)\n                  #visit_expr\n                  () e\n              in\n              ( LhsArbitraryExpr { e; witness },\n                no_lbs { SideEffects.zero with deep_mutation } )\n          | _ -> super#visit_lhs env lhs\n\n        method! visit_expr (env : CollectContext.t) e =\n          match e.e with\n          | LocalVar v -> (e, no_lbs (SideEffects.reads v e.typ))\n          | QuestionMark { e = e'; return_typ; witness } ->\n              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->\n                  let effects =\n                    m#plus effects\n                      (no_lbs\n                         { SideEffects.zero with return = Some return_typ })\n                  in\n                  ( { e with e = QuestionMark { e = e'; return_typ; witness } },\n                    effects ))\n          | Return { e = e'; witness } ->\n              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->\n                  ( { e with e = Return { e = e'; witness } },\n                    m#plus effects\n                      (no_lbs { SideEffects.zero with return = Some e'.typ }) ))\n          | Break { e = e'; label; acc; witness } ->\n              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->\n                  ( { e with e = Break { e = e'; acc; label; witness } },\n                    m#plus effects\n                      (no_lbs { SideEffects.zero with break = Some e'.typ }) ))\n          | Continue { acc = e'; label; witness } -> (\n              let ceffect =\n                no_lbs\n                  {\n                    SideEffects.zero with\n                    continue = Some (Option.map ~f:(fun (e, _) -> e.typ) e');\n                  }\n              in\n              match e' with\n              | Some (e', witness') ->\n                  HoistSeq.one env (self#visit_expr env e') (fun e' effects ->\n                      ( {\n                          e with\n                          e =\n                            Continue\n                              { acc = Some (e', witness'); label; witness };\n                        },\n                        m#plus ceffect effects ))\n              | None -> (e, ceffect))\n          | Loop { body; kind; state; label; witness; control_flow } ->\n              let kind' =\n                match kind with\n                | UnconditionalLoop -> []\n                | ForLoop { it; _ } -> [ self#visit_expr env it ]\n                | WhileLoop { condition; _ } ->\n                    [ self#visit_expr env condition ]\n                | _ -> .\n              in\n              let state' =\n                Option.map\n                  ~f:(fun { init; _ } -> self#visit_expr env init)\n                  state\n              in\n              let kind_state = kind' @ Option.to_list state' in\n              (* effects to realize before the loop *)\n              (* let effects_before = List.fold ~init:zero ~f:plus kind_state in *)\n              HoistSeq.many env kind_state (fun l effects ->\n                  let kind =\n                    match (l, kind) with\n                    | condition :: ([ _ ] | []), WhileLoop { witness; _ } ->\n                        WhileLoop { condition; witness }\n                    | it :: ([ _ ] | []), ForLoop { pat; witness; _ } ->\n                        ForLoop { pat; witness; it }\n                    | ([ _ ] | []), UnconditionalLoop -> UnconditionalLoop\n                    | _, ForIndexLoop _ -> .\n                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__\n                  in\n                  let state =\n                    match (l, state) with\n                    | (_ :: [ state ] | [ state ]), Some { witness; bpat; _ } ->\n                        Some { witness; bpat; init = state }\n                    | ([ _ ] | []), None -> None\n                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__\n                  in\n                  (* by now, the \"inputs\" of the loop are hoisted as let if needed *)\n                  let body, { lbs; effects = body_effects } =\n                    self#visit_expr env body\n                  in\n                  (* the loop construction **handles** the effect continue and break *)\n                  let body_effects =\n                    no_lbs { body_effects with continue = None; break = None }\n                  in\n                  let effects = m#plus effects body_effects in\n                  let body = lets_of_bindings lbs body in\n                  ( {\n                      e with\n                      e =\n                        Loop { body; kind; state; label; witness; control_flow };\n                    },\n                    effects ))\n          | If { cond; then_; else_ } ->\n              HoistSeq.one env (self#visit_expr env cond) (fun cond effects ->\n                  let then_, { lbs = lbs_then; effects = ethen } =\n                    self#visit_expr env then_\n                  in\n                  let else_, { lbs = lbs_else; effects = eelse } =\n                    match Option.map ~f:(self#visit_expr env) else_ with\n                    | Some (else_, eelse) -> (Some else_, eelse)\n                    | None -> (None, m#zero)\n                  in\n                  let then_ = lets_of_bindings lbs_then then_ in\n                  let else_ = Option.map ~f:(lets_of_bindings lbs_else) else_ in\n                  let effects =\n                    m#plus (m#plus (no_lbs ethen) (no_lbs eelse)) effects\n                  in\n                  ({ e with e = If { cond; then_; else_ } }, effects))\n          | App { f; args; generic_args; trait; bounds_impls } ->\n              HoistSeq.many env\n                (List.map ~f:(self#visit_expr env) (f :: args))\n                (fun l effects ->\n                  let f, args =\n                    match l with\n                    | f :: args -> (f, args)\n                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__\n                  in\n                  ( {\n                      e with\n                      e = App { f; args; generic_args; trait; bounds_impls };\n                    },\n                    effects ))\n          | Literal _ -> (e, m#zero)\n          | Block { e; safety_mode; witness } ->\n              HoistSeq.one env (self#visit_expr env e) (fun e effects ->\n                  ({ e with e = Block { e; safety_mode; witness } }, effects))\n          | Array l ->\n              HoistSeq.many env\n                (List.map ~f:(self#visit_expr env) l)\n                (fun l effects -> ({ e with e = Array l }, effects))\n          | Construct\n              { constructor; is_record; is_struct; fields = []; base = None } ->\n              ( {\n                  e with\n                  e =\n                    Construct\n                      {\n                        constructor;\n                        is_record;\n                        is_struct;\n                        fields = [];\n                        base = None;\n                      };\n                },\n                m#zero )\n          | Construct { constructor; is_struct; is_record; fields; base } ->\n              HoistSeq.many env\n                (List.map ~f:(self#visit_expr env)\n                   (Option.to_list (Option.map ~f:fst base)\n                   @ List.map ~f:snd fields))\n                (fun l effects ->\n                  let base, fields_expr =\n                    match (l, base) with\n                    | hd :: tl, Some (_, witness) -> (Some (hd, witness), tl)\n                    | _, None -> (None, l)\n                    | _ -> HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__\n                  in\n                  let fields =\n                    match List.zip (List.map ~f:fst fields) fields_expr with\n                    | Ok fields -> fields\n                    | Unequal_lengths ->\n                        HoistSeq.err_hoist_invariant e.span Stdlib.__LOC__\n                  in\n                  ( {\n                      e with\n                      e =\n                        Construct\n                          { constructor; is_struct; is_record; fields; base };\n                    },\n                    effects ))\n          | Match { scrutinee; arms } ->\n              let arms, eff_arms =\n                let arms =\n                  List.map ~f:(self#visit_arm env) arms\n                  (* materialize letbindings in each arms *)\n                  |> List.map ~f:(fun ({ arm; span }, ({ lbs; effects } : t)) ->\n                         let arm =\n                           { arm with body = lets_of_bindings lbs arm.body }\n                         in\n                         (({ arm; span } : arm), { lbs = []; effects }))\n                     (* cancel effects that concern variables introduced in pats  *)\n                  |> List.map ~f:(fun (arm, { lbs; effects }) ->\n                         let vars =\n                           U.Reducers.variables_of_pat arm.arm.arm_pat\n                         in\n                         let effects =\n                           SideEffects.without_rw_vars vars effects\n                         in\n                         (arm, { lbs; effects }))\n                in\n                ( List.map ~f:fst arms,\n                  List.fold ~init:m#zero ~f:m#plus (List.map ~f:snd arms) )\n              in\n              HoistSeq.one env (self#visit_expr env scrutinee)\n                (fun scrutinee effects ->\n                  ( { e with e = Match { scrutinee; arms } },\n                    m#plus eff_arms effects ))\n          | Let { monadic = Some _; _ } -> .\n          | Let { monadic = None; lhs; rhs; body } ->\n              let rhs, { lbs = rhs_lbs; effects = rhs_effects } =\n                self#visit_expr env rhs\n              in\n              let body, { lbs = body_lbs; effects = body_effects } =\n                self#visit_expr env body\n              in\n              let lbs = rhs_lbs @ ((lhs, rhs) :: body_lbs) in\n              let effects = SideEffects.plus rhs_effects body_effects in\n              (body, { lbs; effects })\n          | GlobalVar _ -> (e, m#zero)\n          | Ascription { e = e'; typ } ->\n              HoistSeq.one env (self#visit_expr env e') (fun e' eff ->\n                  ({ e with e = Ascription { e = e'; typ } }, eff))\n          | MacroInvokation _ -> (e, m#zero)\n          | Assign { lhs; e = e'; witness } ->\n              (* TODO: here, LHS should really have no effect... This is not fine *)\n              let lhs, lhs_effects = self#visit_lhs env lhs in\n              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->\n                  let effects = m#plus effects lhs_effects in\n                  ({ e with e = Assign { e = e'; lhs; witness } }, effects))\n          | Borrow { kind; e = e'; witness } ->\n              let kind, kind_effects = self#visit_borrow_kind env kind in\n              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->\n                  let effects = m#plus kind_effects effects in\n                  ({ e with e = Borrow { kind; e = e'; witness } }, effects))\n          | AddressOf { mut; e = e'; witness } ->\n              let mut, mut_effects = (mut, m#zero) in\n              HoistSeq.one env (self#visit_expr env e') (fun e' effects ->\n                  let effects = m#plus mut_effects effects in\n                  ({ e with e = AddressOf { mut; e = e'; witness } }, effects))\n          | Closure { params; body; captures } ->\n              let body, body_effects =\n                let body, { lbs; effects } = self#visit_expr env body in\n                let vars =\n                  Set.union_list (module Local_ident)\n                  @@ List.map ~f:U.Reducers.variables_of_pat params\n                in\n                let body = lets_of_bindings lbs body in\n                let effects =\n                  {\n                    (SideEffects.without_rw_vars vars effects) with\n                    return = None;\n                  }\n                in\n                (body, { lbs = []; effects })\n              in\n              ({ e with e = Closure { params; body; captures } }, body_effects)\n              (* HoistSeq.many env *)\n              (*   (List.map ~f:(super#visit_expr env) captures) *)\n              (*   (fun captures effects -> *)\n              (*     let effects = m#plus body_effects effects in *)\n              (*     ({ e with e = Closure { params; body; captures } }, effects)) *)\n          | EffectAction _ ->\n              Diagnostics.failure\n                ~context:(Other \"collect_and_hoist_effects_object\") ~span:e.span\n                (Unimplemented\n                   { issue_id = None; details = Some \"EffectAction\" })\n          | Quote _ -> (e, m#zero)\n      end\n\n    let collect_and_hoist_effects (e : expr) : expr * SideEffects.t =\n      let e, { lbs; effects } =\n        collect_and_hoist_effects_object#visit_expr CollectContext.empty e\n      in\n      (lets_of_bindings lbs e, effects)\n  end\nend\n\nmodule%inlined_contents Hoist\n    (F :\n      Features.T\n        with type monadic_binding = Features.Off.monadic_binding\n         and type for_index_loop = Features.Off.for_index_loop) =\nstruct\n  module FA = F\n\n  module FB = struct\n    include F\n  end\n\n  module UA = Ast_utils.Make (F)\n  module UB = Ast_utils.Make (FB)\n  module A = Ast.Make (F)\n  module B = Ast.Make (FB)\n\n  module S = struct\n    include Features.SUBTYPE.Id\n  end\n\n  open MakeSI (F)\n\n  [%%inline_defs dmutability + dsafety_kind]\n\n  module ID = struct\n    (* OCaml is not able to understand A.expr is the same as B.expr........... *)\n    [%%inline_defs dexpr]\n  end\n\n  open ID\n\n  let dexpr (expr : A.expr) : B.expr =\n    Hoist.collect_and_hoist_effects expr |> fst |> dexpr\n\n  [%%inline_defs \"Item.*\"]\n\n  let metadata = Phase_utils.Metadata.make HoistSideEffects\nend\n[@@add \"subtype.ml\"]\n"
  },
  {
    "path": "engine/lib/span.ml",
    "content": "open! Prelude\n\nmodule FreshId = struct\n  let current = ref 1\n\n  let make () =\n    let id = !current in\n    current := id + 1;\n    id\nend\n\nmodule Imported = struct\n  type span = { filename : file_name; hi : loc; lo : loc }\n  and loc = { col : int; line : int }\n\n  and file_name =\n    | Real of real_file_name\n    | CfgSpec of string\n    | Anon of string\n    | MacroExpansion of string\n    | ProcMacroSourceCode of string\n    | CliCrateAttr of string\n    | Custom of string\n    | DocTest of string\n    | InlineAsm of string\n\n  and real_file_name =\n    | LocalPath of string\n    | Remapped of { local_path : string option; virtual_name : string }\n  [@@deriving show, yojson, sexp, compare, eq, hash]\n\n  let file_name_of_thir : Types.file_name -> file_name = function\n    | Real x ->\n        Real\n          (match x with\n          | LocalPath x -> LocalPath x\n          | Remapped { local_path; virtual_name } ->\n              Remapped { local_path; virtual_name })\n    | CfgSpec x -> CfgSpec x\n    | Anon x -> Anon x\n    | MacroExpansion x -> MacroExpansion x\n    | ProcMacroSourceCode x -> ProcMacroSourceCode x\n    | CliCrateAttr x -> CliCrateAttr x\n    | Custom x -> Custom x\n    | DocTest x -> DocTest x\n    | InlineAsm x -> InlineAsm x\n\n  let loc_of_thir ({ col; line } : Types.loc) : loc =\n    { col = Int.of_string col; line = Int.of_string line }\n\n  let span_of_thir (s : Types.span) : span =\n    {\n      filename = file_name_of_thir s.filename;\n      hi = loc_of_thir s.hi;\n      lo = loc_of_thir s.lo;\n    }\n\n  let file_name_to_thir : file_name -> Types.file_name = function\n    | Real x ->\n        Real\n          (match x with\n          | LocalPath x -> LocalPath x\n          | Remapped { local_path; virtual_name } ->\n              Remapped { local_path; virtual_name })\n    | CfgSpec x -> CfgSpec x\n    | Anon x -> Anon x\n    | MacroExpansion x -> MacroExpansion x\n    | ProcMacroSourceCode x -> ProcMacroSourceCode x\n    | CliCrateAttr x -> CliCrateAttr x\n    | Custom x -> Custom x\n    | DocTest x -> DocTest x\n    | InlineAsm x -> InlineAsm x\n\n  let loc_to_thir ({ col; line } : loc) : Types.loc =\n    { col = Int.to_string col; line = Int.to_string line }\n\n  let span_to_thir (s : span) : Types.span =\n    {\n      filename = file_name_to_thir s.filename;\n      hi = loc_to_thir s.hi;\n      lo = loc_to_thir s.lo;\n    }\n\n  let display_loc (l : loc) : string =\n    Int.to_string l.col ^ \":\" ^ Int.to_string l.line\n\n  let display_span (s : span) : string =\n    let file =\n      match s.filename with\n      | Real (LocalPath path) -> path\n      | s -> [%show: file_name] s\n    in\n    \"<\" ^ file ^ \" \" ^ display_loc s.lo ^ \"→\" ^ display_loc s.hi ^ \">\"\nend\n\ntype owner_id = OwnerId of int\n[@@deriving show, yojson, sexp, compare, eq, hash]\n\nlet owner_id_list = ref []\nlet owner_id_list_len = ref 0\n\nlet fresh_owner_id (owner : Types.def_id) : owner_id =\n  let next_id = OwnerId !owner_id_list_len in\n  owner_id_list := owner :: !owner_id_list;\n  owner_id_list_len := !owner_id_list_len + 1;\n  next_id\n\n(** This state changes the behavior of `of_thir`: the hint placed into this\n    state will be inserted automatically by `of_thir`. The field `owner_hint`\n    shall be used solely for reporting to the user, not for any logic within the\n    engine. *)\nlet state_owner_hint : owner_id option ref = ref None\n\nlet with_owner_hint (type t) (owner : Types.def_id) (f : unit -> t) : t =\n  let previous = !state_owner_hint in\n  state_owner_hint := Some (fresh_owner_id owner);\n  let result = f () in\n  state_owner_hint := previous;\n  result\n\ntype t = { id : int; data : Imported.span list; owner_hint : owner_id option }\n[@@deriving show, yojson, sexp, compare, eq, hash]\n\nlet display { id = _; data; _ } =\n  match data with\n  | [] -> \"<dummy>\"\n  | [ span ] -> Imported.display_span span\n  | spans -> List.map ~f:Imported.display_span spans |> String.concat ~sep:\"∪\"\n\nlet of_thir span =\n  {\n    data = [ Imported.span_of_thir span ];\n    id = FreshId.make ();\n    owner_hint = !state_owner_hint;\n  }\n\nlet to_thir { data; _ } = List.map ~f:Imported.span_to_thir data\n\nlet union_list spans =\n  let data = List.concat_map ~f:(fun { data; _ } -> data) spans in\n  let owner_hint = List.hd spans |> Option.bind ~f:(fun s -> s.owner_hint) in\n  { data; id = FreshId.make (); owner_hint }\n\nlet union x y = union_list [ x; y ]\n\nlet dummy () =\n  { id = FreshId.make (); data = []; owner_hint = !state_owner_hint }\n\nlet id_of { id; _ } = id\nlet refresh_id span = { span with id = FreshId.make () }\nlet default = { id = 0; data = []; owner_hint = None }\n\nlet owner_hint span =\n  span.owner_hint\n  |> Option.map ~f:(fun (OwnerId id) ->\n         Option.value_exn\n           (List.nth !owner_id_list (!owner_id_list_len - id - 1)))\n\nlet to_rust_ast_span span : Rust_engine_types.span =\n  let owner_hint =\n    let f (did : Types.def_id) : Types.def_id2 = { contents = did.contents } in\n    owner_hint span |> Option.map ~f\n  in\n  {\n    data = List.map ~f:Imported.span_to_thir span.data;\n    id = Int.to_int64 span.id;\n    owner_hint;\n  }\n\nlet from_rust_ast_span (span : Rust_engine_types.span) : t =\n  let owner_hint =\n    let f (did : Types.def_id2) : Types.def_id = { contents = did.contents } in\n    Option.map ~f span.owner_hint\n  in\n  {\n    data = List.map ~f:Imported.span_of_thir span.data;\n    id = Int.of_int64_exn span.id;\n    owner_hint = Option.map ~f:fresh_owner_id owner_hint;\n  }\n"
  },
  {
    "path": "engine/lib/span.mli",
    "content": "type t [@@deriving show, yojson, sexp, compare, eq, hash]\n\nval display : t -> string\n\nval of_thir : Types.span -> t\n(** Imports a THIR span as a hax span *)\n\nval to_thir : t -> Types.span list\n(** Exports a hax span to THIR spans (a hax span might be a collection of spans)\n*)\n\nval union_list : t list -> t\nval union : t -> t -> t\n\nval dummy : unit -> t\n(** Generates a dummy span: this should be avoided at all cost. *)\n\nval id_of : t -> int\n(** Lookup the internal unique identifier of a span. *)\n\nval refresh_id : t -> t\n(** Replaces the internal identifier by a fresh one. This can be useful for\n    debugging. *)\n\nval default : t\n(** A default span can be useful when a span is required in some computation\n    that never reports error and when we know the span will go away. Using this\n    should be avoided. *)\n\nval with_owner_hint : Types.def_id -> (unit -> 't) -> 't\n(** Inserts a hint about the fact that, in function `f`, we're translating spans\n    that are \"owned\" by an item `owner`. This should be used only in\n    `import_thir`, also, the hint shall be used only to enhance user reporting,\n    not for any logic within the engine. *)\n\nval owner_hint : t -> Types.def_id option\n(** Looks up the owner hint for a span. This should be used for user reports\n    only. *)\n\nval to_rust_ast_span : t -> Rust_engine_types.span\n(** Converts this span to a Rust engine span. *)\n\nval from_rust_ast_span : Rust_engine_types.span -> t\n"
  },
  {
    "path": "engine/lib/subtype.ml",
    "content": "open! Prelude\n\nmodule Make\n    (FA : Features.T)\n    (FB : Features.T)\n    (S : Features.SUBTYPE.T with module A = FA and module B = FB) =\nstruct\n  open Ast\n  module A = Ast.Make (FA)\n  module B = Ast.Make (FB)\n  module UA = Ast_utils.Make (FA)\n  module UB = Ast_utils.Make (FB)\n  module FA = FA\n\n  let dsafety_kind (span : Span.t) (safety : A.safety_kind) : B.safety_kind =\n    match safety with Safe -> Safe | Unsafe w -> Unsafe (S.unsafe span w)\n\n  let dmutability (span : Span.t) (type a b) (s : Span.t -> a -> b)\n      (mutability : a mutability) : b mutability =\n    match mutability with\n    | Mutable w -> Mutable (s span w)\n    | Immutable -> Immutable\n\n  let rec dty (span : span) (ty : A.ty) : B.ty =\n    match ty with\n    | TBool -> TBool\n    | TChar -> TChar\n    | TInt k -> TInt k\n    | TFloat k -> TFloat k\n    | TStr -> TStr\n    | TApp { ident; args } ->\n        TApp { ident; args = List.map ~f:(dgeneric_value span) args }\n    | TArray { typ; length } ->\n        TArray { typ = dty span typ; length = dexpr length }\n    | TSlice { witness; ty } ->\n        TSlice { witness = S.slice span witness; ty = dty span ty }\n    | TRef { witness; typ; mut; region } ->\n        TRef\n          {\n            witness = S.reference span witness;\n            typ = dty span typ;\n            mut = dmutability span S.mutable_reference mut;\n            region;\n          }\n    | TParam local_ident -> TParam local_ident\n    | TArrow (inputs, output) ->\n        TArrow (List.map ~f:(dty span) inputs, dty span output)\n    | TAssociatedType { impl; item } ->\n        TAssociatedType { impl = dimpl_expr span impl; item }\n    | TOpaque ident -> TOpaque ident\n    | TRawPointer { witness } ->\n        TRawPointer { witness = S.raw_pointer span witness }\n    | TDyn { witness; goals } ->\n        TDyn\n          {\n            witness = S.dyn span witness;\n            goals = List.map ~f:(ddyn_trait_goal span) goals;\n          }\n\n  and ddyn_trait_goal (span : span) (r : A.dyn_trait_goal) : B.dyn_trait_goal =\n    {\n      trait = r.trait;\n      non_self_args = List.map ~f:(dgeneric_value span) r.non_self_args;\n    }\n\n  and dtrait_goal (span : span) (r : A.trait_goal) : B.trait_goal =\n    { trait = r.trait; args = List.map ~f:(dgeneric_value span) r.args }\n\n  and dimpl_ident (span : span) (r : A.impl_ident) : B.impl_ident =\n    { goal = dtrait_goal span r.goal; name = r.name }\n\n  and dprojection_predicate (span : span) (r : A.projection_predicate) :\n      B.projection_predicate =\n    {\n      impl = dimpl_expr span r.impl;\n      assoc_item = r.assoc_item;\n      typ = dty span r.typ;\n    }\n\n  and dimpl_expr (span : span) (i : A.impl_expr) : B.impl_expr =\n    { kind = dimpl_expr_kind span i.kind; goal = dtrait_goal span i.goal }\n\n  and dimpl_expr_kind (span : span) (i : A.impl_expr_kind) : B.impl_expr_kind =\n    match i with\n    | Self -> Self\n    | Concrete tr -> Concrete (dtrait_goal span tr)\n    | LocalBound { id } -> LocalBound { id }\n    | Parent { impl; ident } ->\n        Parent { impl = dimpl_expr span impl; ident = dimpl_ident span ident }\n    | Projection { impl; item; ident } ->\n        Projection\n          { impl = dimpl_expr span impl; item; ident = dimpl_ident span ident }\n    | ImplApp { impl; args } ->\n        ImplApp\n          {\n            impl = dimpl_expr span impl;\n            args = List.map ~f:(dimpl_expr span) args;\n          }\n    | Dyn -> Dyn\n    | Builtin tr -> Builtin (dtrait_goal span tr)\n\n  and dgeneric_value (span : span) (generic_value : A.generic_value) :\n      B.generic_value =\n    match generic_value with\n    | GLifetime { lt; witness } ->\n        GLifetime { lt; witness = S.lifetime span witness }\n    | GType t -> GType (dty span t)\n    | GConst e -> GConst (dexpr e)\n\n  and dborrow_kind (span : span) (borrow_kind : A.borrow_kind) : B.borrow_kind =\n    match borrow_kind with\n    | Shared -> Shared\n    | Unique -> Unique\n    | Mut witness -> Mut (S.mutable_reference span witness)\n\n  and dpat (p : A.pat) : B.pat =\n    { p = dpat' p.span p.p; span = p.span; typ = dty p.span p.typ }\n\n  and dpat' (span : span) (pat : A.pat') : B.pat' =\n    match pat with\n    | PWild -> PWild\n    | PAscription { typ; typ_span; pat } ->\n        PAscription { typ = dty span typ; pat = dpat pat; typ_span }\n    | PConstruct { constructor; is_record; is_struct; fields } ->\n        PConstruct\n          {\n            constructor;\n            is_record;\n            is_struct;\n            fields = List.map ~f:(dfield_pat span) fields;\n          }\n    | POr { subpats } -> POr { subpats = List.map ~f:dpat subpats }\n    | PArray { args } -> PArray { args = List.map ~f:dpat args }\n    | PConstant { lit } -> PConstant { lit }\n    | PBinding { mut; mode; var : Local_ident.t; typ; subpat } ->\n        PBinding\n          {\n            mut = dmutability span S.mutable_variable mut;\n            mode = dbinding_mode span mode;\n            var;\n            typ = dty span typ;\n            subpat = Option.map ~f:(dpat *** S.as_pattern span) subpat;\n          }\n    | PDeref { subpat; witness } ->\n        PDeref { subpat = dpat subpat; witness = S.reference span witness }\n\n  and dfield_pat (_span : span) (p : A.field_pat) : B.field_pat =\n    { field = p.field; pat = dpat p.pat }\n\n  and dbinding_mode (span : span) (binding_mode : A.binding_mode) :\n      B.binding_mode =\n    match binding_mode with\n    | ByValue -> ByValue\n    | ByRef (kind, witness) ->\n        ByRef (dborrow_kind span kind, S.reference span witness)\n\n  and dsupported_monads (span : span) (m : A.supported_monads) :\n      B.supported_monads =\n    match m with\n    | MException t -> MException (dty span t)\n    | MResult t -> MResult (dty span t)\n    | MOption -> MOption\n\n  and dexpr (e : A.expr) : B.expr =\n    try dexpr_unwrapped e\n    with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n      let typ : B.ty =\n        try dty e.span e.typ\n        with Diagnostics.SpanFreeError.Exn (Data (_context, _kind)) ->\n          UB.HaxFailure.Build.ty \"\"\n      in\n      UB.hax_failure_expr e.span typ (context, kind) (UA.LiftToFullAst.expr e)\n\n  and dexpr_unwrapped (e : A.expr) : B.expr =\n    { e = dexpr' e.span e.e; span = e.span; typ = dty e.span e.typ }\n\n  and dexpr' (span : span) (expr : A.expr') : B.expr' =\n    match expr with\n    | If { cond; then_; else_ } ->\n        If\n          {\n            cond = dexpr cond;\n            then_ = dexpr then_;\n            else_ = Option.map ~f:dexpr else_;\n          }\n    | App { f; args; generic_args; bounds_impls; trait } ->\n        let dgeneric_values = List.map ~f:(dgeneric_value span) in\n        App\n          {\n            f = dexpr f;\n            args = List.map ~f:dexpr args;\n            generic_args = dgeneric_values generic_args;\n            bounds_impls = List.map ~f:(dimpl_expr span) bounds_impls;\n            trait = Option.map ~f:(dimpl_expr span *** dgeneric_values) trait;\n          }\n    | Literal lit -> Literal lit\n    | Array l -> Array (List.map ~f:dexpr l)\n    | Construct { constructor; is_record; is_struct; fields; base } ->\n        Construct\n          {\n            constructor;\n            is_record;\n            is_struct;\n            fields = List.map ~f:(map_snd dexpr) fields;\n            base = Option.map ~f:(dexpr *** S.construct_base span) base;\n          }\n    | Match { scrutinee; arms } ->\n        Match { scrutinee = dexpr scrutinee; arms = List.map ~f:darm arms }\n    | Let { monadic; lhs; rhs; body } ->\n        Let\n          {\n            monadic =\n              Option.map\n                ~f:(dsupported_monads span *** S.monadic_binding span)\n                monadic;\n            lhs = dpat lhs;\n            rhs = dexpr rhs;\n            body = dexpr body;\n          }\n    | Block { e; safety_mode; witness } ->\n        Block\n          {\n            e = dexpr e;\n            safety_mode = dsafety_kind span safety_mode;\n            witness = S.block span witness;\n          }\n    | LocalVar local_ident -> LocalVar local_ident\n    | GlobalVar global_ident -> GlobalVar global_ident\n    | Ascription { e; typ } -> Ascription { e = dexpr e; typ = dty span typ }\n    | MacroInvokation { macro; args; witness } ->\n        MacroInvokation { macro; args; witness = S.macro span witness }\n    | Assign { lhs; e; witness } ->\n        Assign\n          {\n            lhs = dlhs span lhs;\n            e = dexpr e;\n            witness = S.mutable_variable span witness;\n          }\n    | Loop { body; kind; state; label; witness; control_flow } ->\n        Loop\n          {\n            body = dexpr body;\n            kind = dloop_kind span kind;\n            state = Option.map ~f:(dloop_state span) state;\n            label;\n            control_flow =\n              Option.map\n                ~f:\n                  (( function\n                   | A.BreakOnly -> B.BreakOnly\n                   | A.BreakOrReturn -> B.BreakOrReturn )\n                  *** S.fold_like_loop span)\n                control_flow;\n            witness = S.loop span witness;\n          }\n    | Break { e; acc; label; witness } ->\n        Break\n          {\n            e = dexpr e;\n            acc = Option.map ~f:(dexpr *** S.state_passing_loop span) acc;\n            label;\n            witness = (S.break span *** S.loop span) witness;\n          }\n    | Return { e; witness } ->\n        Return { e = dexpr e; witness = S.early_exit span witness }\n    | QuestionMark { e; return_typ; witness } ->\n        QuestionMark\n          {\n            e = dexpr e;\n            return_typ = dty span return_typ;\n            witness = S.question_mark span witness;\n          }\n    | Continue { acc; label; witness = w1, w2 } ->\n        Continue\n          {\n            acc = Option.map ~f:(dexpr *** S.state_passing_loop span) acc;\n            label;\n            witness = (S.continue span w1, S.loop span w2);\n          }\n    | Borrow { kind; e; witness } ->\n        Borrow\n          {\n            kind = dborrow_kind span kind;\n            e = dexpr e;\n            witness = S.reference span witness;\n          }\n    | EffectAction { action; argument } ->\n        EffectAction\n          { action = S.monadic_action span action; argument = dexpr argument }\n    | AddressOf { mut; e; witness } ->\n        AddressOf\n          {\n            mut = dmutability span S.mutable_pointer mut;\n            e = dexpr e;\n            witness = S.raw_pointer span witness;\n          }\n    | Closure { params; body; captures } ->\n        Closure\n          {\n            params = List.map ~f:dpat params;\n            body = dexpr body;\n            captures = List.map ~f:dexpr captures;\n          }\n    | Quote quote -> Quote (dquote span quote)\n\n  and dquote (span : span) ({ contents; witness } : A.quote) : B.quote =\n    let f = function\n      | A.Verbatim code -> B.Verbatim code\n      | Expr e -> Expr (dexpr e)\n      | Pattern p -> Pattern (dpat p)\n      | Typ p -> Typ (dty span p)\n    in\n    { contents = List.map ~f contents; witness = S.quote span witness }\n\n  and dloop_kind (span : span) (k : A.loop_kind) : B.loop_kind =\n    match k with\n    | UnconditionalLoop -> UnconditionalLoop\n    | WhileLoop { condition; witness } ->\n        WhileLoop\n          { condition = dexpr condition; witness = S.while_loop span witness }\n    | ForLoop { it; pat; witness } ->\n        ForLoop\n          { it = dexpr it; pat = dpat pat; witness = S.for_loop span witness }\n    | ForIndexLoop { start; end_; var; var_typ; witness } ->\n        ForIndexLoop\n          {\n            start = dexpr start;\n            end_ = dexpr end_;\n            var;\n            var_typ = dty span var_typ;\n            witness = S.for_index_loop span witness;\n          }\n\n  and dloop_state (span : span) (s : A.loop_state) : B.loop_state =\n    {\n      init = dexpr s.init;\n      bpat = dpat s.bpat;\n      witness = S.state_passing_loop span s.witness;\n    }\n\n  and darm (a : A.arm) : B.arm = { span = a.span; arm = darm' a.arm }\n\n  and darm' (a : A.arm') : B.arm' =\n    {\n      arm_pat = dpat a.arm_pat;\n      body = dexpr a.body;\n      guard = Option.map ~f:dguard a.guard;\n    }\n\n  and dguard (a : A.guard) : B.guard =\n    { span = a.span; guard = dguard' a.span a.guard }\n\n  and dguard' (span : span) (guard : A.guard') : B.guard' =\n    match guard with\n    | IfLet { lhs; rhs; witness } ->\n        IfLet\n          {\n            lhs = dpat lhs;\n            rhs = dexpr rhs;\n            witness = S.match_guard span witness;\n          }\n\n  and dlhs (span : span) (lhs : A.lhs) : B.lhs =\n    match lhs with\n    | LhsFieldAccessor { e; field; typ; witness } ->\n        LhsFieldAccessor\n          {\n            e = dlhs span e;\n            field;\n            typ = dty span typ;\n            witness = S.nontrivial_lhs span witness;\n          }\n    | LhsArrayAccessor { e; index; typ; witness } ->\n        LhsArrayAccessor\n          {\n            e = dlhs span e;\n            index = dexpr index;\n            typ = dty span typ;\n            witness = S.nontrivial_lhs span witness;\n          }\n    | LhsLocalVar { var; typ } -> LhsLocalVar { var; typ = dty span typ }\n    | LhsVecRef { e; typ; witness } ->\n        LhsVecRef\n          {\n            e = dlhs span e;\n            typ = dty span typ;\n            witness = S.nontrivial_lhs span witness;\n          }\n    | LhsArbitraryExpr { e; witness } ->\n        LhsArbitraryExpr { e = dexpr e; witness = S.arbitrary_lhs span witness }\n\n  module Item = struct\n    (* TODO: remvove span argument *)\n    let dgeneric_param _span ({ ident; span; attrs; kind } : A.generic_param) :\n        B.generic_param =\n      let kind =\n        match kind with\n        | GPLifetime { witness } ->\n            B.GPLifetime { witness = S.lifetime span witness }\n        | GPType -> GPType\n        | GPConst { typ } -> GPConst { typ = dty span typ }\n      in\n      { ident; span; kind; attrs }\n\n    let dgeneric_constraint (span : span)\n        (generic_constraint : A.generic_constraint) : B.generic_constraint =\n      match generic_constraint with\n      | GCLifetime (lf, witness) -> B.GCLifetime (lf, S.lifetime span witness)\n      | GCType impl_ident -> B.GCType (dimpl_ident span impl_ident)\n      | GCProjection projection ->\n          B.GCProjection (dprojection_predicate span projection)\n\n    let dgenerics (span : span) (g : A.generics) : B.generics =\n      {\n        params = List.map ~f:(dgeneric_param span) g.params;\n        constraints = List.map ~f:(dgeneric_constraint span) g.constraints;\n      }\n\n    let dparam (span : span) (p : A.param) : B.param =\n      {\n        pat = dpat p.pat;\n        typ = dty (Option.value ~default:span p.typ_span) p.typ;\n        typ_span = p.typ_span;\n        attrs = p.attrs;\n      }\n\n    let dvariant (span : span) (v : A.variant) : B.variant =\n      {\n        name = v.name;\n        arguments = List.map ~f:(map_snd3 @@ dty span) v.arguments;\n        is_record = v.is_record;\n        attrs = v.attrs;\n      }\n\n    let rec dtrait_item' (span : span) (ti : A.trait_item') : B.trait_item' =\n      match ti with\n      | TIType idents -> TIType (List.map ~f:(dimpl_ident span) idents)\n      | TIFn t -> TIFn (dty span t)\n      | TIDefault { params; body; witness } ->\n          TIDefault\n            {\n              params = List.map ~f:(dparam span) params;\n              body = dexpr body;\n              witness = S.trait_item_default span witness;\n            }\n\n    and dtrait_item (ti : A.trait_item) : B.trait_item =\n      {\n        ti_span = ti.ti_span;\n        ti_generics = dgenerics ti.ti_span ti.ti_generics;\n        ti_v = dtrait_item' ti.ti_span ti.ti_v;\n        ti_ident = ti.ti_ident;\n        ti_attrs = ti.ti_attrs;\n      }\n\n    let rec dimpl_item' (span : span) (ii : A.impl_item') : B.impl_item' =\n      match ii with\n      | IIType { typ; parent_bounds } ->\n          IIType\n            {\n              typ = dty span typ;\n              parent_bounds =\n                List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;\n            }\n      | IIFn { body; params } ->\n          IIFn { body = dexpr body; params = List.map ~f:(dparam span) params }\n\n    and dimpl_item (ii : A.impl_item) : B.impl_item =\n      {\n        ii_span = ii.ii_span;\n        ii_generics = dgenerics ii.ii_span ii.ii_generics;\n        ii_v = dimpl_item' ii.ii_span ii.ii_v;\n        ii_ident = ii.ii_ident;\n        ii_attrs = ii.ii_attrs;\n      }\n\n    let rec ditem (i : A.item) : B.item list =\n      try ditem_unwrapped i\n      with Diagnostics.SpanFreeError.Exn (Data (context, kind)) ->\n        let error = Diagnostics.pretty_print_context_kind context kind in\n        let cast_item : A.item -> Ast.Full.item = Stdlib.Obj.magic in\n        let ast = cast_item i |> Print_rust.pitem_str in\n        let msg = error ^ \"\\nLast available AST for this item:\\n\\n\" ^ ast in\n        [ B.make_hax_error_item i.span i.ident msg ]\n\n    and ditem_unwrapped (item : A.item) : B.item list =\n      [\n        {\n          v = ditem' item.span item.v;\n          span = item.span;\n          ident = item.ident;\n          attrs = item.attrs;\n        };\n      ]\n\n    and ditem' (span : span) (item : A.item') : B.item' =\n      match item with\n      | Fn { name; generics; body; params; safety } ->\n          B.Fn\n            {\n              name;\n              generics = dgenerics span generics;\n              body = dexpr body;\n              params = List.map ~f:(dparam span) params;\n              safety = dsafety_kind span safety;\n            }\n      | Type { name; generics; variants; is_struct } ->\n          B.Type\n            {\n              name;\n              generics = dgenerics span generics;\n              variants = List.map ~f:(dvariant span) variants;\n              is_struct;\n            }\n      | TyAlias { name; generics; ty } ->\n          B.TyAlias\n            { name; generics = dgenerics span generics; ty = dty span ty }\n      | IMacroInvokation { macro; argument; span; witness } ->\n          B.IMacroInvokation\n            { macro; argument; span; witness = S.macro span witness }\n      | Trait { name; generics; items; safety } ->\n          B.Trait\n            {\n              name;\n              generics = dgenerics span generics;\n              items = List.map ~f:dtrait_item items;\n              safety = dsafety_kind span safety;\n            }\n      | Impl\n          {\n            generics;\n            self_ty;\n            of_trait = trait_id, trait_generics;\n            items;\n            parent_bounds;\n            safety;\n          } ->\n          B.Impl\n            {\n              generics = dgenerics span generics;\n              self_ty = dty span self_ty;\n              of_trait =\n                (trait_id, List.map ~f:(dgeneric_value span) trait_generics);\n              items = List.map ~f:dimpl_item items;\n              parent_bounds =\n                List.map ~f:(dimpl_expr span *** dimpl_ident span) parent_bounds;\n              safety = dsafety_kind span safety;\n            }\n      | Alias { name; item } -> B.Alias { name; item }\n      | Use { path; is_external; rename } -> B.Use { path; is_external; rename }\n      | Quote { quote; origin } -> Quote { quote = dquote span quote; origin }\n      | HaxError e -> B.HaxError e\n      | NotImplementedYet -> B.NotImplementedYet\n\n    let ditems = List.concat_map ~f:ditem\n  end\n\n  include Item\nend\n"
  },
  {
    "path": "engine/lib/untyped_phases/gen.js",
    "content": "#!/usr/bin/env node\nconst { readdirSync, readFileSync } = require('fs');\n\nlet f = s => ((s.split(\"include module type\")[0] || \"\").match(/(with|and) type[^)]*/g) || []).join(\"\").split('and type').map(x => x.replace(/(with|and) type/g, '').trim()).filter(x => x).map(x => x.split('=')[1].trim().split('.').slice(1));\n\nlet phases = readdirSync(\"../phases\").filter(x => x.endsWith(\".mli\")).map(filename => ({\n  filename,\n  contents: f(readFileSync(\"../phases/\" + filename).toString()),\n}));\n\nlet rejections = readFileSync(\"../phases/phase_reject.ml\")\n  .toString()\n  .split('\\n')\n  .map(s => s.match(/^module ([a-z][a-z_]+)/i)?.[1])\n  .filter(s => s);\n\nconsole.log(`\nopen Prelude\n    \nmodule type PHASE_FULL =\n  Phase_utils.PHASE\n    with module FA = Features.Full\n     and module FB = Features.Full\n     and module A = Ast.Full\n     and module B = Ast.Full\n\nmodule BindPhaseFull (A : PHASE_FULL) (B : PHASE_FULL) : PHASE_FULL = struct\n  include Phase_utils.BindPhase (A) (B)\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\nend\n\nmodule IdentityFull : PHASE_FULL = struct\n  include Phase_utils.Identity (Features.Full)\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\nend\n\nlet bind (module A : PHASE_FULL) (module B : PHASE_FULL) : (module PHASE_FULL) =\n  (module BindPhaseFull (A) (B))\n\nlet bind_list : (module PHASE_FULL) list -> (module PHASE_FULL) =\n  List.reduce ~f:bind\n  >> Option.value ~default:(module IdentityFull : PHASE_FULL)\n\n`);\n\n\nfor (let phase of phases) {\n  let name_lc = phase.filename.replace(/^phase_/, \"\").replace(/[.]mli$/, \"\");\n  let name = name_lc.replace(/^(.)/, l => l.toUpperCase());\n  phase.name_lc = name_lc;\n  phase.name = name;\n  phase.module_expression = `Phases.${name}`;\n}\n\n\nfor (let rejection of rejections) {\n  let name = 'Reject_' + rejection.replace(/^(.)/, l => l.toLowerCase()).replace(/[A-Z]/g, c => `_${c}`).toLowerCase();\n  phases.push({\n    name_lc: name.toLowerCase(),\n    name,\n    module_expression: 'Phase_reject.' + rejection,\n    contents: [],\n  });\n}\n\nphases.push({\n  name_lc: \"hoist_side_effects\",\n  name: 'Hoist_side_effects',\n  module_expression: 'Side_effect_utils.Hoist',\n  contents: [\n    ['Off', 'monadic_binding'],\n    ['Off', 'for_index_loop'],\n  ],\n});\n\n\nfor (let phase of phases) {\n  let { name, module_expression } = phase;\n\n  console.log(`\nmodule ${name} : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    ${phase.contents.map(([status, f]) => `include ${status}.${f.replace(/^(.)/, l => l.toUpperCase())}`).join('\\n')}\n  end\n\n  module Phase = ${module_expression} (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        ${phase.contents.map(([status, f]) =>\n    `let ${f} = ` + (status == 'On' ? 'fun _ _ -> Features.On.' + f : 'reject')\n  ).join('\\n')}\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n`);\n}\n\n\nfor (let phase of phases) {\n  console.log(`let ${phase.name_lc} : (module PHASE_FULL) = (module ${phase.name})`)\n}\nconsole.log(`let phases_list : (module PHASE_FULL) list = [${phases.map(p => p.name_lc).join(';')}]`)\n\n\nconsole.log(`\nlet phase_of_name: string -> (module PHASE_FULL) option = \n    function\n    ${phases.map(p => `| \"${p.name_lc}\" -> Some ${p.name_lc}`).join('')}\n    | _ -> None\n\nlet phases: string list = [${phases.map(p => `\"${p.name_lc}\"`).join(';')}]\n\n(*\n${phases.map(p => `${p.name_lc}`).join(', ')}\n*)\n`);\n\n\n\n\n"
  },
  {
    "path": "engine/lib/untyped_phases/untyped_phases.ml",
    "content": "open Prelude\n\nmodule type PHASE_FULL =\n  Phase_utils.PHASE\n    with module FA = Features.Full\n     and module FB = Features.Full\n     and module A = Ast.Full\n     and module B = Ast.Full\n\nmodule BindPhaseFull (A : PHASE_FULL) (B : PHASE_FULL) : PHASE_FULL = struct\n  include Phase_utils.BindPhase (A) (B)\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\nend\n\nmodule IdentityFull : PHASE_FULL = struct\n  include Phase_utils.Identity (Features.Full)\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\nend\n\nlet bind (module A : PHASE_FULL) (module B : PHASE_FULL) : (module PHASE_FULL) =\n  (module BindPhaseFull (A) (B))\n\nlet bind_list : (module PHASE_FULL) list -> (module PHASE_FULL) =\n  List.reduce ~f:bind\n  >> Option.value ~default:(module IdentityFull : PHASE_FULL)\n\nmodule And_mut_defsite : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    include On.Mutable_variable\n    include On.Mutable_reference\n    include On.Nontrivial_lhs\n    include On.Arbitrary_lhs\n    include On.Reference\n  end\n\n  module Phase = Phases.And_mut_defsite (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let mutable_variable = fun _ _ -> Features.On.mutable_variable\n        let mutable_reference = fun _ _ -> Features.On.mutable_reference\n        let nontrivial_lhs = fun _ _ -> Features.On.nontrivial_lhs\n        let arbitrary_lhs = fun _ _ -> Features.On.arbitrary_lhs\n        let reference = fun _ _ -> Features.On.reference\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Bundle_cycles : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Bundle_cycles (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Cf_into_monads : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    include Off.Monadic_action\n    include Off.Monadic_binding\n  end\n\n  module Phase = Phases.Cf_into_monads (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let monadic_action = reject\n        let monadic_binding = reject\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Direct_and_mut : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    include Off.Raw_pointer\n    include Off.Mutable_pointer\n  end\n\n  module Phase = Phases.Direct_and_mut (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let raw_pointer = reject\n        let mutable_pointer = reject\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Drop_blocks : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Drop_blocks (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Drop_match_guards : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Drop_match_guards (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Drop_references : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    include Off.Raw_pointer\n    include Off.Mutable_reference\n  end\n\n  module Phase = Phases.Drop_references (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let raw_pointer = reject\n        let mutable_reference = reject\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Drop_return_break_continue : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Drop_return_break_continue (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Drop_sized_trait : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Drop_sized_trait (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Explicit_conversions : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Explicit_conversions (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Functionalize_loops : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    include Off.Continue\n    include Off.Early_exit\n    include Off.Break\n  end\n\n  module Phase = Phases.Functionalize_loops (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let continue = reject\n        let early_exit = reject\n        let break = reject\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Hoist_disjunctive_patterns : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Hoist_disjunctive_patterns (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Local_mutation : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    include Off.Mutable_reference\n    include Off.Mutable_pointer\n    include Off.Raw_pointer\n    include Off.Arbitrary_lhs\n    include Off.Nontrivial_lhs\n    include Off.Monadic_action\n    include Off.Monadic_binding\n    include Off.For_index_loop\n  end\n\n  module Phase = Phases.Local_mutation (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let mutable_reference = reject\n        let mutable_pointer = reject\n        let raw_pointer = reject\n        let arbitrary_lhs = reject\n        let nontrivial_lhs = reject\n        let monadic_action = reject\n        let monadic_binding = reject\n        let for_index_loop = reject\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Newtype_as_refinement : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Newtype_as_refinement (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reconstruct_asserts : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Reconstruct_asserts (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reconstruct_for_index_loops : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Reconstruct_for_index_loops (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reconstruct_for_loops : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Reconstruct_for_loops (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reconstruct_question_marks : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Reconstruct_question_marks (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reconstruct_while_loops : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Reconstruct_while_loops (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_impl_type_method : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Reject_impl_type_method (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reorder_fields : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Reorder_fields (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Rewrite_control_flow : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Rewrite_control_flow (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Rewrite_local_self : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Rewrite_local_self (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Simplify_hoisting : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Simplify_hoisting (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Simplify_match_return : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Simplify_match_return (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Simplify_question_marks : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Simplify_question_marks (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Sort_items : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Sort_items (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Sort_items_namespace_wise : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Sort_items_namespace_wise (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Specialize : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Specialize (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Traits_specs : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Traits_specs (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Transform_hax_lib_inline : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Transform_hax_lib_inline (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Trivialize_assign_lhs : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phases.Trivialize_assign_lhs (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_arbitrary_lhs : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.Arbitrary_lhs (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_continue : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.Continue (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_question_mark : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.Question_mark (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_raw_or_mut_pointer : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.RawOrMutPointer (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_early_exit : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.EarlyExit (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_as_pattern : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.As_pattern (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_dyn : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.Dyn (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_trait_item_default : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.Trait_item_default (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Reject_unsafe : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n  end\n\n  module Phase = Phase_reject.Unsafe (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nmodule Hoist_side_effects : PHASE_FULL = struct\n  module FA = Features.Full\n  module FB = Features.Full\n  module A = Ast.Full\n  module B = Ast.Full\n\n  module ExpectedFA = struct\n    open Features\n    include On\n    include Off.Monadic_binding\n    include Off.For_index_loop\n  end\n\n  module Phase = Side_effect_utils.Hoist (ExpectedFA)\n\n  module Coerce =\n    Feature_gate.Make (Features.Full) (ExpectedFA)\n      (struct\n        module A = Features.Full\n        module B = ExpectedFA\n        include Feature_gate.DefaultSubtype\n\n        let monadic_binding = reject\n        let for_index_loop = reject\n\n        let metadata =\n          Phase_reject.make_metadata\n            (CoercionForUntypedPhase\n               ([%show: Diagnostics.Phase.t] Phase.metadata.current_phase))\n      end)\n\n  let metadata = Phase.metadata\n  let to_full_ast : Phase.B.item list -> Ast.Full.item list = Stdlib.Obj.magic\n\n  let ditems =\n    List.map ~f:Coerce.ditem >> List.concat >> Phase.ditems >> to_full_ast\nend\n\nlet and_mut_defsite : (module PHASE_FULL) = (module And_mut_defsite)\nlet bundle_cycles : (module PHASE_FULL) = (module Bundle_cycles)\nlet cf_into_monads : (module PHASE_FULL) = (module Cf_into_monads)\nlet direct_and_mut : (module PHASE_FULL) = (module Direct_and_mut)\nlet drop_blocks : (module PHASE_FULL) = (module Drop_blocks)\nlet drop_match_guards : (module PHASE_FULL) = (module Drop_match_guards)\nlet drop_references : (module PHASE_FULL) = (module Drop_references)\n\nlet drop_return_break_continue : (module PHASE_FULL) =\n  (module Drop_return_break_continue)\n\nlet drop_sized_trait : (module PHASE_FULL) = (module Drop_sized_trait)\nlet explicit_conversions : (module PHASE_FULL) = (module Explicit_conversions)\nlet functionalize_loops : (module PHASE_FULL) = (module Functionalize_loops)\n\nlet hoist_disjunctive_patterns : (module PHASE_FULL) =\n  (module Hoist_disjunctive_patterns)\n\nlet local_mutation : (module PHASE_FULL) = (module Local_mutation)\nlet newtype_as_refinement : (module PHASE_FULL) = (module Newtype_as_refinement)\nlet reconstruct_asserts : (module PHASE_FULL) = (module Reconstruct_asserts)\n\nlet reconstruct_for_index_loops : (module PHASE_FULL) =\n  (module Reconstruct_for_index_loops)\n\nlet reconstruct_for_loops : (module PHASE_FULL) = (module Reconstruct_for_loops)\n\nlet reconstruct_question_marks : (module PHASE_FULL) =\n  (module Reconstruct_question_marks)\n\nlet reconstruct_while_loops : (module PHASE_FULL) =\n  (module Reconstruct_while_loops)\n\nlet reject_impl_type_method : (module PHASE_FULL) =\n  (module Reject_impl_type_method)\n\nlet reorder_fields : (module PHASE_FULL) = (module Reorder_fields)\nlet rewrite_control_flow : (module PHASE_FULL) = (module Rewrite_control_flow)\nlet rewrite_local_self : (module PHASE_FULL) = (module Rewrite_local_self)\nlet simplify_hoisting : (module PHASE_FULL) = (module Simplify_hoisting)\nlet simplify_match_return : (module PHASE_FULL) = (module Simplify_match_return)\n\nlet simplify_question_marks : (module PHASE_FULL) =\n  (module Simplify_question_marks)\n\nlet sort_items : (module PHASE_FULL) = (module Sort_items)\n\nlet sort_items_namespace_wise : (module PHASE_FULL) =\n  (module Sort_items_namespace_wise)\n\nlet specialize : (module PHASE_FULL) = (module Specialize)\nlet traits_specs : (module PHASE_FULL) = (module Traits_specs)\n\nlet transform_hax_lib_inline : (module PHASE_FULL) =\n  (module Transform_hax_lib_inline)\n\nlet trivialize_assign_lhs : (module PHASE_FULL) = (module Trivialize_assign_lhs)\nlet reject_arbitrary_lhs : (module PHASE_FULL) = (module Reject_arbitrary_lhs)\nlet reject_continue : (module PHASE_FULL) = (module Reject_continue)\nlet reject_question_mark : (module PHASE_FULL) = (module Reject_question_mark)\n\nlet reject_raw_or_mut_pointer : (module PHASE_FULL) =\n  (module Reject_raw_or_mut_pointer)\n\nlet reject_early_exit : (module PHASE_FULL) = (module Reject_early_exit)\nlet reject_as_pattern : (module PHASE_FULL) = (module Reject_as_pattern)\nlet reject_dyn : (module PHASE_FULL) = (module Reject_dyn)\n\nlet reject_trait_item_default : (module PHASE_FULL) =\n  (module Reject_trait_item_default)\n\nlet reject_unsafe : (module PHASE_FULL) = (module Reject_unsafe)\nlet hoist_side_effects : (module PHASE_FULL) = (module Hoist_side_effects)\n\nlet phases_list : (module PHASE_FULL) list =\n  [\n    and_mut_defsite;\n    bundle_cycles;\n    cf_into_monads;\n    direct_and_mut;\n    drop_blocks;\n    drop_match_guards;\n    drop_references;\n    drop_return_break_continue;\n    drop_sized_trait;\n    explicit_conversions;\n    functionalize_loops;\n    hoist_disjunctive_patterns;\n    local_mutation;\n    newtype_as_refinement;\n    reconstruct_asserts;\n    reconstruct_for_index_loops;\n    reconstruct_for_loops;\n    reconstruct_question_marks;\n    reconstruct_while_loops;\n    reject_impl_type_method;\n    reorder_fields;\n    rewrite_control_flow;\n    rewrite_local_self;\n    simplify_hoisting;\n    simplify_match_return;\n    simplify_question_marks;\n    sort_items;\n    sort_items_namespace_wise;\n    specialize;\n    traits_specs;\n    transform_hax_lib_inline;\n    trivialize_assign_lhs;\n    reject_arbitrary_lhs;\n    reject_continue;\n    reject_question_mark;\n    reject_raw_or_mut_pointer;\n    reject_early_exit;\n    reject_as_pattern;\n    reject_dyn;\n    reject_trait_item_default;\n    reject_unsafe;\n    hoist_side_effects;\n  ]\n\nlet phase_of_name : string -> (module PHASE_FULL) option = function\n  | \"and_mut_defsite\" -> Some and_mut_defsite\n  | \"bundle_cycles\" -> Some bundle_cycles\n  | \"cf_into_monads\" -> Some cf_into_monads\n  | \"direct_and_mut\" -> Some direct_and_mut\n  | \"drop_blocks\" -> Some drop_blocks\n  | \"drop_match_guards\" -> Some drop_match_guards\n  | \"drop_references\" -> Some drop_references\n  | \"drop_return_break_continue\" -> Some drop_return_break_continue\n  | \"drop_sized_trait\" -> Some drop_sized_trait\n  | \"explicit_conversions\" -> Some explicit_conversions\n  | \"functionalize_loops\" -> Some functionalize_loops\n  | \"hoist_disjunctive_patterns\" -> Some hoist_disjunctive_patterns\n  | \"local_mutation\" -> Some local_mutation\n  | \"newtype_as_refinement\" -> Some newtype_as_refinement\n  | \"reconstruct_asserts\" -> Some reconstruct_asserts\n  | \"reconstruct_for_index_loops\" -> Some reconstruct_for_index_loops\n  | \"reconstruct_for_loops\" -> Some reconstruct_for_loops\n  | \"reconstruct_question_marks\" -> Some reconstruct_question_marks\n  | \"reconstruct_while_loops\" -> Some reconstruct_while_loops\n  | \"reject_impl_type_method\" -> Some reject_impl_type_method\n  | \"reorder_fields\" -> Some reorder_fields\n  | \"rewrite_control_flow\" -> Some rewrite_control_flow\n  | \"rewrite_local_self\" -> Some rewrite_local_self\n  | \"simplify_hoisting\" -> Some simplify_hoisting\n  | \"simplify_match_return\" -> Some simplify_match_return\n  | \"simplify_question_marks\" -> Some simplify_question_marks\n  | \"sort_items\" -> Some sort_items\n  | \"sort_items_namespace_wise\" -> Some sort_items_namespace_wise\n  | \"specialize\" -> Some specialize\n  | \"traits_specs\" -> Some traits_specs\n  | \"transform_hax_lib_inline\" -> Some transform_hax_lib_inline\n  | \"trivialize_assign_lhs\" -> Some trivialize_assign_lhs\n  | \"reject_arbitrary_lhs\" -> Some reject_arbitrary_lhs\n  | \"reject_continue\" -> Some reject_continue\n  | \"reject_question_mark\" -> Some reject_question_mark\n  | \"reject_raw_or_mut_pointer\" -> Some reject_raw_or_mut_pointer\n  | \"reject_early_exit\" -> Some reject_early_exit\n  | \"reject_as_pattern\" -> Some reject_as_pattern\n  | \"reject_dyn\" -> Some reject_dyn\n  | \"reject_trait_item_default\" -> Some reject_trait_item_default\n  | \"reject_unsafe\" -> Some reject_unsafe\n  | \"hoist_side_effects\" -> Some hoist_side_effects\n  | _ -> None\n\nlet phases : string list =\n  [\n    \"and_mut_defsite\";\n    \"bundle_cycles\";\n    \"cf_into_monads\";\n    \"direct_and_mut\";\n    \"drop_blocks\";\n    \"drop_match_guards\";\n    \"drop_references\";\n    \"drop_return_break_continue\";\n    \"drop_sized_trait\";\n    \"explicit_conversions\";\n    \"functionalize_loops\";\n    \"hoist_disjunctive_patterns\";\n    \"local_mutation\";\n    \"newtype_as_refinement\";\n    \"reconstruct_asserts\";\n    \"reconstruct_for_index_loops\";\n    \"reconstruct_for_loops\";\n    \"reconstruct_question_marks\";\n    \"reconstruct_while_loops\";\n    \"reject_impl_type_method\";\n    \"reorder_fields\";\n    \"rewrite_control_flow\";\n    \"rewrite_local_self\";\n    \"simplify_hoisting\";\n    \"simplify_match_return\";\n    \"simplify_question_marks\";\n    \"sort_items\";\n    \"sort_items_namespace_wise\";\n    \"specialize\";\n    \"traits_specs\";\n    \"transform_hax_lib_inline\";\n    \"trivialize_assign_lhs\";\n    \"reject_arbitrary_lhs\";\n    \"reject_continue\";\n    \"reject_question_mark\";\n    \"reject_raw_or_mut_pointer\";\n    \"reject_early_exit\";\n    \"reject_as_pattern\";\n    \"reject_dyn\";\n    \"reject_trait_item_default\";\n    \"reject_unsafe\";\n    \"hoist_side_effects\";\n  ]\n\n(*\nand_mut_defsite, bundle_cycles, cf_into_monads, direct_and_mut, drop_blocks, drop_match_guards, drop_references, drop_return_break_continue, drop_sized_trait, explicit_conversions, functionalize_loops, hoist_disjunctive_patterns, local_mutation, newtype_as_refinement, reconstruct_asserts, reconstruct_for_index_loops, reconstruct_for_loops, reconstruct_question_marks, reconstruct_while_loops, reject_impl_type_method, reorder_fields, rewrite_control_flow, rewrite_local_self, simplify_hoisting, simplify_match_return, simplify_question_marks, sort_items, sort_items_namespace_wise, specialize, traits_specs, transform_hax_lib_inline, trivialize_assign_lhs, reject_arbitrary_lhs, reject_continue, reject_question_mark, reject_raw_or_mut_pointer, reject_early_exit, reject_as_pattern, reject_dyn, reject_trait_item_default, reject_unsafe, hoist_side_effects\n*)\n"
  },
  {
    "path": "engine/lib/utils.ml",
    "content": "open Base\n\nlet ( << ) f g x = f (g x)\nlet ( >> ) f g x = g (f x)\nlet ( &&& ) (f : 'a -> 'b) (g : 'a -> 'c) (x : 'a) : 'b * 'c = (f x, g x)\n\nlet ( *** ) (f : 'a -> 'b) (g : 'c -> 'd) ((l, r) : 'a * 'c) : 'b * 'd =\n  (f l, g r)\n\nlet map_fst f = f *** Fn.id\nlet map_snd g = Fn.id *** g\nlet map_fst3 f (x, y, z) = (f x, y, z)\nlet map_snd3 f (x, y, z) = (x, f y, z)\nlet map_thd3 f (x, y, z) = (x, y, f z)\nlet fst3 (x, _, _) = x\nlet snd3 (_, y, _) = y\nlet thd3 (_, _, z) = z\nlet curry f x y = f (x, y)\nlet uncurry f (x, y) = f x y\nlet curry3 f x y z = f (x, y, z)\nlet uncurry3 f (x, y, z) = f x y z\nlet tup2 a b = (a, b)\nlet swap (a, b) = (b, a)\nlet apply f x = f x\nlet ( let* ) x f = Option.bind ~f x\nlet some_if_true = function true -> Some () | _ -> None\n\nlet expect_singleton : 'a. 'a list -> 'a option = function\n  | [ x ] -> Some x\n  | _ -> None\n\n(** [let*? () = guard in body] acts as a guard: if [guard] holds, then [body] is\n    executed, otherwise [None] is returned. *)\nlet ( let*? ) (type a) (x : bool) (f : unit -> a option) =\n  let* () = some_if_true x in\n  f ()\n\nlet map_first_letter (f : string -> string) (s : string) =\n  let first, rest = String.(prefix s 1, drop_prefix s 1) in\n  f first ^ rest\n\nlet rec split_list_once ~equal ~needle ~acc subject =\n  match subject with\n  | [] -> (List.rev acc, [])\n  | hd :: tl ->\n      if List.is_prefix subject ~prefix:needle ~equal then\n        (List.rev acc, List.drop subject (List.length needle))\n      else split_list_once ~equal ~needle ~acc:(hd :: acc) tl\n\nlet split_list ~equal ~needle (subject : 'a list) : 'a list list =\n  let rec h l =\n    match split_list_once ~equal ~needle ~acc:[] l with\n    | l, [] -> [ l ]\n    | l, r -> l :: h r\n  in\n  h subject\n\n(** Map over a list with a option-returning function. Returns `Some` iff every\n    calls to `f` returned `Some`. *)\nlet rec maybe_map ~(f : 'a -> 'b option) (l : 'a list) : 'b list option =\n  match l with\n  | hd :: tl ->\n      let* hd = f hd in\n      let* tl = maybe_map ~f tl in\n      Some (hd :: tl)\n  | [] -> Some []\n\nlet first_letter s = String.prefix s 1\nlet is_uppercase s = String.equal s (String.uppercase s)\nlet is_lowercase s = String.equal s (String.lowercase s)\nlet start_uppercase = first_letter >> is_uppercase\nlet start_lowercase = first_letter >> is_lowercase\nlet string_to_int s = try Some (Int.of_string s) with _ -> None\n\nlet split_str (s : string) ~(on : string) : string list =\n  split_list ~equal:Char.equal ~needle:(String.to_list on) (String.to_list s)\n  |> List.map ~f:String.of_char_list\n\nlet last_init (l : 'a list) : ('a list * 'a) option =\n  Option.both (List.drop_last l) (List.last l)\n\nlet inits (type a) (l : a list) : (a list * a) list =\n  List.fold_map ~init:[]\n    ~f:(fun trace x ->\n      let trace = trace @ [ x ] in\n      (trace, (trace, x)))\n    l\n  |> snd\n\nlet sequence (l : 'a option list) : 'a list option =\n  List.fold_right\n    ~f:(fun x acc ->\n      match (acc, x) with Some acc, Some x -> Some (x :: acc) | _ -> None)\n    ~init:(Some []) l\n\nlet ( <|> ) x f = match x with Some x -> Some x | None -> f ()\nlet tabsize = 2\nlet newline_indent depth : string = \"\\n\" ^ String.make (tabsize * depth) ' '\n\nmodule MyInt64 = struct\n  include Base.Int64\n\n  let t_of_yojson (json : Yojson.Safe.t) : t =\n    match json with\n    | `Intlit s -> of_string s\n    | `Int i -> of_int i\n    | _ -> failwith \"Couldn't parse MyInt64.t\"\n\n  let yojson_of_t (int64 : t) : Yojson.Safe.t = `Intlit (to_string int64)\nend\n\ninclude (\n  struct\n    let id = ref 0\n\n    let tempfile_path ~suffix =\n      id := !id + 1;\n      Core.Filename.(\n        concat temp_dir_name (\"hax-debug-\" ^ Int.to_string !id ^ suffix))\n  end :\n    sig\n      val tempfile_path : suffix:string -> string\n      (** Generates a temporary file path that ends with `suffix` *)\n    end)\n\nmodule List = struct\n  include Base.List\n\n  let zip_opt : 'a 'b. 'a list -> 'b list -> ('a * 'b) list option =\n   fun x y ->\n    match zip x y with Ok result -> Some result | Unequal_lengths -> None\n\n  let longest_prefix (type t) ~(eq : t -> t -> bool) (l : t list list) : t list\n      =\n    match l with\n    | [] -> []\n    | hd :: tl ->\n        let tl = ref tl in\n        let f x =\n          let exception Stop in\n          try\n            tl :=\n              List.map !tl ~f:(function\n                | y :: tl when eq x y -> tl\n                | _ -> raise Stop);\n            true\n          with Stop -> false\n        in\n        List.take_while ~f hd\nend\n"
  },
  {
    "path": "engine/names/Cargo.toml",
    "content": "[package]\nname = \"hax-engine-names\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"Dummy crate containing all the Rust names the hax engine should be aware of\"\n\n[dependencies]\nhax-lib-protocol = {path = \"../../hax-lib-protocol\"}\nhax-lib = {path = \"../../hax-lib\"}\n\n[package.metadata.release]\nrelease = false\n"
  },
  {
    "path": "engine/names/README.md",
    "content": "# `hax-engine-names`\n\n## Purpose of the crate\nThe crate `hax-engine-names` is a dummy crate that contains all the\nRust names the engine should be aware of.\n\nFor instance, the engine needs to know about `Some` and `None` to\nreconstruct loops: Rust desugars `for .. in iterator {..}` loops into\nloops with pattern matching on `iterator.next()`, which returns an\noption.\n\n## How to edit this crate\nIf you need a special treatment for a Rust name in the engine, you\nshould just add a piece of code that is using it.\n\nFor example, to make the name `Some` available to the engine, one\ncould add the following function at the end of the `src/lib.rs` file:\n\n```rust\nfn some(x: Option<()>) {\n    match x {\n        Some(_) => (),\n        _ => (),\n    }\n}\n```\n\nNote this will also make `Option` available.\n\n## How names are generated in OCaml\nThe subcrate `hax-engine-names-extract` runs `cargo hax into json` on\nthe crate `hax-engine-names`, and extracts all the names it finds,\nalong with other information.\n\nThose names are compiled into the enumeration type\n`Concrete_ident_generated.name`. You can look at those names by\nrunning `hax-engine-names-extract | less`. As an example,\n`core::option::Option::None` is made available as the\n`Core__option__Option__None` variant.\n\n## How to match a name in the engine\nThe functions `Concrete_ident.eq_name` and `Global_ident.eq_name`\nallow for comparing `Concrete_ident.t` and `Global_ident.t` with\n`Concrete_ident_generated.name`.\n\nFor example, the expression `Concrete_ident.eq_name\nCore__option__Option__None my_concrete_ident` checks whether the\nconcrete ident `my_concrete_ident` is `core::option::Option::None`.\n\n## How to build a concrete ident out of a name\nSee the function `Concrete_ident.of_name`.\n\n"
  },
  {
    "path": "engine/names/extract/Cargo.toml",
    "content": "[package]\nname = \"hax-engine-names-extract\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"Helper binary generating an OCaml module\"\n\n\n[build-dependencies]\nserde.workspace = true\nserde_json.workspace = true\nhax-engine-names.workspace = true\nhax-adt-into.workspace = true\ntempfile.version = \"3.9\"\n\n[features]\ndefault = [\"extract_names_mode\"]\nextract_names_mode = []\n\n[lints.rust]\nunexpected_cfgs = { level = \"warn\", check-cfg = ['cfg(feature, values(\"rustc\"))'] }\n\n[package.metadata.release]\nrelease = false\n"
  },
  {
    "path": "engine/names/extract/build.rs",
    "content": "use serde_json::Value;\nuse std::process::{Command, Stdio};\n\n/// Instead of depending on `hax_frontend_exporter` (that links to\n/// rustc and exposes a huge number of type definitions and their\n/// impls), we just inline a small module here that contains the three\n/// type definition we need. See the module for complementary\n/// informations.\n#[path = \"../../../frontend/exporter/src/types/def_id.rs\"]\nmod hax_frontend_exporter_def_id;\nuse hax_frontend_exporter_def_id::*;\n\nmod id_table {\n    //! Shim to make `def_id.rs` build. Replaces the `id_table` interner with a plain `Arc`.\n    use serde::{Deserialize, Serialize};\n    use std::sync::Arc;\n\n    #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]\n    pub struct Node<T> {\n        pub value: Arc<T>,\n        pub id: u32,\n    }\n\n    impl<T> std::ops::Deref for Node<T> {\n        type Target = T;\n        fn deref(&self) -> &Self::Target {\n            self.value.as_ref()\n        }\n    }\n}\n\n/// Name of the current crate\nconst HAX_ENGINE_NAMES_CRATE: &str = \"hax_engine_names\";\n/// Path `a::b` needs to be compiled to a OCaml variant name, `::` is\n/// replaced with `SEPARATOR`\nconst SEPARATOR: &str = \"__\";\n/// \"Key\" for OCaml quoted strings\nconst ESCAPE_KEY: &str = \"hax_escape_ocaml_json\";\n\nfn uppercase_first_letter(s: &str) -> String {\n    let mut c = s.chars();\n    match c.next() {\n        None => String::new(),\n        Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),\n    }\n}\n\nfn disambiguator_to_str(disambiguator: u32) -> String {\n    if disambiguator == 0 {\n        \"\".into()\n    } else {\n        format!(\"_{disambiguator}\")\n    }\n}\n\nfn def_path_item_to_str(path_item: DefPathItem) -> String {\n    match path_item {\n        DefPathItem::TypeNs(s)\n        | DefPathItem::ValueNs(s)\n        | DefPathItem::MacroNs(s)\n        | DefPathItem::LifetimeNs(s) => s,\n        DefPathItem::CrateRoot { name } => uppercase_first_letter(&name),\n        DefPathItem::Impl => \"Impl\".into(),\n        DefPathItem::ForeignMod => \"ForeignMod\".into(),\n        DefPathItem::Use => \"Use\".into(),\n        DefPathItem::GlobalAsm => \"GlobalAsm\".into(),\n        DefPathItem::Closure => \"Closure\".into(),\n        DefPathItem::Ctor => \"Ctor\".into(),\n        DefPathItem::AnonConst => \"AnonConst\".into(),\n        DefPathItem::PromotedConst => \"PromotedConst\".into(),\n        DefPathItem::OpaqueTy => \"OpaqueTy\".into(),\n        DefPathItem::OpaqueLifetime(..) => \"OpaqueLifetime\".into(),\n        DefPathItem::AnonAssocTy(..) => \"AnonAssocTy\".into(),\n        DefPathItem::SyntheticCoroutineBody => \"SyntheticCoroutineBody\".into(),\n        DefPathItem::NestedStatic => \"NestedStatic\".into(),\n        DefPathItem::LateAnonConst => \"LateAnonConst\".into(),\n        DefPathItem::DesugaredAnonymousLifetime => \"DesugaredAnonymousLifetime\".into(),\n    }\n}\n\nfn disambiguated_def_path_item_to_str(defpath: &DisambiguatedDefPathItem) -> String {\n    let data = def_path_item_to_str(defpath.data.clone());\n    let disambiguator = disambiguator_to_str(defpath.disambiguator);\n    format!(\"{data}{disambiguator}\")\n}\n\n/// Replaces the crate name `HAX_ENGINE_NAMES_CRATE` by `\"rust_primitives\"`.\nfn rename_krate(value: &mut Value) {\n    match value {\n        Value::Object(map) => {\n            for (key, val) in map.iter_mut() {\n                if let Value::String(s) = val\n                    && key == \"krate\"\n                    && s == \"hax_engine_names\"\n                {\n                    *s = \"rust_primitives\".to_string();\n                }\n                rename_krate(val);\n            }\n        }\n        Value::Array(v) => v.iter_mut().for_each(rename_krate),\n        _ => {}\n    }\n}\n\nfn def_id_to_str(def_id: &DefId) -> (Value, String) {\n    let crate_name = if def_id.krate == HAX_ENGINE_NAMES_CRATE {\n        \"rust_primitives\"\n    } else {\n        &def_id.krate\n    };\n\n    // Update the crate name in the json output as well.\n    let mut json = serde_json::to_value(def_id).unwrap();\n    rename_krate(&mut json);\n\n    let crate_name = uppercase_first_letter(crate_name);\n    let path = [crate_name]\n        .into_iter()\n        .chain(def_id.path.iter().map(disambiguated_def_path_item_to_str))\n        .collect::<Vec<String>>()\n        .join(SEPARATOR);\n    (json, path)\n}\n\n/// Checks whether a def id refers to a macro or not.\n/// We don't want to extract macro names.\nfn is_macro(did: &DefId) -> bool {\n    let Some(last) = did.contents.value.path.last() else {\n        return false;\n    };\n    matches!(last.data, DefPathItem::MacroNs { .. })\n}\n\n/// Checks wether a def id refers to a syntactic item (see `syntactic_item.rs` in hax' exporter)\nfn is_synthetic(did: &DefId) -> bool {\n    &did.contents.value.krate == hax_frontend_exporter_def_id::SYNTHETIC_CRATE_NAME\n}\n\nfn reader_to_str(s: String) -> String {\n    let json: Value = match serde_json::from_str(&s) {\n        Ok(v) => v,\n        Err(e) => panic!(\"error while parsing JSON: {e}\\n\\nString was: {}\", &s),\n    };\n    let def_ids: Vec<DefId> = serde_json::from_value(json[\"def_ids\"].clone()).unwrap();\n    let impl_infos = json[\"impl_infos\"].clone();\n\n    let def_ids = def_ids\n        .into_iter()\n        .filter(|did| !is_macro(did))\n        .filter(|did| !is_synthetic(did))\n        .map(|did| {\n            let (json, krate_name) = def_id_to_str(&did);\n            (serde_json::to_string(&json).unwrap(), krate_name)\n        })\n        .collect::<Vec<_>>();\n\n    const TAB: &str = \"    \";\n    let mut result = String::new();\n    result += &format!(\n        \"type t = \\n{TAB}  {}[@@deriving show, yojson, compare, sexp, eq, hash]\\n\",\n        def_ids\n            .iter()\n            .map(|(_, def_name)| format!(\"{def_name}\"))\n            .collect::<Vec<_>>()\n            .join(&format!(\"\\n{TAB}| \"))\n    );\n\n    result += \"\\n\";\n    result += \"include (val Base.Comparator.make ~compare ~sexp_of_t)\";\n    result += \"\\n\";\n    result += \"module Values = struct\\n\";\n    for (json, name) in &def_ids {\n        result += &format!(\n            \"{TAB}let parsed_{name} = Types.def_id_of_yojson (Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{})\\n\",\n            \"{\", json, \"}\"\n        );\n    }\n    result += \"end\\n\\n\";\n\n    result += &format!(\n        \"let def_id_of: t -> Types.def_id = function\\n{TAB}  {}\\n\\n\",\n        def_ids\n            .iter()\n            .map(|(_, n)| format!(\"{n} -> Values.parsed_{n}\"))\n            .collect::<Vec<_>>()\n            .join(&format!(\"\\n{TAB}| \"))\n    );\n\n    result += &format!(\n        \"let impl_infos_json_list = match Yojson.Safe.from_string {}{ESCAPE_KEY}|{}|{ESCAPE_KEY}{} with | `List l -> l | _ -> failwith \\\"Expected a list of `def_id * impl_infos`\\\"\\n\\n\",\n        \"{\",\n        serde_json::to_string(&impl_infos).unwrap(),\n        \"}\"\n    );\n    result += &format!(\n        \"let impl_infos = Base.List.map ~f:(function | `List [did; ii] -> (Types.def_id_of_yojson did, Types.impl_infos_of_yojson ii) | _ -> failwith \\\"Expected tuple\\\") impl_infos_json_list\"\n    );\n\n    result\n}\n\nfn get_json() -> String {\n    let mut cmd =\n        Command::new(std::env::var(\"HAX_CARGO_COMMAND_PATH\").unwrap_or(\"cargo-hax\".to_string()));\n    cmd.args([\n        \"hax\",\n        \"-C\",\n        \"-p\",\n        \"hax-engine-names\",\n        \"--lib\",\n        \";\",\n        \"json\",\n        \"--include-extra\",\n        \"-o\",\n        \"-\",\n    ])\n    .stdout(Stdio::piped())\n    .stderr(Stdio::piped());\n\n    let out = cmd.output().unwrap();\n    let stdout = String::from_utf8(out.stdout).unwrap();\n    let stderr = String::from_utf8(out.stderr).unwrap();\n    eprintln!(\"{}\", stderr);\n    stdout\n}\n\nfn main() {\n    std::fs::write(\n        format!(\"{}/module.ml\", std::env::var(\"OUT_DIR\").unwrap()),\n        reader_to_str(get_json()),\n    )\n    .unwrap()\n}\n"
  },
  {
    "path": "engine/names/extract/src/main.rs",
    "content": "const OCAML_MODULE: &str = include_str!(concat!(env!(\"OUT_DIR\"), \"/module.ml\"));\n\nfn main() {\n    println!(\"{}\", OCAML_MODULE);\n}\n"
  },
  {
    "path": "engine/names/src/crypto_abstractions.rs",
    "content": "use hax_lib_protocol::crypto::*;\n\nfn crypto_abstractions() {\n    let bytes = vec![0u8; 32];\n    let iv = AEADIV::from_bytes(&bytes);\n    let key = AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, &bytes);\n\n    let (cipher_text, _tag) = aead_encrypt(key, iv, &bytes, &bytes);\n    let iv = AEADIV::from_bytes(&bytes);\n    let key = AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, &bytes);\n    let _ = aead_decrypt(key, iv, &bytes, &cipher_text, AEADTag::from_bytes(&bytes));\n\n    let p = DHElement::from_bytes(&bytes);\n    let s = DHScalar::from_bytes(&bytes);\n    dh_scalar_multiply(DHGroup::X25519, s.clone(), p);\n    dh_scalar_multiply_base(DHGroup::X25519, s);\n\n    let _ = hmac(HMACAlgorithm::Sha256, &bytes, &bytes);\n\n    let _ = 1u64.to_le_bytes();\n    let slice = &bytes[0..1];\n    let _ = slice.len();\n    let _ = slice.to_vec();\n    let _ = [slice, slice].concat();\n    let mut v = vec![0];\n    v.extend_from_slice(slice);\n    v.truncate(1);\n\n    let _ = hash(HashAlgorithm::Sha256, &bytes);\n    let _ = cipher_text.clone();\n}\n"
  },
  {
    "path": "engine/names/src/lib.rs",
    "content": "#![allow(dead_code)]\n#![feature(try_trait_v2)]\n#![feature(allocator_api)]\n\nextern crate alloc;\n/* This is a dummy Rust file. Every path used in this file will be\n * exported and made available automatically in OCaml. */\n\nmod crypto_abstractions;\n\nfn dummy_hax_concrete_ident_wrapper<I: core::iter::Iterator<Item = u8>>(x: I, mut y: I) {\n    let _: core::result::Result<u8, u8> = core::result::Result::Ok(0);\n    let _: core::result::Result<u8, u8> = core::result::Result::Err(0);\n    let _ = x.fold(0, |a, b| a + b);\n    let _ = y.next();\n    let _: core::ops::ControlFlow<u8> = core::ops::ControlFlow::Break(0);\n    let _: core::ops::ControlFlow<u8> = core::ops::ControlFlow::Continue(());\n    let mut v = vec![()];\n    v[0];\n    v[0] = ();\n    let _ = v.as_slice().to_vec();\n    use std::ops::FromResidual;\n    let _ = Result::<String, i64>::from_residual(Err(3u8));\n    let _ = Box::new(());\n    let _: Option<alloc::alloc::Global> = None;\n    let _: Option<()> = Some(());\n    let _: Option<()> = None;\n    let _ = Option::<()>::None.is_some();\n    let _: Result<(), u32> = Result::Err(3u8).map_err(u32::from);\n    assert!(true);\n    assert_eq!(1, 1);\n    hax_lib::assert!(true);\n    hax_lib::_internal_loop_invariant(|_: usize| true);\n    hax_lib::_internal_while_loop_invariant(hax_lib::Prop::from(true));\n    hax_lib::_internal_loop_decreases(hax_lib::Int::_unsafe_from_str(\"0\"));\n\n    fn props() {\n        use hax_lib::prop::*;\n        let x = Prop::from_bool(true);\n        constructors::from_bool(true);\n        constructors::and(x, x);\n        constructors::or(x, x);\n        constructors::not(x);\n        constructors::eq(x, x);\n        constructors::ne(x, x);\n        constructors::implies(x, x);\n        constructors::forall(|_: ()| x);\n        constructors::exists(|_: ()| x);\n\n        Prop::from_bool(true);\n        Prop::and(x, x);\n        Prop::or(x, x);\n        Prop::not(x);\n        Prop::eq(x, x);\n        Prop::ne(x, x);\n        Prop::implies(x, x);\n\n        true.to_prop();\n\n        forall(|_: ()| x);\n        exists(|_: ()| x);\n        implies(x, x);\n    }\n\n    let _ = [()].into_iter();\n    let _: u16 = 6u8.into();\n    let _ = 1..2;\n    let _ = 1..;\n    let _ = ..;\n    let _ = ..1;\n\n    let _ = [\n        std::ops::ControlFlow::Break(()),\n        std::ops::ControlFlow::Continue(()),\n    ];\n\n    fn iterator_functions<It: Iterator + Clone>(it: It) {\n        let _ = it.clone().step_by(2);\n        let _ = it.clone().enumerate();\n        let _ = [()].chunks_exact(2);\n        let _ = [()].iter();\n        let _ = (&[()] as &[()]).iter();\n    }\n\n    {\n        use hax_lib::*;\n        let a: Int = 3u8.lift();\n        let _: Int = 3u8.to_int();\n        let _ = a.clone().pow2();\n        let _ = Int::_unsafe_from_str(\"1\");\n        let _: u32 = a.concretize();\n    }\n\n    fn index_mut<I, T: std::ops::IndexMut<I>>(mut x: T, index: I) {\n        x.index_mut(index);\n    }\n\n    fn question_mark_result<A, B: From<A>>(x: A) -> Result<(), B> {\n        Err(x)?;\n        Ok(())\n    }\n\n    let _ = hax_lib::inline(\"\");\n    let _: () = hax_lib::inline_unsafe(\"\");\n    let _: () = hax_lib::any_to_unit(());\n    use hax_lib::{RefineAs, Refinement};\n\n    fn refinements<T: Refinement + Clone, U: RefineAs<T>>(x: T, y: U) -> T {\n        let _ = x.clone().get_mut();\n        T::new(x.get());\n        y.into_checked()\n    }\n\n    const _: () = {\n        use core::{cmp::*, ops::*};\n        fn arith<\n            X: Add<Output = X>\n                + Sub<Output = X>\n                + Mul<Output = X>\n                + Div<Output = X>\n                + Rem<Output = X>\n                + BitXor<Output = X>\n                + BitAnd<Output = X>\n                + BitOr<Output = X>\n                + Shl<Output = X>\n                + Shr<Output = X>\n                + Neg<Output = X>\n                + Not<Output = X>\n                + PartialOrd\n                + Eq\n                + Ord\n                + Copy,\n        >(\n            x: X,\n        ) -> X {\n            let _ = x < x && x > x && x <= x && x >= x && x == x && x != x;\n            (x ^ x & !x) + x / x * x - x | (-x) % x << x >> x\n        }\n    };\n\n    fn dummy<T: core::ops::Try<Output = ()>>(z: T) {\n        let _ = T::from_output(());\n        let _ = z.branch();\n    }\n\n    let s: &str = \"123\";\n    let ptr: *const u8 = s.as_ptr();\n\n    unsafe {\n        let _ = *ptr.offset(1) as char;\n    }\n\n    const _: () = {\n        use std::ops::DerefMut;\n        fn f<T: DerefMut>(x: T) {\n            let _: &mut _ = { x }.deref_mut();\n        }\n        use std::ops::Deref;\n        fn g<T: Deref>(x: T) {\n            let _: &_ = { x }.deref();\n        }\n    };\n\n    #[derive(PartialEq)]\n    struct Foo();\n}\n\nmacro_rules! impl_arith {\n    ($name:ident$(,)?) => {\n        mod $name {\n            fn add() {}\n            fn sub() {}\n            fn mul() {}\n            fn div() {}\n            fn rem() {}\n            fn neg() {}\n            fn bit_xor() {}\n            fn bit_and() {}\n            fn bit_or() {}\n            fn shl() {}\n            fn shr() {}\n            fn eq() {}\n            fn lt() {}\n            fn le() {}\n            fn ne() {}\n            fn ge() {}\n            fn gt() {}\n        }\n    };\n    ($name:ident,$($rest:tt)*) => {\n        impl_arith!($name);\n        impl_arith!($($rest)*);\n    }\n}\n\nimpl_arith!(u8, u16, u32, u64, u128, usize);\nimpl_arith!(i8, i16, i32, i64, i128, isize);\n\nfn offset() {}\n\nfn unsize() {}\n\n/// Hax additions\nmod hax {\n    fn failure() {}\n    struct Failure;\n    enum Never {}\n\n    // Only useful when HAX_CORE_EXTRACTION_MODE in `on`\n    enum MutRef {}\n\n    struct Tuple2(u8, u8);\n    fn deref_op() {}\n    fn cast_op() {}\n    fn logical_op_and() {}\n    fn logical_op_or() {}\n\n    fn while_loop() {}\n    fn while_loop_cf() {}\n    fn while_loop_return() {}\n    fn repeat() {}\n    fn update_at() {}\n    mod monomorphized_update_at {\n        fn update_at_usize() {}\n        fn update_at_range() {}\n        fn update_at_range_from() {}\n        fn update_at_range_to() {}\n        fn update_at_range_full() {}\n    }\n    // TODO: Should that live here? (this is F* specific)\n    fn array_of_list() {}\n    fn never_to_any() {}\n\n    mod folds {\n        fn fold_range() {}\n        fn fold_range_cf() {}\n        fn fold_range_return() {}\n        fn fold_range_step_by() {}\n        fn fold_range_step_by_cf() {}\n        fn fold_range_step_by_return() {}\n        fn fold_enumerated_slice() {}\n        fn fold_enumerated_slice_cf() {}\n        fn fold_enumerated_slice_return() {}\n        fn fold_enumerated_chunked_slice() {}\n        fn fold_enumerated_chunked_slice_cf() {}\n        fn fold_enumerated_chunked_slice_return() {}\n        fn fold_chunked_slice() {}\n        fn fold_chunked_slice_cf() {}\n        fn fold_chunked_slice_return() {}\n        fn fold_cf() {}\n        fn fold_return() {}\n    }\n\n    /// The engine uses this `dropped_body` symbol as a marker value\n    /// to signal that a item was extracted without body.\n    fn dropped_body() {}\n\n    mod int {\n        fn add() {}\n        fn sub() {}\n        fn div() {}\n        fn mul() {}\n        fn rem() {}\n        fn neg() {}\n\n        fn le() {}\n        fn lt() {}\n        fn ge() {}\n        fn gt() {}\n\n        fn eq() {}\n        fn ne() {}\n\n        fn from_machine() {}\n        fn into_machine() {}\n    }\n\n    mod machine_int {\n        fn add() {}\n        fn sub() {}\n        fn div() {}\n        fn mul() {}\n        fn rem() {}\n\n        fn not() {}\n        fn bitxor() {}\n        fn bitor() {}\n        fn bitand() {}\n        fn shl() {}\n        fn shr() {}\n\n        fn eq() {}\n        fn ne() {}\n        fn le() {}\n        fn lt() {}\n        fn ge() {}\n        fn gt() {}\n\n        fn add_with_overflow() {}\n        fn sub_with_overflow() {}\n        fn mul_with_overflow() {}\n        fn cmp() {}\n    }\n\n    mod control_flow_monad {\n        trait ControlFlowMonad {\n            fn lift() {}\n        }\n        mod mexception {\n            fn run() {}\n        }\n        mod mresult {\n            fn run() {}\n        }\n        mod moption {\n            fn run() {}\n        }\n    }\n    fn box_new() {}\n\n    mod explicit_monadic {\n        fn lift() {}\n        fn pure() {}\n    }\n}\n\nmod arithmetic {\n    fn neg() {}\n}\n"
  },
  {
    "path": "engine/utils/generate_from_ast/README.md",
    "content": "# `generate_from_ast`\n\n## `generate_from_ast visitors`\nThis binary reads the AST module of hax and creates **standalone**\nvisitors. We need to define visitors and the types of the AST in two\nseparate modules. Otherwise, each time we instantiate the AST functor,\nwe end up re-defining every single visitor. Since the AST functor is\ninstantiated a lot, this used to lead to huge memory consumption while\nbuilding.\n\nThis binary takes an OCaml module that defines types as input and\noutputs an OCaml module defining visitors for those types.\n\nNote that this binary relies on the structure and naming of the AST of\nhax; it is not intended for any other use.\n\n## `generate_from_ast ast_builder`\nGenerates helpers to build node in the AST.\n"
  },
  {
    "path": "engine/utils/generate_from_ast/codegen_ast_builder.ml",
    "content": "open Base\nopen Utils\nopen Types\n\nlet rec print_ty (t : Type.t) =\n  if String.is_prefix t.typ ~prefix:\"prim___tuple_\" then\n    \"(\" ^ String.concat ~sep:\" * \" (List.map t.args ~f:print_ty) ^ \")\"\n  else\n    \"(\"\n    ^ (if List.is_empty t.args then \"\"\n       else \"(\" ^ String.concat ~sep:\", \" (List.map t.args ~f:print_ty) ^ \") \")\n    ^ t.typ ^ \")\"\n\nlet print_record_or_tuple is_record x =\n  let l, sep, r = if is_record then (\"{\", \";\", \"}\") else (\"(\", \",\", \")\") in\n  l ^ String.concat ~sep (List.map ~f:fst x) ^ r\n\nlet print_record = print_record_or_tuple true\nlet print_tuple = print_record_or_tuple false\n\nlet mk_builder (provided_fields : string list)\n    ((record, enum) : Datatype.t * Datatype.t) =\n  let ty = record.name in\n  let record, variants =\n    match (record.kind, enum.kind) with\n    | Record record, Variant variants -> (record, variants)\n    | _ -> failwith \"mk_builder: bad kinds of datatypes\"\n  in\n  let record_names = List.map ~f:fst record in\n  let args =\n    record\n    |> List.filter\n         ~f:(fst >> List.mem ~equal:[%eq: string] provided_fields >> not)\n    |> List.filter ~f:(fun (_, ty) -> not ([%eq: string] ty.Type.typ enum.name))\n    |> List.map ~f:(fun (name, ty) -> (true, name, ty))\n  in\n  let field_name_raw, _ =\n    List.find ~f:(fun (_, ty) -> [%eq: string] ty.Type.typ enum.name) record\n    |> Option.value_exn\n  in\n  List.map\n    ~f:(fun Variant.{ name; payload } ->\n      let extra_lb = ref \"\" in\n      let args =\n        args\n        @\n        match payload with\n        | VariantPayload.Record fields ->\n            fields\n            |> List.map ~f:(fun (name, ty) ->\n                   ( true,\n                     (if List.mem ~equal:[%eq: string] record_names name then (\n                        let name' = \"inner_\" ^ name in\n                        (* if not ([%eq: string] field_name_raw name) then *)\n                        extra_lb :=\n                          !extra_lb ^ \"let \" ^ name ^ \" = \" ^ name' ^ \" in\\n\";\n                        name')\n                      else name),\n                     ty ))\n        | Tuple types ->\n            List.mapi ~f:(fun i ty -> (false, \"x\" ^ Int.to_string i, ty)) types\n        | None -> []\n      in\n      let sargs =\n        List.map\n          ~f:(fun (named, name, ty) ->\n            (if named then \"~\" else \"\") ^ \"(\" ^ name ^ \":\" ^ print_ty ty ^ \")\")\n          args\n        |> String.concat ~sep:\" \"\n      in\n      let head = \"let \" ^ ty ^ \"_\" ^ name ^ \" \" ^ sargs ^ \": \" ^ ty ^ \" = \" in\n      let spayload =\n        match payload with\n        | Record record -> print_record record\n        | Tuple types ->\n            List.mapi ~f:(fun i ty -> (\"x\" ^ Int.to_string i, ty)) types\n            |> print_tuple\n        | None -> \"\"\n      in\n      let body =\n        \"let \" ^ field_name_raw ^ \": \" ^ enum.name ^ \" = \" ^ !extra_lb ^ \"\\n\"\n        ^ name ^ \" \" ^ spayload ^ \" in\"\n      in\n      let body = body ^ print_record record in\n      head ^ body)\n    variants\n  |> String.concat ~sep:\"\\n\\n\"\n\nlet mk datatypes =\n  let find name =\n    List.find ~f:(fun dt -> [%eq: string] dt.Datatype.name name) datatypes\n    |> Option.value_exn\n  in\n  let data =\n    [\n      (find \"expr\", find \"expr'\");\n      (find \"pat\", find \"pat'\");\n      (find \"item\", find \"item'\");\n      (find \"guard\", find \"guard'\");\n      (find \"trait_item\", find \"trait_item'\");\n      (find \"impl_expr\", find \"impl_expr_kind\");\n    ]\n  in\n  let body = data |> List.map ~f:(mk_builder []) |> String.concat ~sep:\"\\n\\n\" in\n  let spanned =\n    data |> List.map ~f:(mk_builder [ \"span\" ]) |> String.concat ~sep:\"\\n\\n\"\n  in\n  {|\nopen! Prelude\nopen! Ast\n\n\nmodule Make (F : Features.T) = struct\n  open Ast.Make(F)\n\nmodule Explicit = struct\n|}\n  ^ body\n  ^ {|\nend\n\n  module type SPAN = sig val span: span end\n  module Make(Span: SPAN) = struct\n    open Span\n    |}\n  ^ spanned ^ {|\n  end\n\nend\n|}\n"
  },
  {
    "path": "engine/utils/generate_from_ast/codegen_ast_destruct.ml",
    "content": "open Base\nopen Utils\nopen Types\n\nlet rec print_ty (t : Type.t) =\n  if String.is_prefix t.typ ~prefix:\"prim___tuple_\" then\n    \"(\" ^ String.concat ~sep:\" * \" (List.map t.args ~f:print_ty) ^ \")\"\n  else\n    \"(\"\n    ^ (if List.is_empty t.args then \"\"\n       else \"(\" ^ String.concat ~sep:\", \" (List.map t.args ~f:print_ty) ^ \") \")\n    ^ t.typ ^ \")\"\n\nlet print_record_or_tuple is_record x =\n  let l, sep, r = if is_record then (\"{\", \";\", \"}\") else (\"(\", \",\", \")\") in\n  l ^ String.concat ~sep (List.map ~f:fst x) ^ r\n\nlet print_record = print_record_or_tuple true\nlet print_tuple = print_record_or_tuple false\n\nlet print_record_type_or_tuple is_record x =\n  let l, sep, r = if is_record then (\"{\", \";\", \"}\") else (\"(\", \"*\", \")\") in\n  l\n  ^ String.concat ~sep\n      (List.map\n         ~f:(fun (name, ty) ->\n           (if is_record then name ^ \":\" else \"\") ^ print_ty ty)\n         x)\n  ^ r\n\nlet print_record_type = print_record_type_or_tuple true\n\nlet print_tuple_type =\n  List.map ~f:(fun ty -> (\"\", ty)) >> print_record_type_or_tuple false\n\nlet mk_builder ((record, enum) : Datatype.t * Datatype.t) =\n  let ty = record.name in\n  let record, variants =\n    match (record.kind, enum.kind) with\n    | Record record, Variant variants -> (record, variants)\n    | _ -> failwith \"mk_builder: bad kinds of datatypes\"\n  in\n  let field_name_raw, _ =\n    List.find ~f:(fun (_, ty) -> [%eq: string] ty.Type.typ enum.name) record\n    |> Option.value_exn\n  in\n  List.map\n    ~f:(fun Variant.{ name; payload } ->\n      let id = ty ^ \"_\" ^ name in\n      let inline_record = id in\n      let type_decl =\n        \"\\ntype \" ^ inline_record ^ \" = \"\n        ^\n        match payload with\n        | Record record -> print_record_type record\n        | Tuple types -> types |> print_tuple_type\n        | None -> \"unit\"\n      in\n      let head =\n        \"\\nlet \" ^ id ^ \" (value: \" ^ ty ^ \")\" ^ \": \" ^ inline_record\n        ^ \" option =\"\n      in\n      let spayload =\n        match payload with\n        | Record record -> print_record record\n        | Tuple types ->\n            List.mapi ~f:(fun i ty -> (\"x\" ^ Int.to_string i, ty)) types\n            |> print_tuple\n        | None -> \"\"\n      in\n      type_decl ^ head ^ \"\\n  match value.\" ^ field_name_raw ^ \" with\\n    | \"\n      ^ name ^ \" \" ^ spayload ^ \" -> Some \"\n      ^ (if String.is_empty spayload then \"()\" else spayload)\n      ^ if List.length variants |> [%eq: int] 1 then \"\" else \"\\n    | _ -> None\")\n    variants\n  |> String.concat ~sep:\"\\n\\n\"\n\nlet mk datatypes =\n  let find name =\n    List.find ~f:(fun dt -> [%eq: string] dt.Datatype.name name) datatypes\n    |> Option.value_exn\n  in\n  let data =\n    [\n      (find \"expr\", find \"expr'\");\n      (find \"pat\", find \"pat'\");\n      (find \"item\", find \"item'\");\n      (find \"guard\", find \"guard'\");\n      (find \"trait_item\", find \"trait_item'\");\n      (find \"impl_expr\", find \"impl_expr_kind\");\n    ]\n  in\n  let body = data |> List.map ~f:mk_builder |> String.concat ~sep:\"\\n\\n\" in\n  {|\nopen! Prelude\nopen! Ast\n\nmodule Make (F : Features.T) = struct\n  open Ast.Make(F)\n\n|}\n  ^ body ^ {|\n\nend\n|}\n"
  },
  {
    "path": "engine/utils/generate_from_ast/codegen_printer.ml",
    "content": "open Base\nopen Utils\nopen Types\n\ntype state = { names_with_doc : string list }\n\nlet ( let* ) x f = Option.bind ~f x\nlet super_types_list = [ \"expr\"; \"pat\"; \"guard\"; \"arm\"; \"item\" ]\n\nlet get_super_type ty =\n  List.find ~f:(fun s -> String.equal (s ^ \"'\") ty) super_types_list\n\nlet get_child_type ty =\n  if List.mem ~equal:String.equal super_types_list ty then Some (ty ^ \"'\")\n  else None\n\nlet do_not_override_prefix = \"_do_not_override_\"\n\nlet is_hidden_method =\n  let list =\n    [\n      \"expr'_App\";\n      \"expr'_Construct\";\n      \"ty_TApp\";\n      \"lhs_LhsFieldAccessor\";\n      \"local_ident\";\n      \"pat'_PConstruct\";\n      \"expr'_GlobalVar\";\n      \"variant\";\n      \"item'_Type\";\n    ]\n  in\n  List.mem ~equal:[%eq: string] list\n\nlet lazy_doc_manual_definitions = [ \"_do_not_override_lazy_of_generics\" ]\n\nlet rec of_ty (state : state) (call_method : string -> ty:string -> string)\n    (t : Type.t) : ((unit -> string) -> string -> string) option =\n  let* args =\n    List.fold t.args ~init:(Some []) ~f:(fun acc x ->\n        let* acc = acc in\n        let* x = of_ty state call_method x in\n        Some (x :: acc))\n    |> Option.map ~f:List.rev\n  in\n  match (t.typ, args) with\n  | \"option\", [ inner ] ->\n      Some\n        (fun pos value ->\n          \"(match \" ^ value ^ \" with | None -> None | Some value -> Some (\"\n          ^ inner pos \"value\" ^ \"))\")\n  | \"list\", [ inner ] ->\n      Some\n        (fun pos value ->\n          \"(List.map ~f:(fun x -> \" ^ inner pos \"x\" ^ \") \" ^ value ^ \")\")\n  | \"prim___tuple_2\", [ fst; snd ] ->\n      Some\n        (fun pos value ->\n          let base =\n            \"(\"\n            ^ fst pos (\"(fst \" ^ value ^ \")\")\n            ^ \",\"\n            ^ snd pos (\"(snd \" ^ value ^ \")\")\n            ^ \")\"\n          in\n          let mk proj =\n            \"(let x = \" ^ base ^ \"in lazy_doc (fun tuple -> (\" ^ proj\n            ^ \" tuple)#p) \" ^ pos () ^ \" x)\"\n          in\n          match List.map ~f:(is_lazy_doc_typ state) t.args with\n          | [ false; true ] -> mk \"snd\"\n          | [ true; false ] -> mk \"fst\"\n          | _ -> base)\n      (* if String.is_prefix ~prefix:\"F.\" (List.nth t.args 1 |> Option.value ~default:\"\") then \"(let x = \" ^ base ^ \"in lazy_doc x)\" else base) *)\n  | \"prim___tuple_3\", [ fst; snd; thd ] ->\n      Some\n        (fun pos value ->\n          \"(let (value1, value2, value3) = \" ^ value ^ \" in (\"\n          ^ fst pos \"value1\" ^ \",\" ^ snd pos \"value2\" ^ \",\" ^ thd pos \"value3\"\n          ^ \"))\")\n  | _ when List.mem ~equal:[%eq: string] state.names_with_doc t.typ ->\n      Some\n        (fun pos value ->\n          \"(print#\" ^ do_not_override_prefix ^ \"lazy_of_\" ^ t.typ\n          ^ (if Option.is_some (get_super_type t.typ) then \" ~super\" else \"\")\n          ^ \" \" ^ pos () ^ \" \" ^ value ^ \")\")\n  | _ -> Some (fun pos value -> \"(\" ^ value ^ \")\")\n\nand string_ty_of_ty' (state : state) (t : Type.t) =\n  if String.is_prefix t.typ ~prefix:\"prim___tuple_\" then\n    let args = List.map t.args ~f:(string_ty_of_ty' state) in\n    let n = List.count args ~f:(String.is_suffix ~suffix:\"lazy_doc)\") in\n    let base =\n      \"(\"\n      ^ String.concat ~sep:\" * \" (List.map t.args ~f:(string_ty_of_ty' state))\n      ^ \")\"\n    in\n    if [%eq: int] n 1 then \"(\" ^ base ^ \" lazy_doc)\" else base\n  else\n    \"(\"\n    ^ (if List.is_empty t.args then \"\"\n       else\n         \"(\"\n         ^ String.concat ~sep:\", \" (List.map t.args ~f:(string_ty_of_ty' state))\n         ^ \") \")\n    ^ t.typ\n    ^ (if List.mem ~equal:[%eq: string] state.names_with_doc t.typ then\n         \" lazy_doc\"\n       else \"\")\n    ^ \")\"\n\nand is_lazy_doc_typ (state : state) = string_ty_of_ty' state >> is_lazy_doc_typ'\nand is_lazy_doc_typ' = String.is_suffix ~suffix:\"lazy_doc)\"\n\nlet string_ty_of_ty (state : state) (t : Type.t) =\n  let s = string_ty_of_ty' state t in\n  match s with\n  | \"(generics lazy_doc)\" ->\n      \"((generics lazy_doc * generic_param lazy_doc list * generic_constraint \\\n       lazy_doc list) lazy_doc)\"\n  | _ -> s\n\nlet meth_name' typ_name variant_name =\n  typ_name ^ if String.is_empty variant_name then \"\" else \"_\" ^ variant_name\n\nlet meth_name typ_name variant_name =\n  let meth = meth_name' typ_name variant_name in\n  (if is_hidden_method meth then do_not_override_prefix else \"\") ^ meth\n\nlet print_variant state (call_method : string -> ty:string -> string)\n    (register_position : string option -> string) (super_type : string option)\n    (register_signature : string -> unit) (t_name : string) (v : Variant.t) :\n    string =\n  let meth_name = meth_name t_name v.name in\n  let meth = \"print#\" ^ meth_name in\n  let mk named fields =\n    let head =\n      v.name\n      ^ (if named then \" { \" else \" ( \")\n      ^ String.concat ~sep:(if named then \";\" else \",\") (List.map ~f:fst fields)\n      ^ (if named then \" } \" else \")\")\n      ^ \" -> \"\n    in\n    let args =\n      List.map\n        ~f:(fun (field_name, ty) ->\n          let value =\n            match of_ty state call_method ty with\n            | Some f ->\n                let pos = register_position (Some field_name) in\n                f (fun _ -> pos) field_name\n            | None -> field_name\n          in\n          let name = \"~\" ^ field_name ^ \":\" in\n          (if named then name else \"\") ^ \"(\" ^ value ^ \")\")\n        fields\n    in\n    let call =\n      String.concat ~sep:\" \"\n        (meth\n        :: ((if Option.is_some super_type then [ \"~super\" ] else []) @ args))\n    in\n    let signature =\n      let ty =\n        List.map\n          ~f:(fun (name, ty) ->\n            let name = if named then name ^ \":\" else \"\" in\n            name ^ string_ty_of_ty state ty)\n          fields\n        |> String.concat ~sep:\" -> \"\n      in\n      let super =\n        match super_type with\n        | Some super_type -> \" super:(\" ^ super_type ^ \") -> \"\n        | None -> \"\"\n      in\n      register_signature\n        (\"method virtual \" ^ meth_name ^ \" : \" ^ super ^ ty ^ \" -> document\")\n    in\n    head ^ call\n  in\n  \"\\n  | \"\n  ^\n  match v.payload with\n  | Record fields -> mk true fields\n  | None -> v.name ^ \" -> \" ^ meth\n  | Tuple types ->\n      mk false (List.mapi ~f:(fun i ty -> (\"x\" ^ Int.to_string i, ty)) types)\n\nlet catch_errors_for = [ \"expr\"; \"item\"; \"pat\" ]\n\nlet print_datatype state (dt : Datatype.t)\n    (register_entrypoint : string -> unit)\n    (register_position : string -> string -> string option -> string) =\n  let super_type = get_super_type dt.name in\n  let sigs = ref [] in\n  let method_name = do_not_override_prefix ^ \"lazy_of_\" ^ dt.name in\n  let print_variants variants wrapper =\n    let head =\n      \"(**/**) method \" ^ method_name\n      ^ (match super_type with Some t -> \" ~(super: \" ^ t ^ \")\" | _ -> \"\")\n      ^ \" ast_position (value: \" ^ dt.name ^ \"): \" ^ dt.name ^ \" lazy_doc =\"\n    in\n    let body =\n      (if Option.is_some (get_child_type dt.name) then\n         \"\\n    let super = value in\"\n       else \"\")\n      ^ \"\\n    match value with\"\n      ^ String.concat ~sep:\"\"\n          (List.map\n             ~f:(fun variant ->\n               print_variant state\n                 (fun name ~ty:_ -> name)\n                 (register_position dt.name variant.Variant.name)\n                 super_type\n                 (fun s -> sigs := s :: !sigs)\n                 dt.name variant)\n             variants)\n    in\n    let body =\n      \"(print#wrap_\" ^ dt.name ^ \" ast_position value (\" ^ body ^ \"))\"\n    in\n    let body = wrapper body in\n    sigs :=\n      (\"method wrap_\" ^ dt.name ^ \" (_pos: ast_position) (_value: \" ^ dt.name\n     ^ \") (doc: document): document = doc\")\n      :: !sigs;\n    let def =\n      head ^ \"lazy_doc (fun (value: \" ^ dt.name ^ \") -> \" ^ body\n      ^ \") ast_position value\"\n    in\n    if List.mem ~equal:[%eq: string] lazy_doc_manual_definitions method_name\n    then \"(* skipping \" ^ method_name ^ \" *) (**/**)\"\n    else def ^ \"(**/**)\"\n  in\n  let main =\n    match dt.kind with\n    | Variant variants -> print_variants variants Fn.id\n    | Record record ->\n        let wrapper =\n          if List.exists ~f:(fst >> [%eq: string] \"span\") record then\n            fun body ->\n            \"print#with_span ~span:value.span (fun _ -> \" ^ body ^ \")\"\n          else Fn.id\n        in\n        let wrapper =\n          if List.mem ~equal:[%eq: string] catch_errors_for dt.name then\n            fun body ->\n            \"print#catch_exn print#error_\" ^ dt.name ^ \" (fun () -> \"\n            ^ wrapper body ^ \")\"\n          else wrapper\n        in\n        print_variants [ { name = \"\"; payload = Record record } ] wrapper\n    | TypeSynonym ty ->\n        print_variants [ { name = \"\"; payload = Tuple [ ty ] } ] (fun x -> x)\n    | _ -> \"(* Not translating \" ^ dt.name ^ \" *)\"\n  in\n  let print =\n    let name = \"print_\" ^ dt.name in\n    let ty = \"ast_position -> \" ^ dt.name ^ \" -> \" in\n    let body =\n      \"fun ast_position x -> (print#\" ^ method_name ^ \" ast_position x)#p\"\n    in\n    if Option.is_none super_type then\n      \"method \" ^ name ^ \": \" ^ ty ^ \" document = \" ^ body\n    else \"\"\n  in\n  let entrypoint =\n    let name = \"entrypoint_\" ^ dt.name in\n    let ty = dt.name ^ \" -> \" in\n    let body = \"print#print_\" ^ dt.name ^ \" AstPos_Entrypoint\" in\n    if Option.is_none super_type then (\n      register_entrypoint (name ^ \" : \" ^ ty ^ \" 'a\");\n      \"method \" ^ name ^ \": \" ^ ty ^ \" document = \" ^ body)\n    else \"\"\n  in\n  String.concat ~sep:\"\\n\\n\" (main :: print :: entrypoint :: !sigs)\n\nlet hardcoded =\n  {|\nmodule LazyDoc = struct\n    type 'a lazy_doc =\n      < compact : output -> unit\n      ; pretty : output -> state -> int -> bool -> unit\n      ; requirement : int\n      ; p : document\n      ; v : 'a\n      ; ast_position : ast_position >\n    let lazy_doc : 'a. ('a -> document) -> ast_position -> 'a -> 'a lazy_doc =\n     fun to_document pos value ->\n      let lazy_doc = ref None in\n      let doc () =\n        match !lazy_doc with\n        | None ->\n            let doc = to_document value in\n            lazy_doc := Some doc;\n            doc\n        | Some doc -> doc\n      in\n      object (self)\n        method requirement : requirement = requirement (doc ())\n        method pretty : output -> state -> int -> bool -> unit =\n          fun o s i b -> pretty o s i b (doc ())\n        method compact : output -> unit = fun o -> compact o (doc ())\n        method p = custom (self :> custom)\n        method v = value\n        method ast_position = pos\n      end\nend\nopen LazyDoc\n|}\n\nlet class_prelude =\n  {|\n   method virtual with_span: span:span -> (unit -> document) -> document\n   method virtual catch_exn : (string -> document) -> (unit -> document) -> document\n\n   method virtual _do_not_override_lazy_of_local_ident: _\n   method virtual _do_not_override_lazy_of_concrete_ident: _\n|}\n\nlet mk datatypes =\n  let datatypes =\n    List.filter\n      ~f:(fun dt -> not ([%eq: string] dt.Datatype.name \"mutability\"))\n      datatypes\n  in\n  let state =\n    let names_with_doc = List.map ~f:(fun dt -> dt.name) datatypes in\n    let names_with_doc = \"concrete_ident\" :: \"local_ident\" :: names_with_doc in\n    { names_with_doc }\n  in\n  let positions = ref [ \"AstPos_Entrypoint\"; \"AstPos_NotApplicable\" ] in\n  let entrypoint_types = ref [] in\n  let class_body =\n    List.map\n      ~f:(fun dt ->\n        print_datatype state dt\n          (fun x -> entrypoint_types := x :: !entrypoint_types)\n          (fun ty variant field ->\n            let pos =\n              \"AstPos_\" ^ ty ^ \"_\" ^ variant\n              ^ match field with Some field -> \"_\" ^ field | _ -> \"\"\n            in\n            positions := pos :: !positions;\n            pos))\n      datatypes\n    |> String.concat ~sep:\"\\n\\n\"\n  in\n  let object_poly = String.concat ~sep:\";\\n \" !entrypoint_types in\n  let object_span_data_map =\n    String.concat ~sep:\"\\n\"\n      (List.map\n         ~f:(fun s ->\n           let n = fst (String.lsplit2_exn ~on:':' s) in\n           \"method \" ^ n ^ \" = obj#\" ^ n)\n         !entrypoint_types)\n  in\n  let object_map =\n    String.concat ~sep:\"\\n\"\n      (List.map\n         ~f:(fun s ->\n           let n = fst (String.lsplit2_exn ~on:':' s) in\n           \"method \" ^ n ^ \" x = f (fun obj -> obj#\" ^ n ^ \" x)\")\n         !entrypoint_types)\n  in\n  Printf.sprintf\n    {|\nopen! Prelude\nopen! Ast\nopen PPrint\ntype ast_position = %s | AstPosition_Quote\n\n%s\n\nmodule Make (F : Features.T) = struct\n   module AST = Ast.Make (F)\n   open Ast.Make (F)\n\n   class virtual base = object (print)\n     %s\n   end\n\n   type ('span_data, 'a) object_type = <\n        span_data : 'span_data;\n        %s\n     >\n\n   let map (type span_data) (type a) (type b)\n           (f: ((span_data, a) object_type -> a) -> b)\n           : (unit, b) object_type = object\n        method span_data: unit = ()\n        %s\n     end\n\n   let map_span_data (type a) (type b) (type t)\n          (obj: (a, t) object_type)\n          (span_data: b)          \n          : (b, t) object_type = object\n        method span_data: b = span_data\n        %s\n     end\nend\n|}\n    (String.concat ~sep:\" | \"\n       (List.dedup_and_sort ~compare:String.compare !positions))\n    hardcoded\n    (class_prelude ^ class_body)\n    object_poly object_map object_span_data_map\n"
  },
  {
    "path": "engine/utils/generate_from_ast/codegen_visitor.ml",
    "content": "(** Give a list of {!Types.Datatype.t}, this file generates an ocaml module of\n    visitors. *)\n\nopen Base\nopen Utils\nopen Types\n\n(** What kind of visitor are we generating? *)\ntype kind = Map | MapReduce | Reduce\n\n(** Helpers around kinds *)\ninclude struct\n  let is_reduce = function MapReduce | Reduce -> true | _ -> false\n  let is_map = function Map | MapReduce -> true | _ -> false\nend\n\n(** Various helpers and constants *)\ninclude struct\n  let method_prefix = \"visit_\"\n  let acc_var_prefix = \"acc___\"\n  let acc_var_param = acc_var_prefix ^ \"param___var\"\n  let payload_var = \"v___payload\"\n  let env_var = \"env___var\"\n  let app = List.filter ~f:(String.is_empty >> not) >> String.concat ~sep:\" \"\n  let parens s = if String.contains s ' ' then \"(\" ^ s ^ \")\" else s\nend\n\n(** Produces a method name given a dot-separated path *)\nlet method_name path =\n  let path = String.split ~on:'.' path in\n  method_prefix ^ String.concat ~sep:\"__\" path\n\n(** Produces a visitor call for a type expression, without applying it. *)\nlet rec of_type' need_parens (t : Type.t) =\n  let f =\n    if String.is_prefix ~prefix:\"'\" t.typ then \"visit_\" ^ t.typ\n    else \"self#\" ^ method_name t.typ\n  in\n  if List.is_empty t.args then f\n  else\n    app (f :: List.map ~f:(of_type' true) t.args)\n    |> if need_parens then parens else Fn.id\n\n(** Produces a complete visitor call for a type expression. *)\nlet of_type typ payload = app [ of_type' false typ; env_var; payload ]\n\nlet acc_var_for_field ((field, _) : Record.field) = acc_var_prefix ^ field\n\n(** Given a list [x1; ...; xN], produces `self#plus x1 (self#plus ... (self#plus\n    xN))` *)\nlet self_plus =\n  List.fold_left\n    ~f:(fun acc var ->\n      match acc with\n      | None -> Some var\n      | Some acc -> Some (app [ \"self#plus\"; parens acc; var ]))\n    ~init:None\n  >> Option.value ~default:\"self#zero\"\n\n(** Creates a let expression *)\nlet mk_let ~lhs ~rhs = \"let \" ^ lhs ^ \" = \" ^ rhs ^ \" in \"\n\nlet of_typed_binding ~kind (value, typ, value_binding, acc_binding) =\n  let lhs =\n    [\n      (if is_map kind then [ value_binding ] else []);\n      (if is_reduce kind then [ acc_binding ] else []);\n    ]\n    |> List.concat |> String.concat ~sep:\", \"\n  in\n  let rhs = of_type typ value in\n  mk_let ~lhs ~rhs\n\nlet of_typed_bindings ~kind l =\n  let lbs = List.map ~f:(of_typed_binding ~kind) l |> String.concat ~sep:\"\\n\" in\n  let acc = List.map ~f:(fun (_, _, _, acc) -> acc) l |> self_plus in\n  (lbs, acc)\n\nlet tuple_if ~kind ?(sep = \", \") if_map if_reduce =\n  [\n    (if is_map kind then [ if_map ] else []);\n    (if is_reduce kind then [ if_reduce ] else []);\n  ]\n  |> List.concat |> String.concat ~sep\n\nlet of_record ~kind ~constructor (r : Record.t) =\n  let lbs, acc =\n    List.map\n      ~f:(fun (field, typ) ->\n        (payload_var ^ \".\" ^ field, typ, field, acc_var_for_field (field, typ)))\n      r\n    |> of_typed_bindings ~kind\n  in\n  let record =\n    constructor ^ \"{\" ^ String.concat ~sep:\"; \" (List.map ~f:fst r) ^ \"}\"\n  in\n  let result = tuple_if ~kind record acc in\n  (* let result = record ^ if is_reduce kind then \", \" ^ acc else \"\" in *)\n  lbs ^ \"\\n\" ^ result\n\nlet of_tuple_variant ~kind name (types : Type.t list) =\n  let vars = List.mapi ~f:(fun i _ -> \"x\" ^ Int.to_string i) types in\n  let accs = List.mapi ~f:(fun i _ -> \"a\" ^ Int.to_string i) types in\n  let tuple = vars |> String.concat ~sep:\", \" |> parens in\n  let lbs, acc =\n    List.zip_exn types (List.zip_exn vars accs)\n    |> List.map ~f:(fun (typ, (name, acc)) -> (name, typ, name, acc))\n    |> of_typed_bindings ~kind\n  in\n  name ^ \" \" ^ tuple ^ \" -> \" ^ lbs ^ tuple_if ~kind (name ^ \" \" ^ tuple) acc\n\nlet of_variant ~kind (v : Variant.t) =\n  match v.payload with\n  | Tuple l -> of_tuple_variant ~kind v.name l\n  | None -> v.name ^ \" -> \" ^ tuple_if ~kind v.name \"self#zero\"\n  | Record record ->\n      v.name ^ \" \" ^ payload_var ^ \" -> \"\n      ^ of_record ~kind ~constructor:v.name record\n\nlet of_datatype ~kind (dt : Datatype.t) =\n  let body =\n    match dt.kind with\n    | Record record -> of_record ~kind ~constructor:\"\" record\n    | TypeSynonym typ -> of_type typ payload_var\n    | Variant variants ->\n        let arms =\n          List.map ~f:(of_variant ~kind) variants |> String.concat ~sep:\"\\n  | \"\n        in\n        \"match \" ^ payload_var ^ \" with\\n  \" ^ arms\n    | Opaque -> tuple_if ~kind payload_var \"self#zero\"\n  in\n  let meth = method_name dt.name in\n  let self_typ =\n    if Type.is_tuple_name dt.name then\n      String.concat ~sep:\" * \" dt.type_vars |> parens\n    else app [ String.concat ~sep:\", \" dt.type_vars |> parens; dt.name ]\n  in\n  let forall_clause = String.concat ~sep:\" \" dt.type_vars in\n  let arrs =\n    List.map\n      ~f:(fun tvar ->\n        \"'env -> \" ^ tvar ^ \" -> \"\n        ^ (tuple_if ~kind ~sep:\" * \" tvar \"'acc\" |> parens))\n      dt.type_vars\n  in\n  let arrs =\n    arrs @ [ \"'env\"; self_typ; tuple_if ~kind ~sep:\" * \" self_typ \"'acc\" ]\n  in\n  let arrs = List.map ~f:parens arrs |> String.concat ~sep:\" -> \" in\n  let meth_typ =\n    List.filter ~f:(String.is_empty >> not) [ forall_clause; arrs ]\n    |> String.concat ~sep:\".\"\n  in\n  let visitors =\n    List.map ~f:(fun tvar -> \"visit_\" ^ tvar) dt.type_vars |> app\n  in\n  \"method \" ^ meth ^ \" : \" ^ meth_typ ^ \" = fun \" ^ visitors ^ \" \" ^ env_var\n  ^ \" \" ^ payload_var ^ \" -> \" ^ body\n\n(** Hard coded visitors *)\nlet extra_visitors_for = function\n  | Map ->\n      \"        method visit_list : 'a. ('env -> 'a -> 'a) -> 'env -> 'a list \\\n       -> 'a list\\n\\\n      \\            =\\n\\\n      \\          fun v env -> Base.List.map ~f:(v env)\\n\\n\"\n  | MapReduce ->\n      \"           method visit_list\\n\\\n      \\            : 'a. ('env -> 'a -> 'a * 'acc) -> 'env -> 'a list -> 'a \\\n       list * 'acc\\n\\\n      \\            =\\n\\\n      \\          fun v env ->\\n\\\n      \\            Base.List.fold_map ~init:self#zero ~f:(fun acc x ->\\n\\\n      \\                let x, acc' = v env x in\\n\\\n      \\                (self#plus acc acc', x))\\n\\\n      \\            >> swap\\n\\n\"\n  | Reduce ->\n      \"\\n\\\n      \\          method visit_list : 'a. ('env -> 'a -> 'acc) -> 'env -> 'a \\\n       list -> 'acc =\\n\\\n      \\            fun v env this ->\\n\\\n      \\              Base.List.fold ~init:self#zero\\n\\\n      \\                ~f:(fun acc -> v env >> self#plus acc)\\n\\\n      \\                this\"\n\n(** Make one kind of visitor *)\nlet mk_one ~kind (l : Datatype.t list) : string =\n  let contents =\n    List.map ~f:(of_datatype ~kind) l |> String.concat ~sep:\"\\n\\n\"\n  in\n  let name =\n    [\n      (if is_map kind then [ \"map\" ] else []);\n      (if is_reduce kind then [ \"reduce\" ] else []);\n    ]\n    |> List.concat |> String.concat ~sep:\"\"\n  in\n  let extra_visitors =\n    (* visitor_for_tuples ~kind ^ \"\\n\\n\" ^ *)\n    extra_visitors_for kind\n  in\n  \"class virtual ['self] \" ^ name ^ \" = object (self : 'self)\" ^ contents ^ \"\\n\"\n  ^ extra_visitors ^ \"\\nend\"\n\n(** AST.ml-specific headers *)\nlet header =\n  \"open Ast\\n\\\n   open! Utils\\n\\\n   open Base\\n\\n\\\n   module Make =\\n\\\n   functor\\n\\\n  \\  (F : Features.T)\\n\\\n  \\  ->\\n\\\n  \\  struct\\n\\\n  \\    [@@@warning \\\"-27\\\"]\\n\\n\\\n  \\    open Make (F)\\n\"\n\n(** Only certain types should be opaque in AST.ml *)\nlet is_allowed_opaque name =\n  let allowlist =\n    [\n      \"Local_ident.t\";\n      \"bool\";\n      \"char\";\n      \"concrete_ident\";\n      \"global_ident\";\n      \"attr\";\n      \"local_ident\";\n      \"signedness\";\n      \"size\";\n      \"span\";\n      \"string\";\n      \"todo\";\n      \"float_kind\";\n      \"int_kind\";\n      \"item_quote_origin_position\";\n      \"item_kind\";\n    ]\n  in\n  List.mem ~equal:String.equal allowlist name\n  || String.is_prefix ~prefix:\"F.\" name\n\n(** Make all three kinds of visitors for a list of datatypes *)\nlet mk (l : Datatype.t list) : string =\n  let l = Primitive_types.(tuples @ [ option ]) @ l in\n  let opaques =\n    Visitors.collect_undefined_types l\n    |> List.map ~f:(fun name ->\n           Datatype.{ name; type_vars = []; kind = Opaque })\n  in\n  (match\n     Visitors.collect_undefined_types l\n     |> List.filter ~f:(is_allowed_opaque >> not)\n   with\n  | [] -> ()\n  | disallowed ->\n      let msg =\n        \"visitor generation: forbidden opaque type: \"\n        ^ [%show: string list] disallowed\n      in\n      Stdio.prerr_endline msg;\n      failwith msg);\n  let l = opaques @ l in\n  let visitors =\n    List.map ~f:(fun kind -> mk_one ~kind l) [ Map; MapReduce; Reduce ]\n  in\n  let visitors = visitors |> String.concat ~sep:\"\\n\\n\" in\n  [ header; visitors; \"end\" ] |> String.concat ~sep:\"\\n\\n\"\n"
  },
  {
    "path": "engine/utils/generate_from_ast/dune",
    "content": "(executable\n (public_name generate_from_ast)\n (name generate_from_ast)\n (package hax-engine)\n (libraries ppxlib base stdio ppx_deriving_yojson.runtime)\n (preprocess\n  (pps\n   ppxlib.metaquot\n   ppx_deriving.eq\n   ppx_yojson_conv\n   ppx_compare\n   ppx_deriving.show)))\n\n(env\n (_\n  (flags\n   (:standard -warn-error -A -warn-error +8))))\n"
  },
  {
    "path": "engine/utils/generate_from_ast/errors.ml",
    "content": "open Ppxlib\nopen! Ppx_yojson_conv_lib.Yojson_conv.Primitives\n\n(** Define `pp_*` functions for some type of the OCaml ASTs so that we can show\n    them *)\ninclude struct\n  let pp_core_type = Pprintast.core_type\n\n  let pp_label_declaration fmt label_decl =\n    Stdlib.Format.pp_print_string fmt label_decl.pld_name.txt\n\n  let pp_constructor_declaration fmt cons_decl =\n    Stdlib.Format.pp_print_string fmt cons_decl.pcd_name.txt\n\n  let pp_type_declaration fmt type_decl =\n    Pprintast.structure_item fmt\n      {\n        pstr_loc = Astlib.Location.none;\n        pstr_desc = Pstr_type (Nonrecursive, [ type_decl ]);\n      }\nend\n\n(** The type of various error that can occur errors *)\ntype t =\n  | UnsupportedCoreType of core_type\n  | UnsupportedLabelDeclaration of label_declaration\n  | UnsupportedConstructorDeclaration of constructor_declaration\n  | UnsupportedTypeDeclaration of type_declaration\n[@@deriving show]\n\n(** We can't derive yojson for OCaml types. Thus this indirection, that prints\n    payload of `t` as string, and *then* produces JSON. *)\nopen struct\n  type t_string =\n    | UnsupportedCoreType of string\n    | UnsupportedLabelDeclaration of string\n    | UnsupportedConstructorDeclaration of string\n    | UnsupportedTypeDeclaration of string\n  [@@deriving show, yojson]\n\n  let into_string : t -> t_string = function\n    | UnsupportedCoreType core_type ->\n        UnsupportedCoreType ([%show: core_type] core_type)\n    | UnsupportedLabelDeclaration label_declaration ->\n        UnsupportedLabelDeclaration\n          ([%show: label_declaration] label_declaration)\n    | UnsupportedConstructorDeclaration constructor_declaration ->\n        UnsupportedConstructorDeclaration\n          ([%show: constructor_declaration] constructor_declaration)\n    | UnsupportedTypeDeclaration type_declaration ->\n        UnsupportedTypeDeclaration ([%show: type_declaration] type_declaration)\nend\n\nlet yojson_of_t (e : t) = into_string e |> [%yojson_of: t_string]\nlet _ = pp_t_string (* just to silence OCaml warning *)\n\nexception Error of t\n"
  },
  {
    "path": "engine/utils/generate_from_ast/generate_from_ast.ml",
    "content": "open Base\nopen Utils\nopen Types\n\nlet _main =\n  let ocaml_file =\n    Stdio.In_channel.stdin |> Lexing.from_channel\n    |> Ppxlib_ast.Parse.implementation\n  in\n  let datatypes =\n    type_declaration_of_structure ocaml_file\n    |> List.filter ~f:(fun (path, _) ->\n           (* We only look at certain types in the AST.ml module *)\n           String.is_prefix ~prefix:\"Make.\" path\n           || List.mem ~equal:String.equal\n                [\n                  \"mutability\"; \"literal\"; \"attrs\"; \"quote\"; \"item_quote_origin\";\n                ]\n                path)\n    |> List.map ~f:(fun (path, td) ->\n           ( String.chop_prefix ~prefix:\"Make.\" path\n             |> Option.value ~default:path,\n             td ))\n    |> List.map ~f:(fun (path, type_decl) ->\n           (path, Datatype.of_ocaml_result type_decl))\n    |> List.filter_map ~f:(fun (path, dt) ->\n           match dt with\n           (* Use path as name, can be useful if used on something else than AST.ml *)\n           | Result.Ok v -> Some Datatype.{ v with name = path }\n           | _ -> None)\n  in\n\n  let data =\n    datatypes\n    |>\n    match Sys.get_argv () with\n    | [| _; \"visitors\" |] -> Codegen_visitor.mk\n    | [| _; \"printer\" |] -> Codegen_printer.mk\n    | [| _; \"ast_builder\" |] -> Codegen_ast_builder.mk\n    | [| _; \"ast_destruct\" |] -> Codegen_ast_destruct.mk\n    | [| _; \"json\" |] ->\n        [%yojson_of: Datatype.t list] >> Yojson.Safe.pretty_to_string\n    | [| _; verb |] ->\n        failwith (\"`generate_from_ast`: unknown action `\" ^ verb ^ \"`\")\n    | _ -> failwith \"`generate_from_ast`: expected one argument\"\n  in\n  (* Stdio.Out_channel.write_all \"/tmp/debug-generated-code.ml\" ~data; *)\n  Stdio.print_endline data\n"
  },
  {
    "path": "engine/utils/generate_from_ast/primitive_types.ml",
    "content": "(** This module encodes several primitive OCaml types as Datatype.t so that\n    visitors can be generated automatically for them as well. *)\n\nopen Base\nopen! Utils\nopen Types\n\n(** Helper to produce type variable. *)\nlet ty_var typ = Type.{ typ; args = [] }\n\n(** Produces a datatype description for tuples of a given length. *)\nlet mk_tuple len =\n  let type_vars = List.init len ~f:(fun i -> \"'t\" ^ Int.to_string i) in\n  let name = Type.tuple_name len in\n  let types = List.map ~f:ty_var type_vars in\n  let payload = VariantPayload.Tuple types in\n  let kind = Datatype.Variant [ Variant.{ name = \"\"; payload } ] in\n  Datatype.{ name; type_vars; kind }\n\n(** Common sizes of tuples. *)\nlet tuples = List.map ~f:mk_tuple [ 2; 3; 4 ]\n\n(** Datatype description for the option type. *)\nlet option =\n  let kind =\n    Datatype.Variant\n      [\n        Variant.\n          { name = \"Some\"; payload = VariantPayload.Tuple [ ty_var \"'a\" ] };\n        Variant.{ name = \"None\"; payload = VariantPayload.None };\n      ]\n  in\n  Datatype.{ name = \"option\"; type_vars = [ \"'a\" ]; kind }\n"
  },
  {
    "path": "engine/utils/generate_from_ast/types.ml",
    "content": "(** This module defines a subset of OCaml inductives as a nice and simple AST *)\n\nopen Base\nopen! Utils\nopen Errors\n\n(** Describe what is a type expression, reflects OCaml's `core_type`. *)\nmodule Type = struct\n  let tuple_prefix = \"prim___tuple_\"\n  let is_tuple_name = String.is_prefix ~prefix:tuple_prefix\n  let tuple_name (len : int) : string = tuple_prefix ^ Int.to_string len\n  let unit_name : string = \"unit___\"\n\n  let lident_to_string lident =\n    Astlib.Longident.flatten lident |> String.concat ~sep:\".\"\n\n  type t = { typ : string; args : t list } [@@deriving show, yojson]\n\n  let tuple args =\n    match args with\n    | [] -> { typ = unit_name; args }\n    | [ typ ] -> typ\n    | _ -> { typ = tuple_name (List.length args); args }\n\n  let unsupported v = raise (Error (UnsupportedCoreType v))\n\n  open Ppxlib\n\n  let rec of_ocaml (t : core_type) : t =\n    match t.ptyp_desc with\n    | Ptyp_var typ -> { typ = \"'\" ^ typ; args = [] }\n    | Ptyp_tuple types -> List.map ~f:of_ocaml types |> tuple\n    | Ptyp_constr (lident, types) ->\n        { typ = lident_to_string lident.txt; args = List.map ~f:of_ocaml types }\n    | _ -> unsupported t\nend\n\n(** Describe what is a record, reflects OCaml's `label_declaration`. *)\nmodule Record = struct\n  type field = string * Type.t [@@deriving show, yojson]\n  type t = field list [@@deriving show, yojson]\n\n  let unsupported v = raise (Error (UnsupportedLabelDeclaration v))\n\n  open Ppxlib\n\n  let field_of_ocaml (label_decl : label_declaration) : field =\n    (match label_decl.pld_mutable with\n    | Mutable -> unsupported label_decl\n    | _ -> ());\n    (label_decl.pld_name.txt, Type.of_ocaml label_decl.pld_type)\n\n  let of_ocaml : label_declaration list -> t = List.map ~f:field_of_ocaml\nend\n\n(** Describe what is a variant payload, reflects OCaml's `construtor_arguments`.\n*)\nmodule VariantPayload = struct\n  type t = Record of Record.t | Tuple of Type.t list | None\n  [@@deriving show, yojson]\n\n  open Ppxlib\n\n  let of_ocaml (cons_decl : constructor_arguments) : t =\n    match cons_decl with\n    | Pcstr_tuple [] -> None\n    | Pcstr_tuple [ typ ] -> (\n        match typ.ptyp_desc with\n        | Ptyp_tuple types -> Tuple (List.map ~f:Type.of_ocaml types)\n        | _ -> Tuple [ Type.of_ocaml typ ])\n    | Pcstr_tuple types -> Tuple (List.map ~f:Type.of_ocaml types)\n    | Pcstr_record label_decls -> Record (Record.of_ocaml label_decls)\nend\n\n(** Describe what is a variant, reflects OCaml's `constructor_declaration`. *)\nmodule Variant = struct\n  type t = { name : string; payload : VariantPayload.t }\n  [@@deriving show, yojson]\n\n  let unsupported v = raise (Error (UnsupportedConstructorDeclaration v))\n\n  open Ppxlib\n\n  let of_ocaml (cons_decl : constructor_declaration) : t =\n    if List.is_empty cons_decl.pcd_vars |> not then unsupported cons_decl;\n    let payload = VariantPayload.of_ocaml cons_decl.pcd_args in\n    { name = cons_decl.pcd_name.txt; payload }\nend\n\n(** A result type. *)\nmodule Result = struct\n  type ('r, 'e) t = Ok of 'r | Error of 'e [@@deriving show, yojson]\nend\n\n(** Describe what is a datatype, reflects ppx' `type_declaration`. *)\nmodule Datatype = struct\n  type kind =\n    | Record of Record.t\n    | Variant of Variant.t list\n    | TypeSynonym of Type.t\n    | Opaque\n        (** `Opaque` is not produced by `of_ocaml` below; it is used by\n            `codegen_visitor` to generate identity visitors *)\n  [@@deriving show, yojson]\n\n  type t = { name : string; type_vars : string list; kind : kind }\n  [@@deriving show, yojson]\n\n  let unsupported v = raise (Error (UnsupportedTypeDeclaration v))\n\n  let of_ocaml (type_decl : Ppxlib.type_declaration) : t =\n    let open Ppxlib in\n    let name = type_decl.ptype_name.txt in\n    let type_vars =\n      List.map\n        ~f:(fun (t, _) ->\n          match t.ptyp_desc with\n          | Ptyp_var n -> \"'\" ^ n\n          | _ -> unsupported type_decl)\n        type_decl.ptype_params\n    in\n    if List.is_empty type_decl.ptype_cstrs |> not then unsupported type_decl;\n    let kind =\n      match (type_decl.ptype_kind, type_decl.ptype_manifest) with\n      | Ptype_abstract, Some typ -> TypeSynonym (Type.of_ocaml typ)\n      | Ptype_variant cons_decls, None ->\n          Variant (List.map ~f:Variant.of_ocaml cons_decls)\n      | Ptype_record label_decls, None -> Record (Record.of_ocaml label_decls)\n      | _ -> unsupported type_decl\n    in\n    { name; kind; type_vars }\n\n  let of_ocaml_result (type_decl : Ppxlib.type_declaration) :\n      (t, Errors.t) Result.t =\n    try Result.Ok (of_ocaml type_decl) with Errors.Error e -> Result.Error e\nend\n"
  },
  {
    "path": "engine/utils/generate_from_ast/utils.ml",
    "content": "open Base\ninclude Ppx_yojson_conv_lib.Yojson_conv.Primitives\n\nlet ( >> ) f g x = g (f x)\n\nlet type_declaration_of_structure (str : Ppxlib.structure) :\n    (string * Ppxlib.type_declaration) list =\n  let open Ppxlib in\n  let visitor =\n    object (self)\n      inherit Ast_traverse.iter as super\n      val mutable result = []\n      val mutable path_state = []\n\n      method get_path () =\n        List.rev path_state |> List.map ~f:(Option.value ~default:\"<anon>\")\n\n      method get_result () = List.rev result\n\n      method! module_binding mb =\n        let prev_path = path_state in\n        path_state <- mb.pmb_name.txt :: path_state;\n        super#module_binding mb;\n        path_state <- prev_path;\n        ()\n\n      method! type_declaration decl =\n        let path =\n          self#get_path () @ [ decl.ptype_name.txt ] |> String.concat ~sep:\".\"\n        in\n        result <- (path, decl) :: result\n    end\n  in\n  visitor#structure str;\n  visitor#get_result ()\n"
  },
  {
    "path": "engine/utils/generate_from_ast/visitors.ml",
    "content": "(** This module is mostly generated, but hand-edited, it defines visitors for\n    the types defined in module `Types`. *)\n\nopen Base\nopen Types\nopen Utils\n\nclass virtual ['self] reduce =\n  object (self : 'self)\n    method virtual plus : 'acc -> 'acc -> 'acc\n    method virtual zero : 'acc\n    method visit_string (_env : 'env) (_s : string) = self#zero\n\n    method visit_prim___tuple_2 :\n        't0 't1.\n        ('env -> 't0 -> 'acc) ->\n        ('env -> 't1 -> 'acc) ->\n        'env ->\n        't0 * 't1 ->\n        'acc =\n      fun visit_'t0 visit_'t1 env___var v___payload ->\n        match v___payload with\n        | x0, x1 ->\n            let a0 = visit_'t0 env___var x0 in\n            let a1 = visit_'t1 env___var x1 in\n            self#plus a0 a1\n\n    method visit_prim___tuple_3 :\n        't0 't1 't2.\n        ('env -> 't0 -> 'acc) ->\n        ('env -> 't1 -> 'acc) ->\n        ('env -> 't2 -> 'acc) ->\n        'env ->\n        't0 * 't1 * 't2 ->\n        'acc =\n      fun visit_'t0 visit_'t1 visit_'t2 env___var v___payload ->\n        match v___payload with\n        | x0, x1, x2 ->\n            let a0 = visit_'t0 env___var x0 in\n            let a1 = visit_'t1 env___var x1 in\n            let a2 = visit_'t2 env___var x2 in\n            self#plus (self#plus a0 a1) a2\n\n    method visit_prim___tuple_4 :\n        't0 't1 't2 't3.\n        ('env -> 't0 -> 'acc) ->\n        ('env -> 't1 -> 'acc) ->\n        ('env -> 't2 -> 'acc) ->\n        ('env -> 't3 -> 'acc) ->\n        'env ->\n        't0 * 't1 * 't2 * 't3 ->\n        'acc =\n      fun visit_'t0 visit_'t1 visit_'t2 visit_'t3 env___var v___payload ->\n        match v___payload with\n        | x0, x1, x2, x3 ->\n            let a0 = visit_'t0 env___var x0 in\n            let a1 = visit_'t1 env___var x1 in\n            let a2 = visit_'t2 env___var x2 in\n            let a3 = visit_'t3 env___var x3 in\n            self#plus (self#plus (self#plus a0 a1) a2) a3\n\n    method visit_option : 'a. ('env -> 'a -> 'acc) -> 'env -> 'a option -> 'acc\n        =\n      fun visit_'a env___var v___payload ->\n        match v___payload with\n        | Some x0 ->\n            let a0 = visit_'a env___var x0 in\n            a0\n        | None -> self#zero\n\n    method visit_Type__t : 'env -> Type.t -> 'acc =\n      fun env___var v___payload ->\n        let acc___typ = self#visit_string env___var v___payload.typ in\n        let acc___args =\n          self#visit_list self#visit_Type__t env___var v___payload.args\n        in\n        self#plus acc___typ acc___args\n\n    method visit_Record__field : 'env -> Record.field -> 'acc =\n      fun env___var v___payload ->\n        self#visit_prim___tuple_2 self#visit_string self#visit_Type__t env___var\n          v___payload\n\n    method visit_Record__t : 'env -> Record.t -> 'acc =\n      fun env___var v___payload ->\n        self#visit_list self#visit_Record__field env___var v___payload\n\n    method visit_VariantPayload__t : 'env -> VariantPayload.t -> 'acc =\n      fun env___var v___payload ->\n        match v___payload with\n        | Record x0 ->\n            let a0 = self#visit_Record__t env___var x0 in\n            a0\n        | Tuple x0 ->\n            let a0 = self#visit_list self#visit_Type__t env___var x0 in\n            a0\n        | None -> self#zero\n\n    method visit_Variant__t : 'env -> Variant.t -> 'acc =\n      fun env___var v___payload ->\n        let acc___name = self#visit_string env___var v___payload.name in\n        let acc___payload =\n          self#visit_VariantPayload__t env___var v___payload.payload\n        in\n        self#plus acc___name acc___payload\n\n    method visit_Result__t :\n        'r 'e.\n        ('env -> 'r -> 'acc) ->\n        ('env -> 'e -> 'acc) ->\n        'env ->\n        ('r, 'e) Result.t ->\n        'acc =\n      fun visit_'r visit_'e env___var v___payload ->\n        match v___payload with\n        | Ok x0 ->\n            let a0 = visit_'r env___var x0 in\n            a0\n        | Error x0 ->\n            let a0 = visit_'e env___var x0 in\n            a0\n\n    method visit_Datatype__kind : 'env -> Datatype.kind -> 'acc =\n      fun env___var v___payload ->\n        match v___payload with\n        | Record x0 ->\n            let a0 = self#visit_Record__t env___var x0 in\n            a0\n        | Variant x0 ->\n            let a0 = self#visit_list self#visit_Variant__t env___var x0 in\n            a0\n        | TypeSynonym x0 ->\n            let a0 = self#visit_Type__t env___var x0 in\n            a0\n        | Opaque -> self#zero\n\n    method visit_Datatype__t : 'env -> Datatype.t -> 'acc =\n      fun env___var v___payload ->\n        let acc___name = self#visit_string env___var v___payload.name in\n        let acc___type_vars =\n          self#visit_list self#visit_string env___var v___payload.type_vars\n        in\n        let acc___kind = self#visit_Datatype__kind env___var v___payload.kind in\n        self#plus (self#plus acc___name acc___type_vars) acc___kind\n\n    method visit_datatypes : 'env -> Datatype.t list -> 'acc =\n      self#visit_list self#visit_Datatype__t\n\n    method visit_list : 'a. ('env -> 'a -> 'acc) -> 'env -> 'a list -> 'acc =\n      fun v env this ->\n        Base.List.fold ~init:self#zero\n          ~f:(fun acc -> v env >> self#plus acc)\n          this\n  end\n\nlet collect_defined_types =\n  (object\n     inherit [_] reduce as _super\n     method plus = Set.union\n     method zero = Set.empty (module String)\n     method! visit_Datatype__t () dt = Set.singleton (module String) dt.name\n  end)\n    #visit_datatypes\n    ()\n\nlet collect_used_types =\n  (object (self)\n     inherit [_] reduce as super\n     method plus = Set.union\n     method zero = Set.empty (module String)\n\n     method! visit_Type__t () t =\n       let typ = t.typ in\n       self#plus\n         (if String.is_prefix ~prefix:\"'\" typ || String.equal typ \"list\" then\n            self#zero\n          else Set.singleton (module String) typ)\n         (super#visit_Type__t () t)\n  end)\n    #visit_datatypes\n    ()\n\nlet collect_undefined_types dts : string list =\n  Set.diff (collect_used_types dts) (collect_defined_types dts) |> Set.to_list\n"
  },
  {
    "path": "engine/utils/hacspeclib-macro-parser/dune",
    "content": "(library\n (name hacspeclib_macro_parser)\n (package hax-engine)\n (libraries yojson angstrom)\n (preprocess\n  (pps\n   ppx_yojson_conv\n   ppx_sexp_conv\n   ppx_compare\n   ppx_hash\n   ppx_deriving.show\n   ppx_deriving.eq\n   ppx_matches)))\n\n(env\n (_\n  (flags\n   (:standard -warn-error \"-A+8\" -w \"-17-7-30-56-32\"))))\n"
  },
  {
    "path": "engine/utils/hacspeclib-macro-parser/hacspeclib_macro_parser.ml",
    "content": "open! Base\nopen Angstrom\nopen Ppx_yojson_conv_lib.Yojson_conv.Primitives\n\nmodule BasicParsers = struct\n  let is_space = function ' ' | '\\t' | '\\n' -> true | _ -> false\n\n  let is_identifier = function\n    | '0' .. '9' | 'a' .. 'z' | 'A' .. 'Z' | '_' -> true\n    | _ -> false\n\n  let is_digit = function '0' .. '9' -> true | _ -> false\n  let spaces = Fn.const () <$> take_while is_space\n  let ignore_spaces p = spaces *> p <* spaces\n  let identifier = ignore_spaces @@ take_while1 is_identifier\n\n  let many1_ignore_underscores p =\n    List.filter_map ~f:Fn.id\n    <$> many1 (Option.some <$> p <|> (Fn.const None <$> char '_'))\n\n  let take_while1_ignore_underscores f =\n    String.of_char_list <$> many1_ignore_underscores (satisfy f)\n\n  let number =\n    ignore_spaces (Int.of_string <$> take_while1_ignore_underscores is_digit)\n\n  let is_hex = function\n    | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> true\n    | _ -> false\n\n  let hex_literal =\n    ignore_spaces (string \"0x\" *> take_while1_ignore_underscores is_hex)\n\n  let comma = Fn.const () <$> ignore_spaces @@ char ','\n  let colon = Fn.const () <$> ignore_spaces @@ char ':'\n  let maybe p = Option.some <$> p <|> return None\n  let parens p = ignore_spaces (char '(') *> p <* ignore_spaces (char ')')\n  let quoted p = ignore_spaces (char '\"') *> p <* ignore_spaces (char '\"')\n  let field name p = string name *> colon *> p\n  let comment = ignore_spaces (string \"//\" *> take_while Char.(( <> ) '\\n'))\n  let ignore_comment = Fn.const () <$> maybe comment\nend\n\nopen BasicParsers\n\nmodule type Parser = sig\n  type t [@@deriving show, yojson, eq]\n\n  val name : string\n  val parser : t Angstrom.t\nend\n\nmodule Make (M : Parser) : sig\n  val parse : string -> (M.t, string) Result.t\nend = struct\n  open M\n\n  let parse input =\n    match parse_string ~consume:All (parens parser <* end_of_input) input with\n    | Ok e -> Ok e\n    | Error e ->\n        Stdlib.prerr_endline @@ \"########## Error while parsing: (\" ^ name ^ \")\";\n        Stdlib.prerr_endline input;\n        Error e\nend\n\nmodule Array = struct\n  module M = struct\n    type t = {\n      array_name : string;\n      size : string;\n      typ : string;\n      index_typ : string option;\n    }\n    [@@deriving show, yojson, eq]\n\n    let parser =\n      let* array_name = identifier <* comma in\n      let* size = identifier <* comma in\n      let* typ = identifier in\n      let+ index_typ =\n        maybe @@ (comma *> string \"type_for_indexes\" *> colon *> identifier)\n      in\n      { array_name; size; typ; index_typ }\n\n    let name = \"array\"\n  end\n\n  include M\n  include Make (M)\nend\n\nmodule Bytes = struct\n  module M = struct\n    type t = { bytes_name : string; size : string }\n    [@@deriving show, yojson, eq]\n\n    let parser =\n      let* bytes_name = identifier <* comma in\n      let+ size =\n        identifier\n        (* this covers number and constants, but this leads to namespacing issues... *)\n      in\n      { bytes_name; size }\n\n    let name = \"bytes\"\n  end\n\n  include M\n  include Make (M)\nend\n\nmodule UnsignedPublicInteger = struct\n  module M = struct\n    type t = { integer_name : string; bits : int } [@@deriving show, yojson, eq]\n\n    let parser =\n      let* integer_name = identifier <* comma in\n      let+ bits = number in\n      { integer_name; bits }\n\n    let name = \"unsigned_public_integer\"\n  end\n\n  include M\n  include Make (M)\nend\n\nmodule PublicNatMod = struct\n  module M = struct\n    type t = {\n      type_name : string;\n      type_of_canvas : string;\n      bit_size_of_field : int;\n      modulo_value : string;\n    }\n    [@@deriving show, yojson, eq]\n\n    type t' = {\n      type_name : string option;\n      type_of_canvas : string option;\n      bit_size_of_field : int option;\n      modulo_value : string option;\n    }\n\n    let parser' : t' Angstrom.t =\n      let type_name =\n        (fun x acc -> { acc with type_name = Some x })\n        <$> field \"type_name\" identifier\n      in\n      let type_of_canvas =\n        (fun x acc -> { acc with type_of_canvas = Some x })\n        <$> field \"type_of_canvas\" identifier\n      in\n      let bit_size_of_field =\n        (fun x acc -> { acc with bit_size_of_field = Some x })\n        <$> field \"bit_size_of_field\" number\n      in\n      let modulo_value =\n        (fun x acc -> { acc with modulo_value = Some x })\n        <$> field \"modulo_value\" (quoted @@ take_while1 is_hex)\n      in\n      let f =\n        type_name <|> type_of_canvas <|> bit_size_of_field <|> modulo_value\n      in\n      let* f1 = ignore_comment *> f <* comma <* ignore_comment in\n      let* f2 = f <* comma <* ignore_comment in\n      let* f3 = f <* comma <* ignore_comment in\n      let+ f4 = f <* ignore_comment in\n      {\n        type_name = None;\n        type_of_canvas = None;\n        bit_size_of_field = None;\n        modulo_value = None;\n      }\n      |> f1 |> f2 |> f3 |> f4\n\n    let parser =\n      let* x = parser' in\n      match x with\n      | {\n       type_name = Some type_name;\n       type_of_canvas = Some type_of_canvas;\n       bit_size_of_field = Some bit_size_of_field;\n       modulo_value = Some modulo_value;\n      } ->\n          return\n            ({ type_name; type_of_canvas; bit_size_of_field; modulo_value } : t)\n      | _ -> fail \"Some fields are missing\"\n\n    let name = \"public_nat_mod\"\n  end\n\n  include M\n  include Make (M)\nend\n"
  },
  {
    "path": "engine/utils/ocaml_of_json_schema/ocaml_of_json_schema.js",
    "content": "const keys = p =>\n    new Set(\n        Object.keys(p)\n            .filter(k => ![\n                'description', 'maxItems', 'minItems'\n            ].includes(k))\n            .filter(k => p?.additionalProperties !== false || k != 'additionalProperties')\n    );\nconst eq = (xs, ys) =>\n    xs.size === ys.size &&\n    [...xs].every((x) => ys.has(x));\n\nlet todo = (todo = \"todo\") => null;\n\nlet assert = (fact, msg = \"assert\") => {\n    if (!fact)\n        throw msg;\n};\n\nlet exact_keys = (o, ...key_list) => {\n    // console.log('exact_keys', o);\n    // console.log('keys=', keys(o));\n    // console.log('agaisnt=', new Set(key_list));\n    return eq(keys(o), new Set(key_list));\n};\n\n\nconst clean = o => {\n    if (o instanceof Object\n        && exact_keys(o, 'allOf')\n        && o.allOf.length == 1\n    ) {\n        let first = o.allOf[0];\n        delete o['allOf'];\n        for (let k in first)\n            o[k] = first[k];\n    }\n    if (o instanceof Object\n        && 'type' in o\n        && o.type instanceof Array\n        && o.type.length === 2\n        && o.type.includes('null')\n    ) {\n        let type = o.type.filter(x => x != 'null')[0];\n        let other = JSON.parse(JSON.stringify(o));\n        other.type = type;\n        for (let k in o)\n            delete o[k];\n        o.anyOf = [other, { type: 'null' }];\n    }\n    if (o instanceof Array) {\n        return o\n            .filter(o => true)\n            .map(clean);\n    }\n    if (o instanceof Object) {\n        delete o['maxItems'];\n        delete o['minItems'];\n        return Object.fromEntries(Object.entries(o).map(([k, v]) => [k, clean(v)]));\n    } else {\n        return o;\n    }\n};\nlet isUpperCase = s => s.toUpperCase() == s;\nlet startsWithUpper = s => isUpperCase(s[0]);\n\nlet makeFirstCharUpper = s => s[0].toUpperCase() + s.slice(1);\nlet makeFirstCharLower = s => s[0].toLowerCase() + s.slice(1);\n\n\nlet variantNameOf = s => {\n    let v = makeFirstCharUpper(s);\n    if (['Some', 'None'].includes(v))\n        return v + \"'\";\n    return v;\n};\nlet escapeOCamlKeywords = s => {\n    let ocaml_keywords = [\"and\", \"as\", \"assert\", \"asr\", \"begin\", \"class\", \"constraint\",\n        \"do\", \"done\", \"downto\", \"else\", \"end\", \"exception\", \"external\",\n        \"false\", \"for\", \"fun\", \"function\", \"functor\", \"if\", \"in\",\n        \"include\", \"inherit\", \"initializer\", \"land\", \"lazy\", \"let\",\n        \"lor\", \"lsl\", \"lsr\", \"lxor\", \"match\", \"method\", \"mod\", \"module\",\n        \"mutable\", \"new\", \"nonrec\", \"object\", \"of\", \"open\", \"or\",\n        \"private\", \"rec\", \"sig\", \"struct\", \"then\", \"to\", \"true\", \"try\",\n        \"type\", \"val\", \"virtual\", \"when\", \"while\", \"with\"\n    ];\n    return ocaml_keywords.includes(s) ? s + \"'\" : s;\n}\nlet typeNameOf = s => escapeOCamlKeywords(s.replace(/[A-Z]/g, (l, i) => `${i ? '_' : ''}${l.toLowerCase()}`));\nlet fieldNameOf = s => escapeOCamlKeywords(s);\n\nlet ensureUnique = (() => {\n    let cache = {};\n    return (kind, v, disambiguer) => {\n        let key = JSON.stringify({ kind, v });\n        // TODO: enble check below, find a good solution\n        // if(cache[key])\n        //     throw `dup ${kind}, ${v}`;\n        cache[key] = true;\n        return v;\n    };\n})();\n\nconst util = require('util');\nlet log_full = o => console.error(util.inspect(o, { showHidden: false, depth: null, colors: true }));\n\nlet trace1 = (name, f) => (input) => {\n    let output = f(input);\n    log_full({ name, input, output });\n    return output;\n};\n\nlet ocaml_of_type_expr = (o, path) => {\n    if (!path)\n        throw \"Path missing!\";\n    let { kind, payload } = o;\n    return (({\n        option: type => `(${ocaml_of_type_expr(type, [...path, 'option'])} option)`,\n        unit: _ => `unit`,\n        tuple: types => `(${types.map((t, i) => ocaml_of_type_expr(t, [...path, 'tuple', i])).join(' * ')})`,\n        array: type => `(${ocaml_of_type_expr(type, [...path, 'array'])} list)`,\n        boolean: _ => `bool`,\n        string: _ => `string`,\n        char: _ => `char`,\n        integer: _ => ({\n            int64: 'Base.Int64.t',\n            string: 'string',\n            int: 'int'\n        })[o.repr],\n        name: payload => typeNameOf(payload),\n    })[kind] || (_ => {\n        log_full(o);\n        throw \"ocaml_of_type_expr: bad kind \" + kind;\n    }))(payload);\n};\n\n\nlet mk_match = (scrut, arms, path) => {\n    if (!path) {\n        console.trace();\n        throw \"Path missing!\";\n    }\n    // console.log({scrut, arms});\n    return `\nbegin match ${scrut} with\n${[...arms, ['_', `failwith (\"parsing error: ${path} LINE=\" ^ string_of_int __LINE__ ^ \" JSON=\" ^ Yojson.Safe.pretty_to_string ${scrut})`]].map(([pat, expr]) => `${pat} -> ${expr}`).join('\\n|')}\nend\n`;\n};\n\nlet wrap_paren = s => `(${s})`;\n\nlet ocaml_yojson_of_type_expr = (o, subject, path) => {\n    if (!path)\n        throw \"Path missing!\";\n    let { kind, payload } = o;\n    return `(${(({\n        option: type => `match ${subject} with | Option.Some x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'Some'])} | _ -> \\`Null`,\n        unit: _ => `\\`Null`,\n        tuple: types =>\n            `let (${types.map((t, i) => 'x' + i)}) = ${subject} in \\`List [${types.map((t, i) => ocaml_yojson_of_type_expr(t, 'x' + i, [...path, 'tuple', i])).join(';')}]`,\n        array: type =>\n            `\\`List (List.map (fun x -> ${ocaml_yojson_of_type_expr(type, 'x', [...path, 'array'])}) ${subject})`,\n        boolean: _ => `\\`Bool ${subject}`,\n        string: _ => `\\`String ${subject}`,\n        integer: _ => ({\n            string: `\\`Intlit ${subject}`,\n            int64: `\\`Intlit (Int64.to_string ${subject})`,\n            int: `\\`Int ${subject}`\n        })[o.repr],\n        char: _ => `\\`String (Base.Char.to_string ${subject})`,\n        name: payload => `yojson_of_${typeNameOf(payload)} ${subject}`,\n    })[kind] || (_ => {\n        log_full(o);\n        throw \"ocaml_arms_of_type_expr: bad kind \" + kind;\n    }))(payload)})`;\n};\n\n\nlet ocaml_arms_of_type_expr = (o, path) => {\n    if (!path)\n        throw \"Path missing!\";\n    let { kind, payload } = o;\n    return (({\n        option: type => [\n            [`\\`Null`, `Option.None`],\n            ...ocaml_arms_of_type_expr(type, [...path, 'option']).map(([pat, expr]) => [pat, `Option.Some (${expr})`])\n        ],\n        unit: _ => [[`\\`Null`, '()']],\n        tuple: types => {\n            let sub_matches = types.map((type, i) =>\n                mk_match(`v${i}`, ocaml_arms_of_type_expr(type, [...path, 'tuple', i]), [...path, 'tuple']));\n            return [\n                [`\\`List [${types.map((_, i) => `v${i}`).join(';')}]`,\n                `(${sub_matches.join(',')})`\n                ],\n            ];\n        },\n        array: type => [\n            [`\\`List l`,\n                `List.map (fun x -> ${mk_match('x', ocaml_arms_of_type_expr(type, [...path, 'array']), [...path, 'array'])}) l`\n            ]\n        ],\n        boolean: _ => [[`\\`Bool b`, 'b']],\n        string: _ => [[`\\`String s`, 's']],\n        char: _ => [[`\\`String s`, 'String.get s 0']],\n        integer: _ => ({\n            int64: [\n                [`\\`Int i`, 'Base.Int64.of_int i'],\n                [`\\`Intlit lit`, `(try Base.Int64.of_string lit with | _ -> failwith (\"Base.Int64.of_string failed for \" ^ lit))`]\n            ],\n            string: [\n                [`\\`Int i`, 'string_of_int i'],\n                [`\\`Intlit s`, 's']\n            ],\n            int: [\n                [`\\`Int i`, 'i'],\n                [`\\`Intlit s`, 'Base.Int.of_string s']\n            ]\n        })[o.repr],\n        name: payload => [['remains', `${typeNameOf(payload)}_of_yojson remains`]],\n    })[kind] || (_ => {\n        log_full(o);\n        throw \"ocaml_arms_of_type_expr: bad kind \" + kind;\n    }))(payload);\n};\n\nlet parse_type_name = s => {\n    if (!s.startsWith('#/definitions/'))\n        throw s;\n    return s.split('/').slice(-1)[0];\n};\n\nlet int_repr_of_format = format =>\n    (format.endsWith('int128') || format == 'uint64' || format == 'uint' /*`uint`s are `usize`s actually, so that's safer to assume it's a uint64, see https://github.com/GREsau/schemars/blob/386e3d7f5ac601795fb4e247291bbef31512ded3/schemars/src/json_schema_impls/primitives.rs#L85C16-L85C21*/)\n        ? 'string'\n        : (format == 'int64' || format == 'uint32' ? 'int64' : 'int');\n\nlet is_type = {\n    option: def => {\n        if (exact_keys(def, 'anyOf')\n            && def.anyOf.length === 2\n            && is_type.expr(def.anyOf[0])\n            && exact_keys(def.anyOf[1], 'type')\n            && def.anyOf[1].type === 'null'\n        )\n            return {\n                kind: 'option',\n                payload: is_type.expr(def.anyOf[0])\n            };\n        return false;\n    },\n\n    unit: def => {\n        if (exact_keys(def, 'type')\n            && def.type === 'null')\n            return {\n                kind: 'unit',\n            };\n        return false;\n    },\n\n    tuple: def => {\n        if (exact_keys(def, 'type', 'items')\n            && def.type === 'array'\n            && def.items instanceof Array\n            && def.items.every(is_type.expr))\n            return {\n                kind: 'tuple',\n                payload: def.items.map(is_type.expr)\n            };\n        return false;\n    },\n\n    array: def => {\n        if (exact_keys(def, 'type', 'items')\n            && def.type === 'array'\n            && is_type.expr(def.items))\n            return {\n                kind: 'array',\n                payload: is_type.expr(def.items),\n            };\n        return false;\n    },\n\n    expr: def =>\n        (exact_keys(def, '$ref') ? {\n            kind: 'name', payload: parse_type_name(def['$ref'])\n        } : false)\n        || is_type.option(def)\n        || is_type.array(def)\n        || is_type.unit(def)\n        || is_type.tuple(def)\n        || (def.type === 'integer'\n            ? { kind: 'integer', repr: int_repr_of_format(def.format) }\n            : false)\n        || (def.type === 'string' && def.maxLength === def.minLength && def.minLength === 1\n            ? { kind: 'char' }\n            : false)\n        || ((exact_keys(def, 'type')\n            && ['boolean', 'string'].includes(def.type)\n        ) ? { kind: def.type } : false\n        ) || false,\n\n    record: def => {\n        if ((eq(keys(def), new Set([\"type\", \"required\", \"properties\"]))\n            || eq(keys(def), new Set([\"type\", \"properties\"]))\n        )\n            && def.type === \"object\"\n            && (def.required || []).every(k => typeof k == 'string')\n            && Object.values(def.properties).every(is_type.expr))\n            return Object.fromEntries(Object.entries(def.properties).map(([n, v]) => [n, is_type.expr(v)]));\n        return false;\n    },\n\n    variant: def => {\n        let doc = def.description;\n        if (exporters.enum.guard(def))\n            return def.enum.map(e => ({\n                kind: 'variant',\n                name: e,\n                payloadKind: 'empty',\n                payload: null,\n                doc,\n            }));\n        if (exact_keys(def, 'type', 'required', 'properties')\n            && def.type === 'object'\n            && Object.values(def.properties).length == 1\n        ) {\n            let [name, value] = Object.entries(def.properties)[0];\n            if (is_type.expr(value))\n                return [{\n                    kind: 'variant',\n                    payloadKind: 'expr',\n                    name,\n                    payload: is_type.expr(value),\n                    doc,\n                }];\n            if (is_type.record(value))\n                return [{\n                    kind: 'variant',\n                    name,\n                    payloadKind: 'record',\n                    payload: is_type.record(value),\n                    doc,\n                }];\n        }\n        return false;\n    },\n};\n\n// for (let k in is_type) {\n//     is_type[k] = trace1(k, is_type[k]);\n// }\n\n// let trace = (name, f) => (...inputs) => {\n//     let output = f(...inputs);\n//     log_full({f: name, inputs, output});\n//     return output;\n// };\n\nlet export_record = (fields, path, name) => {\n    let record_expression = fields.map(([field, type, _doc], i) => {\n        if (field == 'index' && name == 'def_id_contents') {\n            // This is a hack to always parse Rust DefId indexes to `(0, 0)`\n            return 'index = Base.Int64.(zero, zero, None)';\n        }\n        let p = [...path, 'field_' + field];\n        let sub = mk_match('x', ocaml_arms_of_type_expr(type, p), p);\n        let match = `match List.assoc_opt \"${field}\" l with Option.Some x -> begin ${sub} end | Option.None -> raise (MissingField {field = \"${field}\"; fields = l})`;\n        return `${fieldNameOf(field)} = begin ${match} end`;\n    }).join(';\\n');\n    return [`\\`Assoc l`, `{ ${record_expression} }`];\n};\n\nlet mkdoc = doc => doc ? ` (** ${doc} *)` : '';\n\nlet exporters = {\n    oneOf: {\n        guard: def => eq(keys(def), new Set([\"oneOf\"])) &&\n            def.oneOf.every(is_type.variant),\n        f: (name, { oneOf }) => {\n            let variants = oneOf.map(is_type.variant).flat();\n            let type = variants.map(({ kind, name: variant_name, payloadKind, payload, doc }) => {\n                doc = mkdoc(doc);\n                let variant = ensureUnique('variant', variantNameOf(variant_name));\n                return ({\n                    record: () => {\n                        let fields = Object.entries(payload).map(([field, value]) =>\n                            fieldNameOf(field) + ' : ' + ocaml_of_type_expr(value, ['rec-variant:' + variant + ':' + field]));\n                        return `${variant} of {${fields.join(';\\n')}}${doc}`;\n                    },\n                    expr: () => `${variant} of (${ocaml_of_type_expr(payload, ['expr-variant:' + variant + ':' + name])})${doc}`,\n                    empty: () => `${variant}${doc}`,\n                }[payloadKind] || (() => {\n                    throw \"bad payloadKind: \" + payloadKind;\n                }))();\n            }).join('\\n     | ');\n            let parse_arms = variants.map(({ kind, name: variant_name, payloadKind, payload }) => {\n                let variant = variantNameOf(variant_name);\n                let wrap = (arms, prefix = '') => [\n                    [`\\`Assoc [\"${variant_name}\", rec_value]`,\n                    prefix + mk_match('rec_value', arms, ['rec-variant_' + variant + '_' + variant_name])\n                    ]\n                ];\n                return ({\n                    record: () => {\n                        let [pat, expr] = export_record(Object.entries(payload), ['rec-variant_' + variant + '_' + variant_name], name);\n                        return wrap([[pat, variant + ' ' + expr]]);\n                    },\n                    expr: () => wrap(ocaml_arms_of_type_expr(payload, ['expr-variant(PA):' + name + ':' + variant + ':' + variant_name]), variant + ' '),\n                    empty: () => [[`\\`String \"${variant_name}\"`, variant]],\n                }[payloadKind] || (() => {\n                    throw \"bad payloadKind: \" + payloadKind;\n                }))();\n            }).flat();\n            let parse = mk_match('o', parse_arms, [name + '_of_yojson']);\n            let to_json = `match o with ${variants.map(({ kind, name: variant_name, payloadKind, payload }) => {\n                let variant = variantNameOf(variant_name);\n                let wrap = (x, e) => `${variant} ${x} -> \\`Assoc [\"${variant_name}\", ${e}]`;\n                return ({\n                    record: () => {\n                        let fields = Object.entries(payload);\n                        return wrap(\n                            `{${fields.map(([field, type], i) => `${fieldNameOf(field)}`).join('; ')}}`,\n                            `\\`Assoc [${fields.map(([field, type], i) => `(\"${field}\", ${ocaml_yojson_of_type_expr(type, fieldNameOf(field), [name + ':' + variant, 'variant', field])})`).join('; ')\n                            }]`\n                        );\n                    },\n                    expr: () => wrap('x', ocaml_yojson_of_type_expr(payload, 'x', [name + ':' + variant, 'payload'])),\n                    empty: () => `${variant} -> \\`String \"${variant_name}\"`,\n                }[payloadKind] || (() => {\n                    throw \"bad payloadKind: \" + payloadKind;\n                }))();\n            }).join(' | ')}`;\n            return { type, parse, to_json };\n        },\n    },\n    empty_struct: {\n        guard: def => (eq(keys(def), new Set([\"type\"])) && (def.type == 'object' || def.type == 'null')),\n        f: (name, _) => {\n            return {\n                type: `EmptyStruct${name}`,\n                parse: `EmptyStruct${name}`,\n                to_json: '`Null',\n            };\n        },\n    },\n    newtype: {\n        guard: def => !exporters['empty_struct'].guard(def) && is_type.expr(def, [\"try-parse\"]),\n        f: (name, o) => {\n            let path = [name + '-newtype-ref'];\n            let te = is_type.expr(o, path);\n            let ocaml_type = ocaml_of_type_expr(te, path);\n            let arms = ocaml_arms_of_type_expr(te, path);\n            let to_json = ocaml_yojson_of_type_expr(te, `(let Newtype${name} inner = o in inner)`, path);\n            return {\n                type: `Newtype${name} of ${ocaml_type}`,\n                parse: `Newtype${name}(${mk_match('o', arms, path)})`,\n                to_json,\n            };\n        },\n    },\n    // object is a *flat* record\n    object: {\n        guard: def => (eq(keys(def), new Set([\"type\", \"required\", \"properties\"]))\n            || eq(keys(def), new Set([\"type\", \"properties\"]))\n        )\n            && def.type === \"object\"\n            && (def.required || []).every(k => typeof k == 'string')\n            && Object.values(def.properties).every(is_type.expr),\n        f: (name, { required, properties }) => {\n            let fields = Object.entries(properties).map(\n                ([name, prop]) => [name, is_type.expr(prop), prop.description]\n            );\n\n            let [pat, expr] = export_record(fields, ['struct_' + name], name);\n\n            return {\n                type: `{ ${fields.map(([fname, type, doc]) => `${fieldNameOf(fname)} : ${ocaml_of_type_expr(type, ['struct_' + fname + '_' + name])}${mkdoc(doc)}`).join(';\\n')} }`,\n                parse: mk_match('o', [[pat, expr]], ['struct_' + name]),\n                to_json: //`let {${fields.map(([fname, type, doc]) => fieldNameOf(fname)).join(';')}} = o in`\n                    `\\`Assoc [${fields.map(([fname, type, doc]) => `(\"${fname}\", ${ocaml_yojson_of_type_expr(type, 'o.' + fieldNameOf(fname), ['todo'])})`).join('; ')}]`\n            };\n        },\n    },\n    enum: {\n        guard: def => eq(keys(def), new Set([\"type\", \"enum\"]))\n            && def.type == \"string\",\n        f: (name, o) => {\n            assert(o.enum.every(x => typeof x == \"string\"), 'not every enum is a string');\n\n            if (o.enum.length == 0) {\n                return {\n                    type: '|',\n                    parse: 'failwith \"cannot parse an empty type\"',\n                    to_json: 'match o with _ -> .',\n                };\n            }\n\n            let variants = o.enum.map(n => ({\n                Δ: n,\n                variant: ensureUnique('variant', variantNameOf(n)),\n                variantOriginName: n\n            }));\n\n            let parse_string\n                = `match s with ` + variants.map(\n                    ({ Δ, variant }) => `\"${Δ}\" -> ${variant}`\n                ).join(' | ') + ` | s -> failwith (\"unexpected variant [\" ^ s ^ \"] while parsing enum [${name}]\")`;\n\n            return {\n                type: variants.map(({ variant }) => variant).join(' | '),\n                parse: `  match o with\n                        | \\`String s -> (${parse_string})\n                        | _ -> failwith \"expected a string while parsing a ${name}\"\n                       `,\n                to_json: `match o with ${variants.map(({ variant, variantOriginName }) => `${variant} -> \\`String \"${variantOriginName}\"`).join(' | ')}`,\n            };\n        },\n    },\n};\n\nlet export_definition = (name, def) => {\n    let suitable_exporters = Object.entries(exporters).filter(\n        ([_, { guard }]) => guard(def)\n    );\n\n    if (suitable_exporters.length != 1) {\n        console.error(`ERROR: each definition should have exactly one suited exporter, but type \"${name}\" has the following exporter(s): ${JSON.stringify(suitable_exporters.map(([n, _]) => n))}.`);\n        console.error('name', name);\n        log_full(def);\n        console.error('xname', name);\n\n        throw \"kind error\";\n    }\n    let [_, { f }] = suitable_exporters[0];\n    name = ensureUnique('type', typeNameOf(name));\n    let r = f(name, def);\n    if (r === null)\n        return `(* type ${name} *)`;\n    let { type, parse, to_json } = r;\n    return { name, type, parse, to_json };\n    // return [{type, parse}]\n    // return `type ${name} = ${type}\\nlet parse_${name} (o: Yojson.Safe.t): ${name} = ${parse}\\n`;\n};\n\nfunction run(str) {\n    let contents = JSON.parse(str);\n    const definitions = clean(contents.definitions);\n\n    let sig = ``;\n\n    let impl = `include struct\nopen struct\n  include Base.Hash.Builtin\n  open Base\n  let bool_of_sexp = bool_of_sexp\n  let string_of_sexp = string_of_sexp\n  let option_of_sexp = option_of_sexp\n  let list_of_sexp = list_of_sexp\n  let int_of_sexp = int_of_sexp\n  let char_of_sexp = char_of_sexp\n  let unit_of_sexp = unit_of_sexp\n  let bool_of_sexp = bool_of_sexp\n\n  let sexp_of_bool = sexp_of_bool\n  let sexp_of_string = sexp_of_string\n  let sexp_of_option = sexp_of_option\n  let sexp_of_list = sexp_of_list\n  let sexp_of_int = sexp_of_int\n  let sexp_of_char = sexp_of_char\n  let sexp_of_unit = sexp_of_unit\n  let sexp_of_bool = sexp_of_bool\n\n  let compare_bool = compare_bool\n  let compare_string = compare_string\n  let compare_option = compare_option\n  let compare_list = compare_list\n  let compare_int = compare_int\n  let compare_char = compare_char\n  let compare_unit = compare_unit\n  let compare_bool = compare_bool\nend\n[@@@warning \"-A\"]\n`;\n\n    impl += `let hax_version = {escape|${contents['$id'].replace(/\\|escape\\}/g, '|_escape}')}|escape}`;\n\n    let items = Object.entries(definitions)\n        .map(([name, def]) => ['Node_for_TyKind' == name ? 'node_for_ty_kind_generated' : name, def])\n        .map(([name, def]) => ['Node_for_DefIdContents' == name ? 'node_for_def_id_contents_generated' : name, def])\n        .map(([name, def]) => ['Node_for_ItemRefContents' == name ? 'node_for_item_ref_contents_generated' : name, def])\n        .map(\n            ([name, def]) => export_definition(name, def)\n        ).filter(x => x instanceof Object);\n\n    let derive_items = ['show', 'eq', 'hash', 'sexp', 'compare'];\n\n    impl += `\nmodule ParseError = struct\n  exception MissingField of {\n    fields: (string * Yojson.Safe.t) list;\n    field: string\n  }\n\n  let pp = function\n    | MissingField {fields; field} ->\n       \"Missing field [\" ^ field ^ \"], while looking at the following JSON: \" ^ Yojson.Safe.pretty_to_string (\\`Assoc fields)\n    | e -> raise e\nend\n\nopen ParseError\n\n`;\n\n    let derive_clause = derive_items.length ? `[@@deriving ${derive_items.join(', ')}]` : '';\n\n    impl += (\n        'type '\n        + items.map(({ name, type }) =>\n            `${name} = ${type}\\n`\n        ).join('\\nand ')\n        + derive_clause\n    );\n    impl += `\nand node_for__ty_kind = node_for_ty_kind_generated\nand node_for__def_id_contents = node_for_def_id_contents_generated\nand node_for__item_ref_contents = node_for_item_ref_contents_generated\n\n\ntype map_types = ${\"[`TyKind of ty_kind | `DefIdContents of def_id_contents | `ItemRefContents of item_ref_contents]\"}\nlet cache_map: (int64, ${\"[ `Value of map_types | `JSON of Yojson.Safe.t ]\"}) Base.Hashtbl.t = Base.Hashtbl.create (module Base.Int64)\n\nmodule Exn = struct\nlet table_id_node_of_yojson (type t) (name: string) (encode: t -> map_types) (decode: map_types -> t option) (parse: Yojson.Safe.t -> t) (o: Yojson.Safe.t): (t * int64) =\n    let label = \"table_id_node_of_yojson:\" ^ name ^ \": \" in\n    match o with\n    | \\`Assoc alist -> begin\n          let id = match List.assoc_opt \"id\" alist with\n            | Some (\\`Int id) -> Base.Int.to_int64 id\n            | Some (\\`Intlit lit) -> (try Base.Int64.of_string lit with | _ -> failwith (label ^ \"Base.Int64.of_string failed for \" ^ lit))\n            | Some bad_json -> failwith (label ^ \"id was expected to be an int, got: \" ^ Yojson.Safe.pretty_to_string bad_json ^ \"\\n\\n\\nfull json: \" ^ Yojson.Safe.pretty_to_string o)\n            | None -> failwith (label ^ \" could not find the key 'id' in the following json: \" ^ Yojson.Safe.pretty_to_string o)\n          in\n          let decode v = decode v |> Base.Option.value_exn ~message:(label ^ \"could not decode value (wrong type)\") in\n          match List.assoc_opt \"value\" alist with\n          | Some json when (match json with \\`Null -> false | _ -> true) ->\n            (parse json, id)\n          | _ ->\n            let value = match Base.Hashtbl.find cache_map id with\n            | None -> failwith (label ^ \"failed to lookup id \" ^ Base.Int64.to_string id)\n            | Some (\\`Value v) -> decode v\n            | Some (\\`JSON json) ->\n                let value = parse json in\n                Base.Hashtbl.set cache_map ~key:id ~data:(\\`Value (encode value));\n                value\n            in (value, id)\n       end\n    | _ -> failwith (label ^ \"expected Assoc\")\n\n`;\n    impl += ('');\n    impl += ('let rec ' + items.map(({ name, type, parse }) =>\n        `${name}_of_yojson (o: Yojson.Safe.t): ${name} = ${parse}`\n    ).join('\\nand '));\n    impl += `\nand node_for__ty_kind_of_yojson (o: Yojson.Safe.t): node_for__ty_kind =\n   let (value, _id) =\n       table_id_node_of_yojson \"TyKind\"\n           (fun value -> \\`TyKind value)\n           (function | \\`TyKind value -> Some value | _ -> None)\n           ty_kind_of_yojson\n           o\n   in\n   {value; id = Base.Int64.zero}\nand node_for__def_id_contents_of_yojson (o: Yojson.Safe.t): node_for__def_id_contents =\n   let (value, _id) =\n       table_id_node_of_yojson \"DefIdContents\"\n           (fun value -> \\`DefIdContents value)\n           (function | \\`DefIdContents value -> Some value | _ -> None)\n           def_id_contents_of_yojson\n           o\n   in\n   {value; id = Base.Int64.zero}\nand node_for__item_ref_contents_of_yojson (o: Yojson.Safe.t): node_for__item_ref_contents =\n   let (value, _id) =\n       table_id_node_of_yojson \"ItemRefContents\"\n           (fun value -> \\`ItemRefContents value)\n           (function | \\`ItemRefContents value -> Some value | _ -> None)\n           item_ref_contents_of_yojson\n           o\n   in\n   {value; id = Base.Int64.zero}\n`;\n    impl += ('');\n    impl += ('let rec ' + items.map(({ name, type, parse, to_json }) =>\n        `yojson_of_${name} (o: ${name}): Yojson.Safe.t = ${to_json}`\n    ).join('\\nand '));\n    impl += `\nand yojson_of_node_for__ty_kind {value; id} = yojson_of_node_for_ty_kind_generated {value; id}\nand yojson_of_node_for__def_id_contents {value; id} = yojson_of_node_for_def_id_contents_generated {value; id}\nand yojson_of_node_for__item_ref_contents {value; id} = yojson_of_node_for_item_ref_contents_generated {value; id}\nend\n\nopen struct\n  let catch_parsing_errors (type a b) (label: string) (f: a -> b) (x: a): (b, Base.Error.t) Base.Result.t = \n      try Base.Result.Ok (f x) with\n      | e -> Base.Result.Error (Base.Error.of_exn ~backtrace:\\`Get e)\n  let unwrap = function \n    | Base.Result.Ok value -> value\n    | Base.Result.Error err -> \n        let err =\n            let path = Utils.tempfile_path ~suffix:\".log\" in\n            Core.Out_channel.write_all path\n                ~data:(Base.Error.to_string_hum err);\n            path\n        in\n        prerr_endline [%string {|\nError: could not serialize or deserialize a hax value.\nThis error arises from an incompatibility betwen hax components: hax-engine, cargo-hax and hax-lib.\nPotential fixes:\n  - Make sure the version of \\`hax-lib\\` for the crate your are trying to extract matches the version of hax currently installed (%{hax_version}).\n  - Run \\`cargo clean\\`\n  - Reinstall hax\n\nThe full stack trace was dumped to %{err}.\n|}];\n        exit 1\nend\n`;\n\n\n    impl += (items.map(({ name, type, parse, to_json }) =>\n        `\nlet safe_yojson_of_${name} = catch_parsing_errors \"yojson_of_${name}\" Exn.yojson_of_${name}\nlet safe_${name}_of_yojson = catch_parsing_errors \"${name}_of_yojson\" Exn.${name}_of_yojson\nlet yojson_of_${name} x = unwrap (safe_yojson_of_${name} x)\nlet ${name}_of_yojson x = unwrap (safe_${name}_of_yojson x)`\n    ).join('\\n'));\n\n    return impl + ' \\n end';\n}\n\nfunction parse_args() {\n    let [script_name, input_path, output_path, ...rest] = process.argv.slice(1);\n    if (!input_path || !output_path || rest.length) {\n        console.log(`\nUsage: node ${script_name} INPUT_PATH OUTPUT_PATH\n\n   INPUT_PATH and OUTPUT_PATH can be - to denotes stdin or stdout\n`);\n        process.exit();\n    }\n    return { input_path, output_path };\n}\n\nasync function read(stream) {\n    const chunks = [];\n    for await (const chunk of stream) chunks.push(chunk);\n    return Buffer.concat(chunks).toString('utf8');\n}\n\nasync function main() {\n    const fs = require('fs');\n    let { input_path, output_path } = parse_args();\n    let out = run(input_path == '-'\n        ? await read(process.stdin)\n        : fs.readFileSync(input_path, 'utf-8')\n    );\n    output_path == '-'\n        ? process.stdout.write(out)\n        : fs.writeFileSync(output_path, out);\n}\n\nmain();\n\n"
  },
  {
    "path": "engine/utils/ppx_functor_application/README.md",
    "content": "# `ppx_functor_application`\n\n## Motivation\nThe engine consists of numerous phases, implemented as OCaml functors\nparametrized over \"AST features\" (see the book). Two phases can be\nbinded (sequenced) via `Phase_utils.BindPhase` functor.\n\nSince OCaml define (or let users define) infix notations for functor\napplication, combining many phases (functors) results in the following\nchristmas tree looking kinds of code:\n\n```ocaml\nstruct\n    module ARG0 = (Phases.Reject.RawOrMutPointer)(Features.Rust)\n    module ARG1 = (Phases.Transform_hax_lib_inline)(ARG0.FB)\n    module ARG2 = (Phases.Specialize)(ARG1.FB)\n    module ARG3 = (Phases.Drop_sized_trait)(ARG2.FB)\n    module ARG4 = (Phases.Simplify_question_marks)(ARG3.FB)\n    module ARG5 = (Phases.And_mut_defsite)(ARG4.FB)\n    module ARG6 = (Phases.Reconstruct_for_loops)(ARG5.FB)\n    module ARG7 = (Phases.Reconstruct_while_loops)(ARG6.FB)\n    module ARG8 = (Phases.Direct_and_mut)(ARG7.FB)\n    module ARG9 = (Phases.Reject.Arbitrary_lhs)(ARG8.FB)\n    module ARG10 = (Phases.Drop_blocks)(ARG9.FB)\n    module ARG11 = (Phases.Drop_references)(ARG10.FB)\n    module ARG12 = (Phases.Trivialize_assign_lhs)(ARG11.FB)\n    module ARG13 = (Side_effect_utils.Hoist)(ARG12.FB)\n    module ARG14 = (Phases.Simplify_match_return)(ARG13.FB)\n    module ARG15 = (Phases.Drop_needless_returns)(ARG14.FB)\n    module ARG16 = (Phases.Local_mutation)(ARG15.FB)\n    module ARG17 = (Phases.Reject.Continue)(ARG16.FB)\n    module ARG18 = (Phases.Cf_into_monads)(ARG17.FB)\n    module ARG19 = (Phases.Reject.EarlyExit)(ARG18.FB)\n    module ARG20 = (Phases.Functionalize_loops)(ARG19.FB)\n    module ARG21 = (Phases.Reject.As_pattern)(ARG20.FB)\n    module ARG22 = (Phases.Traits_specs)(ARG21.FB)\n    module ARG23 = (Phases.Simplify_hoisting)(ARG22.FB)\n    module ARG24 = (Phases.Newtype_as_refinement)(ARG23.FB)\n    module ARG25 = (SubtypeToInputLanguage)(ARG24.FB)\n    module ARG26 = (Identity)(ARG25.FB)\n    include\n        ((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(((BindPhase)(ARG0))(ARG1)))(ARG2)))(ARG3)))(ARG4)))(ARG5)))(ARG6)))(ARG7)))(ARG8)))(ARG9)))(ARG10)))(ARG11)))(ARG12)))(ARG13)))(ARG14)))(ARG15)))(ARG16)))(ARG17)))(ARG18)))(ARG19)))(ARG20)))(ARG21)))(ARG22)))(ARG23)))(ARG24)))(ARG25)))(ARG26)\nend\n```\n\nThe system of phases is supposed to let backends opt-in or out easily\nfor phases. This syntactic limitation was a major issue for that.\n\n## Solution\nThis PPX defines a small DSL that embeds in the OCaml syntax of\nexpressions to provide a nice way of binding phases functors via a\n`|>` infix operator.\n\nExample:\n```ocaml\nmodule TransformToInputLanguage =\n  [%functor_application\n  Phases.Reject.RawOrMutPointer(Features.Rust)\n  |> Phases.Transform_hax_lib_inline\n  |> Phases.Specialize\n  |> Phases.Drop_sized_trait\n  |> Phases.Simplify_question_marks\n  |> Phases.And_mut_defsite\n  |> Phases.Reconstruct_for_loops\n  |> Phases.Reconstruct_while_loops\n  |> SubtypeToInputLanguage\n  |> Identity\n  ]\n  [@ocamlformat \"disable\"]\n```\n\nNote: the `[@ocamlformat \"disable\"]` annotation is important,\notherwise `ocamlformat` tries to format those PPX invokations with its\nrules for expressions, yielding rather ugly looking code...\n\n### Syntax\n - `Name`: a module `Name`\n - `Name(X, Y, Z)`: the application of the functor `Name` with three arguments `X`, `Y` and `Z`\n - `(module <M>)`: the arbitary OCaml module expression `<M>`\n - `<a> <b>`: the application of the module described by `<a>` and the module described by `<b>`\n - `(fun X -> <a>)`: a \"functor\" from `X` to `<a>`\n - `<a> |> <b>`: `<a>` binded with `<b>`\n"
  },
  {
    "path": "engine/utils/ppx_functor_application/dune",
    "content": "(library\n (name ppx_functor_application)\n (package hax-engine)\n (kind ppx_rewriter)\n (libraries ppxlib base)\n (preprocess\n  (pps ppxlib.metaquot ppx_deriving.eq ppx_deriving.show)))\n\n(env\n (_\n  (flags\n   (:standard -warn-error -A -warn-error +8))))\n"
  },
  {
    "path": "engine/utils/ppx_functor_application/ppx_functor_application.ml",
    "content": "open Base\nopen Ppxlib\nmodule Format = Stdlib.Format\n\nlet name = \"functor_application\"\n\ntype longident = Longident.t\n\nlet show_longident = Longident.name\n\nlet pp_longident (fmt : Format.formatter) (s : longident) : unit =\n  Format.pp_print_string fmt @@ show_longident s\n\nlet string_of_pattern p =\n  let s = Buffer.create 0 in\n  let fmt = Format.formatter_of_buffer s in\n  Pprintast.pattern fmt p;\n  Format.pp_print_flush fmt ();\n  Buffer.contents s\n\nlet string_of_module_expr p =\n  let s = Buffer.create 0 in\n  let fmt = Format.formatter_of_buffer s in\n  Pprintast.module_expr fmt p;\n  Format.pp_print_flush fmt ();\n  Buffer.contents s\n\nlet show_module_expr = string_of_module_expr\n\nlet pp_module_expr (fmt : Format.formatter) (s : module_expr) : unit =\n  Format.pp_print_string fmt @@ string_of_module_expr s\n\n(** Defines a DSL for functor application. *)\ntype module_dsl =\n  | Var of longident\n  | App of module_dsl * module_dsl\n  | ModExpr of module_expr\n  | Abs of string * module_dsl\n  | Pipe of module_dsl list\n  | Meta of module_dsl * (location[@opaque])\n[@@deriving show]\n\nlet var_of_string s = Var (Longident.Lident s)\n\n(** Elaborate a OCaml module expression from a `module_dsl` *)\nlet rec elab ~loc (t : module_dsl) : module_expr =\n  let (module E) = Ast_builder.make loc in\n  let h = elab ~loc in\n  match t with\n  | Meta (x, loc) -> elab ~loc x\n  | Var x -> E.pmod_ident { txt = x; loc }\n  | ModExpr m -> m\n  | App ((Abs (arg, m) | Meta (Abs (arg, m), _)), x) ->\n      E.pmod_structure\n        [\n          E.pstr_module\n          @@ E.module_binding ~name:{ loc; txt = Some arg } ~expr:(h x);\n          E.pstr_include @@ E.include_infos @@ h m;\n        ]\n  | App (f, x) -> E.pmod_apply (h f) (h x)\n  | Pipe (x :: funs) ->\n      let x = h x in\n      let nth_arg nth = \"ARG\" ^ Int.to_string nth in\n      let arg0 = E.pmod_ident { loc; txt = Lident (nth_arg 0) } in\n      let binds =\n        List.mapi\n          ~f:(fun i _ ->\n            E.pmod_ident { txt = Lident (nth_arg @@ (i + 1)); loc })\n          funs\n        |> List.fold_left ~init:arg0 ~f:(fun x y ->\n               let bind = E.pmod_ident { loc; txt = Lident \"BindPhase\" } in\n               let ( <| ) = E.pmod_apply in\n               bind <| x <| y)\n      in\n      E.pmod_structure\n      @@ [%stri module ARG0 = [%m x]]\n         :: List.concat_mapi\n              ~f:(fun nth fn ->\n                let nth_var = Var (Ldot (Lident (nth_arg nth), \"FB\")) in\n                let new_arg = App (fn, nth_var) in\n                [\n                  E.pstr_module\n                  @@ E.module_binding\n                       ~name:{ loc; txt = Some (nth_arg @@ (nth + 1)) }\n                       ~expr:(h new_arg);\n                ])\n              funs\n      @ [%str include [%m binds]]\n  | Pipe _ -> failwith \"Illegal pipe: singleton or empty\"\n  | Abs _ -> failwith \"Top-level abstraction\"\n\nlet rec collect_pipes (t : module_dsl) : module_dsl list =\n  match t with\n  | Meta (Pipe l, _) | Pipe l -> List.concat_map ~f:collect_pipes l\n  | _ -> [ t ]\n\n(** Get rid of extra `Pipe` nodes *)\nlet rec normalize (t : module_dsl) : module_dsl =\n  match t with\n  | App (f, x) -> App (normalize f, normalize x)\n  | Abs (x, body) -> Abs (x, normalize body)\n  | ModExpr _ | Var _ -> t\n  | Meta (x, loc) -> Meta (normalize x, loc)\n  | Pipe _ -> (\n      match collect_pipes t with\n      | [] -> failwith \"Empty pipe\"\n      | [ t ] -> t\n      | l -> Pipe l)\n\n(** Recognize a small language embedded in OCaml syntax for applying functors in\n    chain. *)\nlet rec parse expr =\n  let r =\n    match expr with\n    | { pexp_desc = Pexp_construct ({ txt; _ }, None); _ } ->\n        (* Parses variables (module names are uppercase, since we are looking at OCaml expressions, so we match on constructors)  *)\n        Var txt\n    | { pexp_desc = Pexp_construct ({ txt; _ }, Some arg); _ } ->\n        (* Parses module applcations (same as above: in expressions, module applications are parsed as constructor applications) *)\n        App (Var txt, parse arg)\n    | [%expr [%e? m1] |> [%e? m2]] ->\n        (* Parses `... |> ...` infix module application *)\n        Pipe [ parse m1; parse m2 ]\n    | [%expr (module [%m? m])] ->\n        (* Parses module expressions (in this case, that corresponds to OCaml module expression) *)\n        ModExpr m\n    | [%expr [%e? f] [%e? x]] ->\n        (* Parses module applications (e.g. `(fun x -> ...) (module YXZ)`) *)\n        App (parse f, parse x)\n    | [%expr fun [%p? x] -> [%e? body]] -> (\n        (* Parses module abstractions (e.g. `fun X -> Z(X)`) *)\n        match x with\n        | { ppat_desc = Ppat_construct ({ txt = Lident x; _ }, None); _ } ->\n            Abs (x, parse body)\n        | _ -> failwith @@ \"Out of language: \" ^ string_of_pattern x)\n    | _ -> failwith @@ \"Out of language: \" ^ Pprintast.string_of_expression expr\n  in\n  Meta (r, expr.pexp_loc)\n\nlet expand ~(ctxt : Expansion_context.Extension.t) (e : expression) :\n    module_expr =\n  let loc = Expansion_context.Extension.extension_point_loc ctxt in\n  let e = parse e |> normalize in\n  elab ~loc e\n\nlet ext =\n  Extension.V3.declare name Extension.Context.module_expr\n    Ast_pattern.(pstr (pstr_eval __ drop ^:: nil))\n    expand\n\nlet rule = Ppxlib.Context_free.Rule.extension ext\nlet () = Ppxlib.Driver.register_transformation ~rules:[ rule ] name\n"
  },
  {
    "path": "engine/utils/ppx_generate_features/README.md",
    "content": "# `ppx_generate_features`\n\nSpecific to `hax-engine`: \n    - generates a `FEATURES` module type;\n    - modules `Off` and `On` of type `FEATURES`, one with every feature type set to `on`, the other with every feature type set to `off`;\n    - a `SUBSET.T` module type that describe a subtyping relation between two modules of type `FEATURES`;\n    - a `SUBSET.Id` module that maps every feature to themselves.\n    \nThis PPX aims to alleviates the pain of adding new features.\n\n"
  },
  {
    "path": "engine/utils/ppx_generate_features/dune",
    "content": "(library\n (name ppx_generate_features)\n (package hax-engine)\n (kind ppx_rewriter)\n (libraries ppxlib base)\n (preprocess\n  (pps ppxlib.metaquot ppx_deriving.eq ppx_deriving.show)))\n\n(env\n (_\n  (flags\n   (:standard -warn-error -A -warn-error +8))))\n"
  },
  {
    "path": "engine/utils/ppx_generate_features/ppx_generate_features.ml",
    "content": "open Base\nopen Ppxlib\n\nlet name = \"declare_features\"\n\nlet uppercase_first_char (s : string) : string =\n  String.(uppercase (prefix s 1) ^ drop_prefix s 1)\n\nlet rename (l : (string * string) list) =\n  let h (s : string) =\n    List.find_map\n      ~f:(fun (s', replace) -> if String.equal s s' then Some replace else None)\n      l\n    |> Option.value ~default:s\n  in\n  object\n    inherit Ast_traverse.map\n    method! string = h\n    method! label = h\n\n    method! longident =\n      let rec r = function\n        | Lapply (x, y) -> Lapply (r x, r y)\n        | Ldot (x, y) -> Ldot (r x, h y)\n        | Lident x -> Lident (h x)\n      in\n      r\n  end\n\nlet expand ~(ctxt : Expansion_context.Extension.t) (features : string list) :\n    structure_item =\n  let loc = Expansion_context.Extension.extension_point_loc ctxt in\n  let (module B) = Ast_builder.make loc in\n  [%stri\n    include struct\n      module type FEATURES = sig\n        include\n          [%m\n        List.map\n          ~f:(fun txt ->\n            (rename [ (\"placeholder\", txt) ])#signature_item\n              [%sigi:\n                type placeholder\n                [@@deriving show, yojson, hash, compare, sexp, hash, eq]])\n          features\n        |> B.pmty_signature]\n      end\n\n      module type T = FEATURES\n\n      module Enumeration = struct\n        [%%i\n        let decl =\n          B.type_declaration ~name:{ loc; txt = \"t\" } ~params:[] ~cstrs:[]\n            ~kind:\n              (Ptype_variant\n                 (List.map\n                    ~f:(fun txt ->\n                      B.constructor_declaration\n                        ~name:{ loc; txt = uppercase_first_char txt }\n                        ~args:(Pcstr_tuple []) ~res:None)\n                    features))\n            ~private_:Public ~manifest:None\n        in\n        B.pstr_type Recursive\n          [\n            {\n              decl with\n              ptype_attributes =\n                [\n                  B.attribute ~name:{ loc; txt = \"deriving\" }\n                    ~payload:\n                      (PStr\n                         [%str\n                           show { with_path = false },\n                           yojson,\n                           hash,\n                           compare,\n                           sexp,\n                           hash,\n                           eq]);\n                ];\n            };\n          ]]\n      end\n\n      (*\n      module MapFeatureTypes (T : sig\n        type t [@@deriving show, yojson, hash, eq]\n      end) =\n      struct\n        include T\n\n        include\n          [%m\n          List.concat_map\n            ~f:(fun txt ->\n              (rename\n                 [\n                   (\"placeholder\", txt);\n                   (\"Placeholder\", uppercase_first_char txt);\n                 ])\n                #structure\n                [%str\n                  module Placeholder = struct\n                    type placeholder = Placeholder of T.t [@@deriving show, yojson, hash, eq]\n                  end\n                      \n                  include Placeholder])\n            features\n          |> B.pmod_structure]\n      end\n\n      module On = MapFeatureTypes (struct\n        type t = on [@@deriving show, yojson, hash, eq]\n      end)\n\n      module Off = MapFeatureTypes (struct\n        type t = off [@@deriving show, yojson, hash, eq]\n            end)\n            *)\n\n      module On =\n        [%m\n        List.concat_map\n          ~f:(fun txt ->\n            (rename\n               [\n                 (\"placeholder\", txt); (\"Placeholder\", uppercase_first_char txt);\n               ])\n              #structure\n              [%str\n                module Placeholder : sig\n                  type placeholder\n                  [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n                  val placeholder : placeholder\n                end = struct\n                  type placeholder = Placeholder\n                  [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n\n                  let placeholder = Placeholder\n                end\n\n                include Placeholder])\n          features\n        |> B.pmod_structure]\n\n      module ToFull =\n        [%m\n        List.concat_map\n          ~f:(fun txt ->\n            (rename\n               [\n                 (\"placeholder\", txt); (\"Placeholder\", uppercase_first_char txt);\n               ])\n              #structure\n              [%str let placeholder _ = On.placeholder])\n          features\n        |> B.pmod_structure]\n\n      module Off =\n        [%m\n        List.concat_map\n          ~f:(fun txt ->\n            (rename\n               [\n                 (\"placeholder\", txt); (\"Placeholder\", uppercase_first_char txt);\n               ])\n              #structure\n              [%str\n                module Placeholder = struct\n                  type placeholder = |\n                  [@@deriving show, yojson, hash, compare, sexp, hash, eq]\n                end\n\n                include Placeholder])\n          features\n        |> B.pmod_structure]\n\n      module SUBTYPE = struct\n        module type T = sig\n          module A : FEATURES\n          module B : FEATURES\n\n          include\n            [%m\n          List.map\n            ~f:(fun txt ->\n              (rename [ (\"placeholder\", txt) ])#signature_item\n                [%sigi:\n                  val placeholder : Span.t -> A.placeholder -> B.placeholder])\n            features\n          |> B.pmty_signature]\n        end\n\n        module type MAPPER = sig\n          val map :\n            'a 'b. (Span.t -> 'a -> 'b) -> Enumeration.t -> Span.t -> 'a -> 'b\n        end\n\n        module Map (S : T) (Mapper : MAPPER) =\n          [%m\n          let f txt =\n            [%stri\n              let [%p B.ppat_var { loc; txt }] =\n                let kind =\n                  [%e\n                    B.pexp_construct\n                      {\n                        loc;\n                        txt =\n                          Ldot (Lident \"Enumeration\", uppercase_first_char txt);\n                      }\n                      None]\n                in\n                let f =\n                  [%e B.pexp_ident { loc; txt = Ldot (Lident \"S\", txt) }]\n                in\n                Mapper.map f kind]\n          in\n          B.pmod_structure @@ ([%stri include S] :: List.map ~f features)]\n\n        module On =\n          [%m\n          List.concat_map\n            ~f:(fun txt ->\n              (rename\n                 [\n                   (\"placeholder\", txt);\n                   (\"Placeholder\", uppercase_first_char txt);\n                 ])\n                #structure\n                [%str\n                  module Placeholder = struct\n                    let placeholder _span _witness = On.placeholder\n                  end\n\n                  include Placeholder])\n            features\n          |> B.pmod_structure]\n\n        module Reject (R : sig\n          val reject : 'a. unit -> 'a\n        end) =\n          [%m\n          List.concat_map\n            ~f:(fun txt ->\n              (rename\n                 [\n                   (\"placeholder\", txt);\n                   (\"Placeholder\", uppercase_first_char txt);\n                 ])\n                #structure\n                [%str\n                  module Placeholder = struct\n                    let placeholder _span _witness = R.reject ()\n                  end\n\n                  include Placeholder])\n            features\n          |> B.pmod_structure]\n\n        module Id =\n          [%m\n          List.map\n            ~f:(fun txt ->\n              [%stri let [%p B.ppat_var { loc; txt }] = fun _span -> Base.Fn.id])\n            features\n          |> B.pmod_structure]\n      end\n    end]\n(* let attrs = *)\n(*   attributes_of_structure_item str *)\n(*   |> List.filter_map ~f:(fun attr -> *)\n(*          match string_of_payload ~loc attr.attr_payload with *)\n(*          | Result.Ok payload -> Some (attr.attr_name.txt, payload) *)\n(*          | _ -> None) *)\n(* in *)\n(* let opens = *)\n(*   List.filter_map *)\n(*     ~f:(fun (name, path) -> *)\n(*       if String.equal name \"add\" then Some path else None) *)\n(*     attrs *)\n(* in *)\n(* (map_inline_nodes opens loc)#structure_item str *)\n\nlet ext =\n  Extension.V3.declare name Extension.Context.structure_item\n    (* Ast_pattern.(pstr ((pstr_eval (pexp_tuple (many __) drop) ^:: nil))) *)\n    Ast_pattern.(\n      pstr (pstr_eval (pexp_tuple (many (pexp_ident @@ lident __))) drop ^:: nil))\n    expand\n\nlet rule = Ppxlib.Context_free.Rule.extension ext\nlet () = Ppxlib.Driver.register_transformation ~rules:[ rule ] name\n"
  },
  {
    "path": "engine/utils/ppx_inline/README.md",
    "content": "# `ppx_inline`\n\nInlines chunks of OCaml AST in place.\n\nRewrite `[%%inline_defs L]`, `let rec ... [@@inline_ands L]`, `[%%inline_arms L]`, `[%%inline_body PATH]` inside nodes `[%%inlined_contents NODE]`, where:\n - `L` is a (`+`/`-`-separated) list of `QUALIFIED-PATH`s specifying which chunk of AST we should inline;\n - `QUALIFIED-PATH` is either a plain `PATH` or `bindings_of PATH` (the latter means all let/and bindings in a `let rec ... and ...` bundle);\n - `PATH` is a `.`-separated list of strings, possibly containing the `*` glob.\n\n## Example:\nFile `some_module.ml`:\n```ocaml\nlet f x = x + 1\nlet g x = x + 2\nlet f' x = x + 3\n\nmodule M = struct\n    let w = 0\n    let x = 1\n    let y = 2\n    let z = 3\nend\n\nlet h x = \"\"\ntype foo = | A | B\nlet i (x: foo) =\n    match x with\n    | A -> 0\n    | B -> 1\n\nlet rec bundle_1 x = bundle_2 x + 1\nand bundle_2 y = bundle_3 + 1\nand bundle_3 z = z + 1\n```\n\nThe module:\n```ocaml\nmodule%inlined_contents [@@add \"some_module.ml\"] Test = struct\n    [%%inline_defs f + g + foo]\n    [%%inline_defs \"M.*\" - z - y]\n\n    let h: int -> string = [%%inline_body h]\n    let i: foo -> int =\n        match i with\n      | [%%inline_arms \"i.*\" - B] -> dummy\n      | B -> 123\n\n    let rec bundle_1 x = bundle_2 x + 123\n        [@@inline_ands bindings_of bundle_1]\nend\n```\n\nWill be rewritten into:\n```ocaml\nmodule%inlined_contents [@@add \"some_module.ml\"] Test = struct\n\n    (* [%%inline_defs f + g + foo] *)\n    let f x = x + 1\n    let g x = x + 2\n    type foo = | A | B\n\n    (* [%%inline_defs \"M.*\" - z - y] *)\n    let w = 0\n    let x = 1\n\n    let h: int -> string = (fun x -> \"\")\n    let i: foo -> int = \n        match i with\n      | A -> 0\n      | B -> 123\n\n    let rec bundle_1 x = bundle_2 x + 123\n    and bundle_2 y = bundle_3 + 1\n    and bundle_3 z = z + 1\nend\n```\n\n"
  },
  {
    "path": "engine/utils/ppx_inline/dune",
    "content": "(library\n (name ppx_inline)\n (package hax-engine)\n (kind ppx_rewriter)\n (libraries ppxlib base)\n (preprocess\n  (pps ppxlib.metaquot ppx_deriving.eq ppx_compare ppx_deriving.show)))\n\n(env\n (_\n  (flags\n   (:standard -warn-error -A -warn-error +8))))\n"
  },
  {
    "path": "engine/utils/ppx_inline/ppx_inline.ml",
    "content": "open Base\nopen Ppxlib\n\nlet name = \"inlined_contents\"\n\nlet cons_lid_of_pattern (p : pattern) =\n  match p.ppat_desc with\n  | Ppat_construct ({ txt; _ }, _) -> Some txt\n  | _ -> None\n\nlet name_of_pattern (p : pattern) =\n  match p.ppat_desc with Ppat_var { txt; _ } -> Some txt | _ -> None\n\nlet name_of_binding b = name_of_pattern b.pvb_pat\n\ntype inlinable_item_kind =\n  | MatchCase of (case[@opaque])\n  | Binding of (value_binding[@opaque])\n  | StrItem of (structure_item[@opaque])\n[@@deriving show]\n\ntype inlinable_item_kind_head = MatchCase | Binding | StrItem\n[@@deriving show]\n\nlet head_of : inlinable_item_kind -> inlinable_item_kind_head = function\n  | MatchCase _ -> MatchCase\n  | Binding _ -> Binding\n  | StrItem _ -> StrItem\n\ntype inlinable_item = { path : string list; kind : inlinable_item_kind }\n[@@deriving show]\n\nlet collect_ast_nodes (result : inlinable_item list ref) =\n  let add (l : inlinable_item list) = result := !result @ l in\n  object\n    inherit [string list] Ast_traverse.map_with_context as super\n\n    method! module_binding path x =\n      let path =\n        match x.pmb_name.txt with Some name -> path @ [ name ] | None -> path\n      in\n      super#module_binding path x\n\n    method! value_binding path x =\n      let path =\n        match name_of_pattern x.pvb_pat with\n        | Some name ->\n            let path = path @ [ name ] in\n            add @@ [ { path; kind = Binding x } ];\n            path\n        | None -> path\n      in\n      super#value_binding path x\n\n    method! structure_item path s =\n      (match s.pstr_desc with\n      | Pstr_value (_, bindings) ->\n          List.iter bindings ~f:(fun { pvb_pat; _ } ->\n              match name_of_pattern pvb_pat with\n              | Some n -> add [ { path = path @ [ n ]; kind = StrItem s } ]\n              | _ -> ())\n      | Pstr_type (_, bindings) ->\n          List.iter bindings ~f:(fun { ptype_name = { txt = n; _ }; _ } ->\n              add [ { path = path @ [ n ]; kind = StrItem s } ])\n      | _ -> ());\n      super#structure_item path s\n\n    method! expression path e =\n      let e' = super#expression path e in\n      match e.pexp_desc with\n      | Pexp_match (_, cases) ->\n          add\n          @@ List.filter_map\n               ~f:(fun case ->\n                 match cons_lid_of_pattern case.pc_lhs with\n                 | Some chunk ->\n                     Some\n                       {\n                         path = path @ [ Longident.last_exn chunk ];\n                         kind = MatchCase case;\n                       }\n                 | None -> None)\n               cases;\n          e'\n      | _ -> e'\n  end\n\nlet replace_every_location (location : location) =\n  object\n    inherit Ast_traverse.map\n    method! location = Fn.const location\n  end\n\nlet locate_module (name : string) : string =\n  let rec find = function\n    | path when Stdlib.Sys.is_directory path ->\n        Stdlib.Sys.readdir path\n        |> Array.find_map ~f:(fun name ->\n               find @@ Stdlib.Filename.concat path name)\n    | path when String.(Stdlib.Filename.basename path = name) -> Some path\n    | _ -> None\n  in\n  find (Stdlib.Sys.getcwd ())\n  |> Option.value_exn ~message:(\"ppx_inline: could not locate module \" ^ name)\n\nlet inlinable_items_of_module : loc:location -> string -> inlinable_item list =\n  let memo = Hashtbl.create (module String) in\n  fun ~loc path ->\n    Hashtbl.find_or_add memo\n      ~default:(fun () ->\n        let results = ref [] in\n        let _ =\n          locate_module path |> Stdlib.open_in |> Lexing.from_channel\n          |> Parse.implementation |> (replace_every_location loc)#structure\n          |> (collect_ast_nodes results)#structure [ path ]\n        in\n        !results)\n      path\n\nlet inlinable_items_of_modules ~loc : string list -> inlinable_item list =\n  List.concat_map ~f:(inlinable_items_of_module ~loc)\n\ntype not_found_available_item = {\n  path : string list;\n  head : inlinable_item_kind_head;\n  preselected : bool;\n  postselected : bool;\n}\n[@@deriving show]\n\ntype inline_error =\n  | NotFound of {\n      search : string list;\n      available : not_found_available_item list;\n      context : string;\n    }\n  | NotPlusMinusList\n[@@deriving show]\n\nlet display_inline_error = function\n  | NotFound o ->\n      let pre_ = \"A\" in\n      let post_ = \"B\" in\n      let h = String.concat ~sep:\".\" in\n      \"Ppx_inline.NotFound:\\nCannot find any item given glob [\" ^ h o.search\n      ^ \"] (context: \" ^ o.context ^ \").\\nAvailable items: ([\" ^ pre_\n      ^ \"] means preselected, [\" ^ post_ ^ \"] means postselected)\"\n      ^ String.concat ~sep:\"\"\n      @@ List.map\n           ~f:(fun { path = i; head; preselected; postselected } ->\n             let kind =\n               (match head with\n               | MatchCase -> \"case\"\n               | Binding -> \"let \"\n               | StrItem -> \"str \")\n               ^ \" \"\n             in\n             \"\\n• \"\n             ^ (if preselected then pre_ else \" \")\n             ^ (if postselected then \" \" else post_)\n             ^ \" \" ^ kind ^ \"\\t\" ^ h i)\n           o.available\n  | NotPlusMinusList -> \"Ppx_inline.NotPlusMinusList\"\n\nexception InlineError of inline_error\n\nlet raise_inline_err x = raise @@ InlineError x\n\ntype flag = Include | Exclude [@@deriving show]\ntype qualifier = AllBindings [@@deriving show]\n\ntype pm_atom = { apath : string list; aqualifier : qualifier option }\n[@@deriving show]\n\nlet rec plus_minus_list_of_expr' (e : expression) : (flag * pm_atom) list =\n  match e with\n  | [%expr [%e? x] + [%e? y]] ->\n      plus_minus_list_of_expr' x @ plus_minus_list_of_expr' y\n  | [%expr [%e? x] - [%e? y]] ->\n      plus_minus_list_of_expr' x\n      @ List.map ~f:(fun (_, v) -> (Exclude, v))\n      @@ plus_minus_list_of_expr' y\n  | _ ->\n      let default () = raise_inline_err NotPlusMinusList in\n      let plus_minus_atom_name (e : expression) : string list option =\n        match e with\n        | { pexp_desc = Pexp_constant (Pconst_string (s, _, _)); _ } ->\n            Some (String.split ~on:'.' s)\n        | { pexp_desc = Pexp_ident { txt; _ }; _ }\n        | { pexp_desc = Pexp_construct ({ txt; _ }, _); _ } ->\n            Some (Longident.flatten_exn txt)\n        | _ -> None\n      in\n      let plus_minus_atom (e : expression) : pm_atom =\n        let h e = Option.value_or_thunk (plus_minus_atom_name e) ~default in\n        match e with\n        | [%expr bindings_of [%e? arg]] ->\n            { apath = h arg; aqualifier = Some AllBindings }\n        (* | [%expr bundle [%e? arg]] -> *)\n        (*     { apath = h arg; aqualifier = Some Binding } *)\n        | e -> { apath = h e; aqualifier = None }\n      in\n      [ (Include, plus_minus_atom e) ]\n\nlet plus_minus_list_of_expr (e : expression) : (flag * pm_atom) list option =\n  try Some (plus_minus_list_of_expr' e)\n  with InlineError NotPlusMinusList -> failwith \"InlineError NotPlusMinusList\"\n\nlet elast l =\n  match (List.last l, List.drop_last l) with\n  | Some last, Some init -> Some (init, last)\n  | _ -> None\n\nlet diff_list (type a) (x : a list) (y : a list) ~(equal : a -> a -> bool) :\n    a list =\n  List.filter\n    ~f:(fun elem_x ->\n      List.for_all ~f:(fun elem_y -> not @@ equal elem_x elem_y) y)\n    x\n\nlet attributes_of_structure_item (str : structure_item) =\n  match str.pstr_desc with\n  | Pstr_module { pmb_attributes = attrs; _ } | Pstr_eval (_, attrs) -> attrs\n  | _ -> []\n\nlet string_of_payload ~loc e =\n  Ast_pattern.(\n    parse_res\n    @@ pstr\n         (pstr_eval (pexp_constant @@ pconst_string __ drop drop) drop ^:: nil))\n    loc e Fn.id\n\nlet string_attributes_of_structure_item ~loc (str : structure_item) :\n    (string * string) list =\n  attributes_of_structure_item str\n  |> List.filter_map ~f:(fun attr ->\n         match string_of_payload ~loc attr.attr_payload with\n         | Result.Ok payload -> Some (attr.attr_name.txt, payload)\n         | _ -> None)\n\n(* TODO: ppx_inline reports badly locations (I actually don't use `_loc`...) *)\nlet map_inline_nodes opens _loc =\n  let rec match_glob (glob : string list) (against : string list) =\n    match (elast glob, elast against) with\n    | Some (glob, \"*\"), Some (against, _) -> match_glob glob against\n    | _ -> List.is_suffix ~equal:String.equal ~suffix:glob against\n  in\n  let inlinable_items = inlinable_items_of_modules opens in\n  let matches ~loc (glob : string list) : inlinable_item list =\n    List.filter ~f:(fun ({ path; _ } : inlinable_item) -> match_glob glob path)\n    @@ inlinable_items ~loc\n  in\n  let find_one (type a) ~context ~loc (glob : string list)\n      (f : inlinable_item -> (string list * a) list) : (string list * a) list =\n    let selection = matches glob ~loc in\n    match List.concat_map ~f selection with\n    | [] ->\n        let selected_paths = List.map ~f:(fun { path; _ } -> path) selection in\n        raise_inline_err\n        @@ NotFound\n             {\n               search = glob;\n               context;\n               available =\n                 List.map ~f:(fun ({ path; kind } as i) ->\n                     {\n                       path;\n                       head = head_of kind;\n                       preselected =\n                         List.mem ~equal:[%eq: string list] selected_paths path;\n                       postselected = f i |> List.is_empty |> not;\n                     })\n                 @@ inlinable_items ~loc;\n             }\n    | l -> l\n  in\n  let find (type a) ~loc ~context (flags : (flag * pm_atom) list)\n      (f : inlinable_item_kind -> a option) =\n    List.fold_left ~init:[]\n      ~f:(fun acc (flag, path) ->\n        let matches =\n          find_one ~loc ~context path.apath (fun { path = path'; kind = i } ->\n              match (path.aqualifier, i) with\n              | ( Some AllBindings,\n                  StrItem { pstr_desc = Pstr_value (_, bindings); _ } ) ->\n                  let prefix = List.drop_last_exn path' in\n                  List.filter_map\n                    ~f:(fun b ->\n                      Option.both\n                        (name_of_binding b\n                        |> Option.map ~f:(fun n -> prefix @ [ n ]))\n                        (f (Binding b)))\n                    bindings\n              | _ ->\n                  Option.to_list @@ Option.map ~f:(fun i -> (path', i)) @@ f i)\n        in\n        let acc =\n          match flag with\n          | Include -> acc @ matches\n          | Exclude ->\n              diff_list\n                ~equal:(fun (x, _) (y, _) -> [%eq: string list] x y)\n                acc matches\n        in\n        acc)\n      flags\n    |> List.map ~f:snd\n  in\n\n  object\n    inherit Ast_traverse.map as super\n\n    method! structure e =\n      let e = super#structure e in\n      let each_item e =\n        let loc = e.pstr_loc in\n        match e.pstr_desc with\n        | Pstr_extension\n            ( ( { txt = \"inline_defs\"; _ },\n                PStr [ { pstr_desc = Pstr_eval (payload, _); _ } ] ),\n              _ ) -> (\n            match plus_minus_list_of_expr payload with\n            | Some opts -> (\n                try\n                  find ~context:\"inline_defs\" ~loc opts (function\n                    | StrItem x -> Some x\n                    | _ -> None)\n                with InlineError err ->\n                  let err =\n                    display_inline_error err |> Ast_builder.Default.estring ~loc\n                  in\n                  [%str [%ocaml.error [%e err]]])\n            | _ -> [ e ])\n        | Pstr_value (rf, bindings) ->\n            let binding_names = List.filter_map ~f:name_of_binding bindings in\n            let bindings =\n              let f b =\n                let mk_err s =\n                  { b with pvb_expr = [%expr [%ocaml.error [%e s]]] }\n                in\n                let attr =\n                  b.pvb_attributes\n                  |> List.find ~f:(fun attr ->\n                         String.equal attr.attr_name.txt \"inline_ands\")\n                in\n                match attr with\n                | Some { attr_payload; _ } -> (\n                    match attr_payload with\n                    | PStr [ { pstr_desc = Pstr_eval (payload, _); _ } ] -> (\n                        match plus_minus_list_of_expr payload with\n                        | Some opts -> (\n                            try\n                              b\n                              ::\n                              (let bindings =\n                                 find ~context:\"inline_ands\" ~loc opts (function\n                                   | Binding b' -> Some b'\n                                   | _ -> None)\n                               in\n                               List.filter\n                                 ~f:(fun b' ->\n                                   match name_of_binding b' with\n                                   | Some name ->\n                                       List.mem ~equal:String.equal\n                                         binding_names name\n                                       |> not\n                                   | _ -> true)\n                                 bindings)\n                              |> List.dedup_and_sort ~compare:(fun a b ->\n                                     [%compare: string option]\n                                       (name_of_binding a) (name_of_binding b))\n                            with InlineError err ->\n                              let err =\n                                display_inline_error err\n                                |> Ast_builder.Default.estring ~loc\n                              in\n                              [ mk_err err ])\n                        | _ -> [ b ])\n                    | _ -> [ mk_err [%expr \"expected PStr\"] ])\n                | None -> [ b ]\n              in\n\n              List.concat_map ~f bindings\n            in\n            [ { e with pstr_desc = Pstr_value (rf, bindings) } ]\n        | _ -> [ e ]\n      in\n      List.concat_map ~f:each_item e\n\n    method! expression e =\n      let e = super#expression e in\n      let loc = e.pexp_loc in\n      match e with\n      | { pexp_desc = Pexp_match (scrut, cases); _ } ->\n          let cases =\n            List.concat_map\n              ~f:(fun case ->\n                match case.pc_lhs with\n                | [%pat? [%inline_arms [%e? e]]] -> (\n                    let pc_rhs_map =\n                      match case.pc_rhs with\n                      | [%expr map [%e? f]] -> fun e -> [%expr [%e f] [%e e]]\n                      | _ -> Fn.id\n                    in\n                    match plus_minus_list_of_expr e with\n                    | Some opts -> (\n                        try\n                          find ~context:\"case\" ~loc opts (function\n                            | MatchCase case -> Some case\n                            | _ -> None)\n                          |> List.map ~f:(fun case ->\n                                 { case with pc_rhs = pc_rhs_map case.pc_rhs })\n                        with InlineError err ->\n                          let err =\n                            display_inline_error err\n                            |> Ast_builder.Default.estring ~loc\n                          in\n                          [\n                            {\n                              case with\n                              pc_lhs = [%pat? [%ocaml.error [%e err]]];\n                            };\n                          ])\n                    | None -> [ case ])\n                | _ -> [ case ])\n              cases\n          in\n          { e with pexp_desc = Pexp_match (scrut, cases) }\n      | [%expr [%inline_body [%e? e]]] -> (\n          match plus_minus_list_of_expr e with\n          | Some opts -> (\n              try\n                match\n                  find ~context:\"inline_body\" ~loc opts (function\n                    | Binding { pvb_expr; _ } -> Some pvb_expr\n                    | _ -> None)\n                with\n                | [ x ] -> x\n                | _ -> failwith \"inline_body: matched multiple\"\n              with InlineError err ->\n                let err =\n                  display_inline_error err |> Ast_builder.Default.estring ~loc\n                in\n                [%expr [%ocaml.error [%e err]]])\n          | None -> e)\n      | _ -> e\n  end\n\nlet expand ~(ctxt : Expansion_context.Extension.t) (str : structure_item) :\n    structure_item =\n  let loc = Expansion_context.Extension.extension_point_loc ctxt in\n  let opens =\n    List.filter_map\n      ~f:(fun (name, path) ->\n        if String.equal name \"add\" then Some path else None)\n      (string_attributes_of_structure_item ~loc str)\n  in\n  (map_inline_nodes opens loc)#structure_item str\n\nlet ext =\n  Extension.V3.declare name Extension.Context.structure_item\n    Ast_pattern.(pstr (__ ^:: nil))\n    expand\n\nlet rule = Ppxlib.Context_free.Rule.extension ext\nlet () = Ppxlib.Driver.register_transformation ~rules:[ rule ] name\n"
  },
  {
    "path": "engine/utils/ppx_phases_index/README.md",
    "content": "# `ppx_phases_index`\n\nThis PPX looks for a `phases` folder in the sources, and generate a\nmodule binding for each, inlining the documentation, so that we can\nhave a nice index of all the phases with their documentation.\n"
  },
  {
    "path": "engine/utils/ppx_phases_index/dune",
    "content": "(library\n (name ppx_phases_index)\n (package hax-engine)\n (kind ppx_rewriter)\n (libraries ppxlib base)\n (preprocess\n  (pps ppxlib.metaquot ppx_deriving.eq ppx_compare ppx_deriving.show)))\n\n(env\n (_\n  (flags\n   (:standard -warn-error -A -warn-error +8))))\n"
  },
  {
    "path": "engine/utils/ppx_phases_index/ppx_phases_index.ml",
    "content": "open Base\nopen Ppxlib\n\nlet ( let* ) x f = Option.bind ~f x\n\nlet map_first_letter (f : string -> string) (s : string) =\n  let first, rest = String.(prefix s 1, drop_prefix s 1) in\n  f first ^ rest\n\nlet uppercase_first_char = map_first_letter String.uppercase\n\nlet locate_phases_directory () : string =\n  let rec find path =\n    match path with\n    | path when String.(Stdlib.Filename.basename path = \"phases\") -> Some path\n    | path when Stdlib.Sys.is_directory path ->\n        Stdlib.Sys.readdir path\n        |> Array.filter ~f:(fun name -> not (String.is_prefix ~prefix:\".\" name))\n        |> Array.find_map ~f:(fun name ->\n               find @@ Stdlib.Filename.concat path name)\n    | _ -> None\n  in\n  find (Stdlib.Sys.getcwd ())\n  |> Option.value_exn\n       ~message:\"ppx_phases_index: could not locate folder [phases]\"\n\nlet list_phases loc : (string * string * string * _ option) list =\n  let dir = locate_phases_directory () in\n  Stdlib.Sys.readdir dir |> Array.to_list\n  |> List.filter_map ~f:(fun filename ->\n         let* module_name = String.chop_suffix ~suffix:\".mli\" filename in\n         let* _ =\n           match String.chop_suffix ~suffix:\".pp\" module_name with\n           | Some _ -> None\n           | None -> Some ()\n         in\n         let* phase_name = String.chop_prefix ~prefix:\"phase_\" module_name in\n         let module_name = uppercase_first_char module_name in\n         let phase_name = uppercase_first_char phase_name in\n         Some (filename, module_name, phase_name))\n  |> List.map ~f:(fun (filename, module_name, phase_name) ->\n         let path = Stdlib.Filename.concat dir filename in\n         let str =\n           Stdlib.open_in path |> Lexing.from_channel |> Parse.interface\n         in\n         let str =\n           List.filter\n             ~f:(function { psig_desc = Psig_open _; _ } -> false | _ -> true)\n             str\n         in\n         match str with\n         | [ _ ] -> (filename, module_name, phase_name, None)\n         | [ { psig_desc = Psig_attribute attr; _ }; _ ] ->\n             (filename, module_name, phase_name, Some attr)\n         | [] -> failwith (\"Empty phase\" ^ filename)\n         | _ ->\n             failwith\n               (\"Invalid phase\" ^ filename ^ \": got \"\n               ^ Int.to_string (List.length str)))\n\nlet rename (l : (string * string) list) =\n  let h (s : string) =\n    List.find_map\n      ~f:(fun (s', replace) -> if String.equal s s' then Some replace else None)\n      l\n    |> Option.value ~default:s\n  in\n  object\n    inherit Ast_traverse.map\n    method! string = h\n    method! label = h\n\n    method! longident =\n      let rec r = function\n        | Lapply (x, y) -> Lapply (r x, r y)\n        | Ldot (x, y) -> Ldot (r x, h y)\n        | Lident x -> Lident (h x)\n      in\n      r\n  end\n\nlet expand_phases_index ~(ctxt : Expansion_context.Extension.t)\n    (str : structure_item) : structure_item =\n  let loc = Expansion_context.Extension.extension_point_loc ctxt in\n  let (module S) = Ppxlib.Ast_builder.make loc in\n  let modules =\n    list_phases loc\n    |> List.map ~f:(fun (_, module_name, phase_name, attrs) ->\n           let h x = { txt = Lident x; loc } in\n           let original =\n             S.pmod_ident { txt = Ldot (Lident module_name, \"Make\"); loc }\n           in\n           let b =\n             S.module_binding\n               ~name:{ txt = Some phase_name; loc }\n               ~expr:original\n           in\n           let attrs = Option.to_list attrs in\n           let attrs =\n             List.map\n               ~f:(fun attr ->\n                 let n = attr.attr_name in\n                 if String.equal n.txt \"ocaml.text\" then\n                   { attr with attr_name = { n with txt = \"ocaml.doc\" } }\n                 else attr)\n               attrs\n           in\n           let b = { b with pmb_attributes = attrs } in\n           S.pstr_module b)\n  in\n  S.pstr_include (S.include_infos (S.pmod_structure modules))\n\nlet chop_ml_or_mli str =\n  match String.chop_suffix ~suffix:\".ml\" str with\n  | Some result -> Some result\n  | None -> String.chop_suffix ~suffix:\".mli\" str\n\nlet filename_to_phase_constructor file_name =\n  let phase_name =\n    file_name |> String.rsplit2 ~on:'/' |> Option.map ~f:snd\n    |> Option.value ~default:file_name\n    |> String.chop_prefix ~prefix:\"phase_\"\n    |> Option.value_exn\n         ~message:\n           (\"`[%auto_phase_name]` can only be used in a phase, whose filename \\\n             starts with `phase_`. Current file is: [\" ^ file_name ^ \"]\")\n    |> chop_ml_or_mli\n    |> Option.value_exn\n         ~message:\n           (\"File name [\" ^ file_name\n          ^ \"] was expected to end with a `.ml` or `.mli`\")\n  in\n  phase_name |> String.split ~on:'_'\n  |> List.map ~f:uppercase_first_char\n  |> String.concat\n\nlet expand_add_phase_names ~(ctxt : Expansion_context.Extension.t)\n    (typ : type_declaration) : structure_item =\n  let loc = Expansion_context.Extension.extension_point_loc ctxt in\n  let (module S) = Ppxlib.Ast_builder.make loc in\n  let ptype_kind =\n    match typ.ptype_kind with\n    | Ptype_variant ctors ->\n        let phases = list_phases loc in\n        let extra =\n          List.map\n            ~f:(fun (filename, _, _, _) ->\n              let name = filename_to_phase_constructor filename in\n              let name = { txt = name; loc = S.loc } in\n              let args = Pcstr_tuple [] in\n              S.constructor_declaration ~name ~args ~res:None)\n            phases\n        in\n        Ptype_variant (ctors @ extra)\n    | _ -> failwith \"expected variants\"\n  in\n  let typ = { typ with ptype_kind } in\n  S.pstr_type Recursive [ typ ]\n\nlet expand_auto_phase_name ~(ctxt : Expansion_context.Extension.t)\n    (str : structure_item) : expression =\n  let file_name = Expansion_context.Extension.input_name ctxt in\n  let constructor = filename_to_phase_constructor file_name in\n  let loc = Expansion_context.Extension.extension_point_loc ctxt in\n  let (module S) = Ppxlib.Ast_builder.make loc in\n  let txt = Astlib.Longident.parse (\"Diagnostics.Phase.\" ^ constructor) in\n  S.pexp_construct { txt; loc = S.loc } None\n\nlet () =\n  let rule_phases_index =\n    let name = \"phases_index\" in\n    Ppxlib.Context_free.Rule.extension\n      (Extension.V3.declare name Extension.Context.structure_item\n         Ast_pattern.(pstr (__ ^:: nil))\n         expand_phases_index)\n  in\n  let rule_auto_phase_name =\n    let name = \"auto_phase_name\" in\n    Ppxlib.Context_free.Rule.extension\n      (Extension.V3.declare name Extension.Context.expression\n         Ast_pattern.(pstr (__ ^:: nil))\n         expand_auto_phase_name)\n  in\n  let rule_expand_add_phase_names =\n    let name = \"add_phase_names\" in\n    Ppxlib.Context_free.Rule.extension\n      (Extension.V3.declare name Extension.Context.structure_item\n         Ast_pattern.(pstr (pstr_type drop (__ ^:: nil) ^:: nil))\n         expand_add_phase_names)\n  in\n  Ppxlib.Driver.register_transformation\n    ~rules:\n      [ rule_phases_index; rule_auto_phase_name; rule_expand_add_phase_names ]\n    \"ppx_phases_index\"\n"
  },
  {
    "path": "engine/utils/sourcemaps/base64.ml",
    "content": "open Prelude\n\nlet alphabet =\n  \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/\"\n\nlet encode (n : int) : char =\n  assert (n >= 0 && n < 64);\n  String.get alphabet n\n\nlet decode (c : char) : int = String.index alphabet c |> Option.value_exn\n"
  },
  {
    "path": "engine/utils/sourcemaps/dune",
    "content": "(library\n (name sourcemaps)\n (package hax-engine)\n (inline_tests)\n (preprocess\n  (pps ppx_inline_test ppx_yojson_conv ppx_deriving.show ppx_deriving.eq))\n (libraries base))\n\n(include_subdirs unqualified)\n"
  },
  {
    "path": "engine/utils/sourcemaps/location.ml",
    "content": "open Prelude\n\ntype t = { line : int; col : int } [@@deriving eq, yojson]\n\nlet show { line; col } =\n  \"(\" ^ Int.to_string line ^ \":\" ^ Int.to_string col ^ \")\"\n\nlet pp (fmt : Stdlib.Format.formatter) (s : t) : unit =\n  Stdlib.Format.pp_print_string fmt @@ show s\n\nlet default = { line = 0; col = 0 }\nlet plus_cols x cols = { x with col = x.col + cols }\nlet op ( + ) x y = { line = x.line + y.line; col = x.col + y.col }\nlet ( + ) = op ( + )\nlet ( - ) = op ( - )\n\nlet compare (x : t) (y : t) : int =\n  let open Int in\n  if x.line > y.line then 1\n  else if x.line = y.line then\n    if x.col > y.col then 1 else if x.col = y.col then 0 else -1\n  else -1\n"
  },
  {
    "path": "engine/utils/sourcemaps/mappings/dual.ml",
    "content": "type 'a t = { gen : 'a; src : 'a } [@@deriving show, eq, yojson]\n\nlet transpose ~(default : 'a t) ({ gen; src } : 'a option t) : 'a t option =\n  match (gen, src) with\n  | Some gen, None -> Some { gen; src = default.src }\n  | None, Some src -> Some { gen = default.gen; src }\n  | Some gen, Some src -> Some { gen; src }\n  | _ -> None\n\nlet default (type a) (default : a) : a t = { gen = default; src = default }\n"
  },
  {
    "path": "engine/utils/sourcemaps/mappings/instruction.ml",
    "content": "open Prelude\nopen Types\n\ntype t =\n  | ShiftGenLinesResetGenCols of { lines : int }\n  | ShiftGenCols of int\n  | Full of { shift_gen_col : int; shift_src : Location.t; meta : meta }\n[@@deriving show { with_path = false }, eq]\n\nlet encode_one : t -> string * [ `Sep | `NeedsSep ] = function\n  | ShiftGenLinesResetGenCols { lines } -> (String.make lines ';', `Sep)\n  | ShiftGenCols n -> (Vql.encode_base64 [ n ], `NeedsSep)\n  | Full { shift_gen_col; shift_src; meta = { file_offset; name } } ->\n      ( Vql.encode_base64\n          ([ shift_gen_col; file_offset; shift_src.line; shift_src.col ]\n          @ match name with Some name -> [ name ] | None -> []),\n        `NeedsSep )\n\nlet encode : t list -> string =\n  List.map ~f:encode_one\n  >> List.fold_left\n       ~f:(fun (acc, sep) (str, sep') ->\n         let acc =\n           acc\n           ^\n           match (sep, sep') with `NeedsSep, `NeedsSep -> \",\" ^ str | _ -> str\n         in\n         (acc, sep'))\n       ~init:(\"\", `Sep)\n  >> fst\n\nlet decode_one (s : string) : t =\n  match Vql.decode_base64 s with\n  | [ cols ] -> ShiftGenCols cols\n  | shift_gen_col :: file_offset :: line :: col :: rest ->\n      let name = match rest with [ name ] -> Some name | _ -> None in\n      let meta = { file_offset; name } in\n      let shift_src : Location.t = { line; col } in\n      Full { shift_gen_col; shift_src; meta }\n  | _ -> failwith \"??\"\n\nlet rec decode' (s : string) : t option list =\n  if String.is_empty s then []\n  else\n    let n =\n      String.lfindi ~f:(fun _ -> function ';' | ',' -> true | _ -> false) s\n      |> Option.value ~default:(String.length s)\n    in\n    (if n > 0 then Some (decode_one (String.prefix s n))\n     else\n       match String.get s 0 with\n       | ';' -> Some (ShiftGenLinesResetGenCols { lines = 1 })\n       | ',' -> None\n       | _ -> failwith \"should not be possible\")\n    :: decode' (String.drop_prefix s (Int.max 1 n))\n\nlet decode : string -> t list = decode' >> List.filter_map ~f:Fn.id\n\nlet eval_one (s : Location.t Dual.t) (i : t) : Location.t Dual.t * meta option =\n  match i with\n  | ShiftGenLinesResetGenCols { lines } ->\n      ({ s with gen = { line = s.gen.line + lines; col = 0 } }, None)\n  | ShiftGenCols i -> ({ s with gen = Location.plus_cols s.gen i }, None)\n  | Full { shift_gen_col; shift_src; meta } ->\n      let gen = Location.plus_cols s.gen shift_gen_col in\n      let src = Location.(s.src + shift_src) in\n      ({ gen; src }, Some meta)\n\nlet to_points ?(init = Dual.default Location.default) : t list -> point list =\n  List.fold_left ~init:(init, []) ~f:(fun (s, acc) i ->\n      let s, r = eval_one s i in\n      (s, (s, r) :: acc))\n  >> snd >> List.rev\n\nlet from_points : point list -> t list =\n  List.folding_map\n    ~init:(Dual.default Location.default, None)\n    ~f:(fun ({ src; gen }, m0) (x, m) ->\n      let d =\n        Location.(Dual.{ Dual.src = x.src - src; Dual.gen = x.gen - gen })\n      in\n      let shift_gen_col = (if Int.(d.gen.line = 0) then d else x).gen.col in\n      let relative_m =\n        Option.map m ~f:(fun m ->\n            match m0 with\n            | Some m0 ->\n                { file_offset = m.file_offset - m0.file_offset; name = None }\n            | None -> m)\n      in\n      let output =\n        (if Int.(d.gen.line = 0) then []\n         else [ ShiftGenLinesResetGenCols { lines = d.gen.line } ])\n        @\n        match relative_m with\n        | Some meta -> [ Full { shift_gen_col; shift_src = d.src; meta } ]\n        | None when Int.(shift_gen_col = 0) -> []\n        | _ when Int.(shift_gen_col = 0) -> []\n        | _ -> [ ShiftGenCols shift_gen_col ]\n      in\n      let x = match m with Some _ -> x | None -> { x with src } in\n      ((x, Option.first_some m m0), output))\n  >> List.concat\n\nlet%test _ =\n  let f = decode >> to_points >> from_points >> encode in\n  [\n    \";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU,UAAAC,SAAc;;;ACApC,SAAS,KAAAC,GAAG,aAAAC,SAAiB;AAC7B,SAAS,YAAAC,SAAgB;AAWlB,IAAMC,IAAN,cAA2BF,EAAsC;AAAA,EAGtE,YAAYG,GAAqB;AAC/B,UAAMA,CAAK;AAIb,SAAAC,IAAa,MAAM,KAAK,SAAS,EAAEC,GAAQ,KAAK,MAAMA,IAAS,EAAE,CAAC;AAClE,SAAAC,IAAa,MAAM,KAAK,SAAS,EAAED,GAAQ,KAAK,MAAMA,IAAS,EAAE,CAAC;AAJhE,SAAK,MAAMA,IAASF,EAAMI;AAAA,EAC5B;AAAA,EAKA,SAAS;AACP,WAAOR,EAAC;AAAA,MAAI,OAAM;AAAA,OAChBA,EAAC,YAAI,KAAK,MAAM,KAAM,GACtBA,EAAC,WACCA,EAAC;AAAA,MAAO,SAAS,KAAKO;AAAA,OAAY,GAAC,GAClC,KACA,KAAK,MAAMD,GACX,KACDN,EAAC;AAAA,MAAO,SAAS,KAAKK;AAAA,OAAY,GAAC,CACrC,CACF;AAAA,EACF;AACF,GAEWI,IAAkB,CAACL,MAAwB;AACpD,MAAI,CAACM,GAAOC,CAAQ,IAAIT,EAASE,EAAMI,CAAa;AACpD,SAAOR,EAAC;AAAA,IAAI,OAAM;AAAA,KAChBA,EAAC,YAAII,EAAMQ,CAAO,GAClBZ,EAAC,WACCA,EAAC;AAAA,IAAO,SAAS,MAAMW,EAASD,IAAQ,CAAC;AAAA,KAAG,GAAC,GAC5C,KACAA,GACA,KACDV,EAAC;AAAA,IAAO,SAAS,MAAMW,EAASD,IAAQ,CAAC;AAAA,KAAG,GAAC,CAC/C,CACF;AACF;;;AD9CAG;AAAA,EACEC,EAAAC,GAAA,MACED,EAACE,GAAA;AAAA,IAAaC,GAAO;AAAA,IAAYC,GAAe;AAAA,GAAK,GACrDJ,EAACK,GAAA;AAAA,IAAgBF,GAAO;AAAA,IAAYC,GAAe;AAAA,GAAK,CAC1D;AAAA,EACA,SAAS,eAAe,MAAM;AAChC;\";\n  ]\n  |> List.for_all ~f:(fun s -> String.equal s (f s))\n\nlet from_spanned : Spanned.t list -> t list = Spanned.to_points >> from_points\n"
  },
  {
    "path": "engine/utils/sourcemaps/mappings/mappings.ml",
    "content": "open Prelude\ninclude Types\n\ntype range = { start : Location.t; end_ : Location.t option }\n[@@deriving show, eq, yojson]\n\nmodule Chunk = struct\n  type t = { gen : range; src : range; meta : meta }\n  [@@deriving show, eq, yojson]\n\n  let compare (x : t) (y : t) = Location.compare x.gen.start y.gen.start\n\n  let from_spanned ((start, end_, meta) : Spanned.t) : t =\n    let gen = { start = start.gen; end_ = end_.gen } in\n    let src = { start = start.src; end_ = end_.src } in\n    { gen; src; meta }\n\n  let to_spanned ({ gen; src; meta } : t) : Spanned.t =\n    ( { gen = gen.start; src = src.start },\n      { gen = gen.end_; src = src.end_ },\n      meta )\n\n  let%test _ =\n    let x = \";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU\" in\n    let s = Instruction.(decode x |> to_points) |> Spanned.from_points in\n    [%eq: Spanned.t list] (List.map ~f:(from_spanned >> to_spanned) s) s\n\n  let decode : string -> t list =\n    Instruction.(decode >> to_points >> Spanned.from_points)\n    >> List.map ~f:from_spanned\n\n  let encode : t list -> string =\n    List.map ~f:to_spanned >> Instruction.from_spanned >> Instruction.encode\n\n  let%test _ =\n    let x =\n      \";AAAA,SAAS,KAAAA,GAAG,YAAAC,GAAU,UAAAC,SAAc;;;ACApC,SAAS,KAAAC,GAAG,aAAAC,SAAiB;AAC7B,SAAS,YAAAC,SAAgB;AAWlB,IAAMC,IAAN,cAA2BF,EAAsC\"\n    in\n    decode x |> encode |> [%eq: string] x\nend\n\ninclude Chunk\n"
  },
  {
    "path": "engine/utils/sourcemaps/mappings/mappings.mli",
    "content": "type meta = { file_offset : int; name : int option }\n[@@deriving show, eq, yojson]\n\ntype range = { start : Location.t; end_ : Location.t option }\n[@@deriving show, eq, yojson]\n\nmodule Chunk : sig\n  type t = { gen : range; src : range; meta : meta }\n  [@@deriving show, eq, yojson]\n\n  val compare : t -> t -> int\nend\n\nopen Chunk\n\nval decode : string -> t list\nval encode : t list -> string\n"
  },
  {
    "path": "engine/utils/sourcemaps/mappings/spanned.ml",
    "content": "open Prelude\nopen Types\n\ntype t = Location.t Dual.t * Location.t option Dual.t * meta\n[@@deriving show, eq]\n\nlet to_points (pts : t list) : point list =\n  List.map pts ~f:Option.some\n  |> Fn.flip List.append [ None ]\n  |> List.folding_map ~init:None ~f:(fun acc x ->\n         let prev_end =\n           match (acc, x) with\n           | Some end_, Some (start, _, _)\n             when [%eq: Location.t] start.Dual.gen end_.Dual.gen |> not ->\n               Some end_\n           | Some end_, None -> Some end_\n           | _ -> None\n         in\n         let out, end_ =\n           match x with\n           | Some (start, end_, meta) ->\n               ([ (start, Some meta) ], Dual.transpose ~default:start end_)\n           | None -> ([], None)\n         in\n         ( end_,\n           (prev_end |> Option.map ~f:(fun e -> (e, None)) |> Option.to_list)\n           @ out ))\n  |> List.concat\n\nlet from_points : point list -> t list =\n  List.rev\n  >> List.folding_map\n       ~init:(None, Map.empty (module Int))\n       ~f:(fun (gen_loc_0, src_locs) ((loc_start : _ Dual.t), meta) ->\n         match meta with\n         | Some meta ->\n             let src_loc_0 = Map.find src_locs meta.file_offset in\n             let src_locs =\n               Map.set src_locs ~key:meta.file_offset ~data:loc_start.src\n             in\n             let loc_end = Dual.{ gen = gen_loc_0; src = src_loc_0 } in\n             ((Some loc_start.gen, src_locs), Some (loc_start, loc_end, meta))\n         | None -> ((Some loc_start.gen, src_locs), None))\n  >> List.filter_map ~f:Fn.id >> List.rev\n"
  },
  {
    "path": "engine/utils/sourcemaps/mappings/types.ml",
    "content": "open Prelude\n\ntype meta = { file_offset : int; name : int option }\n[@@deriving show, eq, yojson]\n\ntype point = Location.t Dual.t * meta option [@@deriving show, eq, yojson]\n"
  },
  {
    "path": "engine/utils/sourcemaps/prelude.ml",
    "content": "include Base\ninclude Ppx_yojson_conv_lib.Yojson_conv.Primitives\n\nlet ( << ) f g x = f (g x)\nlet ( >> ) f g x = g (f x)\n"
  },
  {
    "path": "engine/utils/sourcemaps/source_maps.ml",
    "content": "open Prelude\nmodule Location = Location\ninclude Mappings\n\ntype mapping = {\n  gen : range;\n  src : range;\n  source : string;\n  name : string option;\n}\n\ntype t = {\n  mappings : string;\n  sourceRoot : string;\n  sources : string list;\n  sourcesContent : string option list;\n  names : string list;\n  version : int;\n  file : string;\n}\n[@@deriving yojson]\n\nlet dedup_freq (l : string list) : string list =\n  let hashtbl : (string, int) Hashtbl.t = Hashtbl.create (module String) in\n  List.iter ~f:(Hashtbl.incr hashtbl) l;\n  Hashtbl.to_alist hashtbl\n  |> List.sort ~compare:(fun (_, x) (_, y) -> Int.(y - x))\n  |> List.map ~f:fst\n\nlet mk ?(file = \"\") ?(sourceRoot = \"\") ?(sourcesContent = fun _ -> None)\n    (mappings : mapping list) : t =\n  let sources = List.map ~f:(fun x -> x.source) mappings |> dedup_freq in\n  let names = List.filter_map ~f:(fun x -> x.name) mappings |> dedup_freq in\n  let f { gen; src; source; name } =\n    let file_offset, _ =\n      List.findi_exn ~f:(fun _ -> String.equal source) sources\n    in\n    let name =\n      Option.map\n        ~f:(fun name ->\n          List.findi_exn ~f:(fun _ -> String.equal name) names |> fst)\n        name\n    in\n    let meta = { file_offset; name } in\n    Chunk.{ gen; src; meta }\n  in\n  let mappings = List.map mappings ~f |> List.sort ~compare:Chunk.compare in\n  let mappings = Mappings.encode mappings in\n  let sourcesContent = List.map ~f:sourcesContent sources in\n  { mappings; sourceRoot; sourcesContent; sources; names; version = 3; file }\n\nlet to_json = [%yojson_of: t] >> Yojson.Safe.pretty_to_string\n"
  },
  {
    "path": "engine/utils/sourcemaps/source_maps.mli",
    "content": "type range = { start : Location.t; end_ : Location.t option }\n\nmodule Location : sig\n  type t = { line : int; col : int } [@@deriving eq]\nend\n\ntype mapping = {\n  gen : range;\n  src : range;\n  source : string;\n  name : string option;\n}\n(** A source file to generated file mapping *)\n\ntype t = {\n  mappings : string;\n  sourceRoot : string;\n  sources : string list;\n  sourcesContent : string option list;\n  names : string list;\n  version : int;\n  file : string;\n}\n[@@deriving yojson]\n\nval mk :\n  ?file:string ->\n  ?sourceRoot:string ->\n  ?sourcesContent:(string -> string option) ->\n  mapping list ->\n  t\n\nval to_json : t -> string\n"
  },
  {
    "path": "engine/utils/sourcemaps/vql.ml",
    "content": "open Prelude\n\nlet rec encode_one ?(first = true) (n : int) : int list =\n  let n = if first then (Int.abs n lsl 1) + if n < 0 then 1 else 0 else n in\n  let lhs, rhs = (n lsr 5, n land 0b11111) in\n  let last = Int.equal lhs 0 in\n  let output = (if last then 0b000000 else 0b100000) lor rhs in\n  output :: (if last then [] else encode_one ~first:false lhs)\n\nlet encode : int list -> int list = List.concat_map ~f:encode_one\n\nlet encode_base64 : int list -> string =\n  encode >> List.map ~f:Base64.encode >> String.of_char_list\n\nlet rec decode_one' (first : bool) (l : int list) : int * int list =\n  match l with\n  | [] -> (0, [])\n  | hd :: tl ->\n      assert (hd < 64);\n      let c = Int.shift_right hd 5 |> Int.bit_and 0b1 in\n      let last = Int.equal c 0 in\n      if first then\n        let sign = match Int.bit_and hd 0b1 with 1 -> -1 | _ -> 1 in\n        let hd = Int.shift_right hd 1 |> Int.bit_and 0b1111 in\n        if last then (sign * hd, tl)\n        else\n          let next, tl = decode_one' false tl in\n          let value = hd + Int.shift_left next 4 in\n          (sign * value, tl)\n      else\n        let hd = Int.bit_and hd 0b11111 in\n        if last then (hd, tl)\n        else\n          let next, tl = decode_one' false tl in\n          (hd + Int.shift_left next 5, tl)\n\nlet rec decode (l : int list) : int list =\n  match decode_one' true l with n, [] -> [ n ] | n, tl -> n :: decode tl\n\nlet decode_base64 : string -> int list =\n  String.to_list >> List.map ~f:Base64.decode >> decode\n\nlet%test _ =\n  let tests =\n    [ [ 132; 6; 2323; 64; 32; 63; 31; 65; 33 ]; [ 133123232 ]; [ 0; 0; 0 ] ]\n  in\n  let tests = tests @ List.map ~f:(List.map ~f:(fun x -> -x)) tests in\n  List.for_all ~f:(fun x -> [%eq: int list] x (encode x |> decode)) tests\n"
  },
  {
    "path": "engine/utils/universe-hash.sh",
    "content": "#!/usr/bin/env bash\n\n# this script computes the hash of [hax-export-json-schemas], so that\n# whenver this binary change, dune retriggers a generation of\n# `types.ml` (see `../lib/dune`).\n\nfunction fallback() {\n    echo \"${RANDOM}_$(date +%s)\"\n}\n\nfunction hash() {\n    if command -v sha256sum &> /dev/null; then\n        sha256sum < \"$1\"\n    elif command -v md5sum &> /dev/null; then\n        md5sum < \"$1\"\n    elif command -v openssl &> /dev/null; then\n        openssl sha256 < \"$1\"\n    else\n        fallback\n    fi\n}\n\nfunction error() {\n    DIAG=\"looks like it's **NOT** the case!\"\n    if [[ \":$PATH:\" == *\":$HOME/.cargo/bin\"{,/}\":\"* ]]; then\n        DIAG=\"this seems to be the case\"\n    fi\n    echo \"Error: could not find [$1] in PATH.\" >&2\n    echo \"Please make sure that:\" >&2\n    echo '  - you ran Hax''s `setup.sh` script;' >&2\n    echo \"  - you have `~/.cargo/bin` in your PATH ($DIAG).\" >&2\n    exit 1\n}\n\nHAX_JSON_SCHEMA_EXPORTER_BINARY=${HAX_JSON_SCHEMA_EXPORTER_BINARY:-hax-export-json-schemas}\nHAX_ENGINE_NAMES_EXTRACT_BINARY=${HAX_ENGINE_NAMES_EXTRACT_BINARY:-hax-engine-names-extract}\n\nfor binary in \"$HAX_JSON_SCHEMA_EXPORTER_BINARY\" \"$HAX_ENGINE_NAMES_EXTRACT_BINARY\"; do\n    if BIN=$(command -v \"$binary\"); then\n        hash \"$BIN\"\n    else\n        error \"$binary\"\n    fi\ndone\n\n"
  },
  {
    "path": "examples/.envrc",
    "content": "use flake .#examples\n"
  },
  {
    "path": "examples/.gitignore",
    "content": "/*/target\n/*/proofs/lean/extraction\n/*/proofs/proverif/extraction\n/*/proofs/lean/.lake\n"
  },
  {
    "path": "examples/Cargo.toml",
    "content": "[workspace]\nmembers = [\n    \"chacha20\",\n    \"lean_chacha20\",\n    \"lean_barrett\",\n    \"lean_adc\",\n    \"lean_tutorial\",\n    \"limited-order-book\",\n    \"sha256\",\n    \"barrett\",\n    \"kyber_compress\",\n    \"proverif-psk\",\n    \"coq-example\",\n    \"coverage\",\n]\nresolver = \"2\"\n\n[workspace.dependencies]\nhax-lib = { path = \"../hax-lib\" }\nhax-bounded-integers = { path = \"../hax-bounded-integers\" }\n"
  },
  {
    "path": "examples/Makefile",
    "content": ".PHONY: default\ndefault:\n\tmake -C limited-order-book\n\tmake -C chacha20\n\tmake -C sha256\n\tmake -C barrett\n\tmake -C kyber_compress\n\tmake -C proverif-psk\n\tmake -C lean_chacha20\n\tmake -C lean_barrett\n\nclean:\n\tmake -C limited-order-book clean\n\tmake -C chacha20           clean\n\tmake -C sha256             clean\n\tmake -C barrett            clean\n\tmake -C kyber_compress     clean\n\tmake -C proverif-psk       clean\n\tmake -C lean_chacha20      clean\n\tmake -C lean_barrett       clean\n"
  },
  {
    "path": "examples/README.md",
    "content": "# Examples\n\n| Name               | Status of the F\\* extraction |\n| ------------------ | ---------------------------- |\n| chacha20           | Typechecks                   |\n| limited-order-book | Typechecks                   |\n| sha256             | Lax-typechecks               |\n| barrett            | Typechecks                   |\n| kyber_compress     | Typechecks                   |\n\n## How to generate the F\\* code and typecheck it for the examples\n\n<details>\n  <summary><b>Requirements</b></summary>\n\n  First, make sure to have hax installed in PATH. Then:\n\n  * With Nix, `nix develop .#examples` setups a shell automatically for you.\n\n  * Without Nix:\n    1. install F* `v2025.10.06`<!---FSTAR_VERSION--> manually (see https://github.com/FStarLang/FStar/blob/master/INSTALL.md);\n       1. make sure to have `fstar.exe` in PATH;\n       2. or set the `FSTAR_HOME` environment variable.\n    2. clone [Hacl*](https://github.com/hacl-star/hacl-star) somewhere;\n    3. `export HACL_HOME=THE_DIRECTORY_WHERE_YOU_HAVE_HACL_STAR`.\n</details>\n\nTo generate F\\* code for all the example and then typecheck\neverything, just run `make` in this directory.\n\nRunning `make` will run `make` in each example directory, which in\nturn will generate F\\* modules using hax and then typecheck those\nmodules using F\\*.\n\nNote the generated modules live in the\n`<EXAMPLE>/proofs/fstar/extraction` folders.\n\n## Coq\n\nFor those examples, we generated Coq modules without typechecking them.\nThe `<EXAMPLE>/proofs/coq/extraction` folders contain the generated Coq modules.\n\n## Lean\n\nThree examples are fine-tuned to showcase the Lean backend: `lean_barrett`,\n`lean_chacha20`, and `lean_adc`. For all of them, the lean extraction can be\nobtained by running `cargo hax into lean`.\n\n### Barrett\n\nThe *Barrett reduction* allows to compute remainders without using divisions. It\nshowcases arithmetic operations, conversions between integer types (namely `i32`\nand `i64`). The Lean backend provides *panicking* arithmetic operations `+?`,\n`-?`, etc, that panic on overflows.\n\nFor the Lean extracted code, we prove panic freedom with regards to those\narithmetic operations, and then we prove that the result is indeed the modulus\n(as long as the absolute value of the input is lower than the bound\n`BARRETT_R`). The proof is made via bit-blasting (using Lean's `bv_decide`). To\nlimit the computation time, the bound `BARRETT_R` was lowered compared to the\nnormal example in the `barrett` folder.\n\nThe proofs are backported in the rust code (in `lean_barrett/src/lib.rs`): doing\n`cargo hax into lean` extracts a valid lean file that contains the proof.\n\nThe proof can be run by doing (requires `lake`):\n\n```sh\ncd lean_barrett/\nmake\n```\n\n### ADC (Addition with Carry)\n\nThe *ADC* (addition with carry) example verifies a 32-bit limb addition with\ncarry, a fundamental building block in multi-precision (bignum) arithmetic.\nIt uses `#[hax_lib::lean::after(...)]` to embed a Lean 4 correctness theorem\ndirectly after the extracted function definition. The precondition and\npostcondition are expressed as pure Lean propositions in a Hoare triple, and\nthe proof is fully automated via `hax_mvcgen` and Lean's `bv_decide`\nbit-blasting procedure.\n\nThe verified property states that the 64-bit sum `a + b + carry_in` is correctly\nsplit into a 32-bit sum and a 1-bit carry output.\n\nThe proof can be run by doing (requires `lake`):\n\n```sh\ncd lean_adc/\nmake\n```\n\n### Chacha20\n\nThe Chacha20 example extracts to Lean, but requires a manual edit to be\nwellformed. It showcases array, vector and slices accesses, as well as loops\n(with loop invariants). For the Lean extracted code, we prove panic freedom,\nwhich involves arithmetic on size of arrays.\n\nThis edit and the proofs of panic freedom can be found in\n`lean_chacha20/proofs/lean/extraction/lean_chacha20_manual_edit.lean`.\n\nThe extraction (in `lean_chacha20.lean`) and rerun of the proofs (in\n`lean_chacha20_manual_edit.lean`) can be done by doing (requires `lake`):\n\n```sh\ncd lean_chacha20/\nmake\n```\n"
  },
  {
    "path": "examples/barrett/Cargo.toml",
    "content": "[package]\nname = \"barrett\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nhax-lib.workspace = true\n"
  },
  {
    "path": "examples/barrett/Makefile",
    "content": ".PHONY: default fstar rust clean\n\ndefault: fstar rust\n\tmake -C proofs/fstar/extraction\n\nfstar:\n\tcargo hax into fstar\n\nrust:\n\tcargo hax into rust\n\tcd proofs/rust/extraction\n\tcargo build\n\nclean:\n\trm -f proofs/fstar/extraction/.depend\n\trm -f proofs/fstar/extraction/*.fst\n\trm -f proofs/rust/extraction/*.rs\n"
  },
  {
    "path": "examples/barrett/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHACL_HOME     ?= $(HOME)/.hax/hacl_home\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= .cache\nHINT_DIR      ?= .hints\n\nSHELL ?= /usr/bin/env bash\n\nEXECUTABLES = cargo cargo-hax jq\nK := $(foreach bin,$(EXECUTABLES),\\\n        $(if $(shell command -v $(bin) 2> /dev/null),,$(error \"No $(bin) in PATH\")))\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\nHAX_CLI = \"cargo hax into fstar --z3rlimit 100\"\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\tmkdir -p \"${HACL_HOME}\"\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\n# If no any F* file is detected, we run hax\nifeq \"$(wildcard *.fst *fsti)\" \"\"\n$(shell $(SHELL) -c $(HAX_CLI))\nendif\n\n# By default, we process all the files in the current directory\nROOTS = $(wildcard *.fst *fsti)\n\n# Regenerate F* files via hax when Rust sources change\n$(ROOTS): $(shell find ../../../src -type f -name '*.rs')\n\t$(shell $(SHELL) -c $(HAX_CLI))\n\n# The following is a bash script that discovers F* libraries\ndefine FINDLIBS\n    # Prints a path if and only if it exists. Takes one argument: the\n    # path.\n    function print_if_exists() {\n        if [ -d \"$$1\" ]; then\n            echo \"$$1\"\n        fi\n    }\n    # Asks Cargo all the dependencies for the current crate or workspace,\n    # and extract all \"root\" directories for each. Takes zero argument.\n    function dependencies() {\n        cargo metadata --format-version 1 |\n            jq -r '.packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")'\n    }\n    # Find hax libraries *around* a given path. Takes one argument: the\n    # path.\n    function find_hax_libraries_at_path() {\n        path=\"$$1\"\n        # if there is a `proofs/fstar/extraction` subfolder, then that's a\n        # F* library\n        print_if_exists \"$$path/proofs/fstar/extraction\"\n        # Maybe the `proof-libs` folder of hax is around?\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\")\n        if [ $$? -eq 0 ]; then\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\"\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\"\n        fi\n    }\n    { while IFS= read path; do\n          find_hax_libraries_at_path \"$$path\"\n      done < <(dependencies)\n    } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c \"$$FINDLIBS\")\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n"
  },
  {
    "path": "examples/barrett/proofs/lean/lakefile.toml",
    "content": "name = \"barrett\"\nversion = \"0.1.0\"\ndefaultTargets = [\"barrett\"]\n\n[[lean_lib]]\nname = \"barrett\"\nroots = [\"extraction.Barrett\"]\n\n[[require]]\nname = \"Hax\"\npath = \"../../../../proof-libs/lean\"\n"
  },
  {
    "path": "examples/barrett/proofs/lean/lean-toolchain",
    "content": "leanprover/lean4:v4.29.0-rc1 "
  },
  {
    "path": "examples/barrett/proofs/rust/extraction/Cargo.toml",
    "content": "[package]\nname = \"extraction\"\nversion = \"0.1.0\"\nedition = \"2024\"\n\n[lib]\npath = \"barrett.rs\"\n\n[dependencies]\n\n[workspace]\n"
  },
  {
    "path": "examples/barrett/src/lib.rs",
    "content": "use hax_lib as hax;\n\n/// Values having this type hold a representative 'x' of the Kyber field.\n/// We use 'fe' as a shorthand for this type.\npub(crate) type FieldElement = i32;\n\nconst BARRETT_SHIFT: i64 = 26;\nconst BARRETT_R: i64 = 0x4000000; // 2^26\n\n/// This is calculated as ⌊(BARRETT_R / FIELD_MODULUS) + 1/2⌋\nconst BARRETT_MULTIPLIER: i64 = 20159;\n\npub(crate) const FIELD_MODULUS: i32 = 3329;\n\n/// Signed Barrett Reduction\n///\n/// Given an input `value`, `barrett_reduce` outputs a representative `result`\n/// such that:\n///\n/// - result ≡ value (mod FIELD_MODULUS)\n/// - the absolute value of `result` is bound as follows:\n///\n/// `|result| ≤ FIELD_MODULUS / 2 · (|value|/BARRETT_R + 1)\n///\n/// In particular, if `|value| < BARRETT_R`, then `|result| < FIELD_MODULUS`.\n#[hax_lib::fstar::options(\"--z3rlimit 100\")]\n#[hax::requires((i64::from(value) >= -BARRETT_R && i64::from(value) <= BARRETT_R))]\n#[hax::ensures(|result| result > -FIELD_MODULUS && result < FIELD_MODULUS &&\n                   result % FIELD_MODULUS == value % FIELD_MODULUS)]\npub fn barrett_reduce(value: FieldElement) -> FieldElement {\n    let t = i64::from(value) * BARRETT_MULTIPLIER;\n    // assert!(9223372036854775807 - (BARRETT_R >> 1) > t);\n    let t = t + (BARRETT_R >> 1);\n\n    let quotient = t >> BARRETT_SHIFT;\n    // assert!(quotient <= 2147483647_i64 || quotient >= -2147483648_i64);\n    let quotient = quotient as i32;\n\n    // assert!(((quotient as i64) * (FIELD_MODULUS as i64)) < 9223372036854775807);\n    let sub = quotient * FIELD_MODULUS;\n\n    hax::fstar!(r\"Math.Lemmas.cancel_mul_mod (v $quotient) 3329\");\n\n    value - sub\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn it_works() {\n        fn test(val: FieldElement, expected: FieldElement) {\n            let reduced = barrett_reduce(val);\n            assert_eq!(reduced, expected);\n        }\n\n        test(FIELD_MODULUS + 1, 1);\n        test(FIELD_MODULUS, 0);\n        test(FIELD_MODULUS - 1, -1);\n\n        test(FIELD_MODULUS + (FIELD_MODULUS - 1), -1);\n        test(FIELD_MODULUS + (FIELD_MODULUS + 1), 1);\n\n        test(1234, 1234);\n        test(9876, -111);\n\n        test(4327, 4327 % FIELD_MODULUS)\n    }\n}\n"
  },
  {
    "path": "examples/chacha20/Cargo.toml",
    "content": "[package]\nname = \"chacha20\"\nversion = \"0.1.0\"\nauthors = [\"Franziskus Kiefer <franziskuskiefer@gmail.com>\"]\nedition = \"2021\"\n\n[dependencies]\nhax-lib.workspace = true\nhax-bounded-integers.workspace = true\n"
  },
  {
    "path": "examples/chacha20/Makefile",
    "content": ".PHONY: default clean\ndefault:\n\tmake -C proofs/fstar/extraction\n\nclean:\n\trm -f proofs/fstar/extraction/.depend\n\trm -f proofs/fstar/extraction/*.fst\n"
  },
  {
    "path": "examples/chacha20/proofs/coq/extraction/Chacha20.Hacspec_helper.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nRequire Import Super. (* as State *)\n\n(*Not implemented yet? todo(item)*)\n\nDefinition to_le_u32s_3_ (bytes : seq int8) : nseq int32 TODO: Int.to_string length :=\n  let out := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 3)) : nseq int32 TODO: Int.to_string length in\n  let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 3))) out (fun out i =>\n      update_at out i (impl__u32__from_le_bytes (impl__unwrap (f_try_into (bytes.[(Build_Range ((@repr WORDSIZE32 4).*i)(((@repr WORDSIZE32 4).*i).+(@repr WORDSIZE32 4)))])))))) : nseq int32 TODO: Int.to_string length in\n  out.\n\nDefinition to_le_u32s_8_ (bytes : seq int8) : nseq int32 TODO: Int.to_string length :=\n  let out := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 8)) : nseq int32 TODO: Int.to_string length in\n  let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 8))) out (fun out i =>\n      update_at out i (impl__u32__from_le_bytes (impl__unwrap (f_try_into (bytes.[(Build_Range ((@repr WORDSIZE32 4).*i)(((@repr WORDSIZE32 4).*i).+(@repr WORDSIZE32 4)))])))))) : nseq int32 TODO: Int.to_string length in\n  out.\n\nDefinition to_le_u32s_16_ (bytes : seq int8) : nseq int32 TODO: Int.to_string length :=\n  let out := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 16)) : nseq int32 TODO: Int.to_string length in\n  let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 16))) out (fun out i =>\n      update_at out i (impl__u32__from_le_bytes (impl__unwrap (f_try_into (bytes.[(Build_Range ((@repr WORDSIZE32 4).*i)(((@repr WORDSIZE32 4).*i).+(@repr WORDSIZE32 4)))])))))) : nseq int32 TODO: Int.to_string length in\n  out.\n\nDefinition u32s_to_le_bytes (state : nseq int32 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length :=\n  let out := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in\n  let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(impl__len (unsize state)))) out (fun out i =>\n      let tmp := (impl__u32__to_le_bytes (state.[i])) : nseq int8 TODO: Int.to_string length in\n      f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 4))) out (fun out j =>\n        update_at out ((i.*(@repr WORDSIZE32 4)).+j) (tmp.[j])))) : nseq int8 TODO: Int.to_string length in\n  out.\n\nDefinition xor_state (state : nseq int32 TODO: Int.to_string length) (other : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let state := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 16))) state (fun state i =>\n      update_at state i ((state.[i]).^(other.[i])))) : nseq int32 TODO: Int.to_string length in\n  state.\n\nDefinition add_state (state : nseq int32 TODO: Int.to_string length) (other : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let state := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 16))) state (fun state i =>\n      update_at state i (impl__u32__wrapping_add (state.[i]) (other.[i])))) : nseq int32 TODO: Int.to_string length in\n  state.\n\nDefinition update_array (array : nseq int8 TODO: Int.to_string length) (val : seq int8) : nseq int8 TODO: Int.to_string length :=\n  let _ := (if\n      not ((@repr WORDSIZE32 64)>=.?(impl__len val))\n    then\n      never_to_any (panic assertion failed: 64 >= val.len())\n    else\n      tt) : unit in\n  let array := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(impl__len val))) array (fun array i =>\n      update_at array i (val.[i]))) : nseq int8 TODO: Int.to_string length in\n  array.\n"
  },
  {
    "path": "examples/chacha20/proofs/coq/extraction/Chacha20.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\n(*Not implemented yet? todo(item)*)\n\n(*Not implemented yet? todo(item)*)\n\nRequire Import Hacspec_helper.\n\nRequire Import Hax_lib_macros. (* as hax *)\n\nNotation t_State_t := (nseq int32 TODO: Int.to_string length).\n\nNotation t_Block_t := (nseq int8 TODO: Int.to_string length).\n\nNotation t_ChaChaIV_t := (nseq int8 TODO: Int.to_string length).\n\nNotation t_ChaChaKey_t := (nseq int8 TODO: Int.to_string length).\n\nDefinition chacha20_line (a : uint_size) (b : uint_size) (d : uint_size) (s : int32) (m : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let state := (m) : nseq int32 TODO: Int.to_string length in\n  let state := (update_at state a (impl__u32__wrapping_add (state.[a]) (state.[b]))) : nseq int32 TODO: Int.to_string length in\n  let state := (update_at state d ((state.[d]).^(state.[a]))) : nseq int32 TODO: Int.to_string length in\n  let state := (update_at state d (impl__u32__rotate_left (state.[d]) s)) : nseq int32 TODO: Int.to_string length in\n  state.\n\nDefinition chacha20_quarter_round (a : uint_size) (b : uint_size) (c : uint_size) (d : uint_size) (state : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let state := (chacha20_line a b d (@repr WORDSIZE32 16) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_line c d b (@repr WORDSIZE32 12) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_line a b d (@repr WORDSIZE32 8) state) : nseq int32 TODO: Int.to_string length in\n  chacha20_line c d b (@repr WORDSIZE32 7) state.\n\nDefinition chacha20_double_round (state : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let state := (chacha20_quarter_round (@repr WORDSIZE32 0) (@repr WORDSIZE32 4) (@repr WORDSIZE32 8) (@repr WORDSIZE32 12) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_quarter_round (@repr WORDSIZE32 1) (@repr WORDSIZE32 5) (@repr WORDSIZE32 9) (@repr WORDSIZE32 13) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_quarter_round (@repr WORDSIZE32 2) (@repr WORDSIZE32 6) (@repr WORDSIZE32 10) (@repr WORDSIZE32 14) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_quarter_round (@repr WORDSIZE32 3) (@repr WORDSIZE32 7) (@repr WORDSIZE32 11) (@repr WORDSIZE32 15) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_quarter_round (@repr WORDSIZE32 0) (@repr WORDSIZE32 5) (@repr WORDSIZE32 10) (@repr WORDSIZE32 15) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_quarter_round (@repr WORDSIZE32 1) (@repr WORDSIZE32 6) (@repr WORDSIZE32 11) (@repr WORDSIZE32 12) state) : nseq int32 TODO: Int.to_string length in\n  let state := (chacha20_quarter_round (@repr WORDSIZE32 2) (@repr WORDSIZE32 7) (@repr WORDSIZE32 8) (@repr WORDSIZE32 13) state) : nseq int32 TODO: Int.to_string length in\n  chacha20_quarter_round (@repr WORDSIZE32 3) (@repr WORDSIZE32 4) (@repr WORDSIZE32 9) (@repr WORDSIZE32 14) state.\n\nDefinition chacha20_rounds (state : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let st := (state) : nseq int32 TODO: Int.to_string length in\n  let st := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 10))) st (fun st _i =>\n      chacha20_double_round st)) : nseq int32 TODO: Int.to_string length in\n  st.\n\nDefinition chacha20_core (ctr : int32) (st0 : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let state := (st0) : nseq int32 TODO: Int.to_string length in\n  let state := (update_at state (@repr WORDSIZE32 12) (impl__u32__wrapping_add (state.[(@repr WORDSIZE32 12)]) ctr)) : nseq int32 TODO: Int.to_string length in\n  let k := (chacha20_rounds state) : nseq int32 TODO: Int.to_string length in\n  add_state state k.\n\nDefinition chacha20_init (key : nseq int8 TODO: Int.to_string length) (iv : nseq int8 TODO: Int.to_string length) (ctr : int32) : nseq int32 TODO: Int.to_string length :=\n  let key_u32 := (to_le_u32s_8_ (unsize key)) : nseq int32 TODO: Int.to_string length in\n  let iv_u32 := (to_le_u32s_3_ (unsize iv)) : nseq int32 TODO: Int.to_string length in\n  array_from_list [(@repr WORDSIZE32 1634760805);\n    (@repr WORDSIZE32 857760878);\n    (@repr WORDSIZE32 2036477234);\n    (@repr WORDSIZE32 1797285236);\n    key_u32.[(@repr WORDSIZE32 0)];\n    key_u32.[(@repr WORDSIZE32 1)];\n    key_u32.[(@repr WORDSIZE32 2)];\n    key_u32.[(@repr WORDSIZE32 3)];\n    key_u32.[(@repr WORDSIZE32 4)];\n    key_u32.[(@repr WORDSIZE32 5)];\n    key_u32.[(@repr WORDSIZE32 6)];\n    key_u32.[(@repr WORDSIZE32 7)];\n    ctr;\n    iv_u32.[(@repr WORDSIZE32 0)];\n    iv_u32.[(@repr WORDSIZE32 1)];\n    iv_u32.[(@repr WORDSIZE32 2)]].\n\nDefinition chacha20_key_block (state : nseq int32 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length :=\n  let state := (chacha20_core (@repr WORDSIZE32 0) state) : nseq int32 TODO: Int.to_string length in\n  u32s_to_le_bytes state.\n\nDefinition chacha20_key_block0 (key : nseq int8 TODO: Int.to_string length) (iv : nseq int8 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length :=\n  let state := (chacha20_init key iv (@repr WORDSIZE32 0)) : nseq int32 TODO: Int.to_string length in\n  chacha20_key_block state.\n\nDefinition chacha20_encrypt_block (st0 : nseq int32 TODO: Int.to_string length) (ctr : int32) (plain : nseq int8 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length :=\n  let st := (chacha20_core ctr st0) : nseq int32 TODO: Int.to_string length in\n  let pl := (to_le_u32s_16_ (unsize plain)) : nseq int32 TODO: Int.to_string length in\n  let encrypted := (xor_state st pl) : nseq int32 TODO: Int.to_string length in\n  u32s_to_le_bytes encrypted.\n\nDefinition chacha20_encrypt_last (st0 : nseq int32 TODO: Int.to_string length) (ctr : int32) (plain : seq int8) : t_Vec_t (int8) (t_Global_t) :=\n  let b := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in\n  let b := (update_array b plain) : nseq int8 TODO: Int.to_string length in\n  let b := (chacha20_encrypt_block st0 ctr b) : nseq int8 TODO: Int.to_string length in\n  impl__to_vec (b.[(Build_Range (@repr WORDSIZE32 0)(impl__len plain))]).\n\nDefinition chacha20_update (st0 : nseq int32 TODO: Int.to_string length) (m : seq int8) : t_Vec_t (int8) (t_Global_t) :=\n  let blocks_out := (impl__new) : t_Vec_t (int8) (t_Global_t) in\n  let num_blocks := ((impl__len m)./(@repr WORDSIZE32 64)) : uint_size in\n  let remainder_len := ((impl__len m).%(@repr WORDSIZE32 64)) : uint_size in\n  let blocks_out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)num_blocks)) blocks_out (fun blocks_out i =>\n      let i := (i) : uint_size in\n      let b := (chacha20_encrypt_block st0 (cast i) (impl__unwrap (f_try_into (m.[(Build_Range ((@repr WORDSIZE32 64).*i)(((@repr WORDSIZE32 64).*i).+(@repr WORDSIZE32 64)))])))) : nseq int8 TODO: Int.to_string length in\n      let blocks_out := (impl_2__extend_from_slice blocks_out (unsize b)) : t_Vec_t (int8) (t_Global_t) in\n      blocks_out)) : t_Vec_t (int8) (t_Global_t) in\n  let blocks_out := (if\n      remainder_len<>(@repr WORDSIZE32 0)\n    then\n      let b := (chacha20_encrypt_last st0 (cast num_blocks) (m.[(Build_Range ((@repr WORDSIZE32 64).*num_blocks)(impl__len m))])) : t_Vec_t (int8) (t_Global_t) in\n      let blocks_out := (impl_2__extend_from_slice blocks_out (f_deref b)) : t_Vec_t (int8) (t_Global_t) in\n      blocks_out\n    else\n      blocks_out) : t_Vec_t (int8) (t_Global_t) in\n  blocks_out.\n\nDefinition chacha20 (m : seq int8) (key : nseq int8 TODO: Int.to_string length) (iv : nseq int8 TODO: Int.to_string length) (ctr : int32) : t_Vec_t (int8) (t_Global_t) :=\n  let state := (chacha20_init key iv ctr) : nseq int32 TODO: Int.to_string length in\n  chacha20_update state m.\n"
  },
  {
    "path": "examples/chacha20/proofs/fstar/extraction/Chacha20.Hacspec_helper.fst",
    "content": "module Chacha20.Hacspec_helper\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 40\"\nopen FStar.Mul\nopen Core_models\n\nlet to_le_u32s_3_ (bytes: t_Slice u8) : t_Array u32 (mk_usize 3) =\n  let out:t_Array u32 (mk_usize 3) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 3) in\n  let out:t_Array u32 (mk_usize 3) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 3)\n      (fun out temp_1_ ->\n          let out:t_Array u32 (mk_usize 3) = out in\n          let _:usize = temp_1_ in\n          true)\n      out\n      (fun out i ->\n          let out:t_Array u32 (mk_usize 3) = out in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out\n            i\n            (Core_models.Num.impl_u32__from_le_bytes (Core_models.Result.impl__unwrap #(t_Array u8\n                        (mk_usize 4))\n                    #Core_models.Array.t_TryFromSliceError\n                    (Core_models.Convert.f_try_into #(t_Slice u8)\n                        #(t_Array u8 (mk_usize 4))\n                        #FStar.Tactics.Typeclasses.solve\n                        (bytes.[ {\n                              Core_models.Ops.Range.f_start = mk_usize 4 *! i <: usize;\n                              Core_models.Ops.Range.f_end\n                              =\n                              (mk_usize 4 *! i <: usize) +! mk_usize 4 <: usize\n                            }\n                            <:\n                            Core_models.Ops.Range.t_Range usize ]\n                          <:\n                          t_Slice u8)\n                      <:\n                      Core_models.Result.t_Result (t_Array u8 (mk_usize 4))\n                        Core_models.Array.t_TryFromSliceError)\n                  <:\n                  t_Array u8 (mk_usize 4))\n              <:\n              u32)\n          <:\n          t_Array u32 (mk_usize 3))\n  in\n  out\n\nlet to_le_u32s_8_ (bytes: t_Slice u8) : t_Array u32 (mk_usize 8) =\n  let out:t_Array u32 (mk_usize 8) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 8) in\n  let out:t_Array u32 (mk_usize 8) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 8)\n      (fun out temp_1_ ->\n          let out:t_Array u32 (mk_usize 8) = out in\n          let _:usize = temp_1_ in\n          true)\n      out\n      (fun out i ->\n          let out:t_Array u32 (mk_usize 8) = out in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out\n            i\n            (Core_models.Num.impl_u32__from_le_bytes (Core_models.Result.impl__unwrap #(t_Array u8\n                        (mk_usize 4))\n                    #Core_models.Array.t_TryFromSliceError\n                    (Core_models.Convert.f_try_into #(t_Slice u8)\n                        #(t_Array u8 (mk_usize 4))\n                        #FStar.Tactics.Typeclasses.solve\n                        (bytes.[ {\n                              Core_models.Ops.Range.f_start = mk_usize 4 *! i <: usize;\n                              Core_models.Ops.Range.f_end\n                              =\n                              (mk_usize 4 *! i <: usize) +! mk_usize 4 <: usize\n                            }\n                            <:\n                            Core_models.Ops.Range.t_Range usize ]\n                          <:\n                          t_Slice u8)\n                      <:\n                      Core_models.Result.t_Result (t_Array u8 (mk_usize 4))\n                        Core_models.Array.t_TryFromSliceError)\n                  <:\n                  t_Array u8 (mk_usize 4))\n              <:\n              u32)\n          <:\n          t_Array u32 (mk_usize 8))\n  in\n  out\n\nlet to_le_u32s_16_ (bytes: t_Slice u8) : t_Array u32 (mk_usize 16) =\n  let out:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 16) in\n  let out:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 16)\n      (fun out temp_1_ ->\n          let out:t_Array u32 (mk_usize 16) = out in\n          let _:usize = temp_1_ in\n          true)\n      out\n      (fun out i ->\n          let out:t_Array u32 (mk_usize 16) = out in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out\n            i\n            (Core_models.Num.impl_u32__from_le_bytes (Core_models.Result.impl__unwrap #(t_Array u8\n                        (mk_usize 4))\n                    #Core_models.Array.t_TryFromSliceError\n                    (Core_models.Convert.f_try_into #(t_Slice u8)\n                        #(t_Array u8 (mk_usize 4))\n                        #FStar.Tactics.Typeclasses.solve\n                        (bytes.[ {\n                              Core_models.Ops.Range.f_start = mk_usize 4 *! i <: usize;\n                              Core_models.Ops.Range.f_end\n                              =\n                              (mk_usize 4 *! i <: usize) +! mk_usize 4 <: usize\n                            }\n                            <:\n                            Core_models.Ops.Range.t_Range usize ]\n                          <:\n                          t_Slice u8)\n                      <:\n                      Core_models.Result.t_Result (t_Array u8 (mk_usize 4))\n                        Core_models.Array.t_TryFromSliceError)\n                  <:\n                  t_Array u8 (mk_usize 4))\n              <:\n              u32)\n          <:\n          t_Array u32 (mk_usize 16))\n  in\n  out\n\nlet u32s_to_le_bytes (state: t_Array u32 (mk_usize 16)) : t_Array u8 (mk_usize 64) =\n  let out:t_Array u8 (mk_usize 64) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64) in\n  let out:t_Array u8 (mk_usize 64) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (Core_models.Slice.impl__len #u32 (state <: t_Slice u32) <: usize)\n      (fun out temp_1_ ->\n          let out:t_Array u8 (mk_usize 64) = out in\n          let _:usize = temp_1_ in\n          true)\n      out\n      (fun out i ->\n          let out:t_Array u8 (mk_usize 64) = out in\n          let i:usize = i in\n          let tmp:t_Array u8 (mk_usize 4) =\n            Core_models.Num.impl_u32__to_le_bytes (state.[ i ] <: u32)\n          in\n          Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n            (mk_usize 4)\n            (fun out temp_1_ ->\n                let out:t_Array u8 (mk_usize 64) = out in\n                let _:usize = temp_1_ in\n                true)\n            out\n            (fun out j ->\n                let out:t_Array u8 (mk_usize 64) = out in\n                let j:usize = j in\n                Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out\n                  ((i *! mk_usize 4 <: usize) +! j <: usize)\n                  (tmp.[ j ] <: u8)\n                <:\n                t_Array u8 (mk_usize 64)))\n  in\n  out\n\nlet xor_state (state other: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) =\n  let state:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 16)\n      (fun state temp_1_ ->\n          let state:t_Array u32 (mk_usize 16) = state in\n          let _:usize = temp_1_ in\n          true)\n      state\n      (fun state i ->\n          let state:t_Array u32 (mk_usize 16) = state in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize state\n            i\n            ((state.[ i ] <: u32) ^. (other.[ i ] <: u32) <: u32)\n          <:\n          t_Array u32 (mk_usize 16))\n  in\n  state\n\nlet add_state (state other: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) =\n  let state:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 16)\n      (fun state temp_1_ ->\n          let state:t_Array u32 (mk_usize 16) = state in\n          let _:usize = temp_1_ in\n          true)\n      state\n      (fun state i ->\n          let state:t_Array u32 (mk_usize 16) = state in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize state\n            i\n            (Core_models.Num.impl_u32__wrapping_add (state.[ i ] <: u32) (other.[ i ] <: u32) <: u32\n            )\n          <:\n          t_Array u32 (mk_usize 16))\n  in\n  state\n\nlet update_array (array: t_Array u8 (mk_usize 64)) (v_val: t_Slice u8) : t_Array u8 (mk_usize 64) =\n  let _:Prims.unit =\n    Hax_lib.v_assert (mk_usize 64 >=. (Core_models.Slice.impl__len #u8 v_val <: usize) <: bool)\n  in\n  let array:t_Array u8 (mk_usize 64) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (Core_models.Slice.impl__len #u8 v_val <: usize)\n      (fun array temp_1_ ->\n          let array:t_Array u8 (mk_usize 64) = array in\n          let _:usize = temp_1_ in\n          true)\n      array\n      (fun array i ->\n          let array:t_Array u8 (mk_usize 64) = array in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize array i (v_val.[ i ] <: u8)\n          <:\n          t_Array u8 (mk_usize 64))\n  in\n  array\n"
  },
  {
    "path": "examples/chacha20/proofs/fstar/extraction/Chacha20.fst",
    "content": "module Chacha20\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 40\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Hax_bounded_integers in\n  ()\n\nlet chacha20_line\n      (a b d: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (s: u32)\n      (m: t_Array u32 (mk_usize 16))\n    : t_Array u32 (mk_usize 16) =\n  let state:t_Array u32 (mk_usize 16) = m in\n  let state:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.update_at state\n      a\n      (Core_models.Num.impl_u32__wrapping_add (state.[ a ] <: u32) (state.[ b ] <: u32) <: u32)\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.update_at state d ((state.[ d ] <: u32) ^. (state.[ a ] <: u32) <: u32)\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.update_at state\n      d\n      (Core_models.Num.impl_u32__rotate_left (state.[ d ] <: u32) s <: u32)\n  in\n  state\n\nlet chacha20_quarter_round\n      (a b c d: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (state: t_Array u32 (mk_usize 16))\n    : t_Array u32 (mk_usize 16) =\n  let state:t_Array u32 (mk_usize 16) = chacha20_line a b d (mk_u32 16) state in\n  let state:t_Array u32 (mk_usize 16) = chacha20_line c d b (mk_u32 12) state in\n  let state:t_Array u32 (mk_usize 16) = chacha20_line a b d (mk_u32 8) state in\n  chacha20_line c d b (mk_u32 7) state\n\nlet chacha20_double_round (state: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) =\n  let state:t_Array u32 (mk_usize 16) =\n    chacha20_quarter_round (mk_usize 0\n        <:\n        Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 4 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 8 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 12 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      state\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    chacha20_quarter_round (mk_usize 1\n        <:\n        Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 5 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 9 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 13 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      state\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    chacha20_quarter_round (mk_usize 2\n        <:\n        Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 6 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 10 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 14 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      state\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    chacha20_quarter_round (mk_usize 3\n        <:\n        Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 7 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 11 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 15 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      state\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    chacha20_quarter_round (mk_usize 0\n        <:\n        Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 5 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 10 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 15 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      state\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    chacha20_quarter_round (mk_usize 1\n        <:\n        Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 6 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 11 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 12 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      state\n  in\n  let state:t_Array u32 (mk_usize 16) =\n    chacha20_quarter_round (mk_usize 2\n        <:\n        Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 7 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 8 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      (mk_usize 13 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n      state\n  in\n  chacha20_quarter_round (mk_usize 3\n      <:\n      Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n    (mk_usize 4 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n    (mk_usize 9 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n    (mk_usize 14 <: Hax_bounded_integers.t_BoundedUsize (mk_usize 0) (mk_usize 15))\n    state\n\nlet chacha20_rounds (state: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) =\n  let st:t_Array u32 (mk_usize 16) = state in\n  let st:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n      (mk_i32 10)\n      (fun st temp_1_ ->\n          let st:t_Array u32 (mk_usize 16) = st in\n          let _:i32 = temp_1_ in\n          true)\n      st\n      (fun st e_i ->\n          let st:t_Array u32 (mk_usize 16) = st in\n          let e_i:i32 = e_i in\n          chacha20_double_round st <: t_Array u32 (mk_usize 16))\n  in\n  st\n\nlet chacha20_core (ctr: u32) (st0: t_Array u32 (mk_usize 16)) : t_Array u32 (mk_usize 16) =\n  let state:t_Array u32 (mk_usize 16) = st0 in\n  let state:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize state\n      (mk_usize 12)\n      (Core_models.Num.impl_u32__wrapping_add (state.[ mk_usize 12 ] <: u32) ctr <: u32)\n  in\n  let k:t_Array u32 (mk_usize 16) = chacha20_rounds state in\n  Chacha20.Hacspec_helper.add_state state k\n\nlet chacha20_init (key: t_Array u8 (mk_usize 32)) (iv: t_Array u8 (mk_usize 12)) (ctr: u32)\n    : t_Array u32 (mk_usize 16) =\n  let (key_u32: t_Array u32 (mk_usize 8)):t_Array u32 (mk_usize 8) =\n    Chacha20.Hacspec_helper.to_le_u32s_8_ (key <: t_Slice u8)\n  in\n  let (iv_u32: t_Array u32 (mk_usize 3)):t_Array u32 (mk_usize 3) =\n    Chacha20.Hacspec_helper.to_le_u32s_3_ (iv <: t_Slice u8)\n  in\n  let list =\n    [\n      mk_u32 1634760805; mk_u32 857760878; mk_u32 2036477234; mk_u32 1797285236;\n      key_u32.[ mk_usize 0 ]; key_u32.[ mk_usize 1 ]; key_u32.[ mk_usize 2 ]; key_u32.[ mk_usize 3 ];\n      key_u32.[ mk_usize 4 ]; key_u32.[ mk_usize 5 ]; key_u32.[ mk_usize 6 ]; key_u32.[ mk_usize 7 ];\n      ctr; iv_u32.[ mk_usize 0 ]; iv_u32.[ mk_usize 1 ]; iv_u32.[ mk_usize 2 ]\n    ]\n  in\n  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 16);\n  Rust_primitives.Hax.array_of_list 16 list\n\nlet chacha20_key_block (state: t_Array u32 (mk_usize 16)) : t_Array u8 (mk_usize 64) =\n  let state:t_Array u32 (mk_usize 16) = chacha20_core (mk_u32 0) state in\n  Chacha20.Hacspec_helper.u32s_to_le_bytes state\n\nlet chacha20_key_block0 (key: t_Array u8 (mk_usize 32)) (iv: t_Array u8 (mk_usize 12))\n    : t_Array u8 (mk_usize 64) =\n  let state:t_Array u32 (mk_usize 16) = chacha20_init key iv (mk_u32 0) in\n  chacha20_key_block state\n\nlet chacha20_encrypt_block\n      (st0: t_Array u32 (mk_usize 16))\n      (ctr: u32)\n      (plain: t_Array u8 (mk_usize 64))\n    : t_Array u8 (mk_usize 64) =\n  let st:t_Array u32 (mk_usize 16) = chacha20_core ctr st0 in\n  let (pl: t_Array u32 (mk_usize 16)):t_Array u32 (mk_usize 16) =\n    Chacha20.Hacspec_helper.to_le_u32s_16_ (plain <: t_Slice u8)\n  in\n  let encrypted:t_Array u32 (mk_usize 16) = Chacha20.Hacspec_helper.xor_state st pl in\n  Chacha20.Hacspec_helper.u32s_to_le_bytes encrypted\n\nlet chacha20_encrypt_last (st0: t_Array u32 (mk_usize 16)) (ctr: u32) (plain: t_Slice u8)\n    : Prims.Pure (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n      (requires (Core_models.Slice.impl__len #u8 plain <: usize) <=. mk_usize 64)\n      (fun _ -> Prims.l_True) =\n  let (b: t_Array u8 (mk_usize 64)):t_Array u8 (mk_usize 64) =\n    Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64)\n  in\n  let b:t_Array u8 (mk_usize 64) = Chacha20.Hacspec_helper.update_array b plain in\n  let b:t_Array u8 (mk_usize 64) = chacha20_encrypt_block st0 ctr b in\n  Alloc.Slice.impl__to_vec #u8\n    (b.[ {\n          Core_models.Ops.Range.f_start = mk_usize 0;\n          Core_models.Ops.Range.f_end = Core_models.Slice.impl__len #u8 plain <: usize\n        }\n        <:\n        Core_models.Ops.Range.t_Range usize ]\n      <:\n      t_Slice u8)\n\nlet chacha20_update (st0: t_Array u32 (mk_usize 16)) (m: t_Slice u8)\n    : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n  let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl__new #u8 () in\n  let num_blocks:usize = (Core_models.Slice.impl__len #u8 m <: usize) /! mk_usize 64 in\n  let remainder_len:usize = (Core_models.Slice.impl__len #u8 m <: usize) %! mk_usize 64 in\n  let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      num_blocks\n      (fun blocks_out temp_1_ ->\n          let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = blocks_out in\n          let _:usize = temp_1_ in\n          true)\n      blocks_out\n      (fun blocks_out i ->\n          let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = blocks_out in\n          let i:usize = i in\n          let b:t_Array u8 (mk_usize 64) =\n            chacha20_encrypt_block st0\n              (cast (i <: usize) <: u32)\n              (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 64))\n                  #Core_models.Array.t_TryFromSliceError\n                  (Core_models.Convert.f_try_into #(t_Slice u8)\n                      #(t_Array u8 (mk_usize 64))\n                      #FStar.Tactics.Typeclasses.solve\n                      (m.[ {\n                            Core_models.Ops.Range.f_start = mk_usize 64 *! i <: usize;\n                            Core_models.Ops.Range.f_end\n                            =\n                            (mk_usize 64 *! i <: usize) +! mk_usize 64 <: usize\n                          }\n                          <:\n                          Core_models.Ops.Range.t_Range usize ]\n                        <:\n                        t_Slice u8)\n                    <:\n                    Core_models.Result.t_Result (t_Array u8 (mk_usize 64))\n                      Core_models.Array.t_TryFromSliceError)\n                <:\n                t_Array u8 (mk_usize 64))\n          in\n          let _:Prims.unit =\n            Hax_lib.v_assume (b2t\n                ((Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global blocks_out <: usize) =.\n                  (i *! mk_usize 64 <: usize)\n                  <:\n                  bool))\n          in\n          let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n            Alloc.Vec.impl_2__extend_from_slice #u8\n              #Alloc.Alloc.t_Global\n              blocks_out\n              (b <: t_Slice u8)\n          in\n          blocks_out)\n  in\n  let _:Prims.unit =\n    Hax_lib.v_assume (b2t\n        ((Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global blocks_out <: usize) =.\n          (num_blocks *! mk_usize 64 <: usize)\n          <:\n          bool))\n  in\n  let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    if remainder_len <>. mk_usize 0\n    then\n      let b:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n        chacha20_encrypt_last st0\n          (cast (num_blocks <: usize) <: u32)\n          (m.[ {\n                Core_models.Ops.Range.f_start = mk_usize 64 *! num_blocks <: usize;\n                Core_models.Ops.Range.f_end = Core_models.Slice.impl__len #u8 m <: usize\n              }\n              <:\n              Core_models.Ops.Range.t_Range usize ]\n            <:\n            t_Slice u8)\n      in\n      let blocks_out:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n        Alloc.Vec.impl_2__extend_from_slice #u8\n          #Alloc.Alloc.t_Global\n          blocks_out\n          (Core_models.Ops.Deref.f_deref #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n              #FStar.Tactics.Typeclasses.solve\n              b\n            <:\n            t_Slice u8)\n      in\n      blocks_out\n    else blocks_out\n  in\n  blocks_out\n\nlet chacha20\n      (m: t_Slice u8)\n      (key: t_Array u8 (mk_usize 32))\n      (iv: t_Array u8 (mk_usize 12))\n      (ctr: u32)\n    : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n  let state:t_Array u32 (mk_usize 16) = chacha20_init key iv ctr in\n  chacha20_update state m\n"
  },
  {
    "path": "examples/chacha20/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHACL_HOME     ?= $(HOME)/.hax/hacl_home\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= .cache\nHINT_DIR      ?= .hints\n\nSHELL ?= /usr/bin/env bash\n\nEXECUTABLES = cargo cargo-hax jq\nK := $(foreach bin,$(EXECUTABLES),\\\n        $(if $(shell command -v $(bin) 2> /dev/null),,$(error \"No $(bin) in PATH\")))\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# Default hax invocation\nHAX_CLI = \"cargo hax into fstar --z3rlimit 40\"\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\tmkdir -p \"${HACL_HOME}\"\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\n# If no any F* file is detected, we run hax\nifeq \"$(wildcard *.fst *fsti)\" \"\"\n$(shell $(SHELL) -c $(HAX_CLI))\nendif\n\n# By default, we process all the files in the current directory\nROOTS = $(wildcard *.fst *fsti)\n\n# Regenerate F* files via hax when Rust sources change\n$(ROOTS): $(shell find ../../../src -type f -name '*.rs')\n\t$(shell $(SHELL) -c $(HAX_CLI))\n\n# The following is a bash script that discovers F* libraries\ndefine FINDLIBS\n    # Prints a path if and only if it exists. Takes one argument: the\n    # path.\n    function print_if_exists() {\n        if [ -d \"$$1\" ]; then\n            echo \"$$1\"\n        fi\n    }\n    # Asks Cargo all the dependencies for the current crate or workspace,\n    # and extract all \"root\" directories for each. Takes zero argument.\n    function dependencies() {\n        cargo metadata --format-version 1 |\n            jq -r '.packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")'\n    }\n    # Find hax libraries *around* a given path. Takes one argument: the\n    # path.\n    function find_hax_libraries_at_path() {\n        path=\"$$1\"\n        # if there is a `proofs/fstar/extraction` subfolder, then that's a\n        # F* library\n        print_if_exists \"$$path/proofs/fstar/extraction\"\n        # Maybe the `proof-libs` folder of hax is around?\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\")\n        if [ $$? -eq 0 ]; then\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\"\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\"\n        fi\n    }\n    { while IFS= read path; do\n          find_hax_libraries_at_path \"$$path\"\n      done < <(dependencies)\n    } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c \"$$FINDLIBS\")\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n\n# Special rule for `Chacha20.Hacspec_helper`\n$(CACHE_DIR)/Chacha20.Hacspec_helper.fst.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) --lax $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n"
  },
  {
    "path": "examples/chacha20/src/hacspec_helper.rs",
    "content": "use super::State;\n\n// pub(super) fn to_le_u32s<const L: usize>(bytes: &[u8]) -> [u32; L] {\n//     assert_eq!(L, bytes.len() / 4);\n//     let mut out = [0; L];\n//     for (i, block) in bytes.chunks(4).enumerate() {\n//         out[i] = u32::from_le_bytes(block.try_into().unwrap());\n//     }\n//     out\n// }\n\nmacro_rules! to_le_u32s_impl {\n    ($name:ident,$l:literal) => {\n        pub(super) fn $name(bytes: &[u8]) -> [u32; $l] {\n            // assert_eq!($l, bytes.len() / 4);\n            let mut out = [0; $l];\n            // for (i, block) in bytes.chunks(4).enumerate() {\n            for i in 0..$l {\n                out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap());\n            }\n            out\n        }\n    };\n}\nto_le_u32s_impl!(to_le_u32s_3, 3);\nto_le_u32s_impl!(to_le_u32s_8, 8);\nto_le_u32s_impl!(to_le_u32s_16, 16);\n\npub(super) fn u32s_to_le_bytes(state: &[u32; 16]) -> [u8; 64] {\n    // <const L: usize>\n    let mut out = [0; 64];\n    for i in 0..state.len() {\n        let tmp = state[i].to_le_bytes();\n        for j in 0..4 {\n            out[i * 4 + j] = tmp[j];\n        }\n    }\n    out\n}\n\npub(super) fn xor_state(mut state: State, other: State) -> State {\n    for i in 0..16 {\n        state[i] = state[i] ^ other[i];\n    }\n    state\n}\n\npub(super) fn add_state(mut state: State, other: State) -> State {\n    for i in 0..16 {\n        state[i] = state[i].wrapping_add(other[i]);\n    }\n    state\n}\n\npub(super) fn update_array(mut array: [u8; 64], val: &[u8]) -> [u8; 64] {\n    // <const L: usize>\n    assert!(64 >= val.len());\n    for i in 0..val.len() {\n        array[i] = val[i];\n    }\n    array\n}\n"
  },
  {
    "path": "examples/chacha20/src/lib.rs",
    "content": "mod hacspec_helper;\nuse hacspec_helper::*;\n\nuse hax_lib as hax;\n\ntype State = [u32; 16];\ntype Block = [u8; 64];\ntype ChaChaIV = [u8; 12];\ntype ChaChaKey = [u8; 32];\n\ntype StateIdx = hax_bounded_integers::BoundedUsize<0, 15>;\n\nfn chacha20_line(a: StateIdx, b: StateIdx, d: StateIdx, s: u32, m: State) -> State {\n    let mut state = m;\n    state[a] = state[a].wrapping_add(state[b]);\n    state[d] = state[d] ^ state[a];\n    state[d] = state[d].rotate_left(s);\n    state\n}\n\npub fn chacha20_quarter_round(\n    a: StateIdx,\n    b: StateIdx,\n    c: StateIdx,\n    d: StateIdx,\n    state: State,\n) -> State {\n    let state = chacha20_line(a, b, d, 16, state);\n    let state = chacha20_line(c, d, b, 12, state);\n    let state = chacha20_line(a, b, d, 8, state);\n    chacha20_line(c, d, b, 7, state)\n}\n\nuse hax_lib::*;\n\nfn chacha20_double_round(state: State) -> State {\n    let state = chacha20_quarter_round(\n        0.into_checked(),\n        4.into_checked(),\n        8.into_checked(),\n        12.into_checked(),\n        state,\n    );\n    let state = chacha20_quarter_round(\n        1.into_checked(),\n        5.into_checked(),\n        9.into_checked(),\n        13.into_checked(),\n        state,\n    );\n    let state = chacha20_quarter_round(\n        2.into_checked(),\n        6.into_checked(),\n        10.into_checked(),\n        14.into_checked(),\n        state,\n    );\n    let state = chacha20_quarter_round(\n        3.into_checked(),\n        7.into_checked(),\n        11.into_checked(),\n        15.into_checked(),\n        state,\n    );\n\n    let state = chacha20_quarter_round(\n        0.into_checked(),\n        5.into_checked(),\n        10.into_checked(),\n        15.into_checked(),\n        state,\n    );\n    let state = chacha20_quarter_round(\n        1.into_checked(),\n        6.into_checked(),\n        11.into_checked(),\n        12.into_checked(),\n        state,\n    );\n    let state = chacha20_quarter_round(\n        2.into_checked(),\n        7.into_checked(),\n        8.into_checked(),\n        13.into_checked(),\n        state,\n    );\n    chacha20_quarter_round(\n        3.into_checked(),\n        4.into_checked(),\n        9.into_checked(),\n        14.into_checked(),\n        state,\n    )\n}\n\npub fn chacha20_rounds(state: State) -> State {\n    let mut st = state;\n    for _i in 0..10 {\n        st = chacha20_double_round(st);\n    }\n    st\n}\n\npub fn chacha20_core(ctr: u32, st0: State) -> State {\n    let mut state = st0;\n    state[12] = state[12].wrapping_add(ctr);\n    let k = chacha20_rounds(state);\n    add_state(state, k)\n}\n\npub fn chacha20_init(key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> State {\n    let key_u32: [u32; 8] = to_le_u32s_8(key);\n    let iv_u32: [u32; 3] = to_le_u32s_3(iv);\n    [\n        0x6170_7865,\n        0x3320_646e,\n        0x7962_2d32,\n        0x6b20_6574,\n        key_u32[0],\n        key_u32[1],\n        key_u32[2],\n        key_u32[3],\n        key_u32[4],\n        key_u32[5],\n        key_u32[6],\n        key_u32[7],\n        ctr,\n        iv_u32[0],\n        iv_u32[1],\n        iv_u32[2],\n    ]\n}\n\npub fn chacha20_key_block(state: State) -> Block {\n    let state = chacha20_core(0u32, state);\n    u32s_to_le_bytes(&state)\n}\n\npub fn chacha20_key_block0(key: &ChaChaKey, iv: &ChaChaIV) -> Block {\n    let state = chacha20_init(key, iv, 0u32);\n    chacha20_key_block(state)\n}\n\npub fn chacha20_encrypt_block(st0: State, ctr: u32, plain: &Block) -> Block {\n    let st = chacha20_core(ctr, st0);\n    let pl: State = to_le_u32s_16(plain);\n    let encrypted = xor_state(st, pl);\n    u32s_to_le_bytes(&encrypted)\n}\n\n#[hax::requires(plain.len() <= 64)]\npub fn chacha20_encrypt_last(st0: State, ctr: u32, plain: &[u8]) -> Vec<u8> {\n    let mut b: Block = [0; 64];\n    b = update_array(b, plain);\n    b = chacha20_encrypt_block(st0, ctr, &b);\n    b[0..plain.len()].to_vec()\n}\n\npub fn chacha20_update(st0: State, m: &[u8]) -> Vec<u8> {\n    let mut blocks_out = Vec::new();\n    let num_blocks = m.len() / 64;\n    let remainder_len = m.len() % 64;\n    for i in 0..num_blocks {\n        // Full block\n        let b =\n            chacha20_encrypt_block(st0, i as u32, &m[64 * i..(64 * i + 64)].try_into().unwrap());\n        hax_lib::assume!(blocks_out.len() == i * 64);\n        blocks_out.extend_from_slice(&b);\n    }\n    hax_lib::assume!(blocks_out.len() == num_blocks * 64);\n    if remainder_len != 0 {\n        // Last block\n        let b = chacha20_encrypt_last(st0, num_blocks as u32, &m[64 * num_blocks..m.len()]);\n        blocks_out.extend_from_slice(&b);\n    }\n    blocks_out\n}\n\npub fn chacha20(m: &[u8], key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> Vec<u8> {\n    let state = chacha20_init(key, iv, ctr);\n    chacha20_update(state, m)\n}\n"
  },
  {
    "path": "examples/chacha20/tests/kat.rs",
    "content": "use chacha20::chacha20;\n\npub type ChaChaIV = [u8; 12];\npub type ChaChaKey = [u8; 32];\n\nfn kat_test(m: Vec<u8>, key: ChaChaKey, iv: ChaChaIV, exp_cipher: Vec<u8>) {\n    let out = chacha20(&m, &key, &iv, 1u32);\n    assert_eq!(exp_cipher, out);\n\n    let decrypted = chacha20(&out, &key, &iv, 1u32);\n    assert_eq!(m, decrypted);\n}\n\n#[test]\nfn test_kat() {\n    let key = [\n        0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e,\n        0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d,\n        0x9e, 0x9f,\n    ];\n    let iv = [\n        0x07, 0x00, 0x00, 0x00, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,\n    ];\n    let m = vec![\n        0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74,\n        0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6c,\n        0x61, 0x73, 0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39, 0x3a, 0x20, 0x49, 0x66, 0x20,\n        0x49, 0x20, 0x63, 0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66, 0x65, 0x72, 0x20, 0x79,\n        0x6f, 0x75, 0x20, 0x6f, 0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x74, 0x69, 0x70,\n        0x20, 0x66, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75, 0x72, 0x65,\n        0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73, 0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f, 0x75,\n        0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69, 0x74, 0x2e,\n    ];\n    let exp_cipher = vec![\n        0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb, 0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e,\n        0xc2, 0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe, 0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee,\n        0x62, 0xd6, 0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12, 0x82, 0xfa, 0xfb, 0x69, 0xda,\n        0x92, 0x72, 0x8b, 0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29, 0x05, 0xd6, 0xa5, 0xb6,\n        0x7e, 0xcd, 0x3b, 0x36, 0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c, 0x98, 0x03, 0xae,\n        0xe3, 0x28, 0x09, 0x1b, 0x58, 0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94, 0x55, 0x85,\n        0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc, 0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d, 0xe5,\n        0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b, 0x61, 0x16,\n    ];\n    kat_test(m, key, iv, exp_cipher);\n\n    let key = [\n        0x8c, 0x4e, 0xfa, 0x63, 0x37, 0x96, 0x89, 0xd5, 0x90, 0xa8, 0xcb, 0xcf, 0xe1, 0x59, 0x42,\n        0xf8, 0xc1, 0xce, 0xe5, 0xaf, 0xa5, 0xf7, 0x52, 0xf7, 0xc3, 0xf0, 0x92, 0xa8, 0x41, 0x93,\n        0xa6, 0x89,\n    ];\n    let iv = [\n        0xbc, 0xf, 0x85, 0xee, 0x55, 0xa, 0x45, 0x6f, 0x16, 0xa7, 0x35, 0xb6,\n    ];\n    let m = vec![\n        204, 17, 211, 86, 205, 3, 143, 149, 232, 65, 249, 176, 134, 19, 51, 245, 33, 247, 187, 39,\n        120, 111, 226, 96, 68, 224, 250, 140, 18, 23, 174, 109, 149, 193, 10, 5, 167, 22, 19, 129,\n        17, 172, 51, 202, 186, 21, 6, 141, 39, 108, 186, 72, 39, 100, 193, 30, 104, 79, 48, 185,\n        169, 209, 200, 3, 13, 163, 231, 189, 171, 136, 188, 95, 55, 49, 109, 64, 186, 116, 233,\n        184, 56, 190, 71, 41, 250, 237, 235, 86, 23, 123, 226, 228, 35, 127, 176, 10, 49, 230, 129,\n        226, 237, 144, 29, 197, 161, 96, 129, 200, 66, 205, 187, 155, 34, 133, 250, 84, 14, 51,\n        242, 189, 46, 228, 61, 170, 192, 93, 214, 35, 206, 224, 157, 14, 249, 97, 40, 134, 103,\n        194, 168, 191, 159, 249, 127, 85, 83, 223, 166, 145, 98, 60, 85, 129, 209, 67, 119, 189,\n        67, 56, 55, 106, 48, 255, 198, 76, 192, 233, 56, 236, 98, 228, 219, 213, 206, 185, 25, 125,\n        189, 112, 160, 113, 183, 90, 71, 15, 80, 46, 143, 110, 112, 234, 214, 218, 24, 232, 196,\n        229, 62, 176, 17, 61, 92, 172, 224, 29, 225, 151, 141, 143, 126, 235, 195, 179, 186, 244,\n        250, 165, 106, 17, 255, 145, 27, 166, 250, 29, 149, 212, 55, 214, 158, 104, 82, 74, 246,\n        167, 216, 132, 218, 121, 4, 167, 73, 67, 145, 173, 245, 40, 158, 72, 71, 167, 53, 176, 27,\n        136, 165, 222, 115, 63, 241, 144, 198, 3, 81, 22, 58, 128, 38, 198, 100, 40, 36, 136, 194,\n        216, 150, 182, 94, 194, 235, 97, 212, 195, 112, 255, 158, 243, 154, 102, 56, 105, 25, 72,\n        80, 106, 123, 84, 162, 102, 161, 8, 58, 194, 160, 111, 247, 22, 129, 212, 140, 111, 80,\n        168, 203, 126, 222, 231, 82, 98, 63, 194, 253, 127, 127, 25, 208, 14, 252, 199, 107, 88,\n        38, 82, 57, 67, 13, 173, 208, 75, 182, 222, 89, 70, 27, 28, 21, 17, 97, 122, 184, 122, 27,\n        230, 219, 56, 135, 146, 204, 36, 211, 92, 113, 196, 121, 14, 212, 64, 211, 3, 122, 47, 217,\n        186, 209, 254, 221, 126, 172, 235, 198, 198, 52, 118, 19, 192, 150, 148, 224, 250, 173,\n        139, 121, 101, 231, 13, 101, 22, 168, 223, 118, 254, 161, 216, 110, 246, 67, 64, 249, 252,\n        55, 54, 95, 52, 72, 206, 68, 35, 36, 120, 52, 126, 233, 38, 3, 27, 11, 89, 242, 17, 168,\n        32, 197, 21, 121, 187, 77, 193, 107, 204, 151, 76, 187, 196, 162, 149, 93, 43, 39, 165,\n        171, 45, 154, 186, 89, 170, 11, 60, 119, 30, 183, 0, 29, 154, 114, 227, 77, 207, 140, 232,\n        18, 117, 3, 49, 229, 150, 125, 201, 100, 191, 44, 20, 35, 142, 216, 219, 38, 133, 166, 247,\n        26, 129, 69, 90, 140, 20, 70, 97, 49, 143, 7, 214, 61, 2, 65, 133, 36, 116, 140, 78, 68,\n        29, 138, 89, 83, 162, 117, 48, 52, 247, 108, 118, 183, 48, 125, 45, 53, 192, 235, 198, 30,\n        159, 113, 131, 182, 22, 185, 47, 174, 155, 179, 39, 235, 248, 188, 117, 181, 233, 8, 153,\n        224, 107, 115, 226, 77, 22, 38, 190, 143, 50, 151, 171, 80, 137, 229, 209, 131, 130, 232,\n        147, 142, 227, 225, 86, 56, 230, 12, 236, 180, 121, 119, 89, 55, 231, 158, 222, 131, 173,\n        255, 24, 41, 49, 196, 145, 137, 240, 71, 244, 165, 16, 84, 19, 218, 103, 26, 212, 221, 140,\n        154, 59, 87, 86, 254, 200, 81, 20, 250, 20, 173, 95, 33, 185, 106, 170, 39, 55, 249, 33,\n        192, 79, 5, 27, 92, 126, 245, 10, 215, 11, 43, 240, 120, 16, 167, 251, 80, 79, 16, 215,\n        154, 28, 131, 8, 121, 124, 189, 178, 190, 194, 246, 196, 35, 155, 36, 74, 175, 231, 78,\n        230, 212, 130, 13, 240, 137, 255, 103, 224, 163, 209, 164, 252, 7, 16, 205, 198, 155, 107,\n        255, 9, 26, 176, 69, 47, 58, 17, 198, 134, 241, 242, 2, 98, 48, 131, 58, 52, 122, 10, 96,\n        45, 39, 231, 146, 89, 207, 187, 96, 84, 207, 157, 89, 166, 169, 236, 140, 165, 205, 87,\n        111, 142, 142, 49, 12, 18, 218, 196, 168, 239, 111, 86, 192, 199, 237, 65, 91, 177, 113,\n        206, 133, 165, 51, 177, 49, 55, 127, 47, 14, 121, 250, 30, 107, 243, 99, 109, 195, 110, 62,\n        20, 112, 100, 205, 220, 51, 69, 151, 206, 114, 186, 6, 1, 243,\n    ];\n    let exp_cipher = vec![\n        223, 158, 69, 247, 207, 28, 32, 247, 233, 67, 87, 239, 80, 204, 82, 219, 90, 49, 36, 247,\n        188, 12, 201, 188, 19, 16, 249, 172, 149, 48, 185, 193, 205, 81, 162, 184, 194, 29, 198,\n        129, 72, 30, 148, 5, 127, 254, 175, 179, 229, 228, 26, 157, 127, 67, 88, 85, 240, 197, 250,\n        135, 43, 230, 0, 140, 178, 229, 204, 62, 247, 160, 98, 24, 192, 253, 194, 86, 162, 196,\n        216, 177, 7, 32, 220, 97, 252, 127, 236, 194, 131, 230, 229, 37, 222, 145, 142, 96, 87, 99,\n        206, 218, 149, 223, 164, 92, 65, 178, 73, 240, 146, 227, 168, 244, 163, 11, 237, 205, 132,\n        236, 150, 253, 140, 20, 232, 68, 177, 232, 224, 19, 254, 63, 58, 105, 53, 146, 164, 5, 151,\n        188, 55, 7, 39, 137, 12, 169, 49, 209, 20, 80, 199, 134, 31, 170, 254, 177, 67, 119, 216,\n        57, 170, 76, 37, 226, 93, 9, 65, 61, 62, 169, 67, 230, 241, 209, 164, 240, 81, 100, 13,\n        228, 24, 212, 86, 69, 48, 182, 160, 106, 151, 144, 173, 173, 173, 91, 155, 9, 156, 138,\n        182, 6, 211, 221, 221, 250, 148, 175, 189, 222, 79, 142, 31, 198, 146, 194, 4, 250, 19, 8,\n        5, 28, 15, 9, 95, 66, 122, 234, 138, 205, 107, 0, 202, 236, 143, 197, 126, 164, 77, 159,\n        172, 180, 144, 68, 211, 76, 146, 83, 92, 56, 68, 164, 0, 145, 243, 106, 71, 233, 182, 118,\n        138, 193, 179, 0, 249, 162, 210, 56, 157, 210, 161, 158, 129, 112, 82, 253, 98, 148, 70,\n        247, 93, 234, 218, 200, 137, 245, 191, 196, 157, 204, 239, 148, 253, 103, 98, 99, 28, 131,\n        78, 194, 122, 201, 171, 106, 20, 16, 150, 80, 138, 202, 29, 171, 173, 57, 98, 55, 49, 211,\n        99, 74, 160, 255, 83, 213, 55, 141, 22, 58, 121, 220, 86, 159, 148, 178, 220, 245, 243, 36,\n        53, 126, 31, 224, 188, 220, 133, 63, 99, 108, 103, 93, 134, 210, 57, 114, 228, 127, 226,\n        182, 106, 98, 113, 107, 131, 15, 222, 1, 129, 21, 169, 179, 168, 102, 42, 156, 92, 6, 237,\n        16, 13, 213, 75, 44, 155, 15, 86, 12, 148, 236, 168, 124, 131, 127, 59, 212, 145, 224, 46,\n        226, 170, 254, 210, 8, 237, 7, 247, 57, 146, 170, 220, 97, 24, 172, 34, 128, 233, 62, 238,\n        90, 249, 38, 244, 211, 95, 80, 57, 199, 75, 123, 130, 240, 89, 251, 58, 223, 205, 116, 105,\n        153, 116, 120, 165, 121, 140, 9, 191, 13, 247, 10, 236, 51, 65, 210, 243, 255, 234, 90,\n        254, 111, 15, 50, 91, 143, 221, 78, 248, 40, 232, 43, 255, 5, 160, 65, 77, 57, 36, 91, 77,\n        124, 50, 93, 70, 105, 195, 8, 248, 184, 204, 56, 173, 5, 131, 177, 162, 222, 103, 218, 194,\n        42, 227, 64, 3, 105, 74, 86, 47, 26, 164, 31, 71, 26, 234, 83, 51, 230, 27, 214, 78, 117,\n        248, 75, 226, 140, 181, 144, 74, 161, 201, 178, 243, 210, 157, 121, 176, 23, 156, 192, 126,\n        100, 121, 79, 0, 24, 100, 36, 116, 127, 226, 233, 240, 84, 197, 88, 107, 151, 105, 106, 64,\n        195, 79, 52, 168, 185, 30, 19, 98, 90, 213, 9, 202, 106, 46, 114, 58, 229, 84, 220, 105,\n        225, 65, 139, 175, 0, 250, 31, 76, 73, 33, 17, 245, 23, 213, 74, 255, 52, 148, 242, 217,\n        98, 67, 243, 59, 117, 103, 133, 119, 164, 4, 231, 1, 26, 6, 22, 157, 134, 104, 137, 49,\n        172, 89, 95, 137, 37, 141, 91, 14, 132, 109, 215, 214, 136, 224, 175, 40, 11, 31, 128, 67,\n        255, 29, 78, 70, 204, 36, 194, 16, 218, 200, 77, 150, 72, 167, 22, 47, 72, 199, 88, 109,\n        151, 177, 11, 114, 11, 176, 174, 20, 65, 182, 80, 28, 131, 39, 234, 181, 226, 169, 9, 154,\n        97, 248, 57, 91, 133, 52, 0, 75, 211, 171, 106, 84, 179, 14, 134, 13, 238, 157, 123, 36,\n        190, 69, 232, 85, 4, 125, 86, 69, 64, 230, 162, 139, 187, 189, 210, 58, 212, 74, 74, 122,\n        52, 61, 237, 152, 75, 38, 92, 222, 119, 138, 202, 98, 36, 20, 35, 50, 22, 234, 40, 206,\n        147, 146, 243, 204, 19, 44, 227, 82, 169, 91, 238, 179, 118, 18, 70, 166, 163, 30, 156,\n        191, 179, 183, 83, 252, 252, 238, 207, 70, 81, 219, 48, 86, 226, 243, 94, 179, 251, 22,\n        107, 146, 133, 165, 37, 138,\n    ];\n    kat_test(m, key, iv, exp_cipher);\n}\n"
  },
  {
    "path": "examples/commonArgs.nix",
    "content": "{\n  craneLib,\n  lib,\n}:\nlet\n  matches = re: path: !builtins.isNull (builtins.match re path);\nin\n{\n  version = \"0.0.1\";\n  src = lib.cleanSourceWith {\n    src = craneLib.path ./..;\n    filter = path: type:\n      # We include only certain files. FStar files under the example\n      # directory are listed out. Same for proverif (*.pvl) files.\n      (   matches \".*(Makefile|.*[.](rs|toml|lock|diff|fsti?|pv))$\" path\n          && !matches \".*examples/.*[.]fsti?$\" path\n      ) || (\"directory\" == type);\n  };\n  doCheck = false;\n  cargoVendorDir = craneLib.vendorMultipleCargoDeps {\n    cargoLockList = [\n      ./Cargo.lock\n      ../Cargo.lock\n    ];\n  };\n}\n"
  },
  {
    "path": "examples/coq-example/Cargo.toml",
    "content": "[package]\nname = \"coq-example\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = {path = \"../../hax-lib\" }\n"
  },
  {
    "path": "examples/coq-example/README.md",
    "content": "# Stack example\nThis example is a simple interpreter for a stack.\n\n## How to build\n```sh\ncargo hax into coq\n```\n\n## Coq\nNow we have the file `proofs/coq/extraction/Coq_example.v`.\nTo run the files we first need to install some dependencies.\n\n### Dependencies for Coq\nThe coq backend depends on `coq-record-update` to implement Rust record updates. \nThis can be installed by\n```sh\nopam install coq-record-update\n```\nor alternatively the import lines \n```coq\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n```\ncan be commented out.\n\n## Library required to run\nAs Rust implicitly imports the `Core` library for a lot of the basic functionality, we will also require a core library for Coq. For this small example, we build a dummy library with the required definitions, to run the example. As a hack to get this to run we add\n```\nmod dummy_core_lib;\nuse dummy_core_lib::*;\n```\nto the Rust example file `src/lib.rs`. The definitions of the library are put into `proofs/coq/extraction/dummy_core_lib.v` to match this import.\n\n## Running the code and doing proofs\nWe can set up a Coq project by making a `_CoqProject` file in `proofs/coq/extraction/`.\n```\n-R ./ Coq_example\n-arg -w\n-arg all\n\n./dummy_core_lib.v\n./Coq_example.v\n./Coq_proofs.v\n```\nWe then build a makefile from the project file by\n```sh\ncoq_makefile -f _CoqProject -o Makefile\n```\nand run `make` to build. Any tests and proofs, we put into a seperate file `proofs/coq/extraction/Coq_proofs.v`. which imports the generated file, such that we can update and regenerate the file, without overwriting the proofs.\n"
  },
  {
    "path": "examples/coq-example/proofs/coq/extraction/Coq_example.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n\n\n(* NotImplementedYet *)\n\nFrom Coq_example Require Import dummy_core_lib.\nExport dummy_core_lib.\n\nInductive t_Instruction : Type :=\n| Instruction_Push : t_isize -> _\n| Instruction_Pop\n| Instruction_Add\n| Instruction_Sub\n| Instruction_Mul\n| Instruction_Not\n| Instruction_Dup.\nArguments Instruction_Push.\nArguments Instruction_Pop.\nArguments Instruction_Add.\nArguments Instruction_Sub.\nArguments Instruction_Mul.\nArguments Instruction_Not.\nArguments Instruction_Dup.\n\nDefinition impl_Instruction__interpret (self : t_Instruction) (stack : t_Vec ((t_isize)) ((t_Global))) : t_Vec ((t_isize)) ((t_Global)) :=\n  let stack := match self with\n  | Instruction_Push (v) =>\n    impl_1__push (stack) (v)\n  | Instruction_Pop =>\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let _ := out in\n    stack\n  | Instruction_Add =>\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist2 := out in\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist1 := out in\n    let hoist3 := (hoist2,hoist1) in\n    match hoist3 with\n    | (Option_Some (a),Option_Some (b)) =>\n      impl_1__push (stack) (f_add (b) (a))\n    | _ =>\n      stack\n    end\n  | Instruction_Sub =>\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist5 := out in\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist4 := out in\n    let hoist6 := (hoist5,hoist4) in\n    match hoist6 with\n    | (Option_Some (a),Option_Some (b)) =>\n      impl_1__push (stack) (f_sub (b) (a))\n    | _ =>\n      stack\n    end\n  | Instruction_Mul =>\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist8 := out in\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist7 := out in\n    let hoist9 := (hoist8,hoist7) in\n    match hoist9 with\n    | (Option_Some (a),Option_Some (b)) =>\n      impl_1__push (stack) (f_mul (b) (a))\n    | _ =>\n      stack\n    end\n  | Instruction_Not =>\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist10 := out in\n    match hoist10 with\n    | Option_Some (a) =>\n      impl_1__push (stack) (if\n        f_eq (a) ((0 : t_isize))\n      then\n        (1 : t_isize)\n      else\n        (0 : t_isize))\n    | _ =>\n      stack\n    end\n  | Instruction_Dup =>\n    let (tmp0,out) := impl_1__pop (stack) in\n    let stack := tmp0 in\n    let hoist11 := out in\n    match hoist11 with\n    | Option_Some (a) =>\n      let stack := impl_1__push (stack) (a) in\n      let stack := impl_1__push (stack) (a) in\n      stack\n    | _ =>\n      stack\n    end\n  end in\n  stack.\n\nDefinition example '(_ : unit) : t_Vec ((t_isize)) ((t_Global)) :=\n  let stk := impl__new (tt) in\n  let stk := f_fold (f_into_iter ([Instruction_Push ((1 : t_isize)); Instruction_Push ((1 : t_isize)); Instruction_Add; Instruction_Push ((1 : t_isize)); Instruction_Push ((1 : t_isize)); Instruction_Push ((1 : t_isize)); Instruction_Add; Instruction_Add; Instruction_Dup; Instruction_Mul; Instruction_Sub])) (stk) (fun stk cmd =>\n    impl_Instruction__interpret (cmd) (stk)) in\n  stk.\n"
  },
  {
    "path": "examples/coq-example/proofs/coq/extraction/Coq_example_Dummy_core_lib.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "examples/coq-example/proofs/coq/extraction/Coq_proofs.v",
    "content": "(* Handwritten Proofs *)\n\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\n\nFrom Coq_example Require Import Coq_example.\n\n\n(* Check example *)\nExample is_example_correct : example tt = [-7]. Proof. reflexivity. Qed.\n\n(* Proof composite operations *)\nTheorem dup_mul_is_square : forall x,\n    impl__Instruction__interpret Instruction_Mul (\n        impl__Instruction__interpret Instruction_Dup [x])\n    = [Z.pow x 2].\nProof.\n  intros.\n  cbn.\n  rewrite Z.mul_1_r.\n  reflexivity.\nQed.\n\nTheorem push_pop_cancel : forall l x,\n    impl__Instruction__interpret Instruction_Pop (\n        impl__Instruction__interpret (Instruction_Push x) l)\n    = l.\nProof.\n  intros.\n  cbn.\n  reflexivity.\nQed.\n"
  },
  {
    "path": "examples/coq-example/proofs/coq/extraction/Makefile",
    "content": "##########################################################################\n##         #   The Coq Proof Assistant / The Coq Development Team       ##\n##  v      #         Copyright INRIA, CNRS and contributors             ##\n## <O___,, # (see version control and CREDITS file for authors & dates) ##\n##   \\VV/  ###############################################################\n##    //   #    This file is distributed under the terms of the         ##\n##         #     GNU Lesser General Public License Version 2.1          ##\n##         #     (see LICENSE file for the text of the license)         ##\n##########################################################################\n## GNUMakefile for Coq 8.18.0\n\n# For debugging purposes (must stay here, don't move below)\nINITIAL_VARS := $(.VARIABLES)\n# To implement recursion we save the name of the main Makefile\nSELF := $(lastword $(MAKEFILE_LIST))\nPARENT := $(firstword $(MAKEFILE_LIST))\n\n# This file is generated by coq_makefile and contains many variable\n# definitions, like the list of .v files or the path to Coq\ninclude Makefile.conf\n\n# Put in place old names\nVFILES            := $(COQMF_VFILES)\nMLIFILES          := $(COQMF_MLIFILES)\nMLFILES           := $(COQMF_MLFILES)\nMLGFILES          := $(COQMF_MLGFILES)\nMLPACKFILES       := $(COQMF_MLPACKFILES)\nMLLIBFILES        := $(COQMF_MLLIBFILES)\nMETAFILE          := $(COQMF_METAFILE)\nCMDLINE_VFILES    := $(COQMF_CMDLINE_VFILES)\nINSTALLCOQDOCROOT := $(COQMF_INSTALLCOQDOCROOT)\nOTHERFLAGS        := $(COQMF_OTHERFLAGS)\nCOQCORE_SRC_SUBDIRS := $(COQMF_COQ_SRC_SUBDIRS)\nOCAMLLIBS         := $(COQMF_OCAMLLIBS)\nSRC_SUBDIRS       := $(COQMF_SRC_SUBDIRS)\nCOQLIBS           := $(COQMF_COQLIBS)\nCOQLIBS_NOML      := $(COQMF_COQLIBS_NOML)\nCMDLINE_COQLIBS   := $(COQMF_CMDLINE_COQLIBS)\nCOQLIB            := $(COQMF_COQLIB)\nCOQCORELIB        := $(COQMF_COQCORELIB)\nDOCDIR            := $(COQMF_DOCDIR)\nOCAMLFIND         := $(COQMF_OCAMLFIND)\nCAMLFLAGS         := $(COQMF_CAMLFLAGS)\nHASNATDYNLINK     := $(COQMF_HASNATDYNLINK)\nOCAMLWARN         := $(COQMF_WARN)\n\nMakefile.conf: _CoqProject\n\tcoq_makefile -f _CoqProject -o Makefile\n\n# This file can be created by the user to hook into double colon rules or\n# add any other Makefile code he may need\n-include Makefile.local\n\n# Parameters ##################################################################\n#\n# Parameters are make variable assignments.\n# They can be passed to (each call to) make on the command line.\n# They can also be put in Makefile.local once and for all.\n# For retro-compatibility reasons they can be put in the _CoqProject, but this\n# practice is discouraged since _CoqProject better not contain make specific\n# code (be nice to user interfaces).\n\n# Set KEEP_ERROR to have make keep files produced by failing rules.\n# By default, KEEP_ERROR is empty. So for instance if coqc creates a .vo but\n# then fails to native compile, the .vo will be deleted.\n# May confuse make so use only for debugging.\nKEEP_ERROR?=\nifeq (,$(KEEP_ERROR))\n.DELETE_ON_ERROR:\nendif\n\n# Print shell commands (set to non empty)\nVERBOSE ?=\n\n# Time the Coq process (set to non empty), and how (see default value)\nTIMED?=\nTIMECMD?=\n# Use command time on linux, gtime on Mac OS\nTIMEFMT?=\"$(if $(findstring undefined, $(flavor 1)),$@,$(1)) (real: %e, user: %U, sys: %S, mem: %M ko)\"\nifneq (,$(TIMED))\nifeq (0,$(shell command time -f \"\" true >/dev/null 2>/dev/null; echo $$?))\nSTDTIME?=command time -f $(TIMEFMT)\nelse\nifeq (0,$(shell gtime -f \"\" true >/dev/null 2>/dev/null; echo $$?))\nSTDTIME?=gtime -f $(TIMEFMT)\nelse\nSTDTIME?=command time\nendif\nendif\nelse\nSTDTIME?=command time -f $(TIMEFMT)\nendif\n\nCOQBIN?=\nifneq (,$(COQBIN))\n# add an ending /\nCOQBIN:=$(COQBIN)/\nendif\n\n# Coq binaries\nCOQC     ?= \"$(COQBIN)coqc\"\nCOQTOP   ?= \"$(COQBIN)coqtop\"\nCOQCHK   ?= \"$(COQBIN)coqchk\"\nCOQNATIVE ?= \"$(COQBIN)coqnative\"\nCOQDEP   ?= \"$(COQBIN)coqdep\"\nCOQDOC   ?= \"$(COQBIN)coqdoc\"\nCOQPP    ?= \"$(COQBIN)coqpp\"\nCOQMKFILE ?= \"$(COQBIN)coq_makefile\"\nOCAMLLIBDEP ?= \"$(COQBIN)ocamllibdep\"\n\n# Timing scripts\nCOQMAKE_ONE_TIME_FILE ?= \"$(COQCORELIB)/tools/make-one-time-file.py\"\nCOQMAKE_BOTH_TIME_FILES ?= \"$(COQCORELIB)/tools/make-both-time-files.py\"\nCOQMAKE_BOTH_SINGLE_TIMING_FILES ?= \"$(COQCORELIB)/tools/make-both-single-timing-files.py\"\nBEFORE ?=\nAFTER ?=\n\n# OCaml binaries\nCAMLC       ?= \"$(OCAMLFIND)\" ocamlc   -c\nCAMLOPTC    ?= \"$(OCAMLFIND)\" opt      -c\nCAMLLINK    ?= \"$(OCAMLFIND)\" ocamlc   -linkall\nCAMLOPTLINK ?= \"$(OCAMLFIND)\" opt      -linkall\nCAMLDOC     ?= \"$(OCAMLFIND)\" ocamldoc\nCAMLDEP     ?= \"$(OCAMLFIND)\" ocamldep -slash -ml-synonym .mlpack\n\n# DESTDIR is prepended to all installation paths\nDESTDIR ?=\n\n# Debug builds, typically -g to OCaml, -debug to Coq.\nCAMLDEBUG ?=\nCOQDEBUG ?=\n\n# Extra packages to be linked in (as in findlib -package)\nCAMLPKGS ?=\nFINDLIBPKGS = -package coq-core.plugins.ltac $(CAMLPKGS)\n\n# Option for making timing files\nTIMING?=\n# Option for changing sorting of timing output file\nTIMING_SORT_BY ?= auto\n# Option for changing the fuzz parameter on the output file\nTIMING_FUZZ ?= 0\n# Option for changing whether to use real or user time for timing tables\nTIMING_REAL?=\n# Option for including the memory column(s)\nTIMING_INCLUDE_MEM?=\n# Option for sorting by the memory column\nTIMING_SORT_BY_MEM?=\n# Output file names for timed builds\nTIME_OF_BUILD_FILE               ?= time-of-build.log\nTIME_OF_BUILD_BEFORE_FILE        ?= time-of-build-before.log\nTIME_OF_BUILD_AFTER_FILE         ?= time-of-build-after.log\nTIME_OF_PRETTY_BUILD_FILE        ?= time-of-build-pretty.log\nTIME_OF_PRETTY_BOTH_BUILD_FILE   ?= time-of-build-both.log\nTIME_OF_PRETTY_BUILD_EXTRA_FILES ?= - # also output to the command line\n\nTGTS ?=\n\n# Retro compatibility (DESTDIR is standard on Unix, DSTROOT is not)\nifdef DSTROOT\nDESTDIR := $(DSTROOT)\nendif\n\n# Substitution of the path by appending $(DESTDIR) if needed.\n# The variable $(COQMF_WINDRIVE) can be needed for Cygwin environments.\nwindrive_path = $(if $(COQMF_WINDRIVE),$(subst $(COQMF_WINDRIVE),/,$(1)),$(1))\ndestination_path = $(if $(DESTDIR),$(DESTDIR)/$(call windrive_path,$(1)),$(1))\n\n# Installation paths of libraries and documentation.\nCOQLIBINSTALL ?= $(call destination_path,$(COQLIB)/user-contrib)\nCOQDOCINSTALL ?= $(call destination_path,$(DOCDIR)/coq/user-contrib)\nCOQPLUGININSTALL ?= $(call destination_path,$(COQCORELIB)/..)\nCOQTOPINSTALL ?= $(call destination_path,$(COQLIB)/toploop) # FIXME: Unused variable?\n\n# findlib files installation\nFINDLIBPREINST= mkdir -p \"$(COQPLUGININSTALL)/\"\nFINDLIBDESTDIR= -destdir \"$(COQPLUGININSTALL)/\"\n\n# we need to move out of sight $(METAFILE) otherwise findlib thinks the\n# package is already installed\nfindlib_install = \\\n\t$(HIDE)if [ \"$(METAFILE)\" ]; then \\\n\t  $(FINDLIBPREINST) && \\\n\t  mv \"$(METAFILE)\" \"$(METAFILE).skip\" ; \\\n\t  \"$(OCAMLFIND)\" install $(2) $(FINDLIBDESTDIR) $(FINDLIBPACKAGE) $(1); \\\n\t  rc=$$?; \\\n\t  mv \"$(METAFILE).skip\" \"$(METAFILE)\"; \\\n\t  exit $$rc; \\\n\tfi\nfindlib_remove = \\\n\t$(HIDE)if [ ! -z \"$(METAFILE)\" ]; then\\\n\t  \"$(OCAMLFIND)\" remove $(FINDLIBDESTDIR) $(FINDLIBPACKAGE); \\\n\tfi\n\n\n########## End of parameters ##################################################\n# What follows may be relevant to you only if you need to\n# extend this Makefile.  If so, look for 'Extension point' here and\n# put in Makefile.local double colon rules accordingly.\n# E.g. to perform some work after the all target completes you can write\n#\n# post-all::\n# \techo \"All done!\"\n#\n# in Makefile.local\n#\n###############################################################################\n\n\n\n\n# Flags #######################################################################\n#\n# We define a bunch of variables combining the parameters.\n# To add additional flags to coq, coqchk or coqdoc, set the\n# {COQ,COQCHK,COQDOC}EXTRAFLAGS variable to whatever you want to add.\n# To overwrite the default choice and set your own flags entirely, set the\n# {COQ,COQCHK,COQDOC}FLAGS variable.\n\nSHOW := $(if $(VERBOSE),@true \"\",@echo \"\")\nHIDE := $(if $(VERBOSE),,@)\n\nTIMER=$(if $(TIMED), $(STDTIME), $(TIMECMD))\n\nOPT?=\n\n# The DYNOBJ and DYNLIB variables are used by \"coqdep -dyndep var\" in .v.d\nifeq '$(OPT)' '-byte'\nUSEBYTE:=true\nDYNOBJ:=.cma\nDYNLIB:=.cma\nelse\nUSEBYTE:=\nDYNOBJ:=.cmxs\nDYNLIB:=.cmxs\nendif\n\n# these variables are meant to be overridden if you want to add *extra* flags\nCOQEXTRAFLAGS?=\nCOQCHKEXTRAFLAGS?=\nCOQDOCEXTRAFLAGS?=\n\n# Find the last argument of the form \"-native-compiler FLAG\"\nCOQUSERNATIVEFLAG:=$(strip \\\n$(subst -native-compiler-,,\\\n$(lastword \\\n$(filter -native-compiler-%,\\\n$(subst -native-compiler ,-native-compiler-,\\\n$(strip $(COQEXTRAFLAGS)))))))\n\nCOQFILTEREDEXTRAFLAGS:=$(strip \\\n$(filter-out -native-compiler-%,\\\n$(subst -native-compiler ,-native-compiler-,\\\n$(strip $(COQEXTRAFLAGS)))))\n\nCOQACTUALNATIVEFLAG:=$(lastword $(COQMF_COQ_NATIVE_COMPILER_DEFAULT) $(COQMF_COQPROJECTNATIVEFLAG) $(COQUSERNATIVEFLAG))\n\nifeq '$(COQACTUALNATIVEFLAG)' 'yes'\n  COQNATIVEFLAG=\"-w\" \"-deprecated-native-compiler-option\" \"-native-compiler\" \"ondemand\"\n  COQDONATIVE=\"yes\"\nelse\nifeq '$(COQACTUALNATIVEFLAG)' 'ondemand'\n  COQNATIVEFLAG=\"-w\" \"-deprecated-native-compiler-option\" \"-native-compiler\" \"ondemand\"\n  COQDONATIVE=\"no\"\nelse\n  COQNATIVEFLAG=\"-w\" \"-deprecated-native-compiler-option\" \"-native-compiler\" \"no\"\n  COQDONATIVE=\"no\"\nendif\nendif\n\n# these flags do NOT contain the libraries, to make them easier to overwrite\nCOQFLAGS?=-q $(OTHERFLAGS) $(COQFILTEREDEXTRAFLAGS) $(COQNATIVEFLAG)\nCOQCHKFLAGS?=-silent -o $(COQCHKEXTRAFLAGS)\nCOQDOCFLAGS?=-interpolate -utf8 $(COQDOCEXTRAFLAGS)\n\nCOQDOCLIBS?=$(COQLIBS_NOML)\n\n# The version of Coq being run and the version of coq_makefile that\n# generated this makefile\nCOQ_VERSION:=$(shell $(COQC) --print-version | cut -d \" \" -f 1)\nCOQMAKEFILE_VERSION:=8.18.0\n\n# COQ_SRC_SUBDIRS is for user-overriding, usually to add\n# `user-contrib/Foo` to the includes, we keep COQCORE_SRC_SUBDIRS for\n# Coq's own core libraries, which should be replaced by ocamlfind\n# options at some point.\nCOQ_SRC_SUBDIRS?=\nCOQSRCLIBS?= $(foreach d,$(COQ_SRC_SUBDIRS), -I \"$(COQLIB)/$(d)\")\n\nCAMLFLAGS+=$(OCAMLLIBS) $(COQSRCLIBS)\n# ocamldoc fails with unknown argument otherwise\nCAMLDOCFLAGS:=$(filter-out -annot, $(filter-out -bin-annot, $(CAMLFLAGS)))\nCAMLFLAGS+=$(OCAMLWARN)\n\nifneq (,$(TIMING))\n  ifeq (after,$(TIMING))\n    TIMING_EXT=after-timing\n  else\n    ifeq (before,$(TIMING))\n      TIMING_EXT=before-timing\n    else\n      TIMING_EXT=timing\n    endif\n  endif\n  TIMING_ARG=-time-file $<.$(TIMING_EXT)\nelse\n  TIMING_ARG=\nendif\n\n# Files #######################################################################\n#\n# We here define a bunch of variables about the files being part of the\n# Coq project in order to ease the writing of build target and build rules\n\nVDFILE := .Makefile.d\n\nALLSRCFILES := \\\n\t$(MLGFILES) \\\n\t$(MLFILES) \\\n\t$(MLPACKFILES) \\\n\t$(MLLIBFILES) \\\n\t$(MLIFILES)\n\n# helpers\nvo_to_obj = $(addsuffix .o,\\\n  $(filter-out Warning: Error:,\\\n  $(shell $(COQTOP) -q -noinit -batch -quiet -print-mod-uid $(1))))\nstrip_dotslash = $(patsubst ./%,%,$(1))\n\n# without this we get undefined variables in the expansion for the\n# targets of the [deprecated,use-mllib-or-mlpack] rule\nwith_undef = $(if $(filter-out undefined, $(origin $(1))),$($(1)))\n\nVO = vo\nVOS = vos\n\nVOFILES = $(VFILES:.v=.$(VO))\nGLOBFILES = $(VFILES:.v=.glob)\nHTMLFILES = $(VFILES:.v=.html)\nGHTMLFILES = $(VFILES:.v=.g.html)\nBEAUTYFILES = $(addsuffix .beautified,$(VFILES))\nTEXFILES = $(VFILES:.v=.tex)\nGTEXFILES = $(VFILES:.v=.g.tex)\nCMOFILES = \\\n\t$(MLGFILES:.mlg=.cmo) \\\n\t$(MLFILES:.ml=.cmo) \\\n\t$(MLPACKFILES:.mlpack=.cmo)\nCMXFILES = $(CMOFILES:.cmo=.cmx)\nOFILES = $(CMXFILES:.cmx=.o)\nCMAFILES = $(MLLIBFILES:.mllib=.cma) $(MLPACKFILES:.mlpack=.cma)\nCMXAFILES = $(CMAFILES:.cma=.cmxa)\nCMIFILES = \\\n\t$(CMOFILES:.cmo=.cmi) \\\n\t$(MLIFILES:.mli=.cmi)\n# the /if/ is because old _CoqProject did not list a .ml(pack|lib) but just\n# a .mlg file\nCMXSFILES = \\\n\t$(MLPACKFILES:.mlpack=.cmxs) \\\n\t$(CMXAFILES:.cmxa=.cmxs) \\\n\t$(if $(MLPACKFILES)$(CMXAFILES),,\\\n\t\t$(MLGFILES:.mlg=.cmxs) $(MLFILES:.ml=.cmxs))\n\n# files that are packed into a plugin (no extension)\nPACKEDFILES = \\\n\t$(call strip_dotslash, \\\n\t  $(foreach lib, \\\n\t    $(call strip_dotslash, \\\n\t       $(MLPACKFILES:.mlpack=_MLPACK_DEPENDENCIES)),$(call with_undef,$(lib))))\n# files that are archived into a .cma (mllib)\nLIBEDFILES = \\\n\t$(call strip_dotslash, \\\n\t  $(foreach lib, \\\n\t    $(call strip_dotslash, \\\n\t       $(MLLIBFILES:.mllib=_MLLIB_DEPENDENCIES)),$(call with_undef,$(lib))))\nCMIFILESTOINSTALL = $(filter-out $(addsuffix .cmi,$(PACKEDFILES)),$(CMIFILES))\nCMOFILESTOINSTALL = $(filter-out $(addsuffix .cmo,$(PACKEDFILES)),$(CMOFILES))\nOBJFILES = $(call vo_to_obj,$(VOFILES))\nALLNATIVEFILES = \\\n\t$(OBJFILES:.o=.cmi) \\\n\t$(OBJFILES:.o=.cmx) \\\n\t$(OBJFILES:.o=.cmxs)\nFINDLIBPACKAGE=$(patsubst .%,%,$(suffix $(METAFILE)))\n\n# trick: wildcard filters out non-existing files, so that `install` doesn't show\n# warnings and `clean` doesn't pass to rm a list of files that is too long for\n# the shell.\nNATIVEFILES = $(wildcard $(ALLNATIVEFILES))\nFILESTOINSTALL = \\\n\t$(VOFILES) \\\n\t$(VFILES) \\\n\t$(GLOBFILES) \\\n\t$(NATIVEFILES) \\\n\t$(CMXSFILES)\t\t# to be removed when we remove legacy loading\nFINDLIBFILESTOINSTALL = \\\n\t$(CMIFILESTOINSTALL)\nifeq '$(HASNATDYNLINK)' 'true'\nDO_NATDYNLINK = yes\nFINDLIBFILESTOINSTALL += $(CMXSFILES) $(CMXAFILES) $(CMOFILESTOINSTALL:.cmo=.cmx)\nelse\nDO_NATDYNLINK =\nendif\n\nALLDFILES = $(addsuffix .d,$(ALLSRCFILES)) $(VDFILE)\n\n# Compilation targets #########################################################\n\nall:\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" pre-all\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" real-all\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" post-all\n.PHONY: all\n\nall.timing.diff:\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" pre-all\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" real-all.timing.diff TIME_OF_PRETTY_BUILD_EXTRA_FILES=\"\"\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" post-all\n.PHONY: all.timing.diff\n\nifeq (0,$(TIMING_REAL))\nTIMING_REAL_ARG :=\nTIMING_USER_ARG := --user\nelse\nifeq (1,$(TIMING_REAL))\nTIMING_REAL_ARG := --real\nTIMING_USER_ARG :=\nelse\nTIMING_REAL_ARG :=\nTIMING_USER_ARG :=\nendif\nendif\n\nifeq (0,$(TIMING_INCLUDE_MEM))\nTIMING_INCLUDE_MEM_ARG := --no-include-mem\nelse\nTIMING_INCLUDE_MEM_ARG :=\nendif\n\nifeq (1,$(TIMING_SORT_BY_MEM))\nTIMING_SORT_BY_MEM_ARG := --sort-by-mem\nelse\nTIMING_SORT_BY_MEM_ARG :=\nendif\n\nmake-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE)\nmake-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE)\nmake-pretty-timed make-pretty-timed-before make-pretty-timed-after::\n\t$(HIDE)rm -f pretty-timed-success.ok\n\t$(HIDE)($(MAKE) --no-print-directory -f \"$(PARENT)\" $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE)\n\t$(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed\nprint-pretty-timed::\n\t$(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES)\nprint-pretty-timed-diff::\n\t$(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIMING_INCLUDE_MEM_ARG) $(TIMING_SORT_BY_MEM_ARG) $(TIMING_REAL_ARG) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES)\nifeq (,$(BEFORE))\nprint-pretty-single-time-diff::\n\t@echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing'\n\t$(HIDE)false\nelse\nifeq (,$(AFTER))\nprint-pretty-single-time-diff::\n\t@echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing'\n\t$(HIDE)false\nelse\nprint-pretty-single-time-diff::\n\t$(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --fuzz=$(TIMING_FUZZ) --sort-by=$(TIMING_SORT_BY) $(TIMING_USER_ARG) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES)\nendif\nendif\npretty-timed:\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(PARENT)\" make-pretty-timed\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" print-pretty-timed\n.PHONY: pretty-timed make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff\n\n# Extension points for actions to be performed before/after the all target\npre-all::\n\t@# Extension point\n\t$(HIDE)if [ \"$(COQMAKEFILE_VERSION)\" != \"$(COQ_VERSION)\" ]; then\\\n\t  echo \"W: This Makefile was generated by Coq $(COQMAKEFILE_VERSION)\";\\\n\t  echo \"W: while the current Coq version is $(COQ_VERSION)\";\\\n\tfi\n.PHONY: pre-all\n\npost-all::\n\t@# Extension point\n.PHONY: post-all\n\nreal-all: $(VOFILES) $(if $(USEBYTE),bytefiles,optfiles)\n.PHONY: real-all\n\nreal-all.timing.diff: $(VOFILES:.vo=.v.timing.diff)\n.PHONY: real-all.timing.diff\n\nbytefiles: $(CMOFILES) $(CMAFILES)\n.PHONY: bytefiles\n\noptfiles: $(if $(DO_NATDYNLINK),$(CMXSFILES))\n.PHONY: optfiles\n\n# FIXME, see Ralf's bugreport\n# quick is deprecated, now renamed vio\nvio: $(VOFILES:.vo=.vio)\n.PHONY: vio\nquick: vio\n\t$(warning \"'make quick' is deprecated, use 'make vio' or consider using 'vos' files\")\n.PHONY: quick\n\nvio2vo:\n\t$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \\\n\t\t-schedule-vio2vo $(J) $(VOFILES:%.vo=%.vio)\n.PHONY: vio2vo\n\n# quick2vo is undocumented\nquick2vo:\n\t$(HIDE)make -j $(J) vio\n\t$(HIDE)VIOFILES=$$(for vofile in $(VOFILES); do \\\n\t  viofile=\"$$(echo \"$$vofile\" | sed \"s/\\.vo$$/.vio/\")\"; \\\n\t  if [ \"$$vofile\" -ot \"$$viofile\" -o ! -e \"$$vofile\" ]; then printf \"$$viofile \"; fi; \\\n\tdone); \\\n\techo \"VIO2VO: $$VIOFILES\"; \\\n\tif [ -n \"$$VIOFILES\" ]; then \\\n\t  $(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -schedule-vio2vo $(J) $$VIOFILES; \\\n\tfi\n.PHONY: quick2vo\n\ncheckproofs:\n\t$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \\\n\t\t-schedule-vio-checking $(J) $(VOFILES:%.vo=%.vio)\n.PHONY: checkproofs\n\nvos: $(VOFILES:%.vo=%.vos)\n.PHONY: vos\n\nvok: $(VOFILES:%.vo=%.vok)\n.PHONY: vok\n\nvalidate: $(VOFILES)\n\t$(TIMER) $(COQCHK) $(COQCHKFLAGS) $(COQLIBS_NOML) $^\n.PHONY: validate\n\nonly: $(TGTS)\n.PHONY: only\n\n# Documentation targets #######################################################\n\nhtml: $(GLOBFILES) $(VFILES)\n\t$(SHOW)'COQDOC -d html $(GAL)'\n\t$(HIDE)mkdir -p html\n\t$(HIDE)$(COQDOC) \\\n\t\t-toc $(COQDOCFLAGS) -html $(GAL) $(COQDOCLIBS) -d html $(VFILES)\n\nmlihtml: $(MLIFILES:.mli=.cmi)\n\t$(SHOW)'CAMLDOC -d $@'\n\t$(HIDE)mkdir $@ || rm -rf $@/*\n\t$(HIDE)$(CAMLDOC) -html \\\n\t\t-d $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) $(FINDLIBPKGS)\n\nall-mli.tex: $(MLIFILES:.mli=.cmi)\n\t$(SHOW)'CAMLDOC -latex $@'\n\t$(HIDE)$(CAMLDOC) -latex \\\n\t\t-o $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES) $(FINDLIBPKGS)\n\nall.ps: $(VFILES)\n\t$(SHOW)'COQDOC -ps $(GAL)'\n\t$(HIDE)$(COQDOC) \\\n\t\t-toc $(COQDOCFLAGS) -ps $(GAL) $(COQDOCLIBS) \\\n\t\t-o $@ `$(COQDEP) -sort $(VFILES)`\n\nall.pdf: $(VFILES)\n\t$(SHOW)'COQDOC -pdf $(GAL)'\n\t$(HIDE)$(COQDOC) \\\n\t\t-toc $(COQDOCFLAGS) -pdf $(GAL) $(COQDOCLIBS) \\\n\t\t-o $@ `$(COQDEP) -sort $(VFILES)`\n\n# FIXME: not quite right, since the output name is different\ngallinahtml: GAL=-g\ngallinahtml: html\n\nall-gal.ps: GAL=-g\nall-gal.ps: all.ps\n\nall-gal.pdf: GAL=-g\nall-gal.pdf: all.pdf\n\n# ?\nbeautify: $(BEAUTYFILES)\n\tfor file in $^; do mv $${file%.beautified} $${file%beautified}old && mv $${file} $${file%.beautified}; done\n\t@echo 'Do not do \"make clean\" until you are sure that everything went well!'\n\t@echo 'If there were a problem, execute \"for file in $$(find . -name \\*.v.old -print); do mv $${file} $${file%.old}; done\" in your shell/'\n.PHONY: beautify\n\n# Installation targets ########################################################\n#\n# There rules can be extended in Makefile.local\n# Extensions can't assume when they run.\n\n# We use $(file) to avoid generating a very long command string to pass to the shell\n# (cf https://coq.zulipchat.com/#narrow/stream/250632-Coq-Platform-devs-.26-users/topic/Strange.20command.20length.20limit.20on.20Linux)\n# However Apple ships old make which doesn't have $(file) so we need a fallback\n$(file >.hasfile,1)\nHASFILE:=$(shell if [ -e .hasfile ]; then echo 1; rm .hasfile; fi)\n\nMKFILESTOINSTALL= $(if $(HASFILE),$(file >.filestoinstall,$(FILESTOINSTALL)),\\\n  $(shell rm -f .filestoinstall) \\\n  $(foreach x,$(FILESTOINSTALL),$(shell printf '%s\\n' \"$x\" >> .filestoinstall)))\n\n# findlib needs the package to not be installed, so we remove it before\n# installing it (see the call to findlib_remove)\ninstall: META\n\t@$(MKFILESTOINSTALL)\n\t$(HIDE)code=0; for f in $$(cat .filestoinstall); do\\\n\t if ! [ -f \"$$f\" ]; then >&2 echo $$f does not exist; code=1; fi \\\n\tdone; exit $$code\n\t$(HIDE)for f in $$(cat .filestoinstall); do\\\n\t df=\"`$(COQMKFILE) -destination-of \"$$f\" $(COQLIBS)`\";\\\n\t if [ \"$$?\" != \"0\" -o -z \"$$df\" ]; then\\\n\t   echo SKIP \"$$f\" since it has no logical path;\\\n\t else\\\n\t   install -d \"$(COQLIBINSTALL)/$$df\" &&\\\n\t   install -m 0644 \"$$f\" \"$(COQLIBINSTALL)/$$df\" &&\\\n\t   echo INSTALL \"$$f\" \"$(COQLIBINSTALL)/$$df\";\\\n\t fi;\\\n\tdone\n\t$(call findlib_remove)\n\t$(call findlib_install, META $(FINDLIBFILESTOINSTALL))\n\t$(HIDE)$(MAKE) install-extra -f \"$(SELF)\"\n\t@rm -f .filestoinstall\ninstall-extra::\n\t@# Extension point\n.PHONY: install install-extra\n\nMETA: $(METAFILE)\n\t$(HIDE)if [ \"$(METAFILE)\" ]; then \\\n\t\tcat \"$(METAFILE)\" | grep -v 'directory.*=.*' > META; \\\n\tfi\n\ninstall-byte:\n\t$(call findlib_install, $(CMAFILES) $(CMOFILESTOINSTALL), -add)\n\ninstall-doc:: html mlihtml\n\t@# Extension point\n\t$(HIDE)install -d \"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html\"\n\t$(HIDE)for i in html/*; do \\\n\t dest=\"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i\";\\\n\t install -m 0644 \"$$i\" \"$$dest\";\\\n\t echo INSTALL \"$$i\" \"$$dest\";\\\n\tdone\n\t$(HIDE)install -d \\\n\t\t\"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml\"\n\t$(HIDE)for i in mlihtml/*; do \\\n\t dest=\"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i\";\\\n\t install -m 0644 \"$$i\" \"$$dest\";\\\n\t echo INSTALL \"$$i\" \"$$dest\";\\\n\tdone\n.PHONY: install-doc\n\nuninstall::\n\t@# Extension point\n\t@$(MKFILESTOINSTALL)\n\t$(call findlib_remove)\n\t$(HIDE)for f in $$(cat .filestoinstall); do \\\n\t df=\"`$(COQMKFILE) -destination-of \"$$f\" $(COQLIBS)`\" &&\\\n\t instf=\"$(COQLIBINSTALL)/$$df/`basename $$f`\" &&\\\n\t rm -f \"$$instf\" &&\\\n\t echo RM \"$$instf\" ;\\\n\tdone\n\t$(HIDE)for f in $$(cat .filestoinstall); do \\\n\t df=\"`$(COQMKFILE) -destination-of \"$$f\" $(COQLIBS)`\" &&\\\n\t echo RMDIR \"$(COQLIBINSTALL)/$$df/\" &&\\\n\t (rmdir \"$(COQLIBINSTALL)/$$df/\" 2>/dev/null || true); \\\n\tdone\n\t@rm -f .filestoinstall\n\n.PHONY: uninstall\n\nuninstall-doc::\n\t@# Extension point\n\t$(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html'\n\t$(HIDE)rm -rf \"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html\"\n\t$(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml'\n\t$(HIDE)rm -rf \"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml\"\n\t$(HIDE) rmdir \"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/\" || true\n.PHONY: uninstall-doc\n\n# Cleaning ####################################################################\n#\n# There rules can be extended in Makefile.local\n# Extensions can't assume when they run.\n\nclean::\n\t@# Extension point\n\t$(SHOW)'CLEAN'\n\t$(HIDE)rm -f $(CMOFILES)\n\t$(HIDE)rm -f $(CMIFILES)\n\t$(HIDE)rm -f $(CMAFILES)\n\t$(HIDE)rm -f $(CMXFILES)\n\t$(HIDE)rm -f $(CMXAFILES)\n\t$(HIDE)rm -f $(CMXSFILES)\n\t$(HIDE)rm -f $(OFILES)\n\t$(HIDE)rm -f $(CMXAFILES:.cmxa=.a)\n\t$(HIDE)rm -f $(MLGFILES:.mlg=.ml)\n\t$(HIDE)rm -f $(CMXFILES:.cmx=.cmt)\n\t$(HIDE)rm -f $(MLIFILES:.mli=.cmti)\n\t$(HIDE)rm -f $(ALLDFILES)\n\t$(HIDE)rm -f $(NATIVEFILES)\n\t$(HIDE)find . -name .coq-native -type d -empty -delete\n\t$(HIDE)rm -f $(VOFILES)\n\t$(HIDE)rm -f $(VOFILES:.vo=.vio)\n\t$(HIDE)rm -f $(VOFILES:.vo=.vos)\n\t$(HIDE)rm -f $(VOFILES:.vo=.vok)\n\t$(HIDE)rm -f $(BEAUTYFILES) $(VFILES:=.old)\n\t$(HIDE)rm -f all.ps all-gal.ps all.pdf all-gal.pdf all.glob all-mli.tex\n\t$(HIDE)rm -f $(VFILES:.v=.glob)\n\t$(HIDE)rm -f $(VFILES:.v=.tex)\n\t$(HIDE)rm -f $(VFILES:.v=.g.tex)\n\t$(HIDE)rm -f pretty-timed-success.ok\n\t$(HIDE)rm -f META\n\t$(HIDE)rm -rf html mlihtml\n.PHONY: clean\n\ncleanall:: clean\n\t@# Extension point\n\t$(SHOW)'CLEAN *.aux *.timing'\n\t$(HIDE)rm -f $(foreach f,$(VFILES:.v=),$(dir $(f)).$(notdir $(f)).aux)\n\t$(HIDE)rm -f $(TIME_OF_BUILD_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE)\n\t$(HIDE)rm -f $(VOFILES:.vo=.v.timing)\n\t$(HIDE)rm -f $(VOFILES:.vo=.v.before-timing)\n\t$(HIDE)rm -f $(VOFILES:.vo=.v.after-timing)\n\t$(HIDE)rm -f $(VOFILES:.vo=.v.timing.diff)\n\t$(HIDE)rm -f .lia.cache .nia.cache\n.PHONY: cleanall\n\narchclean::\n\t@# Extension point\n\t$(SHOW)'CLEAN *.cmx *.o'\n\t$(HIDE)rm -f $(NATIVEFILES)\n\t$(HIDE)rm -f $(CMOFILES:%.cmo=%.cmx)\n.PHONY: archclean\n\n\n# Compilation rules ###########################################################\n\n$(MLIFILES:.mli=.cmi): %.cmi: %.mli\n\t$(SHOW)'CAMLC -c $<'\n\t$(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $<\n\n$(MLGFILES:.mlg=.ml): %.ml: %.mlg\n\t$(SHOW)'COQPP $<'\n\t$(HIDE)$(COQPP) $<\n\n# Stupid hack around a deficient syntax: we cannot concatenate two expansions\n$(filter %.cmo, $(MLFILES:.ml=.cmo) $(MLGFILES:.mlg=.cmo)): %.cmo: %.ml\n\t$(SHOW)'CAMLC -c $<'\n\t$(HIDE)$(TIMER) $(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $<\n\n# Same hack\n$(filter %.cmx, $(MLFILES:.ml=.cmx) $(MLGFILES:.mlg=.cmx)): %.cmx: %.ml\n\t$(SHOW)'CAMLOPT -c $(FOR_PACK) $<'\n\t$(HIDE)$(TIMER) $(CAMLOPTC) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) $(FOR_PACK) $<\n\n\n$(MLLIBFILES:.mllib=.cmxs): %.cmxs: %.cmxa\n\t$(SHOW)'CAMLOPT -shared -o $@'\n\t$(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \\\n\t\t-shared -o $@ $<\n\n$(MLLIBFILES:.mllib=.cma): %.cma: | %.mllib\n\t$(SHOW)'CAMLC -a -o $@'\n\t$(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^\n\n$(MLLIBFILES:.mllib=.cmxa): %.cmxa: | %.mllib\n\t$(SHOW)'CAMLOPT -a -o $@'\n\t$(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^\n\n\n$(MLPACKFILES:.mlpack=.cmxs): %.cmxs: %.cmxa\n\t$(SHOW)'CAMLOPT -shared -o $@'\n\t$(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \\\n\t\t-shared -o $@ $<\n\n$(MLPACKFILES:.mlpack=.cmxa): %.cmxa: %.cmx | %.mlpack\n\t$(SHOW)'CAMLOPT -a -o $@'\n\t$(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $<\n\n$(MLPACKFILES:.mlpack=.cma): %.cma: %.cmo | %.mlpack\n\t$(SHOW)'CAMLC -a -o $@'\n\t$(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -a -o $@ $^\n\n$(MLPACKFILES:.mlpack=.cmo): %.cmo: | %.mlpack\n\t$(SHOW)'CAMLC -pack -o $@'\n\t$(HIDE)$(TIMER) $(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -pack -o $@ $^\n\n$(MLPACKFILES:.mlpack=.cmx): %.cmx: | %.mlpack\n\t$(SHOW)'CAMLOPT -pack -o $@'\n\t$(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) -pack -o $@ $^\n\n# This rule is for _CoqProject with no .mllib nor .mlpack\n$(filter-out $(MLLIBFILES:.mllib=.cmxs) $(MLPACKFILES:.mlpack=.cmxs) $(addsuffix .cmxs,$(PACKEDFILES)) $(addsuffix .cmxs,$(LIBEDFILES)),$(MLFILES:.ml=.cmxs) $(MLGFILES:.mlg=.cmxs)): %.cmxs: %.cmx\n\t$(SHOW)'[deprecated,use-mllib-or-mlpack] CAMLOPT -shared -o $@'\n\t$(HIDE)$(TIMER) $(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(FINDLIBPKGS) \\\n\t\t-shared -o $@ $<\n\n# can't make\n# https://www.gnu.org/software/make/manual/make.html#Static-Pattern\n# work with multiple target rules\n# so use eval in a loop instead\n# with grouped targets https://www.gnu.org/software/make/manual/make.html#Multiple-Targets\n# if available (GNU Make >= 4.3)\nifneq (,$(filter grouped-target,$(.FEATURES)))\ndefine globvorule=\n\n# take care to $$ variables using $< etc\n  $(1).vo $(1).glob &: $(1).v | $(VDFILE)\n\t$(SHOW)COQC $(1).v\n\t$(HIDE)$$(TIMER) $(COQC) $(COQDEBUG) $$(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $(1).v\nifeq ($(COQDONATIVE), \"yes\")\n\t$(SHOW)COQNATIVE $(1).vo\n\t$(HIDE)$(call TIMER,$(1).vo.native) $(COQNATIVE) $(COQLIBS) $(1).vo\nendif\n\nendef\nelse\n\n$(VOFILES): %.vo: %.v | $(VDFILE)\n\t$(SHOW)COQC $<\n\t$(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $<\nifeq ($(COQDONATIVE), \"yes\")\n\t$(SHOW)COQNATIVE $@\n\t$(HIDE)$(call TIMER,$@.native) $(COQNATIVE) $(COQLIBS) $@\nendif\n\n# this is broken :( todo fix if we ever find a solution that doesn't need grouped targets\n$(GLOBFILES): %.glob: %.v\n\t$(SHOW)'COQC $< (for .glob)'\n\t$(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $<\n\nendif\n\n$(foreach vfile,$(VFILES:.v=),$(eval $(call globvorule,$(vfile))))\n\n$(VFILES:.v=.vio): %.vio: %.v\n\t$(SHOW)COQC -vio $<\n\t$(HIDE)$(TIMER) $(COQC) -vio $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $<\n\n$(VFILES:.v=.vos): %.vos: %.v\n\t$(SHOW)COQC -vos $<\n\t$(HIDE)$(TIMER) $(COQC) -vos $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $<\n\n$(VFILES:.v=.vok): %.vok: %.v\n\t$(SHOW)COQC -vok $<\n\t$(HIDE)$(TIMER) $(COQC) -vok $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $<\n\n$(addsuffix .timing.diff,$(VFILES)): %.timing.diff : %.before-timing %.after-timing\n\t$(SHOW)PYTHON TIMING-DIFF $*.{before,after}-timing\n\t$(HIDE)$(MAKE) --no-print-directory -f \"$(SELF)\" print-pretty-single-time-diff BEFORE=$*.before-timing AFTER=$*.after-timing TIME_OF_PRETTY_BUILD_FILE=\"$@\"\n\n$(BEAUTYFILES): %.v.beautified: %.v\n\t$(SHOW)'BEAUTIFY $<'\n\t$(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -beautify $<\n\n$(TEXFILES): %.tex: %.v\n\t$(SHOW)'COQDOC -latex $<'\n\t$(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex $< -o $@\n\n$(GTEXFILES): %.g.tex: %.v\n\t$(SHOW)'COQDOC -latex -g $<'\n\t$(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex -g $< -o $@\n\n$(HTMLFILES): %.html: %.v %.glob\n\t$(SHOW)'COQDOC -html $<'\n\t$(HIDE)$(COQDOC) $(COQDOCFLAGS) -html $< -o $@\n\n$(GHTMLFILES): %.g.html: %.v %.glob\n\t$(SHOW)'COQDOC -html -g $<'\n\t$(HIDE)$(COQDOC) $(COQDOCFLAGS)  -html -g $< -o $@\n\n# Dependency files ############################################################\n\nifndef MAKECMDGOALS\n  -include $(ALLDFILES)\nelse\n  ifneq ($(filter-out archclean clean cleanall printenv make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff,$(MAKECMDGOALS)),)\n   -include $(ALLDFILES)\n endif\nendif\n\n.SECONDARY: $(ALLDFILES)\n\nredir_if_ok = > \"$@\" || ( RV=$$?; rm -f \"$@\"; exit $$RV )\n\nGENMLFILES:=$(MLGFILES:.mlg=.ml)\n$(addsuffix .d,$(ALLSRCFILES)): $(GENMLFILES)\n\n$(addsuffix .d,$(MLIFILES)): %.mli.d: %.mli\n\t$(SHOW)'CAMLDEP $<'\n\t$(HIDE)$(CAMLDEP) $(OCAMLLIBS) \"$<\" $(redir_if_ok)\n\n$(addsuffix .d,$(MLGFILES)): %.mlg.d: %.ml\n\t$(SHOW)'CAMLDEP $<'\n\t$(HIDE)$(CAMLDEP) $(OCAMLLIBS) \"$<\" $(redir_if_ok)\n\n$(addsuffix .d,$(MLFILES)): %.ml.d: %.ml\n\t$(SHOW)'CAMLDEP $<'\n\t$(HIDE)$(CAMLDEP) $(OCAMLLIBS) \"$<\" $(redir_if_ok)\n\n$(addsuffix .d,$(MLLIBFILES)): %.mllib.d: %.mllib\n\t$(SHOW)'OCAMLLIBDEP $<'\n\t$(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) \"$<\" $(redir_if_ok)\n\n$(addsuffix .d,$(MLPACKFILES)): %.mlpack.d: %.mlpack\n\t$(SHOW)'OCAMLLIBDEP $<'\n\t$(HIDE)$(OCAMLLIBDEP) -c $(OCAMLLIBS) \"$<\" $(redir_if_ok)\n\n# If this makefile is created using a _CoqProject we have coqdep get\n# options from it. This avoids argument length limits for pathological\n# projects. Note that extra options might be on the command line.\nVDFILE_FLAGS:=$(if _CoqProject,-f _CoqProject,) $(CMDLINE_COQLIBS) $(CMDLINE_VFILES)\n\n$(VDFILE): _CoqProject $(VFILES)\n\t$(SHOW)'COQDEP VFILES'\n\t$(HIDE)$(COQDEP) $(if $(strip $(METAFILE)),-m \"$(METAFILE)\") -vos -dyndep var $(VDFILE_FLAGS) $(redir_if_ok)\n\n# Misc ########################################################################\n\nbyte:\n\t$(HIDE)$(MAKE) all \"OPT:=-byte\" -f \"$(SELF)\"\n.PHONY: byte\n\nopt:\n\t$(HIDE)$(MAKE) all \"OPT:=-opt\" -f \"$(SELF)\"\n.PHONY:\topt\n\n# This is deprecated.  To extend this makefile use\n# extension points and Makefile.local\nprintenv::\n\t$(warning printenv is deprecated)\n\t$(warning write extensions in Makefile.local or include Makefile.conf)\n\t@echo 'COQLIB = $(COQLIB)'\n\t@echo 'COQCORELIB = $(COQCORELIB)'\n\t@echo 'DOCDIR = $(DOCDIR)'\n\t@echo 'OCAMLFIND = $(OCAMLFIND)'\n\t@echo 'HASNATDYNLINK = $(HASNATDYNLINK)'\n\t@echo 'SRC_SUBDIRS = $(SRC_SUBDIRS)'\n\t@echo 'COQ_SRC_SUBDIRS = $(COQ_SRC_SUBDIRS)'\n\t@echo 'COQCORE_SRC_SUBDIRS = $(COQCORE_SRC_SUBDIRS)'\n\t@echo 'OCAMLFIND = $(OCAMLFIND)'\n\t@echo 'PP = $(PP)'\n\t@echo 'COQFLAGS = $(COQFLAGS)'\n\t@echo 'COQLIB = $(COQLIBS)'\n\t@echo 'COQLIBINSTALL = $(COQLIBINSTALL)'\n\t@echo 'COQDOCINSTALL = $(COQDOCINSTALL)'\n.PHONY:\tprintenv\n\n# Generate a .merlin file.  If you need to append directives to this\n# file you can extend the merlin-hook target in Makefile.local\n.merlin:\n\t$(SHOW)'FILL .merlin'\n\t$(HIDE)echo 'FLG $(COQMF_CAMLFLAGS)' > .merlin\n\t$(HIDE)echo 'B $(COQCORELIB)' >> .merlin\n\t$(HIDE)echo 'S $(COQCORELIB)' >> .merlin\n\t$(HIDE)$(foreach d,$(COQCORE_SRC_SUBDIRS), \\\n\t\techo 'B $(COQCORELIB)$(d)' >> .merlin;)\n\t$(HIDE)$(foreach d,$(COQ_SRC_SUBDIRS), \\\n\t\techo 'S $(COQLIB)$(d)' >> .merlin;)\n\t$(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'B $(d)' >> .merlin;)\n\t$(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'S $(d)' >> .merlin;)\n\t$(HIDE)$(MAKE) merlin-hook -f \"$(SELF)\"\n.PHONY: merlin\n\nmerlin-hook::\n\t@# Extension point\n.PHONY: merlin-hook\n\n# prints all variables\ndebug:\n\t$(foreach v,\\\n\t\t$(sort $(filter-out $(INITIAL_VARS) INITIAL_VARS,\\\n\t       \t\t$(.VARIABLES))),\\\n\t       \t$(info $(v) = $($(v))))\n.PHONY: debug\n\n.DEFAULT_GOAL := all\n\n# Users can create Makefile.local-late to hook into double-colon rules\n# or add other needed Makefile code, using defined\n# variables if necessary.\n-include Makefile.local-late\n\n# Local Variables:\n# mode: makefile-gmake\n# End:\n"
  },
  {
    "path": "examples/coq-example/proofs/coq/extraction/dummy_core_lib.v",
    "content": "From Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\n\n(* LIBRARY CODE *)\nDefinition t_isize := Z.\nNotation \"'t_Vec' T '((t_Global))'\" := (list T).\nDefinition impl_1__push {A} (l : list A) (a : A) : list A := cons a l.\nDefinition impl_1__pop {A} (l : list A) : list A * option A :=\n  match l with\n  | [] => ([], None)\n  | (x :: xs) => (xs, Some x)\n  end.\nDefinition impl__unwrap {A} (x : option A) `{H : x <> None} : A :=\n  match x as k return k <> None -> _ with\n  | None => fun H => False_rect _ (H eq_refl)\n  | Some a => fun _ => a\n  end H.\nDefinition t_Add_f_add := (fun x y => x + y).\nDefinition t_Mul_f_mul := (fun x y => x * y).\nDefinition t_PartialEq_f_eq := (fun x y => x =? y).\nDefinition impl__isize__rem_euclid := fun x y => x mod y.\nDefinition cast := fun (x : Z) => x.\nDefinition ne := fun x y => negb (x =? y).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition t_PartialOrd_f_lt := fun x y => x <? y.\nNotation \"'Option_Some'\" := Some.\nNotation \"'Option_None'\" := None.\nDefinition sub := fun x y => x - y.\nDefinition impl__new {A} (tt : unit) : list A := [].\nDefinition f_fold {A B} (l : list A) (i : B) (f : B -> A -> B) : B := List.fold_left f l i.\nDefinition f_into_iter {A} := @id A.\n(* /LIBRARY CODE *)\n"
  },
  {
    "path": "examples/coq-example/src/dummy_core_lib.rs",
    "content": "\n"
  },
  {
    "path": "examples/coq-example/src/lib.rs",
    "content": "mod dummy_core_lib;\nuse dummy_core_lib::*;\n\nenum Instruction {\n    Push(isize),\n    Pop,\n    Add,\n    Sub,\n    Mul,\n    Not,\n    Dup,\n}\n\nimpl Instruction {\n    pub fn interpret(self, stack: &mut Vec<isize>) {\n        match self {\n            Instruction::Push(v) => stack.push(v),\n            Instruction::Pop => {\n                stack.pop();\n            }\n            Instruction::Add => match (stack.pop(), stack.pop()) {\n                (Some(a), Some(b)) => stack.push(b + a),\n                _ => (),\n            },\n            Instruction::Sub => match (stack.pop(), stack.pop()) {\n                (Some(a), Some(b)) => stack.push(b - a),\n                _ => (),\n            },\n            Instruction::Mul => match (stack.pop(), stack.pop()) {\n                (Some(a), Some(b)) => stack.push(b * a),\n                _ => (),\n            },\n            Instruction::Not => match stack.pop() {\n                Some(a) => stack.push(if a == 0 { 1 } else { 0 }),\n                _ => (),\n            },\n            Instruction::Dup => match stack.pop() {\n                Some(a) => {\n                    stack.push(a);\n                    stack.push(a);\n                }\n                _ => (),\n            },\n        }\n    }\n}\n\nfn example() -> Vec<isize> {\n    let mut stk = Vec::new();\n    for cmd in [\n        Instruction::Push(1),\n        Instruction::Push(1),\n        Instruction::Add,\n        Instruction::Push(1),\n        Instruction::Push(1),\n        Instruction::Push(1),\n        Instruction::Add,\n        Instruction::Add,\n        Instruction::Dup,\n        Instruction::Mul,\n        Instruction::Sub,\n    ] {\n        cmd.interpret(&mut stk)\n    }\n    stk\n}\n// Push 1: 1\n// Push 1: 1, 1\n//    Add: 2\n// Push 1: 2, 1\n// Push 1: 2, 1, 1\n// Push 1: 2, 1, 1, 1\n//    Add: 2, 1, 2\n//    Add: 2, 3\n//    Dup: 2, 3, 3\n//    Mul: 2, 9\n//    Sub: -7\n"
  },
  {
    "path": "examples/coverage/Cargo.toml",
    "content": "[package]\nname = \"coverage\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n"
  },
  {
    "path": "examples/coverage/default.nix",
    "content": "{\n  stdenv,\n  lib,\n  hax,\n  coqPackages,\n  craneLib,\n  bat,\n  coqGeneratedCore ? import ../../proof-libs/coq/coq {inherit stdenv coqPackages;},\n}:\nlet\n  commonArgs = import ../commonArgs.nix {inherit craneLib lib;};\n  cargoArtifacts = craneLib.buildDepsOnly commonArgs;\nin\n  craneLib.mkCargoDerivation (commonArgs // {\n      inherit cargoArtifacts;\n      pname = \"coverage\";\n      doCheck = false;\n      buildPhaseCargoCommand = ''\n        (\n          cd examples/coverage/\n          cargo hax into coq\n          cd proofs/coq/extraction/\n          echo -e \"-R ${coqGeneratedCore}/lib/coq/user-contrib/Core Core\\n$(cat _CoqProject)\" > _CoqProject\n          coq_makefile -f _CoqProject -o Makefile\n          make\n        )\n      '';\n      cargoToml = ./Cargo.toml;\n      buildInputs = [\n        hax\n        coqPackages.coq-record-update\n        coqPackages.coq\n      ];\n    })"
  },
  {
    "path": "examples/coverage/src/lib.rs",
    "content": "// https://doc.rust-lang.org/reference/types.html\nmod test_primitives;\n// mod test_sequence;\n\nmod test_enum;\n// mod test_struct;\n\n// mod test_closures;\n// mod test_functions;\n\n// mod test_instance;\n\n// mod test_trait;\n\n// mod test_arrays;\n"
  },
  {
    "path": "examples/coverage/src/test_arrays.rs",
    "content": "// // This function borrows a slice.\n// fn analyze_slice(slice: &[i32]) {\n//     let _ = slice[0];\n//     let _ = slice.len();\n// }\n\n// fn test(){\n//     // Fixed-size array (type signature is superfluous).\n//     let xs: [i32; 5] = [1, 2, 3, 4, 5];\n\n//     // All elements can be initialized to the same value.\n//     let ys: [i32; 500] = [0; 500];\n\n//     // Indexing starts at 0.\n//     let _ = xs[0];\n//     let _ = xs[1];\n\n//     // `len` returns the count of elements in the array.\n//     let _ = xs.len();\n\n//     // Arrays can be automatically borrowed as slices.\n//     analyze_slice(&xs);\n\n//     // Slices can point to a section of an array.\n//     // They are of the form [starting_index..ending_index].\n//     // `starting_index` is the first position in the slice.\n//     // `ending_index` is one more than the last position in the slice.\n//     analyze_slice(&ys[1 .. 4]);\n\n//     // Example of empty slice `&[]`:\n//     let empty_array: [u32; 0] = [];\n//     assert_eq!(&empty_array, &[]);\n//     assert_eq!(&empty_array, &[][..]); // Same but more verbose\n// }\n"
  },
  {
    "path": "examples/coverage/src/test_closures.rs",
    "content": "// TODO:\n// fn test() {\n//     let add : fn(i32, i32) -> i32 = |x, y| x + y;\n//     let _ = (|x : &u8| { x + x })(&2);\n\n//     fn f<F : FnOnce() -> u8> (g: F) -> u8 {\n//         g() + 2\n//     }\n\n//     f(|| {\n//         23\n//     });\n//     // Prints \"foobar\".\n// }\n"
  },
  {
    "path": "examples/coverage/src/test_enum.rs",
    "content": "fn test() {\n    {\n        enum Foo<'a, T, const N: usize> {\n            Bar(u8),\n            Baz,\n            Qux { x: &'a T, y: [T; N], z: u8 },\n        }\n\n        let x: Foo<u8, 12> = Foo::Baz;\n    }\n\n    {\n        enum AnimalA {\n            Dog,\n            Cat,\n        }\n\n        let mut a: AnimalA = AnimalA::Dog;\n        a = AnimalA::Cat;\n    }\n\n    {\n        enum AnimalB {\n            Dog(String, f64),\n            Cat { name: String, weight: f64 },\n        }\n\n        let mut a: AnimalB = AnimalB::Dog(\"Cocoa\".to_string(), 37.2);\n        a = AnimalB::Cat {\n            name: \"Spotty\".to_string(),\n            weight: 2.7,\n        };\n    }\n    {\n        enum Examples {\n            UnitLike,\n            TupleLike(i32),\n            StructLike { value: i32 },\n        }\n\n        // use Examples::*; // Creates aliases to all variants.\n        let x = Examples::UnitLike; // Path expression of the const item.\n        let x = Examples::UnitLike {}; // Struct expression.\n        let y = Examples::TupleLike(123); // Call expression.\n        let y = Examples::TupleLike { 0: 123 }; // Struct expression using integer field names.\n        let z = Examples::StructLike { value: 123 }; // Struct expression.\n    }\n    {\n        #[repr(u8)]\n        enum Enum {\n            Unit = 3,\n            Tuple(u16),\n            Struct { a: u8, b: u16 } = 1,\n        }\n    }\n}\n"
  },
  {
    "path": "examples/coverage/src/test_functions.rs",
    "content": "fn first<A, B>((value, _): (A, i32), y: B) -> A\nwhere\n    B: Clone,\n{\n    value\n}\n\n// foo is generic over A and B\n\nfn foo1<A, B>(x: A, y: B) {}\n\nfn foo2<T>(x: &[T], y: &[T; 1])\nwhere\n    T: Clone,\n{\n    // details elided\n}\n\nfn test() {\n    let x = [1u8];\n    foo2(&x, &x);\n    foo2(&[1, 2], &x);\n}\n\nextern \"Rust\" fn foo3() {}\n\n// async fn regular_example() { } // TODO: Not yet supported\n\n// Requires std::fmt;\n// fn documented() {\n//     #![doc = \"Example\"]\n// }\n"
  },
  {
    "path": "examples/coverage/src/test_instance.rs",
    "content": "// enum SomeEnum<T> {\n//     None,\n//     Some(T),\n// }\n\n// trait SomeTrait {\n//     fn some_fun(&self) -> Self;\n// }\n\n// impl<T> SomeTrait for SomeEnum<T>\n// where\n//     T: SomeTrait,\n// {\n//     #[inline]\n//     fn some_fun(&self) -> Self {\n//         match self {\n//             SomeEnum::Some(x) => SomeEnum::Some(x.some_fun()),\n//             SomeEnum::None => SomeEnum::None,\n//         }\n//     }\n// }\n"
  },
  {
    "path": "examples/coverage/src/test_primitives.rs",
    "content": "fn test_primtives() {\n    // bool\n    let _: bool = false;\n    let _: bool = true;\n\n    // Numerics\n    let _: u8 = 12u8;\n    let _: u16 = 123u16;\n    let _: u32 = 1234u32;\n    let _: u64 = 12345u64;\n    let _: u128 = 123456u128;\n    let _: usize = 32usize;\n\n    let _: i8 = -12i8;\n    let _: i16 = 123i16;\n    let _: i32 = -1234i32;\n    let _: i64 = 12345i64;\n    let _: i128 = 123456i128;\n    let _: isize = -32isize;\n\n    let _: f32 = 1.2f32;\n    let _: f64 = -1.23f64;\n\n    // Textual\n    let _: char = 'c';\n    let _: &str = \"hello world\";\n\n    // Never\n    // cannot be built\n}\n"
  },
  {
    "path": "examples/coverage/src/test_sequence.rs",
    "content": "fn test() {\n    // Tuple\n    let _: () = ();\n    let _: (u8, u16, i8) = (1, 2, 3);\n    let _: u8 = (1, 2).0;\n    let _: u8 = (1,).0;\n    let _: u8 = (1, 2, 3, 4, 5).3;\n\n    // Array\n    let _: [u8; 0] = [];\n    let _: [&str; 3] = [\"23\", \"a\", \"hllo\"];\n    let _: [u8; 14] = [2; 14];\n\n    // Slice\n    let _: &[u8] = &[1, 2, 3, 4];\n    let _: &[&str] = &[];\n}\n"
  },
  {
    "path": "examples/coverage/src/test_struct.rs",
    "content": "struct foo<'a, T, const N: usize> {\n    bar: &'a T,\n    baz: [T; N],\n    qux: u8,\n}\n\n// Point {x: 10.0, y: 20.0};\n// NothingInMe {};\n// TuplePoint(10.0, 20.0);\n// TuplePoint { 0: 10.0, 1: 20.0 }; // Results in the same value as the above line\n// let u = game::User {name: \"Joe\", age: 35, score: 100_000};\n// some_fn::<Cookie>(Cookie);\n\nfn test() {\n    {\n        struct Gamma;\n        let a = Gamma; // Gamma unit value.\n        let b = Gamma {}; // Exact same value as `a`.\n    }\n    {\n        struct Position(i32, i32, i32);\n        Position(0, 0, 0); // Typical way of creating a tuple struct.\n        let c = Position; // `c` is a function that takes 3 arguments.\n        let pos = c(8, 6, 7); // Creates a `Position` value.\n    }\n    // {\n    //     struct Color(u8, u8, u8);\n    //     let c1 = Color(0, 0, 0); // Typical way of creating a tuple struct.\n    //     let c2 = Color {\n    //         0: 255,\n    //         1: 127,\n    //         2: 0,\n    //     }; // Specifying fields by index.\n    //     let c3 = Color { 1: 0, ..c2 }; // Fill out all other fields using a base struct.\n    // }\n    {\n        struct PointA {\n            x: i32,\n            y: i32,\n        }\n        let p = PointA { x: 10, y: 11 };\n        let px: i32 = p.x;\n\n        let mut p2 = PointA { x: 10, y: 11 };\n        p2.x = 10;\n        p2.y = 14;\n    }\n    {\n        struct PointB(i32, i32);\n        let p = PointB(10, 11);\n        let px: i32 = match p {\n            PointB(x, _) => x,\n        };\n    }\n    {\n        struct CookieA;\n        let c = [CookieA, CookieA {}, CookieA, CookieA {}];\n    }\n    {\n        struct Cookie {}\n        const Cookie: Cookie = Cookie {};\n        let c = [Cookie, Cookie {}, Cookie, Cookie {}];\n    }\n}\n"
  },
  {
    "path": "examples/coverage/src/test_trait.rs",
    "content": "// Broken..\n\n// // Co-inductive trait\n// trait TraitA {\n//     type B : TraitB;\n// }\n\n// trait TraitB {\n//     fn test<U>(other : U) -> U\n//     where U: TraitA;\n// }\n"
  },
  {
    "path": "examples/default.nix",
    "content": "{\n  craneLib,\n  stdenv,\n  lib,\n  hax,\n  fstar,\n  hacl-star,\n  hax-env,\n  jq,\n  proverif,\n  lean4,\n}: let\n  commonArgs = import ./commonArgs.nix {inherit craneLib lib;};\n  cargoArtifacts = craneLib.buildDepsOnly commonArgs;\nin\n  craneLib.mkCargoDerivation (commonArgs\n    // {\n      inherit cargoArtifacts;\n      pname = \"hax-examples\";\n      doCheck = false;\n      buildPhaseCargoCommand = ''\n        cd examples\n        eval $(hax-env)\n        export CACHE_DIR=$(mktemp -d)\n        export HINT_DIR=$(mktemp -d)\n        export SHELL=${stdenv.shell}\n        make clean # Should be a no-op (see `filter` above)\n        # Need to inject `HAX_VANILLA_RUSTC=never` because of #472\n        sed -i \"s/make -C limited-order-book/HAX_VANILLA_RUSTC=never make -C limited-order-book/g\" Makefile\n        make\n      '';\n      buildInputs = [\n        hax hax-env fstar jq lean4\n        (proverif.overrideDerivation (_: {\n          patches = [ ./proverif-psk/pv_div_by_zero_fix.diff ];\n        }))\n      ];\n    })\n"
  },
  {
    "path": "examples/hax.fst.config.json",
    "content": "{\n    \"fstar_exe\": \"fstar.exe\",\n    \"options\": [\n        \"--cmi\",\n        \"--warn_error\",\n        \"-331\",\n        \"--cache_checked_modules\",\n        \"--cache_dir\",\n        \"${HAX_HOME}/proof-libs/fstar/.cache\",\n        \"--already_cached\",\n        \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\",\n        \"--query_stats\",\n        \"--split_queries\",\n        \"always\"\n    ],\n    \"include_dirs\": [\n        \".\",\n        \"${HACL_HOME}/lib\",\n        \"${HAX_HOME}/proof-libs/fstar/rust_primitives\",\n        \"${HAX_HOME}/proof-libs/fstar/core\",\n        \"${HAX_HOME}/proof-libs/fstar/hax_lib\"\n    ]\n}\n"
  },
  {
    "path": "examples/kyber_compress/Cargo.toml",
    "content": "[package]\nname = \"kyber_compress\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nhax-lib.workspace = true\n"
  },
  {
    "path": "examples/kyber_compress/Makefile",
    "content": ".PHONY: default clean\ndefault:\n\tmake -C proofs/fstar/extraction\n\nclean:\n\trm -f proofs/fstar/extraction/.depend\n\trm -f proofs/fstar/extraction/*.fst\n"
  },
  {
    "path": "examples/kyber_compress/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHACL_HOME     ?= $(HOME)/.hax/hacl_home\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= .cache\nHINT_DIR      ?= .hints\n\nSHELL ?= /usr/bin/env bash\n\nEXECUTABLES = cargo cargo-hax jq\nK := $(foreach bin,$(EXECUTABLES),\\\n        $(if $(shell command -v $(bin) 2> /dev/null),,$(error \"No $(bin) in PATH\")))\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\nHAX_CLI = \"cargo hax into fstar --z3rlimit 500\"\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\tmkdir -p \"${HACL_HOME}\"\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\n# If no any F* file is detected, we run hax\nifeq \"$(wildcard *.fst *fsti)\" \"\"\n$(shell $(SHELL) -c $(HAX_CLI))\nendif\n\n# By default, we process all the files in the current directory\nROOTS = $(wildcard *.fst *fsti)\n\n# Regenerate F* files via hax when Rust sources change\n$(ROOTS): $(shell find ../../../src -type f -name '*.rs')\n\t$(shell $(SHELL) -c $(HAX_CLI))\n\n# The following is a bash script that discovers F* libraries\ndefine FINDLIBS\n    # Prints a path if and only if it exists. Takes one argument: the\n    # path.\n    function print_if_exists() {\n        if [ -d \"$$1\" ]; then\n            echo \"$$1\"\n        fi\n    }\n    # Asks Cargo all the dependencies for the current crate or workspace,\n    # and extract all \"root\" directories for each. Takes zero argument.\n    function dependencies() {\n        cargo metadata --format-version 1 |\n            jq -r '.packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")'\n    }\n    # Find hax libraries *around* a given path. Takes one argument: the\n    # path.\n    function find_hax_libraries_at_path() {\n        path=\"$$1\"\n        # if there is a `proofs/fstar/extraction` subfolder, then that's a\n        # F* library\n        print_if_exists \"$$path/proofs/fstar/extraction\"\n        # Maybe the `proof-libs` folder of hax is around?\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\")\n        if [ $$? -eq 0 ]; then\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\"\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\"\n        fi\n    }\n    { while IFS= read path; do\n          find_hax_libraries_at_path \"$$path\"\n      done < <(dependencies)\n    } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c \"$$FINDLIBS\")\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  --ext context_pruning \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n"
  },
  {
    "path": "examples/kyber_compress/src/lib.rs",
    "content": "use hax_lib::{ensures, fstar, requires};\n\nconst FIELD_MODULUS: i32 = 3329;\nconst UNSIGNED_FIELD_MODULUS: u32 = FIELD_MODULUS as u32;\n\n#[requires(n == 4 || n == 5 || n == 10 || n == 11 || n == 16)]\n#[ensures(|result| result < 2u32.pow(n as u32))]\nfn get_n_least_significant_bits(n: u8, value: u32) -> u32 {\n    let nth_bit = 1 << n;\n    let mask = nth_bit - 1;\n    fstar!(\"Rust_primitives.Integers.logand_mask_lemma $value (v $n)\");\n    value & mask\n}\n\n#[\n  requires(\n        (coefficient_bits == 4 ||\n         coefficient_bits == 5 ||\n         coefficient_bits == 10 ||\n         coefficient_bits == 11) &&\n         fe < (FIELD_MODULUS as u16))]\n#[\n  ensures(|result| result < 1 << coefficient_bits)]\npub fn compress_unsafe(coefficient_bits: u8, fe: u16) -> i32 {\n    let mut compressed = (fe as u32) << (coefficient_bits + 1);\n    compressed += UNSIGNED_FIELD_MODULUS;\n    compressed /= UNSIGNED_FIELD_MODULUS << 1;\n    compressed &= (1 << coefficient_bits) - 1;\n    fstar!(\"Rust_primitives.Integers.logand_mask_lemma $compressed (v $coefficient_bits)\");\n    get_n_least_significant_bits(coefficient_bits, compressed) as i32\n}\n\n#[\n  requires(\n        (coefficient_bits == 4 ||\n         coefficient_bits == 5 ||\n         coefficient_bits == 10 ||\n         coefficient_bits == 11) &&\n         fe < (FIELD_MODULUS as u16))]\n#[\n  ensures(|result| result < 1 << coefficient_bits)]\npub fn compress(coefficient_bits: u8, fe: u16) -> i32 {\n    let mut compressed = (fe as u64) << coefficient_bits;\n    compressed += 1664 as u64;\n    compressed *= 10_321_340;\n    compressed >>= 35;\n    compressed &= (1 << coefficient_bits) - 1;\n    fstar!(\"Rust_primitives.Integers.logand_mask_lemma $compressed (v $coefficient_bits)\");\n    let compressed = compressed as u32;\n    get_n_least_significant_bits(coefficient_bits, compressed) as i32\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn it_works() {\n        fn test(coefficient_bits: u8, fe: u16) {\n            let c1 = compress_unsafe(coefficient_bits, fe);\n            let c2 = compress(coefficient_bits, fe);\n            assert_eq!(c1, c2);\n        }\n\n        for bits in [4u8, 5, 10, 11] {\n            for fe in 0..3329 {\n                test(bits, fe);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "examples/lean_adc/Cargo.toml",
    "content": "[package]\nname = \"lean_adc\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib.workspace = true\n"
  },
  {
    "path": "examples/lean_adc/Makefile",
    "content": ".PHONY: default clean\ndefault:\n\tcargo hax into lean\n\t(cd proofs/lean && \\\n   elan default v4.29.0-rc1 && \\\n   lake build)\n\nclean:\n\t-rm -f proofs/lean/extraction/lean_adc.lean\n\t-cd proofs/lean && lake clean\n"
  },
  {
    "path": "examples/lean_adc/proofs/lean/lake-manifest.json",
    "content": "{\"version\": \"1.1.0\",\n \"packagesDir\": \".lake/packages\",\n \"packages\":\n [{\"type\": \"path\",\n   \"scope\": \"\",\n   \"name\": \"Hax\",\n   \"manifestFile\": \"lake-manifest.json\",\n   \"inherited\": false,\n   \"dir\": \"../../../../hax-lib/proof-libs/lean\",\n   \"configFile\": \"lakefile.toml\"},\n  {\"url\": \"https://github.com/leanprover-community/quote4\",\n   \"type\": \"git\",\n   \"subDir\": null,\n   \"scope\": \"\",\n   \"rev\": \"23324752757bf28124a518ec284044c8db79fee5\",\n   \"name\": \"Qq\",\n   \"manifestFile\": \"lake-manifest.json\",\n   \"inputRev\": \"v4.29.0-rc1\",\n   \"inherited\": true,\n   \"configFile\": \"lakefile.toml\"}],\n \"name\": \"lean_adc\",\n \"lakeDir\": \".lake\"}\n"
  },
  {
    "path": "examples/lean_adc/proofs/lean/lakefile.toml",
    "content": "name = \"lean_adc\"\nversion = \"0.1.0\"\ndefaultTargets = [\"lean_adc\"]\n\n[[lean_lib]]\nname = \"lean_adc\"\nroots = [\"extraction.lean_adc\"]\n\n[[require]]\nname = \"Hax\"\npath = \"../../../../hax-lib/proof-libs/lean\"\n"
  },
  {
    "path": "examples/lean_adc/proofs/lean/lean-toolchain",
    "content": "leanprover/lean4:v4.29.0-rc1\n"
  },
  {
    "path": "examples/lean_adc/src/lib.rs",
    "content": "//! # 32-bit Addition with Carry (ADC)\n//!\n//! This example demonstrates formal verification of a 32-bit\n//! addition-with-carry (ADC) operation using the hax toolchain and\n//! Lean 4's `bv_decide` bit-vector decision procedure.\n//!\n//! ## What is ADC?\n//!\n//! Addition with carry (ADC) is a fundamental building block in\n//! multi-precision (bignum) arithmetic. When adding large numbers\n//! represented as arrays of 32-bit \"limbs\", each limb addition may\n//! overflow. The ADC operation captures this overflow as a carry-out\n//! bit, which feeds into the next limb addition.\n//!\n//! For example, to add two 128-bit numbers stored as four 32-bit limbs:\n//!\n//! ```text\n//!   (sum0, c0) = adc(a[0], b[0], 0)\n//!   (sum1, c1) = adc(a[1], b[1], c0)\n//!   (sum2, c2) = adc(a[2], b[2], c1)\n//!   (sum3, c3) = adc(a[3], b[3], c2)\n//! ```\n//!\n//! ## Verification approach\n//!\n//! The precondition and postcondition are expressed as plain Rust\n//! functions (`adc_precondition`, `adc_postcondition`) for documentation.\n//! A correctness theorem is embedded via `#[hax_lib::lean::after(...)]`\n//! using a Hoare triple with pure Lean propositions (not the monadic\n//! Rust functions), since `bv_decide` requires pure BitVec goals.\n//!\n//! The proof is fully automated using the tactics from Hax:\n//!\n//!   1. `hax_mvcgen` — generates pure verification conditions from\n//!      the monadic function body using the `bv` specset lemmas.\n//!   2. `bv_decide` — Lean's bit-blasting decision procedure\n//!      automatically verifies the remaining BitVec goals.\n//!\n//! The key property verified:\n//!\n//! ```text\n//!   a + b + carry_in == sum + carry_out * 2^32\n//! ```\n//!\n//! where the left-hand side is computed in `u64` to avoid overflow.\n\n/// Precondition: the input carry must be 0 or 1 (a single bit).\n///\n/// This function documents the precondition and is extracted to Lean,\n/// but the proof theorem states the precondition as a pure Lean\n/// proposition (`carry_in ≤ 1`) rather than using this monadic function.\nfn adc_precondition(carry_in: u32) -> bool {\n    carry_in <= 1\n}\n\n/// Postcondition: the 64-bit sum `a + b + carry_in` is correctly\n/// represented as `sum + carry_out * 2^32`.\n///\n/// We verify two properties:\n///   1. `carry_out` is 0 or 1 (it is a single bit).\n///   2. The full equation holds: the wide sum equals the split result.\n///\n/// Like `adc_precondition`, this documents the postcondition but the\n/// proof uses pure Lean propositions instead of this monadic function.\nfn adc_postcondition(a: u32, b: u32, carry_in: u32, sum: u32, carry_out: u32) -> bool {\n    carry_out <= 1\n        && (a as u64 + b as u64 + carry_in as u64)\n            == (sum as u64 + ((carry_out as u64) << 32u64))\n}\n\n/// 32-bit addition with carry.\n///\n/// Computes `a + b + carry_in` where `carry_in` is 0 or 1.\n/// Returns `(sum, carry_out)` where:\n///   - `sum` is the lower 32 bits of the result\n///   - `carry_out` is 1 if the addition overflowed, 0 otherwise\n///\n/// The computation widens operands to `u64` to avoid overflow, then\n/// splits the 64-bit result back into 32-bit values.\n///\n/// # Verification\n///\n/// The `#[hax_lib::lean::after(...)]` attribute embeds a Lean 4\n/// theorem directly after the extracted function definition. This\n/// theorem states: given the precondition (carry_in is 0 or 1),\n/// the function satisfies the postcondition (the full sum equation\n/// holds and carry_out is 0 or 1).\n///\n/// The proof uses:\n///   1. `hax_mvcgen` — to generate pure verification conditions\n///      from the monadic function body.\n///   2. `bv_decide` — Lean's bit-blasting procedure to\n///      automatically verify the remaining BitVec goals.\n#[hax_lib::lean::after(\n    // The specification is stated with pure Lean propositions (not through the\n    // monadic adc_precondition/adc_postcondition Rust functions), so that\n    // bv_decide can reason about the BitVec properties directly.\n    \"\nset_option maxHeartbeats 1000000 in\nset_option hax_mvcgen.specset \\\"bv\\\" in\ntheorem adc_u32_spec (a b carry_in : u32) :\n  ⦃ ⌜ carry_in ≤ 1 ⌝ ⦄\n  lean_adc.adc_u32 a b carry_in\n  ⦃ ⇓ ⟨sum, carry_out⟩ =>\n    ⌜ carry_out ≤ 1 ∧\n      UInt32.toUInt64 a + UInt32.toUInt64 b + UInt32.toUInt64 carry_in =\n        UInt32.toUInt64 sum + (UInt32.toUInt64 carry_out <<< (32 : UInt64)) ⌝ ⦄\n:= by\n  hax_mvcgen [lean_adc.adc_u32]\n    <;> bv_decide (timeout := 90)\n\"\n)]\npub fn adc_u32(a: u32, b: u32, carry_in: u32) -> (u32, u32) {\n    // Widen to u64 so the addition cannot overflow.\n    let wide: u64 = a as u64 + b as u64 + carry_in as u64;\n    // Extract the lower 32 bits as the sum.\n    let sum: u32 = wide as u32;\n    // Extract bit 32 as the carry-out (0 or 1).\n    let carry_out: u32 = (wide >> 32u64) as u32;\n    (sum, carry_out)\n}\n"
  },
  {
    "path": "examples/lean_barrett/Cargo.toml",
    "content": "[package]\nname = \"lean_barrett\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nhax-lib.workspace = true\n"
  },
  {
    "path": "examples/lean_barrett/Makefile",
    "content": ".PHONY: default clean\ndefault:\n\tcargo hax into lean\n\t(cd proofs/lean && \\\n   elan default v4.29.0-rc1 && \\\n   lake build)\n\nclean:\n\t-rm -f proofs/lean/extraction/Lean_barrett.lean\n\t-cd proofs/lean && lake clean\n"
  },
  {
    "path": "examples/lean_barrett/proofs/lean/lake-manifest.json",
    "content": "{\"version\": \"1.1.0\",\n \"packagesDir\": \".lake/packages\",\n \"packages\":\n [{\"type\": \"path\",\n   \"scope\": \"\",\n   \"name\": \"Hax\",\n   \"manifestFile\": \"lake-manifest.json\",\n   \"inherited\": false,\n   \"dir\": \"../../../../hax-lib/proof-libs/lean\",\n   \"configFile\": \"lakefile.toml\"},\n  {\"url\": \"https://github.com/leanprover-community/quote4\",\n   \"type\": \"git\",\n   \"subDir\": null,\n   \"scope\": \"\",\n   \"rev\": \"bd58c9efe2086d56ca361807014141a860ddbf8c\",\n   \"name\": \"Qq\",\n   \"manifestFile\": \"lake-manifest.json\",\n   \"inputRev\": \"v4.27.0\",\n   \"inherited\": true,\n   \"configFile\": \"lakefile.toml\"}],\n \"name\": \"Lean_barrett\",\n \"lakeDir\": \".lake\"}\n"
  },
  {
    "path": "examples/lean_barrett/proofs/lean/lakefile.toml",
    "content": "name = \"lean_barrett\"\nversion = \"0.1.0\"\ndefaultTargets = [\"lean_barrett\"]\n\n[[lean_lib]]\nname = \"lean_barrett\"\nroots = [\"extraction.lean_barrett\"]\n\n[[require]]\nname = \"Hax\"\npath = \"../../../../hax-lib/proof-libs/lean\"\n"
  },
  {
    "path": "examples/lean_barrett/proofs/lean/lean-toolchain",
    "content": "leanprover/lean4:v4.29.0-rc1 "
  },
  {
    "path": "examples/lean_barrett/src/lib.rs",
    "content": "/// Values having this type hold a representative 'x' of the Kyber field.\n/// We use 'fe' as a shorthand for this type.\npub(crate) type FieldElement = i32;\n\nconst BARRETT_R: i64 = 0x400000; // is 0x4000000 in the normal barrett example\n\nconst BARRETT_SHIFT: i64 = 26;\n\nconst BARRETT_MULTIPLIER: i64 = 20159;\n\npub(crate) const FIELD_MODULUS: i32 = 3329;\n\n// Signed Barrett Reduction\n//\n// Given an input `value`, `barrett_reduce` outputs a representative `result`\n// such that:\n//\n// - result ≡ value (mod FIELD_MODULUS)\n// - the absolute value of `result` is bound as follows:\n//\n// `|result| ≤ FIELD_MODULUS / 2 · (|value|/BARRETT_R + 1)\n//\n// In particular, if `|value| < BARRETT_R`, then `|result| < FIELD_MODULUS`.\n\nfn barrett_reduce_precondition(value: FieldElement) -> bool {\n    i64::from(value) >= -BARRETT_R && i64::from(value) <= BARRETT_R\n}\n\nfn barrett_reduce_postcondition(value: FieldElement, result: FieldElement) -> bool {\n    let valid_result = value % FIELD_MODULUS;\n    result > -FIELD_MODULUS\n        && result < FIELD_MODULUS\n        && (result == valid_result\n            || result == valid_result + FIELD_MODULUS\n            || result == valid_result - FIELD_MODULUS)\n}\n\npub fn barrett_reduce(value: FieldElement) -> FieldElement {\n    let t = i64::from(value) * BARRETT_MULTIPLIER;\n    let t = t + (BARRETT_R >> 1);\n    let quotient = t >> BARRETT_SHIFT;\n    let quotient = quotient as i32;\n    let sub = quotient * FIELD_MODULUS;\n    value - sub\n}\n\n// A theorem stating that Barrett meets its post-condition, given its pre-condition.\n// In the next iteration, this theorem would be auto-generated, with a sorry proof.\n#[hax_lib::lean::replace(\n    \"\nset_option maxHeartbeats 1000000 in\n-- quite computation intensive\ntheorem barrett_spec (value: i32) :\n  ⦃ ⌜ barrett_reduce_precondition (value) = pure true ⌝ ⦄\n  barrett_reduce value\n  ⦃ ⇓ r => ⌜ barrett_reduce_postcondition value r = pure true⌝ ⦄\n:= by\n  -- Unfold all auxiliary functions:\n  unfold\n    barrett_reduce barrett_reduce_precondition\n    barrett_reduce_postcondition\n    FIELD_MODULUS BARRETT_R\n    BARRETT_MULTIPLIER BARRETT_SHIFT at *\n  -- Invoke bit blasting:\n  hax_bv_decide (timeout := 90)\n\"\n)]\npub fn theorem() {}\n"
  },
  {
    "path": "examples/lean_chacha20/Cargo.toml",
    "content": "[package]\nname = \"lean_chacha20\"\nversion = \"0.1.0\"\nauthors = [\"Clement Blaudeau <clement@cryspen.com>\"]\nedition = \"2021\"\n\n[dependencies]\nhax-lib.workspace = true\nhax-bounded-integers.workspace = true\n"
  },
  {
    "path": "examples/lean_chacha20/Makefile",
    "content": ".PHONY: default clean\ndefault:\n\tcargo hax into lean\n\t(cd proofs/lean && \\\n   elan default v4.29.0-rc1 && \\\n   lake build)\n\nclean:\n\t-rm -f proofs/lean/extraction/\n\t-cd proofs/lean && lake clean\n"
  },
  {
    "path": "examples/lean_chacha20/proofs/lean/lake-manifest.json",
    "content": "{\"version\": \"1.1.0\",\n \"packagesDir\": \".lake/packages\",\n \"packages\":\n [{\"type\": \"path\",\n   \"scope\": \"\",\n   \"name\": \"Hax\",\n   \"manifestFile\": \"lake-manifest.json\",\n   \"inherited\": false,\n   \"dir\": \"../../../../hax-lib/proof-libs/lean\",\n   \"configFile\": \"lakefile.toml\"},\n  {\"url\": \"https://github.com/leanprover-community/quote4\",\n   \"type\": \"git\",\n   \"subDir\": null,\n   \"scope\": \"\",\n   \"rev\": \"bd58c9efe2086d56ca361807014141a860ddbf8c\",\n   \"name\": \"Qq\",\n   \"manifestFile\": \"lake-manifest.json\",\n   \"inputRev\": \"v4.27.0\",\n   \"inherited\": true,\n   \"configFile\": \"lakefile.toml\"}],\n \"name\": \"Lean_chacha20\",\n \"lakeDir\": \".lake\"}\n"
  },
  {
    "path": "examples/lean_chacha20/proofs/lean/lakefile.toml",
    "content": "name = \"lean_chacha20\"\nversion = \"0.1.0\"\ndefaultTargets = [\"lean_chacha20\"]\n\n[[lean_lib]]\nname = \"lean_chacha20\"\nroots = [\"extraction.lean_chacha20\"]\n\n[[require]]\nname = \"Hax\"\npath = \"../../../../hax-lib/proof-libs/lean\"\n"
  },
  {
    "path": "examples/lean_chacha20/proofs/lean/lean-toolchain",
    "content": "leanprover/lean4:v4.29.0-rc1 "
  },
  {
    "path": "examples/lean_chacha20/src/hacspec_helper.rs",
    "content": "use super::State;\n\n#[hax_lib::requires(bytes.len() == 12)]\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub(super) fn to_le_u32s_3(bytes: &[u8]) -> [u32; 3] {\n    // assert_eq!($l, bytes.len() / 4);\n    let mut out = [0; 3];\n    // for (i, block) in bytes.chunks(4).enumerate() {\n    for i in 0..3 {\n        out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap());\n    }\n    out\n}\n\n#[hax_lib::requires(bytes.len() == 32)]\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub(super) fn to_le_u32s_8(bytes: &[u8]) -> [u32; 8] {\n    // assert_eq!(8, bytes.len() / 4);\n    let mut out = [0; 8];\n    // for (i, block) in bytes.chunks(4).enumerate() {\n    for i in 0..8 {\n        out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap());\n    }\n    out\n}\n\n#[hax_lib::requires(bytes.len() == 64)]\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub(super) fn to_le_u32s_16(bytes: &[u8]) -> [u32; 16] {\n    // assert_eq!(16, bytes.len() / 4);\n    let mut out = [0; 16];\n    // for (i, block) in bytes.chunks(4).enumerate() {\n    for i in 0..16 {\n        out[i] = u32::from_le_bytes(bytes[4 * i..4 * i + 4].try_into().unwrap());\n    }\n    out\n}\n\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub(super) fn u32s_to_le_bytes(state: &[u32; 16]) -> [u8; 64] {\n    // <const L: usize>\n    let mut out = [0; 64];\n    for i in 0..state.len() {\n        let tmp = state[i].to_le_bytes();\n        for j in 0..4 {\n            out[i * 4 + j] = tmp[j];\n        }\n    }\n    out\n}\n\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub(super) fn xor_state(mut state: State, other: State) -> State {\n    for i in 0..16 {\n        state[i] = state[i] ^ other[i];\n    }\n    state\n}\n\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub(super) fn add_state(mut state: State, other: State) -> State {\n    for i in 0..16 {\n        state[i] = state[i].wrapping_add(other[i]);\n    }\n    state\n}\n\n#[hax_lib::requires(val.len() <= 64)]\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub(super) fn update_array(mut array: [u8; 64], val: &[u8]) -> [u8; 64] {\n    // <const L: usize>\n    assert!(64 >= val.len());\n    for i in 0..val.len() {\n        array[i] = val[i];\n    }\n    array\n}\n"
  },
  {
    "path": "examples/lean_chacha20/src/lib.rs",
    "content": "mod hacspec_helper;\nuse hacspec_helper::*;\n\nuse hax_lib as hax;\n\nuse hax_lib::int::ToInt;\n\ntype State = [u32; 16];\ntype Block = [u8; 64];\ntype ChaChaIV = [u8; 12];\ntype ChaChaKey = [u8; 32];\n\ntype StateIdx = usize;\n\n#[hax_lib::requires(a <= 15 && b <= 15 && d <= 15)]\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\nfn chacha20_line(a: StateIdx, b: StateIdx, d: StateIdx, s: u32, m: State) -> State {\n    let mut state = m;\n    state[a] = state[a].wrapping_add(state[b]);\n    state[d] = state[d] ^ state[a];\n    state[d] = state[d].rotate_left(s);\n    state\n}\n\n#[hax_lib::requires(a <= 15 && b <= 15 && c <= 15 && d <= 15)]\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub fn chacha20_quarter_round(\n    a: StateIdx,\n    b: StateIdx,\n    c: StateIdx,\n    d: StateIdx,\n    state: State,\n) -> State {\n    let state = chacha20_line(a, b, d, 16, state);\n    let state = chacha20_line(c, d, b, 12, state);\n    let state = chacha20_line(a, b, d, 8, state);\n    chacha20_line(c, d, b, 7, state)\n}\n\nuse hax_lib::*;\n\nfn chacha20_double_round(state: State) -> State {\n    let state = chacha20_quarter_round(0, 4, 8, 12, state);\n    let state = chacha20_quarter_round(1, 5, 9, 13, state);\n    let state = chacha20_quarter_round(2, 6, 10, 14, state);\n    let state = chacha20_quarter_round(3, 7, 11, 15, state);\n\n    let state = chacha20_quarter_round(0, 5, 10, 15, state);\n    let state = chacha20_quarter_round(1, 6, 11, 12, state);\n    let state = chacha20_quarter_round(2, 7, 8, 13, state);\n    chacha20_quarter_round(3, 4, 9, 14, state)\n}\n\npub fn chacha20_rounds(state: State) -> State {\n    let mut st = state;\n    let e: usize = 10;\n    for _i in 0..e {\n        st = chacha20_double_round(st);\n    }\n    st\n}\n\npub fn chacha20_core(ctr: u32, st0: State) -> State {\n    let mut state = st0;\n    state[12] = state[12].wrapping_add(ctr);\n    let k = chacha20_rounds(state);\n    add_state(state, k)\n}\n\npub fn chacha20_init(key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> State {\n    let key_u32: [u32; 8] = to_le_u32s_8(key);\n    let iv_u32: [u32; 3] = to_le_u32s_3(iv);\n    [\n        0x6170_7865,\n        0x3320_646e,\n        0x7962_2d32,\n        0x6b20_6574,\n        key_u32[0],\n        key_u32[1],\n        key_u32[2],\n        key_u32[3],\n        key_u32[4],\n        key_u32[5],\n        key_u32[6],\n        key_u32[7],\n        ctr,\n        iv_u32[0],\n        iv_u32[1],\n        iv_u32[2],\n    ]\n}\n\npub fn chacha20_key_block(state: State) -> Block {\n    let state = chacha20_core(0u32, state);\n    u32s_to_le_bytes(&state)\n}\n\npub fn chacha20_key_block0(key: &ChaChaKey, iv: &ChaChaIV) -> Block {\n    let state = chacha20_init(key, iv, 0u32);\n    chacha20_key_block(state)\n}\n\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub fn chacha20_encrypt_block(st0: State, ctr: u32, plain: &Block) -> Block {\n    let st = chacha20_core(ctr, st0);\n    let pl: State = to_le_u32s_16(plain);\n    let encrypted = xor_state(st, pl);\n    u32s_to_le_bytes(&encrypted)\n}\n\n#[hax_lib::requires(plain.len() <= 64)]\n#[hax_lib::ensures(|res| res.len() == plain.len())]\n#[hax_lib::lean::proof_method::grind]\npub fn chacha20_encrypt_last(st0: State, ctr: u32, plain: &[u8]) -> Vec<u8> {\n    let mut b: Block = [0; 64];\n    b = update_array(b, plain);\n    b = chacha20_encrypt_block(st0, ctr, &b);\n    b[0..plain.len()].to_vec()\n}\n\n#[hax_lib::lean::proof(\n    \"by\n    hax_mvcgen [chacha20_update]\n      <;> try grind [USize64.toNat_add, Array.append_eq_append]\n    · expose_names\n      have : 64 * i.toNat + 64 ≤ m.val.size := by grind\n      grind\"\n)]\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub fn chacha20_update(st0: State, m: &[u8]) -> Vec<u8> {\n    let mut blocks_out = Vec::new();\n    let num_blocks = m.len() / 64;\n    let remainder_len = m.len() % 64;\n    for i in 0..num_blocks {\n        hax_lib::loop_invariant!(\n            |i: usize| blocks_out.len().to_int() == i.to_int() * 64usize.to_int()\n        );\n        // Full block\n        let b =\n            chacha20_encrypt_block(st0, i as u32, &m[64 * i..(64 * i + 64)].try_into().unwrap());\n        hax_lib::assume!(blocks_out.len() == i * 64);\n        blocks_out.extend_from_slice(&b);\n    }\n    hax_lib::assume!(blocks_out.len() == num_blocks * 64);\n    if remainder_len != 0 {\n        // Last block\n        let b = chacha20_encrypt_last(st0, num_blocks as u32, &m[64 * num_blocks..m.len()]);\n        blocks_out.extend_from_slice(&b);\n    }\n    blocks_out\n}\n\n#[hax_lib::ensures(|_| true)]\n#[hax_lib::lean::proof_method::grind]\npub fn chacha20(m: &[u8], key: &ChaChaKey, iv: &ChaChaIV, ctr: u32) -> Vec<u8> {\n    let state = chacha20_init(key, iv, ctr);\n    chacha20_update(state, m)\n}\n"
  },
  {
    "path": "examples/lean_tutorial/Cargo.toml",
    "content": "[package]\nname = \"lean_tutorial\"\nversion = \"0.1.0\"\n\n[dependencies]\nhax-lib.workspace = true\n"
  },
  {
    "path": "examples/lean_tutorial/Makefile",
    "content": ".PHONY: default clean\ndefault:\n\tcargo hax into lean\n\t(cd proofs/lean && \\\n   elan default v4.29.0-rc1 && \\\n   lake build)\n\nclean:\n\t-rm -f proofs/lean/extraction/lean_tutorial.lean\n\t-cd proofs/lean && lake clean\n"
  },
  {
    "path": "examples/lean_tutorial/proofs/lean/lake-manifest.json",
    "content": "{\"version\": \"1.1.0\",\n \"packagesDir\": \".lake/packages\",\n \"packages\":\n [{\"type\": \"path\",\n   \"scope\": \"\",\n   \"name\": \"Hax\",\n   \"manifestFile\": \"lake-manifest.json\",\n   \"inherited\": false,\n   \"dir\": \"../../../../hax-lib/proof-libs/lean\",\n   \"configFile\": \"lakefile.toml\"}],\n \"name\": \"Lean_tutorial\",\n \"lakeDir\": \".lake\"}\n"
  },
  {
    "path": "examples/lean_tutorial/proofs/lean/lakefile.toml",
    "content": "name = \"Lean_tutorial\"\nversion = \"0.1.0\"\ndefaultTargets = [\"Lean_tutorial\"]\n\n[[lean_lib]]\nname = \"Lean_tutorial\"\nroots = [\"extraction.Lean_tutorial\"]\n\n[[require]]\nname = \"Hax\"\npath = \"../../../../hax-lib/proof-libs/lean\"\n"
  },
  {
    "path": "examples/lean_tutorial/proofs/lean/lean-toolchain",
    "content": "leanprover/lean4:v4.29.0-rc1 "
  },
  {
    "path": "examples/lean_tutorial/src/lib.rs",
    "content": "#[hax_lib::requires(x < 16)]\n#[hax_lib::ensures(|res| res >= x)]\n#[hax_lib::lean::proof(\"by unfold square; hax_bv_decide\")]\nfn square(x: u8) -> u8 {\n    x * x\n}\n"
  },
  {
    "path": "examples/limited-order-book/Cargo.toml",
    "content": "[package]\nname = \"lob_backend\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[lib]\ncrate-type = [\"cdylib\", \"lib\"]\n\n[dependencies]\ncandid = \"0.9.6\"\nic-cdk = \"0.10.0\"\nic-cdk-macros = \"0.8.1\"\nhax-lib.workspace = true\nserde = { version = \"1.0\" }\n"
  },
  {
    "path": "examples/limited-order-book/Makefile",
    "content": ".PHONY: default\ndefault:\n\tmake -C proofs/fstar/extraction\n\nclean:\n\trm -f proofs/fstar/extraction/.depend\n\trm -f proofs/fstar/extraction/*.fst\n"
  },
  {
    "path": "examples/limited-order-book/README.md",
    "content": "This crate comes from https://github.com/oggy-dfin/lob\n"
  },
  {
    "path": "examples/limited-order-book/lob_backend.did",
    "content": "type GetBookResult = record { asks : vec Order; bids : vec Order };\ntype Match = record {\n  ask_id : nat64;\n  quantity : nat64;\n  price : nat64;\n  bid_id : nat64;\n};\ntype Order = record {\n  id : nat64;\n  side : Side;\n  quantity : nat64;\n  price : nat64;\n};\ntype Side = variant { Buy; Sell };\nservice : (opt principal) -> {\n  add_order : (Order) -> (vec Match);\n  get_book : () -> (GetBookResult) query;\n}"
  },
  {
    "path": "examples/limited-order-book/proofs/coq/extraction/Lob_backend.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nInductive t_Side : Type :=\n| Side_Buyt_Side\n| Side_Sellt_Side.\n\nRecord t_Order : Type :={\n  f_quantity : int64;\n  f_price : int64;\n  f_side : t_Side_t;\n  f_id : int64;\n}.\n\nRecord t_Match : Type :={\n  f_quantity : int64;\n  f_price : int64;\n  f_ask_id : int64;\n  f_bid_id : int64;\n}.\n\nDefinition is_match (order : t_Order_t) (other : t_Order_t) : bool :=\n  andb (andb (andb ((f_quantity order)>.?(@repr WORDSIZE64 0)) ((f_quantity other)>.?(@repr WORDSIZE64 0))) ((f_side order)<>(f_side other))) (orb (andb ((f_side order)=.?Side_Buyt_Side_t) ((f_price order)>=.?(f_price other))) (andb ((f_side order)=.?Side_Sellt_Side_t) ((f_price order)<=.?(f_price other)))).\n\nDefinition impl__Order__try_match (self : t_Order_t) (other : t_Order_t) : t_Option_t t_Match_t :=\n  if\n    is_match self other\n  then\n    let quantity := (min (f_quantity self) (f_quantity other)) : int64 in\n    let '(bid_id,ask_id) := (if\n        (f_side self)=.?Side_Buyt_Side_t\n      then\n        (f_id self,f_id other)\n      else\n        (f_id other,f_id self)) : (int64 × int64) in\n    Option_Some (Build_Match bid_idask_id(f_price self)quantity)\n  else\n    Option_Nonet_Option_t t_Match_t.\n\nDefinition process_order (order : t_Order_t) (other_side : t_BinaryHeap_t T) : (t_BinaryHeap_t T × (t_Vec_t (t_Match_t) (t_Global_t) × t_Option_t t_Order_t)) :=\n  let matches := (impl__new) : t_Vec_t (t_Match_t) (t_Global_t) in\n  let done := (false) : bool in\n  let '(done,matches,order,other_side) := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 1)(impl_10__len other_side))) (done,matches,order,other_side) (fun '(done,matches,order,other_side) _i =>\n      if\n        not done\n      then\n        match impl__and_then (impl_10__peek other_side) (fun other =>\n            impl__Order__try_match (f_into (f_clone other)) order) with\n        | Option_Some m => let order := (Build_t_Order ((f_quantity order).-(f_quantity m))) : t_Order_t in\n          let '(tmp0,out) := (impl_9__pop other_side) : (t_BinaryHeap_t T × t_Option_t T) in\n          let other_side := (tmp0) : t_BinaryHeap_t T in\n          let hoist1 := (out) : t_Option_t T in\n          let hoist2 := (impl__unwrap hoist1) : T in\n          let other := (f_into hoist2) : t_Order_t in\n          let other := (Build_t_Order ((f_quantity other).-(f_quantity m))) : t_Order_t in\n          let other_side := (if\n              (f_quantity other)>.?(@repr WORDSIZE64 0)\n            then\n              let other_side := (impl_9__push other_side (f_from (f_clone other))) : t_BinaryHeap_t T in\n              other_side\n            else\n              other_side) : t_BinaryHeap_t T in\n          let matches := (impl_1__push matches m) : t_Vec_t (t_Match_t) (t_Global_t) in\n          (done,matches,order,other_side)\n        | _ => let done := (true) : bool in\n          (done,matches,order,other_side)\n        end\n      else\n        (done,matches,order,other_side))) : (bool × t_Vec_t (t_Match_t) (t_Global_t) × t_Order_t × t_BinaryHeap_t T) in\n  let output := ((matches,if\n        (f_quantity order)>.?(@repr WORDSIZE64 0)\n      then\n        Option_Some order\n      else\n        Option_Nonet_Option_t t_Order_t)) : (t_Vec_t (t_Match_t) (t_Global_t) × t_Option_t t_Order_t) in\n  (other_side,output).\n"
  },
  {
    "path": "examples/limited-order-book/proofs/fstar/extraction/Lob_backend.fst",
    "content": "module Lob_backend\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Side =\n  | Side_Buy : t_Side\n  | Side_Sell : t_Side\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_7': Core_models.Cmp.t_PartialEq t_Side t_Side\n\nunfold\nlet impl_7 = impl_7'\n\ntype t_Order = {\n  f_id:u64;\n  f_side:t_Side;\n  f_price:u64;\n  f_quantity:u64\n}\n\nlet impl_14: Core_models.Clone.t_Clone t_Order =\n  { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) }\n\ntype t_Match = {\n  f_bid_id:u64;\n  f_ask_id:u64;\n  f_price:u64;\n  f_quantity:u64\n}\n\nlet is_match (order other: t_Order) : bool =\n  order.f_quantity >. mk_u64 0 && other.f_quantity >. mk_u64 0 && order.f_side <>. other.f_side &&\n  (order.f_side =. (Side_Buy <: t_Side) && order.f_price >=. other.f_price ||\n  order.f_side =. (Side_Sell <: t_Side) && order.f_price <=. other.f_price)\n\nlet impl_Order__try_match (self other: t_Order) : Core_models.Option.t_Option t_Match =\n  if is_match self other\n  then\n    let quantity:u64 = Core_models.Cmp.min #u64 self.f_quantity other.f_quantity in\n    let bid_id, ask_id:(u64 & u64) =\n      if self.f_side =. (Side_Buy <: t_Side)\n      then self.f_id, other.f_id <: (u64 & u64)\n      else other.f_id, self.f_id <: (u64 & u64)\n    in\n    Core_models.Option.Option_Some\n    ({ f_bid_id = bid_id; f_ask_id = ask_id; f_price = self.f_price; f_quantity = quantity }\n      <:\n      t_Match)\n    <:\n    Core_models.Option.t_Option t_Match\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option t_Match\n\nlet process_order\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_T t_Order)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Convert.t_From v_T t_Order)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i2: Core_models.Cmp.t_Ord v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i3: Core_models.Clone.t_Clone v_T)\n      (order: t_Order)\n      (other_side: Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global)\n    : (Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global &\n      (Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & Core_models.Option.t_Option t_Order)) =\n  let matches:Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global = Alloc.Vec.impl__new #t_Match () in\n  let done:bool = false in\n  let done, matches, order, other_side:(bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global &\n    t_Order &\n    Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 1)\n      (Alloc.Collections.Binary_heap.impl_11__len #v_T #Alloc.Alloc.t_Global other_side <: usize)\n      (fun temp_0_ temp_1_ ->\n          let done, matches, order, other_side:(bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global &\n            t_Order &\n            Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) =\n            temp_0_\n          in\n          let _:usize = temp_1_ in\n          true)\n      (done, matches, order, other_side\n        <:\n        (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order &\n          Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global))\n      (fun temp_0_ e_i ->\n          let done, matches, order, other_side:(bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global &\n            t_Order &\n            Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global) =\n            temp_0_\n          in\n          let e_i:usize = e_i in\n          if ~.done <: bool\n          then\n            match\n              Core_models.Option.impl__and_then #v_T\n                #t_Match\n                (Alloc.Collections.Binary_heap.impl_11__peek #v_T #Alloc.Alloc.t_Global other_side\n                  <:\n                  Core_models.Option.t_Option v_T)\n                (fun other ->\n                    let other:v_T = other in\n                    impl_Order__try_match (Core_models.Convert.f_into #v_T\n                          #t_Order\n                          #FStar.Tactics.Typeclasses.solve\n                          (Core_models.Clone.f_clone #v_T #FStar.Tactics.Typeclasses.solve other\n                            <:\n                            v_T)\n                        <:\n                        t_Order)\n                      order\n                    <:\n                    Core_models.Option.t_Option t_Match)\n              <:\n              Core_models.Option.t_Option t_Match\n            with\n            | Core_models.Option.Option_Some m ->\n              let order:t_Order =\n                { order with f_quantity = order.f_quantity -! m.f_quantity } <: t_Order\n              in\n              let tmp0, out:(Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global &\n                Core_models.Option.t_Option v_T) =\n                Alloc.Collections.Binary_heap.impl_10__pop #v_T #Alloc.Alloc.t_Global other_side\n              in\n              let other_side:Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global =\n                tmp0\n              in\n              let (other: t_Order):t_Order =\n                Core_models.Convert.f_into #v_T\n                  #t_Order\n                  #FStar.Tactics.Typeclasses.solve\n                  (Core_models.Option.impl__unwrap #v_T out <: v_T)\n              in\n              let other:t_Order =\n                { other with f_quantity = other.f_quantity -! m.f_quantity } <: t_Order\n              in\n              let other_side:Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global =\n                if other.f_quantity >. mk_u64 0\n                then\n                  let _:Prims.unit =\n                    Hax_lib.v_assume (b2t\n                        ((Alloc.Collections.Binary_heap.impl_11__len #v_T\n                              #Alloc.Alloc.t_Global\n                              other_side\n                            <:\n                            usize) <.\n                          Core_models.Num.impl_usize__MAX\n                          <:\n                          bool))\n                  in\n                  let other_side:Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global\n                  =\n                    Alloc.Collections.Binary_heap.impl_10__push #v_T\n                      #Alloc.Alloc.t_Global\n                      other_side\n                      (Core_models.Convert.f_from #v_T\n                          #t_Order\n                          #FStar.Tactics.Typeclasses.solve\n                          (Core_models.Clone.f_clone #t_Order #FStar.Tactics.Typeclasses.solve other\n                            <:\n                            t_Order)\n                        <:\n                        v_T)\n                  in\n                  other_side\n                else other_side\n              in\n              let _:Prims.unit =\n                Hax_lib.v_assume (b2t\n                    ((Alloc.Vec.impl_1__len #t_Match #Alloc.Alloc.t_Global matches <: usize) <.\n                      Core_models.Num.impl_usize__MAX\n                      <:\n                      bool))\n              in\n              let matches:Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global =\n                Alloc.Vec.impl_1__push #t_Match #Alloc.Alloc.t_Global matches m\n              in\n              done, matches, order, other_side\n              <:\n              (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order &\n                Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global)\n            | _ ->\n              let done:bool = true in\n              done, matches, order, other_side\n              <:\n              (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order &\n                Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global)\n          else\n            done, matches, order, other_side\n            <:\n            (bool & Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & t_Order &\n              Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global))\n  in\n  let hax_temp_output:(Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global &\n    Core_models.Option.t_Option t_Order) =\n    matches,\n    (if order.f_quantity >. mk_u64 0\n      then Core_models.Option.Option_Some order <: Core_models.Option.t_Option t_Order\n      else Core_models.Option.Option_None <: Core_models.Option.t_Option t_Order)\n    <:\n    (Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & Core_models.Option.t_Option t_Order)\n  in\n  other_side, hax_temp_output\n  <:\n  (Alloc.Collections.Binary_heap.t_BinaryHeap v_T Alloc.Alloc.t_Global &\n    (Alloc.Vec.t_Vec t_Match Alloc.Alloc.t_Global & Core_models.Option.t_Option t_Order))\n"
  },
  {
    "path": "examples/limited-order-book/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHACL_HOME     ?= $(HOME)/.hax/hacl_home\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= .cache\nHINT_DIR      ?= .hints\n\nSHELL ?= /usr/bin/env bash\n\nEXECUTABLES = cargo cargo-hax jq\nK := $(foreach bin,$(EXECUTABLES),\\\n        $(if $(shell command -v $(bin) 2> /dev/null),,$(error \"No $(bin) in PATH\")))\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\nHAX_CLI = \"cargo hax into -i '-** +**::process_order' fstar\"\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\tmkdir -p \"${HACL_HOME}\"\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\n# If no any F* file is detected, we run hax\nifeq \"$(wildcard *.fst *fsti)\" \"\"\n$(shell $(SHELL) -c $(HAX_CLI))\nendif\n\n# By default, we process all the files in the current directory\nROOTS = $(wildcard *.fst *fsti)\n\n# Regenerate F* files via hax when Rust sources change\n$(ROOTS): $(shell find ../../../src -type f -name '*.rs')\n\t$(shell $(SHELL) -c $(HAX_CLI))\n\n# The following is a bash script that discovers F* libraries\ndefine FINDLIBS\n    # Prints a path if and only if it exists. Takes one argument: the\n    # path.\n    function print_if_exists() {\n        if [ -d \"$$1\" ]; then\n            echo \"$$1\"\n        fi\n    }\n    # Asks Cargo all the dependencies for the current crate or workspace,\n    # and extract all \"root\" directories for each. Takes zero argument.\n    function dependencies() {\n        cargo metadata --format-version 1 |\n            jq -r '.packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")'\n    }\n    # Find hax libraries *around* a given path. Takes one argument: the\n    # path.\n    function find_hax_libraries_at_path() {\n        path=\"$$1\"\n        # if there is a `proofs/fstar/extraction` subfolder, then that's a\n        # F* library\n        print_if_exists \"$$path/proofs/fstar/extraction\"\n        # Maybe the `proof-libs` folder of hax is around?\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\")\n        if [ $$? -eq 0 ]; then\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\"\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\"\n        fi\n    }\n    { while IFS= read path; do\n          find_hax_libraries_at_path \"$$path\"\n      done < <(dependencies)\n    } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c \"$$FINDLIBS\")\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n"
  },
  {
    "path": "examples/limited-order-book/src/canister.rs",
    "content": "use candid::CandidType;\nuse candid::Principal;\nuse ic_cdk::caller;\nuse ic_cdk_macros::{export_candid, init, query, update};\nuse std::cell::RefCell;\n\nuse crate::{Match, Order, OrderBook};\n\nthread_local! {\n    static ORDER_ADMIN: RefCell<Option<Principal>> = RefCell::default();\n    static ORDER_BOOK: RefCell<Option<OrderBook>> = RefCell::default();\n}\n\n#[init]\nfn init(order_admin: Option<Principal>) {\n    ORDER_ADMIN.with(|oa| {\n        *oa.borrow_mut() = order_admin;\n    });\n    ORDER_BOOK.with(|ob| {\n        ob.borrow_mut().replace(OrderBook::new());\n    });\n}\n\n#[update]\npub fn add_order(order: Order) -> Vec<Match> {\n    assert!(order.quantity > 0, \"Order quantity must be positive\");\n    ORDER_ADMIN.with(|oa| {\n        let oa = oa.borrow();\n        oa.as_ref()\n            .map(|admin| assert!(admin == &caller(), \"Only order admin can add orders\"));\n    });\n    ORDER_BOOK.with(|ob| {\n        ob.borrow_mut()\n            .as_mut()\n            .expect(\"Order book not initialized\")\n            .add_order(order)\n    })\n}\n\n#[derive(CandidType)]\npub struct GetBookResult {\n    pub bids: Vec<Order>,\n    pub asks: Vec<Order>,\n}\n\n#[query]\npub fn get_book() -> GetBookResult {\n    ORDER_BOOK.with(|ob| {\n        let ob = ob.borrow();\n        GetBookResult {\n            bids: ob.as_ref().expect(\"Order book not initialized\").list_bids(),\n            asks: ob.as_ref().expect(\"Order book not initialized\").list_asks(),\n        }\n    })\n}\n\nexport_candid!();\n"
  },
  {
    "path": "examples/limited-order-book/src/lib.rs",
    "content": "use candid::{CandidType, Deserialize};\nuse std::{cmp::Reverse, collections::BinaryHeap};\n\npub type OrderId = u64;\n\n#[derive(PartialEq, Eq, Clone, CandidType, Deserialize)]\npub enum Side {\n    Buy,\n    Sell,\n}\n\npub type Price = u64;\npub type Quantity = u64;\n\n#[derive(PartialEq, Eq, Clone, CandidType, Deserialize)]\npub struct Order {\n    pub id: OrderId,\n    pub side: Side,\n    pub price: Price,\n    pub quantity: Quantity,\n}\n\n#[derive(CandidType, Deserialize)]\npub struct Match {\n    pub bid_id: OrderId,\n    pub ask_id: OrderId,\n    pub price: Price,\n    pub quantity: Quantity,\n}\n\nfn is_match(order: &Order, other: &Order) -> bool {\n    order.quantity > 0\n        && other.quantity > 0\n        && order.side != other.side\n        && ((order.side == Side::Buy && order.price >= other.price)\n            || (order.side == Side::Sell && order.price <= other.price))\n}\n\nimpl Order {\n    pub fn try_match(&self, other: &Self) -> Option<Match> {\n        if is_match(self, other) {\n            let quantity = std::cmp::min(self.quantity, other.quantity);\n            let (bid_id, ask_id) = if self.side == Side::Buy {\n                (self.id, other.id)\n            } else {\n                (other.id, self.id)\n            };\n            Some(Match {\n                bid_id,\n                ask_id,\n                // If there's a match, we could use any price between the two orders.\n                // Here we use self.price.\n                price: self.price,\n                quantity,\n            })\n        } else {\n            None\n        }\n    }\n}\n\nimpl PartialOrd for Order {\n    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n        Some(self.cmp(other))\n    }\n}\n\nimpl Ord for Order {\n    fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n        self.price.cmp(&other.price).then(self.id.cmp(&other.id))\n    }\n}\n\nimpl From<Reverse<Order>> for Order {\n    fn from(other: Reverse<Order>) -> Self {\n        other.0\n    }\n}\n\nimpl From<Order> for Reverse<Order> {\n    fn from(value: Order) -> Self {\n        Self(value)\n    }\n}\n\npub struct OrderBook {\n    bids: BinaryHeap<Order>,\n    asks: BinaryHeap<Reverse<Order>>,\n}\n\nimpl OrderBook {\n    pub fn new() -> Self {\n        Self {\n            bids: BinaryHeap::new(),\n            asks: BinaryHeap::new(),\n        }\n    }\n\n    /// Add an order to the order book; if it crosses with existing orders, return the match(es).\n    /// Fill as much of the order as possible, and just keep the remainder on the order book.\n    pub fn add_order(&mut self, order: Order) -> Vec<Match> {\n        assert!(order.quantity > 0);\n        assert!(order.price > 0);\n        match order.side {\n            Side::Buy => {\n                let (matches, opt_remaining_bid) = process_order(order, &mut self.asks);\n                if let Some(remaining_bid) = opt_remaining_bid {\n                    self.bids.push(remaining_bid);\n                }\n                matches\n            }\n            Side::Sell => {\n                let (matches, opt_remaining_ask) = process_order(order, &mut self.bids);\n                if let Some(remaining_ask) = opt_remaining_ask {\n                    self.asks.push(Reverse(remaining_ask));\n                }\n                matches\n            }\n        }\n    }\n\n    pub fn list_bids(&self) -> Vec<Order> {\n        self.bids.iter().cloned().collect()\n    }\n\n    pub fn list_asks(&self) -> Vec<Order> {\n        self.asks\n            .iter()\n            .cloned()\n            .map(|Reverse(order)| order)\n            .collect()\n    }\n}\n\nfn process_order<T>(mut order: Order, other_side: &mut BinaryHeap<T>) -> (Vec<Match>, Option<Order>)\nwhere\n    T: Into<Order> + From<Order> + Ord + Clone,\n{\n    let mut matches = Vec::new();\n    let mut done = false;\n    for _i in 1..other_side.len() {\n        if !done {\n            if let Some(m) = other_side\n                .peek()\n                .and_then(|other| Into::into(other.clone()).try_match(&order))\n            {\n                // Goal 1: prove `order.quantity` does not underflow\n                order.quantity -= m.quantity;\n                // Goal 2: prove this `unwrap()` does not panic\n                let mut other: Order = Into::into(other_side.pop().unwrap());\n                // Goal 3: prove `other.quantity` does not underflow\n                other.quantity -= m.quantity;\n                if other.quantity > 0 {\n                    hax_lib::assume!(other_side.len() < usize::MAX);\n                    other_side.push(From::from(other.clone()));\n                }\n                hax_lib::assume!(matches.len() < usize::MAX);\n                matches.push(m);\n            } else {\n                done = true;\n            }\n        }\n    }\n    (\n        matches,\n        if order.quantity > 0 {\n            Some(order)\n        } else {\n            None\n        },\n    )\n}\n\npub mod canister;\n"
  },
  {
    "path": "examples/proverif-psk/Cargo.toml",
    "content": "[package]\nname = \"proverif-psk\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nhax-lib.workspace = true\nlibcrux = \"=0.0.2-pre.2\"\n\n[dev-dependencies]\nrand = { version = \"0.8\" }\n"
  },
  {
    "path": "examples/proverif-psk/Makefile",
    "content": "check: ./proofs/proverif/extraction/lib.pvl\n\ttimeout 30 proverif -lib ./proofs/proverif/extraction/lib.pvl ./proofs/proverif/analysis.pv\n\nproofs/proverif/extraction/lib.pvl:\n\tcargo hax into pro-verif\n\nclean:\n\trm -f proofs/proverif/extraction/lib.pvl\n"
  },
  {
    "path": "examples/proverif-psk/Readme.md",
    "content": "# A hax ProVerif example\n\nThis crate demonstrates an example of ProVerif extraction using hax.\n\nThe crate provides functions for implementing a simplistic pre-shared-key (PSK) based protocol\nbetween an initiator and receiver, which is defined as follows:\n```\nInitiator(psk: AEADKey): \n    let response_key = AEAD.KeyGen()\n    let message = AEAD.Encrypt(psk, response_key)\n\nInitiator -> Responder: message\n\nResponder(psk: AEADKey, payload: &[u8]):\n    let response_key = AEAD.Decrypt(psk, message)\n    let response = AEAD.Encrypt(response_key, payload)\n           \nResponder -> Initiator: response\n\nInitiator(response_key, response): \n    let output = AEAD.Decrypt(response_key, response)\n    return output\n```\n\nThe crate does not implement message transport, only the initiator and\nresponder protocol logic.\n\nA handwritten ProVerif model of this protocol is included in `psk.pv` for comparison.\n\n### On the use of `proverif::replace()`\nSince ProVerif operates in a symbolic world, certain operations have\nto be represented abstractly, in in symbolic terms. In this case, we\ngive symbolic replacements for serialization and deserialization, as\nwell as cryptographic operations such as encryption and\ndecryption. They are thus treated as ideal implementations of their\nrespective functionality in ProVerif's analysis of the protocol. To\nobtain assurance that these operations are correct and implemented\nsecurely, one of hax' other backends can be used.\n\n\n## Extracting into ProVerif\nTo obtain a ProVerif model of the protocol logic functions, run\n```\ncargo hax into pro-verif\n```\nThis will generate a file `./proofs/proverif/extraction/lib.pvl`.\n\n## Running a Basic Analysis on the Model\nWe have provided a handwritten file\n`./proofs/proverif/extraction/analysis.pv`, which models the protocol\nusing the extracted functions in `lib.pvl` and uses ProVerif to verify\n\n- that initiator and receiver can both complete the protocol, as well as\n- confidentiality of the pre-shared key and the protocol payload\n\nTo let ProVerif perform the analysis, from the crate root, run:\n\n```\nproverif -lib ./proofs/proverif/extraction/lib.pvl ./proofs/proverif/extraction/analysis.pv\n```\n\nThe expected final output is\n```\n--------------------------------------------------------------\nVerification summary:\n\nQuery not event(InitiatorFinished(initiator_result)) is false.\n\nQuery not event(ResponderFinished(responder_result)) is false.\n\nQuery not attacker(PSK[]) is true.\n\nQuery not attacker(SECRET_PAYLOAD[]) is true.\n\n--------------------------------------------------------------\n```\n\n"
  },
  {
    "path": "examples/proverif-psk/proofs/proverif/analysis.pv",
    "content": "(*****************************************)\n(* Top-level processes *)\n(*****************************************)\n\nevent InitiatorFinished(bitstring).\nevent ResponderFinished(bitstring). \n\nfree PSK: proverif_psk__t_KeyIv [private].\nfree SECRET_PAYLOAD: bitstring [private].\n\nquery initiator_result: bitstring; event(InitiatorFinished(initiator_result)).\nquery responder_result: bitstring; event(ResponderFinished(responder_result)).  \n\nquery attacker(PSK).\nquery attacker(SECRET_PAYLOAD).\n\nlet Initiator(psk: proverif_psk__t_KeyIv) =\n    new ikm: bitstring;\n    let (initiator_message: proverif_psk__t_Message, response_key: proverif_psk__t_KeyIv) = proverif_psk__initiate(ikm, psk) in\n    out(c, initiator_message);\n    in(c, response_message: proverif_psk__t_Message);\n    let response = proverif_psk__finish(response_message, response_key) in\n    event InitiatorFinished(response).\n\nlet Responder(psk: proverif_psk__t_KeyIv, payload: bitstring) =\n    in(c, initiator_message: proverif_psk__t_Message);\n    let response_message = proverif_psk__respond(\n         psk,\n         payload,\n         initiator_message\n       ) in\n    event ResponderFinished(payload);\n    out(c, response_message).\n\nprocess\n    Initiator(PSK) | Responder(PSK, SECRET_PAYLOAD)\n\n\n"
  },
  {
    "path": "examples/proverif-psk/psk.pv",
    "content": "free c: channel.\n\ntype key.\n\nfun senc(bitstring, key): bitstring.\nreduc forall m: bitstring, k: key; sdec(senc(m,k), k) = m.\n\nfun key_to_bitstring(key): bitstring.\nreduc forall k: key; bitstring_to_key(key_to_bitstring(k)) = k.\n\nevent InitiatorFinished(bitstring).\nevent ResponderFinished(bitstring). \n\nfree PSK: key [private].\nfree SECRET_PAYLOAD: bitstring [private].\n\nquery initiator_result: bitstring; event(InitiatorFinished(initiator_result)).\nquery responder_result: bitstring; event(ResponderFinished(responder_result)).  \n\nquery attacker(PSK).\nquery attacker(SECRET_PAYLOAD).\n\nlet Initiator(psk: key) =\n    new response_key: key;\n    let initiator_message = senc(key_to_bitstring(response_key), psk) in\n    out(c, initiator_message);\n    in(c, response_message: bitstring);\n    let response = sdec(response_message, response_key) in\n    event InitiatorFinished(response).\n\nlet Responder(psk: key, payload: bitstring) =\n    in(c, initiator_message: bitstring);\n    let response_key = sdec(initiator_message, psk) in\n    let response_message = senc(payload, bitstring_to_key(response_key)) in\n    event ResponderFinished(payload);\n    out(c, response_message).\n\nprocess\n    Initiator(PSK) | Responder(PSK, SECRET_PAYLOAD)\n"
  },
  {
    "path": "examples/proverif-psk/pv_div_by_zero_fix.diff",
    "content": "diff proverif2.05/src/display.ml proverif2.05/src/display.ml\nindex c43785ec..2763d907 100644\n--- proverif/src/display.ml\n+++ proverif/src/display.ml\n@@ -49,7 +49,7 @@ let dynamic_display str =\n   then display_whitespace (!record_cursor_line - size);\n   (* If we cannot determine the number of columns, we just assume that the statistics\n      will fit on one line (the statistics will not be active by default) *)\n-  let lines = if columns = -1 then 0 else ((max (!record_cursor_line) size) - 1) / columns in\n+  let lines = if columns <= 0 then 0 else ((max (!record_cursor_line) size) - 1) / columns in\n   (* Go to the beginning of the line *)\n   print_string \"\\r\";\n   if lines > 0 then\n"
  },
  {
    "path": "examples/proverif-psk/src/lib.rs",
    "content": "use hax_lib as hax;\nuse libcrux::aead::{self, Algorithm};\n\nconst AEAD_KEY_NONCE: usize = Algorithm::key_size(Algorithm::Chacha20Poly1305)\n    + Algorithm::nonce_size(Algorithm::Chacha20Poly1305);\n\nconst AEAD_KEY_LENGTH: usize = Algorithm::key_size(Algorithm::Chacha20Poly1305);\n\nconst EMPTY_AAD: &[u8; 0] = b\"\";\nconst RESPONSE_KEY_CONTEXT: &[u8; 12] = b\"response-key\";\n\n/* Type definitions */\n#[derive(Debug)]\npub enum Error {\n    CryptoError,\n    OtherError,\n}\n\nimpl From<libcrux::aead::Error> for Error {\n    fn from(_value: libcrux::aead::Error) -> Error {\n        Error::CryptoError\n    }\n}\n\nimpl From<libcrux::hkdf::Error> for Error {\n    fn from(_value: libcrux::hkdf::Error) -> Error {\n        Error::CryptoError\n    }\n}\n\nimpl From<std::array::TryFromSliceError> for Error {\n    fn from(_value: std::array::TryFromSliceError) -> Error {\n        Error::OtherError\n    }\n}\n\n#[hax::opaque]\npub struct Message(aead::Tag, Vec<u8>);\n\n#[hax::opaque]\npub struct KeyIv(libcrux::aead::Key, libcrux::aead::Iv);\n\n/* Wire formats */\n#[hax::pv_constructor]\nfn serialize_key_iv(key_iv: &KeyIv) -> Vec<u8> {\n    let mut result = Vec::new();\n    result.extend_from_slice(key_iv.1 .0.as_ref());\n    match &key_iv.0 {\n        aead::Key::Chacha20Poly1305(k) => result.extend_from_slice(k.0.as_ref()),\n        _ => unimplemented!(),\n    }\n    result\n}\n\n#[hax::proverif::replace(\n    \"reduc forall k: $:{KeyIv}; ${deserialize_key_iv}(${serialize_key_iv}(k)) = k.\"\n)]\nfn deserialize_key_iv(bytes: &[u8]) -> Result<KeyIv, Error> {\n    let iv = aead::Iv::new(&bytes[..12])?;\n    let key = aead::Key::from_slice(Algorithm::Chacha20Poly1305, &bytes[12..])?;\n    Ok(KeyIv(key, iv))\n}\n\n/* Cryptographic functions */\n#[hax::pv_constructor]\nfn derive_key_iv(ikm: &[u8], info: &[u8]) -> Result<KeyIv, Error> {\n    let key_iv_bytes =\n        libcrux::hkdf::expand(libcrux::hkdf::Algorithm::Sha256, ikm, info, AEAD_KEY_NONCE)?;\n\n    let (key_bytes, iv_bytes) = key_iv_bytes.split_at(AEAD_KEY_LENGTH);\n    let key =\n        libcrux::aead::Key::from_slice(libcrux::aead::Algorithm::Chacha20Poly1305, key_bytes)?;\n\n    let iv = libcrux::aead::Iv(iv_bytes.try_into()?);\n    Ok(KeyIv(key, iv))\n}\n\n#[hax::proverif::replace(\"fun ${encrypt} ($:{KeyIv}, bitstring): $:{Message}.\")]\npub fn encrypt(key_iv: &KeyIv, message: &[u8]) -> Result<Message, Error> {\n    let (tag, ctxt) =\n        libcrux::aead::encrypt_detached(&key_iv.0, message, aead::Iv(key_iv.1 .0), EMPTY_AAD)?;\n    Ok(Message(tag, ctxt))\n}\n\n#[hax::proverif::replace(\n    \"reduc forall m: bitstring, k: $:{KeyIv}; ${decrypt}(k, ${encrypt}(k, m)) = m.\"\n)]\nfn decrypt(key_iv: &KeyIv, message: Message) -> Result<Vec<u8>, Error> {\n    libcrux::aead::decrypt_detached(\n        &key_iv.0,\n        message.1,\n        aead::Iv(key_iv.1 .0),\n        EMPTY_AAD,\n        &message.0,\n    )\n    .map_err(|_| Error::CryptoError)\n}\n\n/* Protocol */\npub fn initiate(ikm: &[u8], psk: &KeyIv) -> Result<(Message, KeyIv), Error> {\n    let response_key_iv = derive_key_iv(ikm, RESPONSE_KEY_CONTEXT)?;\n\n    let serialized_responder_key = serialize_key_iv(&response_key_iv);\n\n    let initiator_message = encrypt(psk, &serialized_responder_key)?;\n\n    Ok((initiator_message, response_key_iv))\n}\n\npub fn respond(psk: &KeyIv, payload: &[u8], message: Message) -> Result<Message, Error> {\n    let response_key_bytes = decrypt(psk, message)?;\n\n    let response_key_iv = deserialize_key_iv(&response_key_bytes)?;\n\n    let responder_message = encrypt(&response_key_iv, payload)?;\n\n    Ok(responder_message)\n}\n\npub fn finish(message: Message, response_key_iv: &KeyIv) -> Result<Vec<u8>, Error> {\n    let response_bytes = decrypt(response_key_iv, message)?;\n\n    Ok(response_bytes)\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn it_works() {\n        use rand::{rngs::OsRng, RngCore};\n\n        fn random_array<const L: usize>() -> [u8; L] {\n            let mut rng = OsRng;\n            let mut seed = [0; L];\n            rng.try_fill_bytes(&mut seed).unwrap();\n            seed\n        }\n        let payload = b\"SECRET\";\n        let ikm_psk = random_array::<32>();\n        let ikm_responder_key = random_array::<32>();\n\n        let psk = derive_key_iv(&ikm_psk, b\"pre-shared-key\")\n            .map_err(|_| Error::CryptoError)\n            .unwrap();\n\n        let (initiator_message, response_key) = initiate(&ikm_responder_key, &psk).unwrap();\n        let responder_message = respond(&psk, payload, initiator_message).unwrap();\n        let initiator_finish = finish(responder_message, &response_key).unwrap();\n        assert_eq!(payload.to_vec(), initiator_finish);\n    }\n}\n"
  },
  {
    "path": "examples/sha256/.gitignore",
    "content": "target/\nCargo.lock\n"
  },
  {
    "path": "examples/sha256/Cargo.toml",
    "content": "[package]\nname = \"sha256\"\nversion = \"0.1.0\"\nauthors = [\"Franziskus Kiefer <franziskuskiefer@gmail.com>\"]\nedition = \"2021\"\n\n[lib]\npath = \"src/sha256.rs\"\n\n[dependencies]\nhax-lib.workspace = true\n"
  },
  {
    "path": "examples/sha256/Makefile",
    "content": ".PHONY: default\ndefault:\n\tmake -C proofs/fstar/extraction\n\nclean:\n\trm -f proofs/fstar/extraction/.depend\n\trm -f proofs/fstar/extraction/*.fst\n"
  },
  {
    "path": "examples/sha256/proofs/coq/extraction/Sha256.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\n(*Not implemented yet? todo(item)*)\n\nRequire Import Hax_lib_macros. (* as hax *)\n\nRequire Import Std. (* as TryInto *)\n\nDefinition v_BLOCK_SIZE : uint_size :=\n  (@repr WORDSIZE32 64).\n\nDefinition v_LEN_SIZE : uint_size :=\n  (@repr WORDSIZE32 8).\n\nDefinition v_K_SIZE : uint_size :=\n  (@repr WORDSIZE32 64).\n\nDefinition v_HASH_SIZE : uint_size :=\n  (@repr WORDSIZE32 256)./(@repr WORDSIZE32 8).\n\nNotation t_Block_t := (nseq int8 TODO: Int.to_string length).\n\nNotation t_OpTableType_t := (nseq int8 TODO: Int.to_string length).\n\nNotation t_Sha256Digest_t := (nseq int8 TODO: Int.to_string length).\n\nNotation t_RoundConstantsTable_t := (nseq int32 TODO: Int.to_string length).\n\nNotation t_Hash_t := (nseq int32 TODO: Int.to_string length).\n\nDefinition ch (x : int32) (y : int32) (z : int32) : int32 :=\n  (x.&y).^((not x).&z).\n\nDefinition maj (x : int32) (y : int32) (z : int32) : int32 :=\n  (x.&y).^((x.&z).^(y.&z)).\n\nDefinition v_OP_TABLE : nseq int8 TODO: Int.to_string length :=\n  array_from_list [(@repr WORDSIZE8 2);\n    (@repr WORDSIZE8 13);\n    (@repr WORDSIZE8 22);\n    (@repr WORDSIZE8 6);\n    (@repr WORDSIZE8 11);\n    (@repr WORDSIZE8 25);\n    (@repr WORDSIZE8 7);\n    (@repr WORDSIZE8 18);\n    (@repr WORDSIZE8 3);\n    (@repr WORDSIZE8 17);\n    (@repr WORDSIZE8 19);\n    (@repr WORDSIZE8 10)].\n\nDefinition v_K_TABLE : nseq int32 TODO: Int.to_string length :=\n  array_from_list [(@repr WORDSIZE32 1116352408);\n    (@repr WORDSIZE32 1899447441);\n    (@repr WORDSIZE32 3049323471);\n    (@repr WORDSIZE32 3921009573);\n    (@repr WORDSIZE32 961987163);\n    (@repr WORDSIZE32 1508970993);\n    (@repr WORDSIZE32 2453635748);\n    (@repr WORDSIZE32 2870763221);\n    (@repr WORDSIZE32 3624381080);\n    (@repr WORDSIZE32 310598401);\n    (@repr WORDSIZE32 607225278);\n    (@repr WORDSIZE32 1426881987);\n    (@repr WORDSIZE32 1925078388);\n    (@repr WORDSIZE32 2162078206);\n    (@repr WORDSIZE32 2614888103);\n    (@repr WORDSIZE32 3248222580);\n    (@repr WORDSIZE32 3835390401);\n    (@repr WORDSIZE32 4022224774);\n    (@repr WORDSIZE32 264347078);\n    (@repr WORDSIZE32 604807628);\n    (@repr WORDSIZE32 770255983);\n    (@repr WORDSIZE32 1249150122);\n    (@repr WORDSIZE32 1555081692);\n    (@repr WORDSIZE32 1996064986);\n    (@repr WORDSIZE32 2554220882);\n    (@repr WORDSIZE32 2821834349);\n    (@repr WORDSIZE32 2952996808);\n    (@repr WORDSIZE32 3210313671);\n    (@repr WORDSIZE32 3336571891);\n    (@repr WORDSIZE32 3584528711);\n    (@repr WORDSIZE32 113926993);\n    (@repr WORDSIZE32 338241895);\n    (@repr WORDSIZE32 666307205);\n    (@repr WORDSIZE32 773529912);\n    (@repr WORDSIZE32 1294757372);\n    (@repr WORDSIZE32 1396182291);\n    (@repr WORDSIZE32 1695183700);\n    (@repr WORDSIZE32 1986661051);\n    (@repr WORDSIZE32 2177026350);\n    (@repr WORDSIZE32 2456956037);\n    (@repr WORDSIZE32 2730485921);\n    (@repr WORDSIZE32 2820302411);\n    (@repr WORDSIZE32 3259730800);\n    (@repr WORDSIZE32 3345764771);\n    (@repr WORDSIZE32 3516065817);\n    (@repr WORDSIZE32 3600352804);\n    (@repr WORDSIZE32 4094571909);\n    (@repr WORDSIZE32 275423344);\n    (@repr WORDSIZE32 430227734);\n    (@repr WORDSIZE32 506948616);\n    (@repr WORDSIZE32 659060556);\n    (@repr WORDSIZE32 883997877);\n    (@repr WORDSIZE32 958139571);\n    (@repr WORDSIZE32 1322822218);\n    (@repr WORDSIZE32 1537002063);\n    (@repr WORDSIZE32 1747873779);\n    (@repr WORDSIZE32 1955562222);\n    (@repr WORDSIZE32 2024104815);\n    (@repr WORDSIZE32 2227730452);\n    (@repr WORDSIZE32 2361852424);\n    (@repr WORDSIZE32 2428436474);\n    (@repr WORDSIZE32 2756734187);\n    (@repr WORDSIZE32 3204031479);\n    (@repr WORDSIZE32 3329325298)].\n\nDefinition v_HASH_INIT : nseq int32 TODO: Int.to_string length :=\n  array_from_list [(@repr WORDSIZE32 1779033703);\n    (@repr WORDSIZE32 3144134277);\n    (@repr WORDSIZE32 1013904242);\n    (@repr WORDSIZE32 2773480762);\n    (@repr WORDSIZE32 1359893119);\n    (@repr WORDSIZE32 2600822924);\n    (@repr WORDSIZE32 528734635);\n    (@repr WORDSIZE32 1541459225)].\n\nDefinition sigma (x : int32) (i : uint_size) (op : uint_size) : int32 :=\n  let tmp := (impl__u32__rotate_right x (f_into (v_OP_TABLE.[(((@repr WORDSIZE32 3).*i).+(@repr WORDSIZE32 2))]))) : int32 in\n  let tmp := (if\n      op=.?(@repr WORDSIZE32 0)\n    then\n      x shift_right (v_OP_TABLE.[(((@repr WORDSIZE32 3).*i).+(@repr WORDSIZE32 2))])\n    else\n      tmp) : int32 in\n  let rot_val_1 := (f_into (v_OP_TABLE.[((@repr WORDSIZE32 3).*i)])) : int32 in\n  let rot_val_2 := (f_into (v_OP_TABLE.[(((@repr WORDSIZE32 3).*i).+(@repr WORDSIZE32 1))])) : int32 in\n  ((impl__u32__rotate_right x rot_val_1).^(impl__u32__rotate_right x rot_val_2)).^tmp.\n\nDefinition to_be_u32s (block : nseq int8 TODO: Int.to_string length) : t_Vec_t (int32) (t_Global_t) :=\n  let out := (impl__with_capacity (v_BLOCK_SIZE./(@repr WORDSIZE32 4))) : t_Vec_t (int32) (t_Global_t) in\n  let out := (f_fold (f_into_iter (impl__chunks_exact (unsize block) (@repr WORDSIZE32 4))) out (fun out block_chunk =>\n      let block_chunk_array := (impl__u32__from_be_bytes (impl__unwrap (f_try_into block_chunk))) : int32 in\n      let out := (impl_1__push out block_chunk_array) : t_Vec_t (int32) (t_Global_t) in\n      out)) : t_Vec_t (int32) (t_Global_t) in\n  out.\n\nDefinition schedule (block : nseq int8 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let b := (to_be_u32s block) : t_Vec_t (int32) (t_Global_t) in\n  let s := (repeat (@repr WORDSIZE32 0) (@repr WORDSIZE32 64)) : nseq int32 TODO: Int.to_string length in\n  let s := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_K_SIZE)) s (fun s i =>\n      if\n        i<.?(@repr WORDSIZE32 16)\n      then\n        let s := (update_at s i (b.[i])) : nseq int32 TODO: Int.to_string length in\n        s\n      else\n        let t16 := (s.[(i.-(@repr WORDSIZE32 16))]) : int32 in\n        let t15 := (s.[(i.-(@repr WORDSIZE32 15))]) : int32 in\n        let t7 := (s.[(i.-(@repr WORDSIZE32 7))]) : int32 in\n        let t2 := (s.[(i.-(@repr WORDSIZE32 2))]) : int32 in\n        let s1 := (sigma t2 (@repr WORDSIZE32 3) (@repr WORDSIZE32 0)) : int32 in\n        let s0 := (sigma t15 (@repr WORDSIZE32 2) (@repr WORDSIZE32 0)) : int32 in\n        let s := (update_at s i (impl__u32__wrapping_add (impl__u32__wrapping_add (impl__u32__wrapping_add s1 t7) s0) t16)) : nseq int32 TODO: Int.to_string length in\n        s)) : nseq int32 TODO: Int.to_string length in\n  s.\n\nDefinition shuffle (ws : nseq int32 TODO: Int.to_string length) (hashi : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let h := (hashi) : nseq int32 TODO: Int.to_string length in\n  let h := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_K_SIZE)) h (fun h i =>\n      let a0 := (h.[(@repr WORDSIZE32 0)]) : int32 in\n      let b0 := (h.[(@repr WORDSIZE32 1)]) : int32 in\n      let c0 := (h.[(@repr WORDSIZE32 2)]) : int32 in\n      let d0 := (h.[(@repr WORDSIZE32 3)]) : int32 in\n      let e0 := (h.[(@repr WORDSIZE32 4)]) : int32 in\n      let f0 := (h.[(@repr WORDSIZE32 5)]) : int32 in\n      let g0 := (h.[(@repr WORDSIZE32 6)]) : int32 in\n      let h0 := (h.[(@repr WORDSIZE32 7)]) : int32 in\n      let t1 := (impl__u32__wrapping_add (impl__u32__wrapping_add (impl__u32__wrapping_add (impl__u32__wrapping_add h0 (sigma e0 (@repr WORDSIZE32 1) (@repr WORDSIZE32 1))) (ch e0 f0 g0)) (v_K_TABLE.[i])) (ws.[i])) : int32 in\n      let t2 := (impl__u32__wrapping_add (sigma a0 (@repr WORDSIZE32 0) (@repr WORDSIZE32 1)) (maj a0 b0 c0)) : int32 in\n      let h := (update_at h (@repr WORDSIZE32 0) (impl__u32__wrapping_add t1 t2)) : nseq int32 TODO: Int.to_string length in\n      let h := (update_at h (@repr WORDSIZE32 1) a0) : nseq int32 TODO: Int.to_string length in\n      let h := (update_at h (@repr WORDSIZE32 2) b0) : nseq int32 TODO: Int.to_string length in\n      let h := (update_at h (@repr WORDSIZE32 3) c0) : nseq int32 TODO: Int.to_string length in\n      let h := (update_at h (@repr WORDSIZE32 4) (impl__u32__wrapping_add d0 t1)) : nseq int32 TODO: Int.to_string length in\n      let h := (update_at h (@repr WORDSIZE32 5) e0) : nseq int32 TODO: Int.to_string length in\n      let h := (update_at h (@repr WORDSIZE32 6) f0) : nseq int32 TODO: Int.to_string length in\n      let h := (update_at h (@repr WORDSIZE32 7) g0) : nseq int32 TODO: Int.to_string length in\n      h)) : nseq int32 TODO: Int.to_string length in\n  h.\n\nDefinition compress (block : nseq int8 TODO: Int.to_string length) (h_in : nseq int32 TODO: Int.to_string length) : nseq int32 TODO: Int.to_string length :=\n  let s := (schedule block) : nseq int32 TODO: Int.to_string length in\n  let h := (shuffle s h_in) : nseq int32 TODO: Int.to_string length in\n  let h := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 8))) h (fun h i =>\n      update_at h i (impl__u32__wrapping_add (h.[i]) (h_in.[i])))) : nseq int32 TODO: Int.to_string length in\n  h.\n\nDefinition u32s_to_be_bytes (state : nseq int32 TODO: Int.to_string length) : nseq int8 TODO: Int.to_string length :=\n  let out := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 32)) : nseq int8 TODO: Int.to_string length in\n  let out := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_LEN_SIZE)) out (fun out i =>\n      let tmp := (state.[i]) : int32 in\n      let tmp := (impl__u32__to_be_bytes tmp) : nseq int8 TODO: Int.to_string length in\n      f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(@repr WORDSIZE32 4))) out (fun out j =>\n        update_at out ((i.*(@repr WORDSIZE32 4)).+j) (tmp.[j])))) : nseq int8 TODO: Int.to_string length in\n  out.\n\nDefinition hash (msg : seq int8) : nseq int8 TODO: Int.to_string length :=\n  let h := (v_HASH_INIT) : nseq int32 TODO: Int.to_string length in\n  let last_block := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in\n  let last_block_len := ((@repr WORDSIZE32 0)) : uint_size in\n  let '(h,last_block,last_block_len) := (f_fold (f_into_iter (impl__chunks msg v_BLOCK_SIZE)) (h,last_block,last_block_len) (fun '(h,last_block,last_block_len) block =>\n      if\n        (impl__len block)<.?v_BLOCK_SIZE\n      then\n        let last_block := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)(impl__len block))) last_block (fun last_block i =>\n            update_at last_block i (block.[i]))) : nseq int8 TODO: Int.to_string length in\n        let last_block_len := (impl__len block) : uint_size in\n        (h,last_block,last_block_len)\n      else\n        let h := (compress (impl__unwrap (f_try_into block)) h) : nseq int32 TODO: Int.to_string length in\n        (h,last_block,last_block_len))) : (nseq int32 TODO: Int.to_string length × nseq int8 TODO: Int.to_string length × uint_size) in\n  let last_block := (update_at last_block last_block_len (@repr WORDSIZE8 128)) : nseq int8 TODO: Int.to_string length in\n  let len_bist := (cast ((impl__len msg).*(@repr WORDSIZE32 8))) : int64 in\n  let len_bist_bytes := (impl__u64__to_be_bytes len_bist) : nseq int8 TODO: Int.to_string length in\n  let '(h,last_block) := (if\n      last_block_len<.?(v_BLOCK_SIZE.-v_LEN_SIZE)\n    then\n      let last_block := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_LEN_SIZE)) last_block (fun last_block i =>\n          update_at last_block ((v_BLOCK_SIZE.-v_LEN_SIZE).+i) (len_bist_bytes.[i]))) : nseq int8 TODO: Int.to_string length in\n      let h := (compress last_block h) : nseq int32 TODO: Int.to_string length in\n      (h,last_block)\n    else\n      let pad_block := (repeat (@repr WORDSIZE8 0) (@repr WORDSIZE32 64)) : nseq int8 TODO: Int.to_string length in\n      let pad_block := (f_fold (f_into_iter (Build_Range (@repr WORDSIZE32 0)v_LEN_SIZE)) pad_block (fun pad_block i =>\n          update_at pad_block ((v_BLOCK_SIZE.-v_LEN_SIZE).+i) (len_bist_bytes.[i]))) : nseq int8 TODO: Int.to_string length in\n      let h := (compress last_block h) : nseq int32 TODO: Int.to_string length in\n      let h := (compress pad_block h) : nseq int32 TODO: Int.to_string length in\n      (h,last_block)) : (nseq int32 TODO: Int.to_string length × nseq int8 TODO: Int.to_string length) in\n  u32s_to_be_bytes h.\n\nDefinition sha256 (msg : seq int8) : nseq int8 TODO: Int.to_string length :=\n  hash msg.\n"
  },
  {
    "path": "examples/sha256/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHACL_HOME     ?= $(HOME)/.hax/hacl_home\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= .cache\nHINT_DIR      ?= .hints\n\nSHELL ?= /usr/bin/env bash\n\nEXECUTABLES = cargo cargo-hax jq\nK := $(foreach bin,$(EXECUTABLES),\\\n        $(if $(shell command -v $(bin) 2> /dev/null),,$(error \"No $(bin) in PATH\")))\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# Default hax invocation\nHAX_CLI = \"cargo hax into fstar\"\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\tmkdir -p \"${HACL_HOME}\"\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\n# If no any F* file is detected, we run hax\nifeq \"$(wildcard *.fst *fsti)\" \"\"\n$(shell $(SHELL) -c $(HAX_CLI))\nendif\n\n# By default, we process all the files in the current directory\nROOTS = $(wildcard *.fst *fsti)\n\n# Regenerate F* files via hax when Rust sources change\n$(ROOTS): $(shell find ../../../src -type f -name '*.rs')\n\t$(shell $(SHELL) -c $(HAX_CLI))\n\n# The following is a bash script that discovers F* libraries\ndefine FINDLIBS\n    # Prints a path if and only if it exists. Takes one argument: the\n    # path.\n    function print_if_exists() {\n        if [ -d \"$$1\" ]; then\n            echo \"$$1\"\n        fi\n    }\n    # Asks Cargo all the dependencies for the current crate or workspace,\n    # and extract all \"root\" directories for each. Takes zero argument.\n    function dependencies() {\n        cargo metadata --format-version 1 |\n            jq -r '.packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")'\n    }\n    # Find hax libraries *around* a given path. Takes one argument: the\n    # path.\n    function find_hax_libraries_at_path() {\n        path=\"$$1\"\n        # if there is a `proofs/fstar/extraction` subfolder, then that's a\n        # F* library\n        print_if_exists \"$$path/proofs/fstar/extraction\"\n        # Maybe the `proof-libs` folder of hax is around?\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\")\n        if [ $$? -eq 0 ]; then\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\"\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\"\n        fi\n    }\n    { while IFS= read path; do\n          find_hax_libraries_at_path \"$$path\"\n      done < <(dependencies)\n    } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c \"$$FINDLIBS\")\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n"
  },
  {
    "path": "examples/sha256/proofs/fstar/extraction/Sha256.fst",
    "content": "module Sha256\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_BLOCK_SIZE: usize = mk_usize 64\n\nlet v_LEN_SIZE: usize = mk_usize 8\n\nlet v_K_SIZE: usize = mk_usize 64\n\nlet v_HASH_SIZE: usize = mk_usize 256 /! mk_usize 8\n\nlet ch (x y z: u32) : u32 = (x &. y <: u32) ^. ((~.x <: u32) &. z <: u32)\n\nlet maj (x y z: u32) : u32 = (x &. y <: u32) ^. ((x &. z <: u32) ^. (y &. z <: u32) <: u32)\n\nlet v_OP_TABLE: t_Array u8 (mk_usize 12) =\n  let list =\n    [\n      mk_u8 2; mk_u8 13; mk_u8 22; mk_u8 6; mk_u8 11; mk_u8 25; mk_u8 7; mk_u8 18; mk_u8 3; mk_u8 17;\n      mk_u8 19; mk_u8 10\n    ]\n  in\n  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 12);\n  Rust_primitives.Hax.array_of_list 12 list\n\nlet v_K_TABLE: t_Array u32 (mk_usize 64) =\n  let list =\n    [\n      mk_u32 1116352408; mk_u32 1899447441; mk_u32 3049323471; mk_u32 3921009573; mk_u32 961987163;\n      mk_u32 1508970993; mk_u32 2453635748; mk_u32 2870763221; mk_u32 3624381080; mk_u32 310598401;\n      mk_u32 607225278; mk_u32 1426881987; mk_u32 1925078388; mk_u32 2162078206; mk_u32 2614888103;\n      mk_u32 3248222580; mk_u32 3835390401; mk_u32 4022224774; mk_u32 264347078; mk_u32 604807628;\n      mk_u32 770255983; mk_u32 1249150122; mk_u32 1555081692; mk_u32 1996064986; mk_u32 2554220882;\n      mk_u32 2821834349; mk_u32 2952996808; mk_u32 3210313671; mk_u32 3336571891; mk_u32 3584528711;\n      mk_u32 113926993; mk_u32 338241895; mk_u32 666307205; mk_u32 773529912; mk_u32 1294757372;\n      mk_u32 1396182291; mk_u32 1695183700; mk_u32 1986661051; mk_u32 2177026350; mk_u32 2456956037;\n      mk_u32 2730485921; mk_u32 2820302411; mk_u32 3259730800; mk_u32 3345764771; mk_u32 3516065817;\n      mk_u32 3600352804; mk_u32 4094571909; mk_u32 275423344; mk_u32 430227734; mk_u32 506948616;\n      mk_u32 659060556; mk_u32 883997877; mk_u32 958139571; mk_u32 1322822218; mk_u32 1537002063;\n      mk_u32 1747873779; mk_u32 1955562222; mk_u32 2024104815; mk_u32 2227730452; mk_u32 2361852424;\n      mk_u32 2428436474; mk_u32 2756734187; mk_u32 3204031479; mk_u32 3329325298\n    ]\n  in\n  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 64);\n  Rust_primitives.Hax.array_of_list 64 list\n\nlet v_HASH_INIT: t_Array u32 (mk_usize 8) =\n  let list =\n    [\n      mk_u32 1779033703;\n      mk_u32 3144134277;\n      mk_u32 1013904242;\n      mk_u32 2773480762;\n      mk_u32 1359893119;\n      mk_u32 2600822924;\n      mk_u32 528734635;\n      mk_u32 1541459225\n    ]\n  in\n  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 8);\n  Rust_primitives.Hax.array_of_list 8 list\n\nlet sigma (x: u32) (i op: usize) : Prims.Pure u32 (requires i <. mk_usize 4) (fun _ -> Prims.l_True) =\n  let (tmp: u32):u32 =\n    Core_models.Num.impl_u32__rotate_right x\n      (Core_models.Convert.f_into #u8\n          #u32\n          #FStar.Tactics.Typeclasses.solve\n          (v_OP_TABLE.[ (mk_usize 3 *! i <: usize) +! mk_usize 2 <: usize ] <: u8)\n        <:\n        u32)\n  in\n  let tmp:u32 =\n    if op =. mk_usize 0\n    then x >>! (v_OP_TABLE.[ (mk_usize 3 *! i <: usize) +! mk_usize 2 <: usize ] <: u8)\n    else tmp\n  in\n  let rot_val_1_:u32 =\n    Core_models.Convert.f_into #u8\n      #u32\n      #FStar.Tactics.Typeclasses.solve\n      (v_OP_TABLE.[ mk_usize 3 *! i <: usize ] <: u8)\n  in\n  let rot_val_2_:u32 =\n    Core_models.Convert.f_into #u8\n      #u32\n      #FStar.Tactics.Typeclasses.solve\n      (v_OP_TABLE.[ (mk_usize 3 *! i <: usize) +! mk_usize 1 <: usize ] <: u8)\n  in\n  ((Core_models.Num.impl_u32__rotate_right x rot_val_1_ <: u32) ^.\n    (Core_models.Num.impl_u32__rotate_right x rot_val_2_ <: u32)\n    <:\n    u32) ^.\n  tmp\n\nlet to_be_u32s (block: t_Array u8 (mk_usize 64))\n    : Prims.Pure (t_Array u32 (mk_usize 16))\n      Prims.l_True\n      (ensures\n        fun result ->\n          let result:t_Array u32 (mk_usize 16) = result in\n          (Core_models.Slice.impl__len #u32 (result <: t_Slice u32) <: usize) =. mk_usize 16) =\n  let out:t_Array u32 (mk_usize 16) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 16) in\n  let out:t_Array u32 (mk_usize 16) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 16)\n      (fun out temp_1_ ->\n          let out:t_Array u32 (mk_usize 16) = out in\n          let _:usize = temp_1_ in\n          true)\n      out\n      (fun out i ->\n          let out:t_Array u32 (mk_usize 16) = out in\n          let i:usize = i in\n          let block_chunk_array:u32 =\n            Core_models.Num.impl_u32__from_be_bytes (Core_models.Result.impl__unwrap #(t_Array u8\n                      (mk_usize 4))\n                  #Core_models.Array.t_TryFromSliceError\n                  (Core_models.Convert.f_try_into #(t_Slice u8)\n                      #(t_Array u8 (mk_usize 4))\n                      #FStar.Tactics.Typeclasses.solve\n                      (block.[ {\n                            Core_models.Ops.Range.f_start = i *! mk_usize 4 <: usize;\n                            Core_models.Ops.Range.f_end\n                            =\n                            (i +! mk_usize 1 <: usize) *! mk_usize 4 <: usize\n                          }\n                          <:\n                          Core_models.Ops.Range.t_Range usize ]\n                        <:\n                        t_Slice u8)\n                    <:\n                    Core_models.Result.t_Result (t_Array u8 (mk_usize 4))\n                      Core_models.Array.t_TryFromSliceError)\n                <:\n                t_Array u8 (mk_usize 4))\n          in\n          let out:t_Array u32 (mk_usize 16) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out i block_chunk_array\n          in\n          out)\n  in\n  out\n\nlet schedule (block: t_Array u8 (mk_usize 64)) : t_Array u32 (mk_usize 64) =\n  let b:t_Array u32 (mk_usize 16) = to_be_u32s block in\n  let s:t_Array u32 (mk_usize 64) = Rust_primitives.Hax.repeat (mk_u32 0) (mk_usize 64) in\n  let s:t_Array u32 (mk_usize 64) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      v_K_SIZE\n      (fun s i ->\n          let s:t_Array u32 (mk_usize 64) = s in\n          let i:usize = i in\n          (Core_models.Slice.impl__len #u32 (b <: t_Slice u32) <: usize) =. mk_usize 16 <: bool)\n      s\n      (fun s i ->\n          let s:t_Array u32 (mk_usize 64) = s in\n          let i:usize = i in\n          if i <. mk_usize 16 <: bool\n          then\n            let s:t_Array u32 (mk_usize 64) =\n              Rust_primitives.Hax.Monomorphized_update_at.update_at_usize s i (b.[ i ] <: u32)\n            in\n            s\n          else\n            let t16:u32 = s.[ i -! mk_usize 16 <: usize ] in\n            let t15:u32 = s.[ i -! mk_usize 15 <: usize ] in\n            let t7:u32 = s.[ i -! mk_usize 7 <: usize ] in\n            let t2:u32 = s.[ i -! mk_usize 2 <: usize ] in\n            let s1:u32 = sigma t2 (mk_usize 3) (mk_usize 0) in\n            let s0:u32 = sigma t15 (mk_usize 2) (mk_usize 0) in\n            let s:t_Array u32 (mk_usize 64) =\n              Rust_primitives.Hax.Monomorphized_update_at.update_at_usize s\n                i\n                (Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add\n                            s1\n                            t7\n                          <:\n                          u32)\n                        s0\n                      <:\n                      u32)\n                    t16\n                  <:\n                  u32)\n            in\n            s)\n  in\n  s\n\nlet shuffle (ws: t_Array u32 (mk_usize 64)) (hash: t_Array u32 (mk_usize 8))\n    : t_Array u32 (mk_usize 8) =\n  let hash:t_Array u32 (mk_usize 8) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      v_K_SIZE\n      (fun hash temp_1_ ->\n          let hash:t_Array u32 (mk_usize 8) = hash in\n          let _:usize = temp_1_ in\n          true)\n      hash\n      (fun hash i ->\n          let hash:t_Array u32 (mk_usize 8) = hash in\n          let i:usize = i in\n          let a0:u32 = hash.[ mk_usize 0 ] in\n          let b0:u32 = hash.[ mk_usize 1 ] in\n          let c0:u32 = hash.[ mk_usize 2 ] in\n          let d0:u32 = hash.[ mk_usize 3 ] in\n          let e0:u32 = hash.[ mk_usize 4 ] in\n          let f0:u32 = hash.[ mk_usize 5 ] in\n          let g0:u32 = hash.[ mk_usize 6 ] in\n          let (h0: u32):u32 = hash.[ mk_usize 7 ] in\n          let t1:u32 =\n            Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add\n                      (Core_models.Num.impl_u32__wrapping_add h0\n                          (sigma e0 (mk_usize 1) (mk_usize 1) <: u32)\n                        <:\n                        u32)\n                      (ch e0 f0 g0 <: u32)\n                    <:\n                    u32)\n                  (v_K_TABLE.[ i ] <: u32)\n                <:\n                u32)\n              (ws.[ i ] <: u32)\n          in\n          let t2:u32 =\n            Core_models.Num.impl_u32__wrapping_add (sigma a0 (mk_usize 0) (mk_usize 1) <: u32)\n              (maj a0 b0 c0 <: u32)\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash\n              (mk_usize 0)\n              (Core_models.Num.impl_u32__wrapping_add t1 t2 <: u32)\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 1) a0\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 2) b0\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 3) c0\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash\n              (mk_usize 4)\n              (Core_models.Num.impl_u32__wrapping_add d0 t1 <: u32)\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 5) e0\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 6) f0\n          in\n          let hash:t_Array u32 (mk_usize 8) =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash (mk_usize 7) g0\n          in\n          hash)\n  in\n  hash\n\nlet compress (block: t_Array u8 (mk_usize 64)) (hash: t_Array u32 (mk_usize 8))\n    : t_Array u32 (mk_usize 8) =\n  let s:t_Array u32 (mk_usize 64) = schedule block in\n  let h_in:t_Array u32 (mk_usize 8) =\n    Core_models.Clone.f_clone #(t_Array u32 (mk_usize 8)) #FStar.Tactics.Typeclasses.solve hash\n  in\n  let hash:t_Array u32 (mk_usize 8) = shuffle s hash in\n  let hash:t_Array u32 (mk_usize 8) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 8)\n      (fun hash temp_1_ ->\n          let hash:t_Array u32 (mk_usize 8) = hash in\n          let _:usize = temp_1_ in\n          true)\n      hash\n      (fun hash i ->\n          let hash:t_Array u32 (mk_usize 8) = hash in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize hash\n            i\n            (Core_models.Num.impl_u32__wrapping_add (hash.[ i ] <: u32) (h_in.[ i ] <: u32) <: u32)\n          <:\n          t_Array u32 (mk_usize 8))\n  in\n  hash\n\nlet u32s_to_be_bytes (state: t_Array u32 (mk_usize 8)) : t_Array u8 (mk_usize 32) =\n  let (out: t_Array u8 (mk_usize 32)):t_Array u8 (mk_usize 32) =\n    Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 32)\n  in\n  let out:t_Array u8 (mk_usize 32) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      v_LEN_SIZE\n      (fun out temp_1_ ->\n          let out:t_Array u8 (mk_usize 32) = out in\n          let _:usize = temp_1_ in\n          true)\n      out\n      (fun out i ->\n          let out:t_Array u8 (mk_usize 32) = out in\n          let i:usize = i in\n          let tmp:u32 = state.[ i ] in\n          let tmp:t_Array u8 (mk_usize 4) = Core_models.Num.impl_u32__to_be_bytes tmp in\n          Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n            (mk_usize 4)\n            (fun out temp_1_ ->\n                let out:t_Array u8 (mk_usize 32) = out in\n                let _:usize = temp_1_ in\n                true)\n            out\n            (fun out j ->\n                let out:t_Array u8 (mk_usize 32) = out in\n                let j:usize = j in\n                Rust_primitives.Hax.Monomorphized_update_at.update_at_usize out\n                  ((i *! mk_usize 4 <: usize) +! j <: usize)\n                  (tmp.[ j ] <: u8)\n                <:\n                t_Array u8 (mk_usize 32)))\n  in\n  out\n\nlet hash (msg: t_Slice u8)\n    : Prims.Pure (t_Array u8 (mk_usize 32))\n      (requires\n        (cast (Core_models.Slice.impl__len #u8 msg <: usize) <: u64) <. mk_u64 2305843009213693951)\n      (fun _ -> Prims.l_True) =\n  let h:t_Array u32 (mk_usize 8) = v_HASH_INIT in\n  let blocks:usize = (Core_models.Slice.impl__len #u8 msg <: usize) /! v_BLOCK_SIZE in\n  let h:t_Array u32 (mk_usize 8) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      blocks\n      (fun h temp_1_ ->\n          let h:t_Array u32 (mk_usize 8) = h in\n          let _:usize = temp_1_ in\n          true)\n      h\n      (fun h i ->\n          let h:t_Array u32 (mk_usize 8) = h in\n          let i:usize = i in\n          compress (Core_models.Result.impl__unwrap #(t_Array u8 (mk_usize 64))\n                #Core_models.Array.t_TryFromSliceError\n                (Core_models.Convert.f_try_into #(t_Slice u8)\n                    #(t_Array u8 (mk_usize 64))\n                    #FStar.Tactics.Typeclasses.solve\n                    (msg.[ {\n                          Core_models.Ops.Range.f_start = i *! v_BLOCK_SIZE <: usize;\n                          Core_models.Ops.Range.f_end\n                          =\n                          (i +! mk_usize 1 <: usize) *! v_BLOCK_SIZE <: usize\n                        }\n                        <:\n                        Core_models.Ops.Range.t_Range usize ]\n                      <:\n                      t_Slice u8)\n                  <:\n                  Core_models.Result.t_Result (t_Array u8 (mk_usize 64))\n                    Core_models.Array.t_TryFromSliceError)\n              <:\n              t_Array u8 (mk_usize 64))\n            h\n          <:\n          t_Array u32 (mk_usize 8))\n  in\n  let last_block_len:usize = (Core_models.Slice.impl__len #u8 msg <: usize) %! v_BLOCK_SIZE in\n  let (last_block: t_Array u8 (mk_usize 64)):t_Array u8 (mk_usize 64) =\n    Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64)\n  in\n  let last_block:t_Array u8 (mk_usize 64) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_range last_block\n      ({ Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = last_block_len }\n        <:\n        Core_models.Ops.Range.t_Range usize)\n      (Core_models.Slice.impl__copy_from_slice #u8\n          (last_block.[ {\n                Core_models.Ops.Range.f_start = mk_usize 0;\n                Core_models.Ops.Range.f_end = last_block_len\n              }\n              <:\n              Core_models.Ops.Range.t_Range usize ]\n            <:\n            t_Slice u8)\n          (msg.[ { Core_models.Ops.Range.f_start = blocks *! v_BLOCK_SIZE <: usize }\n              <:\n              Core_models.Ops.Range.t_RangeFrom usize ]\n            <:\n            t_Slice u8)\n        <:\n        t_Slice u8)\n  in\n  let last_block:t_Array u8 (mk_usize 64) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize last_block\n      last_block_len\n      (mk_u8 128)\n  in\n  let _:Prims.unit = assert (Seq.length msg * 8 < pow2 64) in\n  let len_bist:u64 = (cast (Core_models.Slice.impl__len #u8 msg <: usize) <: u64) *! mk_u64 8 in\n  let len_bist_bytes:t_Array u8 (mk_usize 8) = Core_models.Num.impl_u64__to_be_bytes len_bist in\n  let h, last_block:(t_Array u32 (mk_usize 8) & t_Array u8 (mk_usize 64)) =\n    if last_block_len <. (v_BLOCK_SIZE -! v_LEN_SIZE <: usize)\n    then\n      let last_block:t_Array u8 (mk_usize 64) =\n        Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n          v_LEN_SIZE\n          (fun last_block temp_1_ ->\n              let last_block:t_Array u8 (mk_usize 64) = last_block in\n              let _:usize = temp_1_ in\n              true)\n          last_block\n          (fun last_block i ->\n              let last_block:t_Array u8 (mk_usize 64) = last_block in\n              let i:usize = i in\n              Rust_primitives.Hax.Monomorphized_update_at.update_at_usize last_block\n                ((v_BLOCK_SIZE -! v_LEN_SIZE <: usize) +! i <: usize)\n                (len_bist_bytes.[ i ] <: u8)\n              <:\n              t_Array u8 (mk_usize 64))\n      in\n      let h:t_Array u32 (mk_usize 8) = compress last_block h in\n      h, last_block <: (t_Array u32 (mk_usize 8) & t_Array u8 (mk_usize 64))\n    else\n      let (pad_block: t_Array u8 (mk_usize 64)):t_Array u8 (mk_usize 64) =\n        Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 64)\n      in\n      let pad_block:t_Array u8 (mk_usize 64) =\n        Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n          v_LEN_SIZE\n          (fun pad_block temp_1_ ->\n              let pad_block:t_Array u8 (mk_usize 64) = pad_block in\n              let _:usize = temp_1_ in\n              true)\n          pad_block\n          (fun pad_block i ->\n              let pad_block:t_Array u8 (mk_usize 64) = pad_block in\n              let i:usize = i in\n              Rust_primitives.Hax.Monomorphized_update_at.update_at_usize pad_block\n                ((v_BLOCK_SIZE -! v_LEN_SIZE <: usize) +! i <: usize)\n                (len_bist_bytes.[ i ] <: u8)\n              <:\n              t_Array u8 (mk_usize 64))\n      in\n      let h:t_Array u32 (mk_usize 8) = compress last_block h in\n      let h:t_Array u32 (mk_usize 8) = compress pad_block h in\n      h, last_block <: (t_Array u32 (mk_usize 8) & t_Array u8 (mk_usize 64))\n  in\n  u32s_to_be_bytes h\n\nlet sha256 (msg: t_Slice u8)\n    : Prims.Pure (t_Array u8 (mk_usize 32))\n      (requires\n        (cast (Core_models.Slice.impl__len #u8 msg <: usize) <: u64) <. mk_u64 2305843009213693951)\n      (fun _ -> Prims.l_True) = hash msg\n"
  },
  {
    "path": "examples/sha256/src/sha256.rs",
    "content": "use std::convert::TryInto;\n\nconst BLOCK_SIZE: usize = 64;\nconst LEN_SIZE: usize = 8;\npub const K_SIZE: usize = 64;\npub const HASH_SIZE: usize = 256 / 8;\n\npub type Block = [u8; BLOCK_SIZE];\npub type OpTableType = [u8; 12];\npub type Sha256Digest = [u8; HASH_SIZE];\npub type RoundConstantsTable = [u32; K_SIZE];\npub type Hash = [u32; LEN_SIZE];\n\npub fn ch(x: u32, y: u32, z: u32) -> u32 {\n    (x & y) ^ ((!x) & z)\n}\n\npub fn maj(x: u32, y: u32, z: u32) -> u32 {\n    (x & y) ^ ((x & z) ^ (y & z))\n}\n\nconst OP_TABLE: OpTableType = [2, 13, 22, 6, 11, 25, 7, 18, 3, 17, 19, 10];\n\n#[rustfmt::skip]\nconst K_TABLE: RoundConstantsTable = [\n        0x428a_2f98u32, 0x7137_4491u32, 0xb5c0_fbcfu32, 0xe9b5_dba5u32, 0x3956_c25bu32,\n        0x59f1_11f1u32, 0x923f_82a4u32, 0xab1c_5ed5u32, 0xd807_aa98u32, 0x1283_5b01u32,\n        0x2431_85beu32, 0x550c_7dc3u32, 0x72be_5d74u32, 0x80de_b1feu32, 0x9bdc_06a7u32,\n        0xc19b_f174u32, 0xe49b_69c1u32, 0xefbe_4786u32, 0x0fc1_9dc6u32, 0x240c_a1ccu32,\n        0x2de9_2c6fu32, 0x4a74_84aau32, 0x5cb0_a9dcu32, 0x76f9_88dau32, 0x983e_5152u32,\n        0xa831_c66du32, 0xb003_27c8u32, 0xbf59_7fc7u32, 0xc6e0_0bf3u32, 0xd5a7_9147u32,\n        0x06ca_6351u32, 0x1429_2967u32, 0x27b7_0a85u32, 0x2e1b_2138u32, 0x4d2c_6dfcu32,\n        0x5338_0d13u32, 0x650a_7354u32, 0x766a_0abbu32, 0x81c2_c92eu32, 0x9272_2c85u32,\n        0xa2bf_e8a1u32, 0xa81a_664bu32, 0xc24b_8b70u32, 0xc76c_51a3u32, 0xd192_e819u32,\n        0xd699_0624u32, 0xf40e_3585u32, 0x106a_a070u32, 0x19a4_c116u32, 0x1e37_6c08u32,\n        0x2748_774cu32, 0x34b0_bcb5u32, 0x391c_0cb3u32, 0x4ed8_aa4au32, 0x5b9c_ca4fu32,\n        0x682e_6ff3u32, 0x748f_82eeu32, 0x78a5_636fu32, 0x84c8_7814u32, 0x8cc7_0208u32,\n        0x90be_fffau32, 0xa450_6cebu32, 0xbef9_a3f7u32, 0xc671_78f2u32\n    ];\n\nconst HASH_INIT: Hash = [\n    0x6a09e667u32,\n    0xbb67ae85u32,\n    0x3c6ef372u32,\n    0xa54ff53au32,\n    0x510e527fu32,\n    0x9b05688cu32,\n    0x1f83d9abu32,\n    0x5be0cd19u32,\n];\n\n#[hax_lib::requires(i < 4)]\npub fn sigma(x: u32, i: usize, op: usize) -> u32 {\n    let mut tmp: u32 = x.rotate_right(OP_TABLE[3 * i + 2].into());\n    if op == 0 {\n        tmp = x >> OP_TABLE[3 * i + 2]\n    }\n    let rot_val_1 = OP_TABLE[3 * i].into();\n    let rot_val_2 = OP_TABLE[3 * i + 1].into();\n    x.rotate_right(rot_val_1) ^ x.rotate_right(rot_val_2) ^ tmp\n}\n\n#[hax_lib::ensures(|result| result.len() == 16)]\nfn to_be_u32s(block: Block) -> [u32; 16] {\n    let mut out = [0u32; 16];\n    for i in 0..16 {\n        let block_chunk_array = u32::from_be_bytes(block[i * 4..(i + 1) * 4].try_into().unwrap());\n        out[i] = block_chunk_array;\n    }\n    out\n}\n\npub fn schedule(block: Block) -> RoundConstantsTable {\n    let b = to_be_u32s(block);\n    let mut s = [0; K_SIZE];\n    for i in 0..K_SIZE {\n        hax_lib::loop_invariant!(|i: usize| b.len() == 16);\n        if i < 16 {\n            s[i] = b[i];\n        } else {\n            let t16 = s[i - 16];\n            let t15 = s[i - 15];\n            let t7 = s[i - 7];\n            let t2 = s[i - 2];\n            let s1 = sigma(t2, 3, 0);\n            let s0 = sigma(t15, 2, 0);\n            s[i] = s1.wrapping_add(t7).wrapping_add(s0).wrapping_add(t16);\n        }\n    }\n    s\n}\n\npub fn shuffle(ws: RoundConstantsTable, hash: &mut Hash) {\n    for i in 0..K_SIZE {\n        let a0 = hash[0];\n        let b0 = hash[1];\n        let c0 = hash[2];\n        let d0 = hash[3];\n        let e0 = hash[4];\n        let f0 = hash[5];\n        let g0 = hash[6];\n        let h0: u32 = hash[7];\n\n        let t1 = h0\n            .wrapping_add(sigma(e0, 1, 1))\n            .wrapping_add(ch(e0, f0, g0))\n            .wrapping_add(K_TABLE[i])\n            .wrapping_add(ws[i]);\n        let t2 = sigma(a0, 0, 1).wrapping_add(maj(a0, b0, c0));\n\n        hash[0] = t1.wrapping_add(t2);\n        hash[1] = a0;\n        hash[2] = b0;\n        hash[3] = c0;\n        hash[4] = d0.wrapping_add(t1);\n        hash[5] = e0;\n        hash[6] = f0;\n        hash[7] = g0;\n    }\n}\n\npub fn compress(block: Block, hash: &mut Hash) {\n    let s = schedule(block);\n    let h_in = hash.clone();\n    shuffle(s, hash);\n    for i in 0..8 {\n        hash[i] = hash[i].wrapping_add(h_in[i]);\n    }\n}\n\nfn u32s_to_be_bytes(state: Hash) -> Sha256Digest {\n    let mut out: Sha256Digest = [0u8; HASH_SIZE];\n    for i in 0..LEN_SIZE {\n        let tmp = state[i];\n        let tmp = tmp.to_be_bytes();\n        for j in 0..4 {\n            out[i * 4 + j] = tmp[j];\n        }\n    }\n    out\n}\n\n#[hax_lib::requires((msg.len() as u64) < 0x1fffffffffffffff)]\npub fn hash(msg: &[u8]) -> Sha256Digest {\n    let mut h = HASH_INIT;\n    let blocks = msg.len() / BLOCK_SIZE;\n    for i in 0..blocks {\n        compress(\n            msg[i * BLOCK_SIZE..(i + 1) * BLOCK_SIZE]\n                .try_into()\n                .unwrap(),\n            &mut h,\n        );\n    }\n\n    let last_block_len = msg.len() % BLOCK_SIZE;\n    let mut last_block: Block = [0; BLOCK_SIZE];\n    last_block[0..last_block_len].copy_from_slice(&msg[blocks * BLOCK_SIZE..]);\n    last_block[last_block_len] = 0x80;\n    hax_lib::fstar!(\"assert(Seq.length msg * 8 < pow2 64)\");\n    let len_bist = msg.len() as u64 * 8;\n    let len_bist_bytes = len_bist.to_be_bytes();\n    if last_block_len < BLOCK_SIZE - LEN_SIZE {\n        for i in 0..LEN_SIZE {\n            last_block[BLOCK_SIZE - LEN_SIZE + i] = len_bist_bytes[i];\n        }\n        compress(last_block, &mut h);\n    } else {\n        let mut pad_block: Block = [0; BLOCK_SIZE];\n        for i in 0..LEN_SIZE {\n            pad_block[BLOCK_SIZE - LEN_SIZE + i] = len_bist_bytes[i];\n        }\n        compress(last_block, &mut h);\n        compress(pad_block, &mut h);\n    }\n\n    u32s_to_be_bytes(h)\n}\n\n#[hax_lib::requires((msg.len() as u64) < 0x1fffffffffffffff)]\npub fn sha256(msg: &[u8]) -> Sha256Digest {\n    hash(msg)\n}\n"
  },
  {
    "path": "examples/sha256/tests/test_sha256.rs",
    "content": "use std::num::ParseIntError;\n\nuse sha256::*;\n\nfn hex_string_to_vec(s: &str) -> Vec<u8> {\n    debug_assert!(s.len() % core::mem::size_of::<u8>() == 0);\n    let b: Result<Vec<u8>, ParseIntError> = (0..s.len())\n        .step_by(2)\n        .map(|i| u8::from_str_radix(&s[i..i + 2], 16).map(<u8>::from))\n        .collect();\n    b.expect(\"Error parsing hex string\")\n}\n\n#[test]\nfn test_sha256_kat() {\n    let msg = hex_string_to_vec(\"686163737065632072756c6573\");\n    let expected_256 =\n        hex_string_to_vec(\"b37db5ed72c97da3b2579537afbc3261ed3d5a56f57b3d8e5c1019ae35929964\");\n    let digest = hash(&msg);\n    println!(\"{:?}\", expected_256);\n    println!(\"{:x?}\", digest);\n    assert_eq!(expected_256, digest);\n\n    let msg = hex_string_to_vec(\"6861637370656320697320612070726f706f73616c20666f722061206e65772073706563696669636174696f6e206c616e677561676520666f722063727970746f207072696d69746976657320746861742069732073756363696e63742c2074686174206973206561737920746f207265616420616e6420696d706c656d656e742c20616e642074686174206c656e647320697473656c6620746f20666f726d616c20766572696669636174696f6e2e\");\n    let expected_256 =\n        hex_string_to_vec(\"348ef044446d56e05210361af5a258588ad31765f446bf4cb3b67125a187a64a\");\n    let digest = hash(&msg);\n    println!(\"{:?}\", expected_256);\n    println!(\"{:x?}\", digest);\n    assert_eq!(expected_256, digest);\n}\n\n#[test]\nfn empty_input() {\n    const SHA256_EMPTY: Sha256Digest = [\n        0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14, 0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9,\n        0x24, 0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c, 0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52,\n        0xb8, 0x55,\n    ];\n    assert_eq!(hash(&vec![]), SHA256_EMPTY);\n}\n"
  },
  {
    "path": "flake.nix",
    "content": "{\n  inputs = {\n    nixpkgs.url = \"github:nixos/nixpkgs/nixos-25.05\";\n    flake-utils.url = \"github:numtide/flake-utils\";\n    crane = { url = \"github:ipetkov/crane\"; };\n    rust-overlay = {\n      url = \"github:oxalica/rust-overlay\";\n      inputs.nixpkgs.follows = \"nixpkgs\";\n    };\n    fstar.url = \"github:FStarLang/FStar/v2025.10.06\";\n    hacl-star = {\n      url = \"github:hacl-star/hacl-star\";\n      flake = false;\n    };\n    rust-by-examples = {\n      url = \"github:rust-lang/rust-by-example\";\n      flake = false;\n    };\n  };\n\n  outputs =\n    { flake-utils, nixpkgs, rust-overlay, crane, hacl-star, ... }@inputs:\n    flake-utils.lib.eachDefaultSystem (system:\n      let\n        pkgs = import nixpkgs {\n          inherit system;\n          overlays = [ rust-overlay.overlays.default ];\n        };\n        toolchain =\n          (fromTOML (pkgs.lib.readFile ./rust-toolchain.toml)).toolchain;\n        rustc = pkgs.rust-bin.fromRustupToolchain toolchain;\n        rustc-docs = (let\n          # Only x86 linux has the component rustc-docs, see https://github.com/nix-community/fenix/issues/51\n          # system = \"x86_64-linux\";\n          n = toolchain // {\n            components = toolchain.components ++ [ \"rustc-docs\" ];\n          };\n          rustc = builtins.trace n.components\n            ((pkgs.rust-bin.fromRustupToolchain n).override {\n              targets = [ \"x86_64-unknown-linux-gnu\" ];\n            });\n        in rustc);\n        craneLib = (crane.mkLib pkgs).overrideToolchain rustc;\n        rustfmt = pkgs.rustfmt;\n        fstar = inputs.fstar.packages.${system}.default;\n        hax-env-file = pkgs.writeText \"hax-env-file\" ''\n          HAX_PROOF_LIBS_HOME=\"${./proof-libs/fstar}\"\n          HAX_LIBS_HOME=\"${./hax-lib}\"/proofs/fstar/extraction\n          HACL_HOME=\"${hacl-star}\"\n        '';\n        hax-env = pkgs.writeScriptBin \"hax-env\" ''\n          if [[ \"$1\" == \"no-export\" ]]; then\n            cat \"${hax-env-file}\"\n          else\n            cat \"${hax-env-file}\" | xargs -I{} echo \"export {}\"\n          fi\n        '';\n        ocamlPackages = pkgs.ocamlPackages;\n        ocamlformat = ocamlPackages.ocamlformat_0_27_0;\n        proverif = pkgs.proverif.overrideDerivation\n          (_: { patches = [ examples/proverif-psk/pv_div_by_zero_fix.diff ]; });\n      in rec {\n        packages = {\n          inherit rustc ocamlformat rustfmt fstar hax-env rustc-docs proverif;\n          docs = pkgs.python312Packages.callPackage ./docs {\n            hax-frontend-docs = packages.hax-rust-frontend.docs;\n          };\n          hax-engine = pkgs.callPackage ./engine {\n            hax-rust-frontend = packages.hax-rust-frontend.unwrapped;\n            # `hax-engine-names-extract` extracts Rust names but also\n            # some informations about `impl`s when names are `impl`\n            # blocks. That includes some span information, which\n            # includes full paths to Rust sources. Sometimes those\n            # Rust sources happens to be in the Nix store. That\n            # creates useless dependencies, this wrapper below takes\n            # care of removing those extra depenedencies.\n            hax-engine-names-extract =\n              pkgs.writeScriptBin \"hax-engine-names-extract\" ''\n                #!${pkgs.stdenv.shell}\n                ${packages.hax-rust-frontend.hax-engine-names-extract}/bin/hax-engine-names-extract | sed 's|/nix/store/\\(.\\{6\\}\\)|/nix_store/\\1-|g'\n              '';\n            inherit rustc ocamlPackages;\n          };\n          hax-rust-frontend = pkgs.callPackage ./cli {\n            inherit rustc craneLib rustc-docs;\n            inherit (packages) hax-engine;\n          };\n          hax = packages.hax-rust-frontend;\n          default = packages.hax;\n\n          check-toolchain = checks.toolchain;\n          check-examples = checks.examples;\n          check-coq-coverage = checks.coverage;\n          check-readme-coherency = checks.readme-coherency;\n\n          rust-by-example-hax-extraction = pkgs.stdenv.mkDerivation {\n            name = \"rust-by-example-hax-extraction\";\n            phases = [ \"installPhase\" ];\n            buildInputs = [ packages.hax pkgs.cargo ];\n            installPhase = ''\n              cp --no-preserve=mode -rf ${inputs.rust-by-examples} workdir\n              cd workdir\n              ${pkgs.nodejs}/bin/node ${./.utils/rust-by-example.js}\n              mv rust-by-examples-crate/proofs $out\n            '';\n          };\n\n          # The commit that corresponds to our nightly pin, helpful when updating rusrc.\n          toolchain_commit = pkgs.runCommand \"hax-toolchain-commit\" { } ''\n            # This is sad but I don't know a better way.\n            cat ${rustc}/share/doc/rust/html/version_info.html \\\n              | grep 'github.com' \\\n              | sed 's#.*\"https://github.com/rust-lang/rust/commit/\\([^\"]*\\)\".*#\\1#' \\\n              > $out\n          '';\n        };\n        checks = {\n          toolchain = packages.hax.tests;\n          examples = pkgs.callPackage ./examples {\n            inherit (packages) hax;\n            inherit craneLib fstar hacl-star hax-env;\n          };\n          coverage = pkgs.callPackage ./examples/coverage {\n            inherit (packages) hax;\n            inherit craneLib;\n            coqPackages = pkgs.coqPackages_8_19;\n          };\n          readme-coherency =\n            let src = pkgs.lib.sourceFilesBySuffices ./. [ \".md\" ];\n            in pkgs.stdenv.mkDerivation {\n              name = \"readme-coherency\";\n              inherit src;\n              buildPhase = ''\n                ${apps.replace-fstar-versions-md.program}\n                diff -r . ${src}\n              '';\n              installPhase = \"touch $out\";\n            };\n        };\n        apps = {\n          replace-fstar-versions-md = {\n            type = \"app\";\n            program = \"${pkgs.writeScript \"replace-fstar-versions-md\" ''\n              #!${pkgs.bash}/bin/bash\n              FSTAR_VERSION=$(cat ${\n                ./flake.lock\n              } | ${pkgs.jq}/bin/jq '.nodes.fstar.original.ref' -r)\n              ${pkgs.fd}/bin/fd \\\n                 -X ${pkgs.sd}/bin/sd '`.*?`(<!---FSTAR_VERSION-->)' '`'\"$FSTAR_VERSION\"'`$1' **/*.md \\\n                 \";\" --glob '*.md'\n            ''}\";\n          };\n          serve-rustc-docs = {\n            type = \"app\";\n            program = \"${pkgs.writeScript \"serve-rustc-docs\" ''\n              #!${pkgs.bash}/bin/bash\n              cd ${rustc-docs}/share/doc/rust/html/rustc\n              ${pkgs.python3}/bin/python -m http.server \"$@\"\n            ''}\";\n          };\n          serve-docs = {\n            type = \"app\";\n            program = \"${pkgs.writeScript \"serve-docs\" ''\n              #!${pkgs.bash}/bin/bash\n              cd ${packages.docs}\n              ${pkgs.python3}/bin/python -m http.server \"$@\"\n            ''}\";\n          };\n        };\n        devShells = let\n          inputsFrom = [\n            packages.hax-rust-frontend.unwrapped\n            # `hax-engine`'s build requires `hax-rust-frontend` and\n            # `hax-engine-names-extract`, but in a dev environment,\n            # those two packages are supposed to be built locally,\n            # thus we kill them here\n            (packages.hax-engine.override {\n              hax-rust-frontend = pkgs.hello;\n              hax-engine-names-extract = pkgs.hello;\n            })\n            packages.docs\n          ];\n          utils = pkgs.stdenv.mkDerivation {\n            name = \"hax-dev-scripts\";\n            phases = [ \"installPhase\" ];\n            installPhase = ''\n              mkdir -p $out/bin\n              cp ${./.utils/rebuild.sh} $out/bin/rebuild\n            '';\n          };\n          defaultPackages = [\n            ocamlformat\n            ocamlPackages.ocaml-lsp\n            ocamlPackages.ocamlformat-rpc-lib\n            ocamlPackages.ocaml-print-intf\n            ocamlPackages.odoc\n            ocamlPackages.utop\n\n            pkgs.just\n            pkgs.cargo-expand\n            pkgs.cargo-release\n            pkgs.cargo-insta\n            pkgs.openssl.dev\n            pkgs.libz.dev\n            pkgs.pkg-config\n            pkgs.rust-analyzer\n            pkgs.toml2json\n            rustfmt\n            utils\n\n            pkgs.go-grip\n          ];\n          LIBCLANG_PATH = \"${pkgs.llvmPackages.libclang.lib}/lib\";\n          DYLD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [ pkgs.libz rustc ];\n        in {\n          examples = pkgs.mkShell {\n            inherit inputsFrom LIBCLANG_PATH DYLD_LIBRARY_PATH;\n            HACL_HOME = \"${hacl-star}\";\n            shellHook = ''\n              HAX_ROOT=$(git rev-parse --show-toplevel)\n              export HAX_PROOF_LIBS_HOME=\"$HAX_ROOT/proof-libs/fstar\"\n              export HAX_LIBS_HOME=\"$HAX_ROOT/hax-lib\"\n            '';\n            packages = defaultPackages ++ [ fstar pkgs.proverif ];\n          };\n          ci-examples = pkgs.mkShell {\n            shellHook = ''\n              eval $(hax-env)\n              export CACHE_DIR=$(mktemp -d)\n              export HINT_DIR=$(mktemp -d)\n              export SHELL=${pkgs.bash}/bin/bash\n            '';\n            packages = [\n              packages.hax\n              packages.hax-env\n              packages.fstar\n              packages.proverif\n              pkgs.jq\n              pkgs.elan\n            ];\n          };\n          default = pkgs.mkShell {\n            inherit inputsFrom LIBCLANG_PATH DYLD_LIBRARY_PATH;\n            packages = defaultPackages;\n          };\n          fstar = pkgs.mkShell {\n            inherit inputsFrom LIBCLANG_PATH DYLD_LIBRARY_PATH;\n            shellHook = ''\n              export HAX_HOME=$(git rev-parse --show-toplevel)\n              export FSTAR_HOME=\"${fstar}\"\n            '';\n            packages = defaultPackages ++ [ fstar ];\n          };\n        };\n      });\n}\n"
  },
  {
    "path": "frontend/exporter/Cargo.toml",
    "content": "[package]\nname = \"hax-frontend-exporter\"\nedition = \"2024\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"Provides mirrors of the algebraic data types used in the Rust compilers, removing indirections and inlining various pieces of information.\"\n\n[package.metadata.rust-analyzer]\nrustc_private=true\n\n[dependencies]\nhax-adt-into.workspace = true\nserde.workspace = true\nserde_json.workspace = true\nschemars.workspace = true\nitertools.workspace = true\nhax-frontend-exporter-options.workspace = true\ntracing.workspace = true\npaste = \"1.0.11\"\nextension-traits = \"1.0.1\"\nlazy_static = \"1.4.0\"\n\n[features]\ndefault = [\"rustc\"]\nextract_names_mode = []\n# Enables the conversion bridges from rustc types (and AST) to the\n# ones defined in this crate. Enabling `rustc` adds a dependency to\n# `librustc_driver`.\nrustc = []\n"
  },
  {
    "path": "frontend/exporter/README.md",
    "content": "# Special core extraction mode\nFor now, the frontend is sensible to the `HAX_CORE_EXTRACTION_MODE`\nvariable environment that enables a special mode.\n"
  },
  {
    "path": "frontend/exporter/adt-into/.gitignore",
    "content": "/target\n/Cargo.lock\n"
  },
  {
    "path": "frontend/exporter/adt-into/Cargo.toml",
    "content": "[package]\nname = \"hax-adt-into\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"Provides the `adt_into` procedural macro, allowing for mirroring data types with small variations.\"\n\n[lib]\nproc-macro = true\n\n[dependencies]\nitertools.workspace = true\nsyn.workspace = true\nproc-macro2 = \"1.0\"\nquote = \"1.0\"\n\n[dev-dependencies]\ntracing.workspace = true\n"
  },
  {
    "path": "frontend/exporter/adt-into/README.md",
    "content": "# hax adt into\n\nThis crate provides the `adt_into` procedural macro, allowing for\nmirroring data types with small variations.\n\nThis crate is used by the frontend of hax, where we need to mirror a\nbig part of the data types defined by the Rust compiler. While the\nabstract syntax trees (ASTs) from the Rust compiler expose a lot of\nindirections (identifiers one should lookup, additional informations\nreachable only via interactive queries), hax exposes the same ASTs,\nremoving indirections and inlining additional informations.\n\nThe `adt_into` derive macro can be used on `struct`s and `enum`s. `adt_into` then looks for another `#[args(<GENERICS>, from: FROM_TYPE, state: STATE_TYPE as SOME_NAME)]` attribute. Such an attribute means that the `struct` or `enum` mirrors the type `FROM_TYPE`, and that the transformation is carried along with a state of type `STATE_TYPE` that will be accessible via the name `SOME_NAME`.\n\nAn example is available in the `tests` folder.\n"
  },
  {
    "path": "frontend/exporter/adt-into/src/lib.rs",
    "content": "use quote::quote;\nuse quote::quote_spanned;\nuse syn::Token;\nuse syn::parse::ParseStream;\nuse syn::{Data, DeriveInput, Generics, parse_macro_input};\nuse syn::{PathArguments, PathSegment, spanned::Spanned};\n\nfn strip_parenthesis(tokens: proc_macro::TokenStream) -> Option<proc_macro::TokenStream> {\n    match tokens.into_iter().collect::<Vec<_>>().as_slice() {\n        [proc_macro::TokenTree::Group(token)] => Some(token.stream()),\n        _ => None,\n    }\n}\n\n#[derive(Debug)]\nstruct Options {\n    generics: Generics,\n    from: syn::TypePath,\n    state: syn::Ident,\n    state_type: syn::Type,\n    where_clause: Option<syn::WhereClause>,\n}\nmod option_parse {\n    use super::*;\n    mod kw {\n        syn::custom_keyword!(from);\n        syn::custom_keyword!(state);\n    }\n    impl syn::parse::Parse for Options {\n        fn parse(input: ParseStream) -> syn::Result<Self> {\n            let generics = input.parse()?;\n            input.parse::<Token![,]>()?;\n\n            input.parse::<kw::from>()?;\n            input.parse::<Token![:]>()?;\n            let from = input.parse()?;\n            input.parse::<Token![,]>()?;\n\n            input.parse::<kw::state>()?;\n            input.parse::<Token![:]>()?;\n            let state_type = input.parse()?;\n            input.parse::<Token![as]>()?;\n            let state = input.parse()?;\n\n            let mut where_clause = None;\n            if input.peek(Token![,]) && input.peek2(Token![where]) {\n                input.parse::<Token![,]>()?;\n                where_clause = Some(input.parse()?);\n            }\n\n            Ok(Options {\n                generics,\n                from,\n                state,\n                state_type,\n                where_clause,\n            })\n        }\n    }\n}\n\n/// Returns the token stream corresponding to an attribute (if it\n/// exists), stripping parenthesis already.\nfn tokens_of_attrs<'a>(\n    attr_name: &'a str,\n    attrs: &'a Vec<syn::Attribute>,\n) -> impl Iterator<Item = proc_macro2::TokenStream> + 'a {\n    attrs\n        .iter()\n        .filter(|attr| attr.path.is_ident(attr_name))\n        .map(|attr| attr.clone().tokens.into())\n        .flat_map(strip_parenthesis)\n        .map(|x| x.into())\n}\n\nfn parse_attrs<'a, T: syn::parse::Parse>(\n    attr_name: &'a str,\n    attrs: &'a Vec<syn::Attribute>,\n) -> impl Iterator<Item = T> + 'a {\n    tokens_of_attrs(attr_name, attrs).map(move |x| {\n        syn::parse::<T>(x.clone().into())\n            .expect(format!(\"expected attribtue {}\", attr_name).as_str())\n    })\n}\n\n/// Parse an attribute as a T if it exists.\nfn parse_attr<T: syn::parse::Parse>(attr_name: &str, attrs: &Vec<syn::Attribute>) -> Option<T> {\n    parse_attrs(attr_name, attrs).next()\n}\n\n/*\nTODO: add `ensure_no_attr` calls to forbid meaningless attributes\nfn ensure_no_attr(context: &str, attr: &str, attrs: &Vec<syn::Attribute>) {\n    if attrs.iter().any(|a| a.path.is_ident(attr)) {\n        panic!(\"Illegal attribute {} {}\", attr, context)\n    }\n}\n*/\n\n/// Create a match arm that corresponds to a given set of fields.\n/// This can be used for named fields as well as unnamed ones.\nfn fields_to_arm(\n    from_record_name: proc_macro2::TokenStream,\n    to_record_name: proc_macro2::TokenStream,\n    fields: Vec<syn::Field>,\n    full_span: proc_macro2::Span,\n    prepend: proc_macro2::TokenStream,\n    used_fields: Vec<syn::Ident>,\n    state: syn::Ident,\n) -> proc_macro2::TokenStream {\n    if fields.is_empty() {\n        return quote_spanned! {full_span=> #from_record_name => #to_record_name, };\n    }\n\n    let is_struct = fields.iter().any(|f| f.ident.is_some());\n    let is_tuple = fields.iter().any(|f| f.ident.is_none());\n    if is_tuple && is_struct {\n        panic!(\"Impossibe: variant with both named and unamed fields\")\n    }\n\n    let data = fields.iter().enumerate().map(|(i, field)| {\n        let attrs = &field.attrs;\n        let name_destination = field.ident.clone().unwrap_or(syn::Ident::new(\n            format!(\"value_{}\", i).as_str(),\n            field.span(),\n        ));\n        let span = field.span();\n        let field_name_span = field.clone().ident.map(|x| x.span()).unwrap_or(span);\n        let name_source =\n            parse_attr::<syn::Ident>(\"from\", attrs).unwrap_or(name_destination.clone());\n        let value = parse_attr::<syn::Expr>(\"value\", attrs);\n        let not_in_source =\n            value.is_some() ||\n            attrs.iter().any(|attr| attr.path.is_ident(\"not_in_source\"));\n        let typ = &field.ty;\n        let point = syn::Ident::new(\"x\", field_name_span);\n\n        let translation = parse_attr::<syn::Expr>(\"map\", attrs).or(value).unwrap_or(\n            syn::parse::<syn::Expr>((quote_spanned! {typ.span()=> #point.sinto(#state)}).into())\n                .expect(\"Could not default [translation]\")\n        );\n        let mapped_value = if not_in_source {\n            quote_spanned! {span=> {#translation}}\n        } else {\n            quote_spanned! {span=> {#[allow(unused_variables)] let #point = #name_source; #translation}}\n        };\n\n        let prefix = if is_struct {\n            quote_spanned! {field_name_span=> #name_destination:}\n        } else {\n            quote! {}\n        };\n        (\n            if not_in_source {\n                quote! {}\n            } else {\n                quote_spanned! {span=> #name_source, }\n            },\n            quote_spanned! {span=> #prefix #mapped_value, },\n        )\n    });\n\n    let bindings: proc_macro2::TokenStream = data\n        .clone()\n        .map(|(x, _)| x)\n        .chain(used_fields.iter().map(|f| quote! {#f,}))\n        .collect();\n    let fields: proc_macro2::TokenStream = data.clone().map(|(_, x)| x).collect();\n\n    if is_struct {\n        quote_spanned! {full_span=> #from_record_name { #bindings .. } => {#prepend #to_record_name { #fields }}, }\n    } else {\n        quote_spanned! {full_span=> #from_record_name ( #bindings ) => {#prepend #to_record_name ( #fields )}, }\n    }\n}\n\n/// Extracts a vector of Field out of a Fields.\n/// This function discard the Unnamed / Named variants.\nfn field_vec_of_fields(fields: syn::Fields) -> Vec<syn::Field> {\n    match fields {\n        syn::Fields::Unit => vec![],\n        syn::Fields::Named(syn::FieldsNamed { named: fields, .. })\n        | syn::Fields::Unnamed(syn::FieldsUnnamed {\n            unnamed: fields, ..\n        }) => fields.into_iter().collect(),\n    }\n}\n\n/// Given a variant, produce a match arm.\nfn variant_to_arm(\n    typ_from: proc_macro2::TokenStream,\n    typ_to: proc_macro2::TokenStream,\n    variant: syn::Variant,\n    state: syn::Ident,\n) -> proc_macro2::TokenStream {\n    let attrs = &variant.attrs;\n    let to_variant = variant.clone().ident;\n    if attrs.iter().any(|attr| attr.path.is_ident(\"todo\")) {\n        return quote!();\n    }\n\n    let disable_mapping = attrs\n        .iter()\n        .any(|attr| attr.path.is_ident(\"disable_mapping\"));\n    let custom_arm = tokens_of_attrs(\"custom_arm\", attrs).next();\n    // TODO: either complete map or drop it\n    let map = parse_attr::<syn::Expr>(\"map\", attrs);\n    // ensure_no_attr(\n    //     format!(\"on the variant {}::{}\", typ_to, to_variant).as_str(),\n    //     \"map\",\n    //     attrs,\n    // );\n    let from_variant = parse_attr::<syn::Ident>(\"from\", attrs);\n\n    if disable_mapping && (map.is_some() || custom_arm.is_some() || from_variant.is_some()) {\n        println!(\"Warning: `disable_mapping` makes `map`, `custom_arm` and `from_variant` inert\")\n    }\n    if custom_arm.is_some() && (map.is_some() || from_variant.is_some()) {\n        println!(\"Warning: `custom_arm` makes `map` and `from` inert\")\n    }\n\n    if disable_mapping {\n        return quote! {};\n    }\n    if let Some(custom_arm) = custom_arm {\n        return custom_arm.into();\n    }\n\n    let from_variant = from_variant.unwrap_or(to_variant.clone());\n\n    let to_variant = quote! { #typ_to::#to_variant };\n    let from_variant = quote! { #typ_from::#from_variant };\n\n    let fields = field_vec_of_fields(variant.clone().fields);\n\n    if let Some(map) = map {\n        let names: proc_macro2::TokenStream = fields\n            .iter()\n            .filter(|f| {\n                let attrs = &f.attrs;\n                !(parse_attr::<syn::Expr>(\"value\", attrs).is_some()\n                    || attrs.iter().any(|attr| attr.path.is_ident(\"not_in_source\")))\n            })\n            .enumerate()\n            .map(|(nth, f)| {\n                f.clone()\n                    .ident\n                    .unwrap_or(syn::Ident::new(format!(\"x{}\", nth).as_str(), f.span()))\n            })\n            .map(|name| quote! {#name, })\n            .collect();\n        if fields.iter().any(|f| f.ident.is_some()) {\n            quote_spanned!(variant.span()=> #from_variant {#names ..} => #map,)\n        } else {\n            quote_spanned!(variant.span()=> #from_variant (#names) => #map,)\n        }\n    } else {\n        fields_to_arm(\n            from_variant,\n            to_variant,\n            fields,\n            variant.span(),\n            tokens_of_attrs(\"prepend\", attrs).collect(),\n            parse_attrs(\"use_field\", attrs).collect(),\n            state,\n        )\n    }\n}\n\n/// [`AdtInto`] derives a\n/// [`SInto`](../hax_frontend_exporter/trait.SInto.html)\n/// instance. This helps at transporting a algebraic data type `A` to\n/// another ADT `B` when `A` and `B` shares a lot of structure.\n#[proc_macro_derive(\n    AdtInto,\n    attributes(\n        map,\n        from,\n        custom_arm,\n        disable_mapping,\n        use_field,\n        prepend,\n        append,\n        args,\n        todo,\n        not_in_source,\n        value,\n    )\n)]\npub fn adt_into(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n    let dinput = {\n        let input = input.clone();\n        parse_macro_input!(input as DeriveInput)\n    };\n    let attrs = &dinput.attrs;\n    let span = dinput.clone().span().clone();\n    let to = dinput.ident;\n    let to_generics = dinput.generics;\n\n    let Options {\n        generics,\n        from: from_with_generics,\n        state,\n        state_type,\n        where_clause,\n    } = parse_attr(\"args\", attrs).expect(\"An [args] attribute was expected\");\n\n    let generics = {\n        let mut generics = generics;\n        generics.params = merge_generic_params(\n            to_generics.params.clone().into_iter(),\n            generics.params.into_iter(),\n        )\n        .collect();\n        generics\n    };\n\n    trait DropBounds {\n        fn drop_bounds(&mut self);\n    }\n\n    impl DropBounds for syn::GenericParam {\n        fn drop_bounds(&mut self) {\n            use syn::GenericParam::*;\n            match self {\n                Lifetime(lf) => {\n                    lf.colon_token = None;\n                    lf.bounds.clear()\n                }\n                Type(t) => {\n                    t.colon_token = None;\n                    t.bounds.clear();\n                    t.eq_token = None;\n                    t.default = None;\n                }\n                Const(c) => {\n                    c.eq_token = None;\n                    c.default = None;\n                }\n            }\n        }\n    }\n    impl DropBounds for syn::Generics {\n        fn drop_bounds(&mut self) {\n            self.params.iter_mut().for_each(DropBounds::drop_bounds);\n        }\n    }\n    let to_generics = {\n        let mut to_generics = to_generics;\n        to_generics.drop_bounds();\n        to_generics\n    };\n\n    let from = drop_generics(from_with_generics.clone());\n\n    let append: proc_macro2::TokenStream = tokens_of_attrs(\"append\", &dinput.attrs)\n        .next()\n        .unwrap_or((quote! {}).into())\n        .into();\n\n    let body = match &dinput.data {\n        Data::Union(..) => panic!(\"Union types are not supported\"),\n        Data::Struct(syn::DataStruct { fields, .. }) => {\n            let arm = fields_to_arm(\n                quote! {#from},\n                quote! {#to},\n                field_vec_of_fields(fields.clone()),\n                span,\n                tokens_of_attrs(\"prepend\", attrs).collect(),\n                parse_attrs(\"use_field\", attrs).collect(),\n                state.clone(),\n            );\n            quote! { match self { #arm #append } }\n        }\n        Data::Enum(syn::DataEnum { variants, .. }) => {\n            let arms: proc_macro2::TokenStream = variants\n                .iter()\n                .cloned()\n                .map(|variant| variant_to_arm(quote! {#from}, quote! {#to}, variant, state.clone()))\n                .collect();\n            let todo = variants.iter().find_map(|variant| {\n                let attrs = &variant.attrs;\n                let to_variant = variant.clone().ident;\n                if attrs.iter().any(|attr| attr.path.is_ident(\"todo\")) {\n                    Some (quote_spanned! {variant.span()=> x => TO_TYPE::#to_variant(format!(\"{:?}\", x)),})\n                } else {\n                    None\n                }\n            }).unwrap_or(quote!{});\n            let append = quote! {\n                #append\n                #todo\n            };\n            quote! { match self { #arms #append } }\n        }\n    };\n\n    quote! {\n        #[cfg(feature = \"rustc\")]\n        const _ : () = {\n            use #from as FROM_TYPE;\n            use #to as TO_TYPE;\n            impl #generics SInto<#state_type, #to #to_generics> for #from_with_generics #where_clause {\n                #[tracing::instrument(level = \"trace\", skip(#state))]\n                fn sinto(&self, #state: &#state_type) -> #to #to_generics {\n                    tracing::trace!(\"Enters sinto ({})\", stringify!(#from_with_generics));\n                    #body\n                }\n            }\n        };\n    }\n    .into()\n}\n\n/// Merge two collections of generic params, with params from [a]\n/// before the ones from [b]. This function ensures lifetimes\n/// appear before anything else.\nfn merge_generic_params(\n    a: impl Iterator<Item = syn::GenericParam>,\n    b: impl Iterator<Item = syn::GenericParam>,\n) -> impl Iterator<Item = syn::GenericParam> {\n    fn partition(\n        a: impl Iterator<Item = syn::GenericParam>,\n    ) -> (Vec<syn::GenericParam>, Vec<syn::GenericParam>) {\n        a.partition(|g| matches!(g, syn::GenericParam::Lifetime(_)))\n    }\n    let (a_lt, a_others) = partition(a);\n    let (b_lt, b_others) = partition(b);\n    let h = |x: Vec<_>, y: Vec<_>| x.into_iter().chain(y.into_iter());\n    h(a_lt, b_lt).chain(h(a_others, b_others))\n}\n\nfn drop_generics(type_path: syn::TypePath) -> syn::TypePath {\n    syn::TypePath {\n        path: syn::Path {\n            segments: type_path\n                .path\n                .segments\n                .into_iter()\n                .map(|s| PathSegment {\n                    ident: s.ident,\n                    arguments: match s.arguments {\n                        PathArguments::AngleBracketed(_) => PathArguments::None,\n                        _ => s.arguments,\n                    },\n                })\n                .collect(),\n            ..type_path.path\n        },\n        ..type_path\n    }\n}\n\n/// A proc macro unrelated to `adt-into`: it is useful in hax\n/// and we don't want a whole crate only for that helper.\n///\n/// This proc macro defines some groups of derive clauses that\n/// we reuse all the time.\n#[proc_macro_attribute]\npub fn derive_group(\n    attr: proc_macro::TokenStream,\n    item: proc_macro::TokenStream,\n) -> proc_macro::TokenStream {\n    let item: proc_macro2::TokenStream = item.into();\n    let groups = format!(\"{attr}\");\n    let groups = groups.split(\",\").map(|s| s.trim());\n    let mut errors = vec![];\n    let result: proc_macro2::TokenStream = groups\n        .map(|group| match group {\n            \"Serializers\" => quote! {\n                #[derive(::serde::Serialize, ::serde::Deserialize)]\n            },\n            _ => {\n                errors.push(quote! {\n                    const _: () = compile_error!(concat!(\n                        \"derive_group: `\",\n                        stringify!(#group),\n                        \"` is not a recognized group name\"\n                    ));\n                });\n                quote! {}\n            }\n        })\n        .collect();\n    quote! {#(#errors)* #result #item}.into()\n}\n"
  },
  {
    "path": "frontend/exporter/adt-into/tests/lib.rs",
    "content": "/// For the example, let's assume we are working with `Literal`, an\n/// ADT that represents literal values. Suppose strings are\n/// represented via an identifier stored in a state `State`.\npub mod source {\n    use std::collections::HashMap;\n    #[derive(Clone, Debug)]\n    pub struct State(pub HashMap<StringId, String>);\n\n    #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]\n    pub struct StringId(u32);\n\n    #[derive(Clone, Debug)]\n    pub enum Literal {\n        Integer(u32),\n        String(StringId),\n    }\n}\n\n/// Here, we mirror the same data type `Literal`, but with a small\n/// difference: there is no `StringId` any longer: we define a `impl`\n/// of `SInto` specifically for `StringId`, that ships with a stateful\n/// lookup. Magically, everytime a mirrored datatype annotated with\n/// `AdtInto` will have a field or a variant of type String while the\n/// original type was `StringId`, the lookup will be done\n/// automatically.\nmod mirrored {\n    use super::{sinto::*, source};\n    use hax_adt_into::*;\n\n    #[derive(AdtInto)]\n    #[args(<>, from: source::Literal, state: source::State as s)]\n    pub enum Literal {\n        Integer(u32),\n        String(String),\n    }\n\n    impl SInto<source::State, String> for source::StringId {\n        fn sinto(&self, s: &source::State) -> String {\n            s.0.get(self).unwrap().clone()\n        }\n    }\n}\n\n/// Definition of the `sinto` trait used by the `AdtInto` macro\npub mod sinto {\n    pub trait SInto<S, To> {\n        fn sinto(&self, s: &S) -> To;\n    }\n\n    /// Default implementation for type implementing Copy\n    impl<S, T: Copy> SInto<S, T> for T {\n        fn sinto(&self, _s: &S) -> T {\n            *self\n        }\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/default.nix",
    "content": "{\n  craneLib,\n  stdenv,\n  makeWrapper,\n  lib,\n  rustc,\n  gcc,\n}: let\n  commonArgs = {\n    version = \"0.0.1\";\n    src = craneLib.cleanCargoSource ./.;\n  };\n  pname = \"hax-rust-frontend\";\n  cargoArtifacts = craneLib.buildDepsOnly (commonArgs\n    // {\n      pname = \"${pname}-deps\";\n    });\nin\n  craneLib.buildPackage (commonArgs\n    // {\n      inherit cargoArtifacts pname;\n    })\n# hax // {\n#   passthru = hax.passthru or {} // {\n#     wrapped = hax-engine: stdenv.mkDerivation {\n#       name = \"hax\";\n#       buildInputs = [ makeWrapper ];\n#       phases = [\"installPhase\"];\n#       installPhase = ''\n#       mkdir -p $out/bin\n#       makeWrapper ${hax}/bin/cargo-hax $out/bin/cargo-hax \\\n#         --prefix PATH : ${\n#           lib.makeBinPath [\n#             hax\n#             hax-engine\n#             rustc gcc\n#           ]\n#         }\n#     '';\n#       meta.mainProgram = \"cargo-hax\";\n#     };\n#   };\n# }\n\n"
  },
  {
    "path": "frontend/exporter/options/Cargo.toml",
    "content": "[package]\nname = \"hax-frontend-exporter-options\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"The options the `hax-frontend-exporter` crate is sensible to.\"\n\n[dependencies]\nserde.workspace = true\nserde_json.workspace = true\nschemars.workspace = true\nhax-adt-into.workspace = true\n"
  },
  {
    "path": "frontend/exporter/options/src/lib.rs",
    "content": "use hax_adt_into::derive_group;\nuse schemars::JsonSchema;\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone, JsonSchema)]\npub enum Glob {\n    One,  // *\n    Many, // **\n}\n\nimpl ToString for Glob {\n    fn to_string(&self) -> String {\n        match self {\n            Self::One => \"*\",\n            Self::Many => \"**\",\n        }\n        .to_string()\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone, JsonSchema)]\npub enum NamespaceChunk {\n    Glob(Glob),\n    Exact(String),\n}\n\nimpl ToString for NamespaceChunk {\n    fn to_string(&self) -> String {\n        match self {\n            Self::Glob(glob) => glob.to_string(),\n            Self::Exact(string) => string.to_string(),\n        }\n    }\n}\n\nimpl std::convert::From<&str> for NamespaceChunk {\n    fn from(s: &str) -> Self {\n        match s {\n            \"*\" => NamespaceChunk::Glob(Glob::One),\n            \"**\" => NamespaceChunk::Glob(Glob::Many),\n            _ => NamespaceChunk::Exact(String::from(s)),\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone, JsonSchema)]\npub struct Namespace {\n    pub chunks: Vec<NamespaceChunk>,\n}\n\nimpl ToString for Namespace {\n    fn to_string(&self) -> String {\n        self.chunks\n            .iter()\n            .map(NamespaceChunk::to_string)\n            .collect::<Vec<_>>()\n            .join(\"::\")\n            .to_string()\n    }\n}\n\nimpl std::convert::From<String> for Namespace {\n    fn from(s: String) -> Self {\n        Namespace {\n            chunks: s\n                .split(\"::\")\n                .filter(|s| !s.is_empty())\n                .map(NamespaceChunk::from)\n                .collect(),\n        }\n    }\n}\n\nimpl Namespace {\n    pub fn matches(&self, path: &Vec<String>) -> bool {\n        fn aux(pattern: &[NamespaceChunk], path: &[String]) -> bool {\n            match (pattern, path) {\n                ([], []) => true,\n                ([NamespaceChunk::Exact(x), pattern @ ..], [y, path @ ..]) => {\n                    x == y && aux(pattern, path)\n                }\n                ([NamespaceChunk::Glob(Glob::One), pattern @ ..], [_, path @ ..]) => {\n                    aux(pattern, path)\n                }\n                ([NamespaceChunk::Glob(Glob::Many), pattern @ ..], []) => aux(pattern, path),\n                ([NamespaceChunk::Glob(Glob::Many), pattern_tl @ ..], [_path_hd, path_tl @ ..]) => {\n                    aux(pattern_tl, path) || aux(pattern, path_tl)\n                }\n                _ => false,\n            }\n        }\n        aux(self.chunks.as_slice(), path.as_slice())\n    }\n}\n\n#[derive(Debug, Clone)]\npub struct Options {\n    /// Whether we should evaluate and inline the value of anonymous constants (inline `const {}`\n    /// blocks or advanced constant expressions as in `[T; N+1]`), or refer to them as\n    /// `GlobalName`s.\n    pub inline_anon_consts: bool,\n    /// Options related to bounds.\n    pub bounds_options: BoundsOptions,\n    /// Resolve definition identifiers to their concrete impl counterpart when possible in `ItemRef::translate`.\n    pub item_ref_use_concrete_impl: bool,\n}\n\n#[derive(Debug, Clone, Copy)]\npub struct BoundsOptions {\n    /// Add `T: Destruct` bounds to every type generic, so that we can build `ImplExpr`s to know\n    /// what code is run on drop.\n    pub resolve_destruct: bool,\n    /// Prune `T: Sized` and `T: MetaSized` predicates.\n    pub prune_sized: bool,\n}\n"
  },
  {
    "path": "frontend/exporter/src/body.rs",
    "content": "pub use module::*;\n\n#[cfg(not(feature = \"rustc\"))]\nmod module {\n    pub trait IsBody: Sized + Clone + 'static {}\n    impl<T: Sized + Clone + 'static> IsBody for T {}\n}\n\n#[cfg(feature = \"rustc\")]\nmod module {\n    pub use crate::prelude::*;\n    pub use rustc_hir::{\n        def_id::{DefId as RDefId, LocalDefId as RLocalDefId},\n        hir_id::OwnerId as ROwnerId,\n    };\n    use rustc_middle::ty;\n\n    mod store {\n        //! This module helps at store bodies to avoid stealing.\n        //! `rustc_data_structures::steal::Steal` is a box for which the content can be stolen, for performance reasons.\n        //! The query system of Rust creates and steal such boxes, resulting in hax trying to borrow the value of a Steal while some query stole it already.\n        //! This module provides an ad-hoc global cache and query overrides to deal with this issue.\n        use rustc_hir::def_id::LocalDefId;\n        use rustc_middle::mir::Body;\n        use rustc_middle::query::plumbing::IntoQueryParam;\n        use rustc_middle::thir::{ExprId, Thir};\n        use std::cell::RefCell;\n        use std::collections::HashMap;\n        use std::rc::Rc;\n\n        thread_local! {\n            static THIR_BODY: RefCell<HashMap<LocalDefId, (Rc<Thir<'static>>, ExprId)>> = RefCell::new(HashMap::new());\n            static MIR_BUILT: RefCell<HashMap<LocalDefId, Rc<Body<'static>>>> = RefCell::new(HashMap::new());\n        }\n\n        /// Register overrides for rustc queries.\n        /// This will clone and store bodies for THIR and MIR (built) in an ad-hoc global cache.\n        pub fn override_queries_store_body(providers: &mut rustc_middle::query::Providers) {\n            providers.thir_body = |tcx, def_id| {\n                let (steal, expr_id) =\n                    (rustc_interface::DEFAULT_QUERY_PROVIDERS.thir_body)(tcx, def_id)?;\n                let body = steal.borrow().clone();\n                let body: Thir<'static> = unsafe { std::mem::transmute(body) };\n                THIR_BODY.with(|map| map.borrow_mut().insert(def_id, (Rc::new(body), expr_id)));\n                Ok((steal, expr_id))\n            };\n            providers.mir_built = |tcx, def_id| {\n                let steal = (rustc_interface::DEFAULT_QUERY_PROVIDERS.mir_built)(tcx, def_id);\n                let body = steal.borrow().clone();\n                let body: Body<'static> = unsafe { std::mem::transmute(body) };\n                MIR_BUILT.with(|map| map.borrow_mut().insert(def_id, Rc::new(body)));\n                steal\n            };\n        }\n\n        /// Extension trait that provides non-stealing variants of `thir_body` and `mir_built`.\n        /// Those methods requires rustc queries to be overriden with the helper function `register` above.\n        #[extension_traits::extension(pub trait SafeTyCtxtBodies)]\n        impl<'tcx> rustc_middle::ty::TyCtxt<'tcx> {\n            fn thir_body_safe(\n                &self,\n                key: impl IntoQueryParam<rustc_span::def_id::LocalDefId>,\n            ) -> Result<(Rc<Thir<'tcx>>, ExprId), rustc_span::ErrorGuaranteed> {\n                let key = key.into_query_param();\n                if !THIR_BODY.with(|map| map.borrow().contains_key(&key)) {\n                    // Compute a body, which will insert a body in `THIR_BODIES`.\n                    let _ = self.thir_body(key);\n                }\n                THIR_BODY.with(|map| {\n                    let (body, expr) = map\n                        .borrow_mut()\n                        .get(&key)\n                        .expect(\"Did we forgot to call `register`?\")\n                        .clone();\n                    let body: Rc<Thir<'tcx>> = unsafe { std::mem::transmute(body) };\n                    Ok((body, expr))\n                })\n            }\n            fn mir_built_safe(\n                &self,\n                key: impl IntoQueryParam<rustc_span::def_id::LocalDefId>,\n            ) -> Rc<Body<'tcx>> {\n                let key = key.into_query_param();\n                if !MIR_BUILT.with(|map| map.borrow().contains_key(&key)) {\n                    // Compute a body, which will insert a body in `MIR_BODIES`.\n                    let _ = self.mir_built(key);\n                }\n                MIR_BUILT.with(|map| {\n                    let body = map\n                        .borrow_mut()\n                        .get(&key)\n                        .expect(\"Did we forgot to call `register`?\")\n                        .clone();\n                    unsafe { std::mem::transmute(body) }\n                })\n            }\n        }\n    }\n    pub use store::*;\n\n    pub fn get_thir<'tcx, S: BaseState<'tcx>>(\n        did: RLocalDefId,\n        s: &S,\n    ) -> (\n        Rc<rustc_middle::thir::Thir<'tcx>>,\n        rustc_middle::thir::ExprId,\n    ) {\n        let tcx = s.base().tcx;\n\n        // The `type_of` anon constants isn't available directly, it needs to be fed by some\n        // other query. This hack ensures this happens, otherwise `thir_body` returns an error.\n        // See https://rust-lang.zulipchat.com/#narrow/channel/182449-t-compiler.2Fhelp/topic/Change.20in.20THIR.20of.20anonymous.20constants.3F/near/509764021 .\n        let hir_id = tcx.local_def_id_to_hir_id(did);\n        for (parent_id, parent) in tcx.hir_parent_iter(hir_id) {\n            if let rustc_hir::Node::Item(..) = parent {\n                let _ = tcx.check_well_formed(parent_id.owner.def_id);\n                break;\n            }\n        }\n\n        let msg = |_| fatal!(s[tcx.def_span(did)], \"THIR not found for {:?}\", did);\n        tcx.thir_body_safe(did).as_ref().unwrap_or_else(msg).clone()\n    }\n\n    pub trait IsBody:\n        Sized + std::fmt::Debug + Clone + std::any::Any + Send + Sync + 'static\n    {\n        fn body<'tcx, S: UnderOwnerState<'tcx>>(\n            s: &S,\n            did: RDefId,\n            instantiate: Option<ty::GenericArgsRef<'tcx>>,\n        ) -> Option<Self>;\n\n        /// Reuse a MIR body we already got. Panic if that's impossible.\n        fn from_mir<'tcx, S: UnderOwnerState<'tcx>>(\n            _s: &S,\n            _body: rustc_middle::mir::Body<'tcx>,\n        ) -> Option<Self> {\n            None\n        }\n    }\n\n    pub fn make_fn_def<'tcx, Body: IsBody, S: BaseState<'tcx>>(\n        fn_sig: &rustc_hir::FnSig,\n        body_id: &rustc_hir::BodyId,\n        s: &S,\n    ) -> FnDef<Body> {\n        let hir_id = body_id.hir_id;\n        let ldid = hir_id.owner.def_id;\n\n        let (thir, expr_entrypoint) = get_thir(ldid, s);\n        let s = &s.with_owner_id(ldid.to_def_id()).with_thir(thir.clone());\n        FnDef {\n            params: thir.params.raw.sinto(s),\n            ret: thir.exprs[expr_entrypoint].ty.sinto(s),\n            body: Body::body(s, ldid.to_def_id(), None).s_unwrap(s),\n            sig_span: fn_sig.span.sinto(s),\n            header: fn_sig.header.sinto(s),\n        }\n    }\n\n    pub fn body_from_id<'tcx, Body: IsBody, S: UnderOwnerState<'tcx>>(\n        id: rustc_hir::BodyId,\n        s: &S,\n    ) -> Body {\n        // **Important:**\n        // We need a local id here, and we get it from the owner id, which must\n        // be local. It is safe to do so, because if we have access to HIR objects,\n        // it necessarily means we are exploring a local item (we don't have\n        // access to the HIR of external objects, only their MIR).\n        Body::body(s, s.base().tcx.hir_body_owner_def_id(id).to_def_id(), None).s_unwrap(s)\n    }\n\n    mod implementations {\n        use super::*;\n        impl IsBody for () {\n            fn body<'tcx, S: UnderOwnerState<'tcx>>(\n                _s: &S,\n                _did: RDefId,\n                _instantiate: Option<ty::GenericArgsRef<'tcx>>,\n            ) -> Option<Self> {\n                Some(())\n            }\n            fn from_mir<'tcx, S: UnderOwnerState<'tcx>>(\n                _s: &S,\n                _body: rustc_middle::mir::Body<'tcx>,\n            ) -> Option<Self> {\n                Some(())\n            }\n        }\n        impl IsBody for ThirBody {\n            fn body<'tcx, S: BaseState<'tcx>>(\n                s: &S,\n                did: RDefId,\n                instantiate: Option<ty::GenericArgsRef<'tcx>>,\n            ) -> Option<Self> {\n                let did = did.as_local()?;\n                // The following returns `None` if did refers to something that has no body (avoids a crash in the call to `thir_body`)\n                s.base().tcx.hir_maybe_body_owned_by(did)?;\n                let (thir, expr) = get_thir(did, s);\n                assert!(instantiate.is_none(), \"monomorphized thir isn't supported\");\n                let s = &s.with_owner_id(did.to_def_id()).with_thir(thir.clone());\n                let params = thir.params.raw.sinto(s);\n                let expr = if *CORE_EXTRACTION_MODE {\n                    let expr = &thir.exprs[expr];\n                    Decorated {\n                        contents: Box::new(ExprKind::Tuple { fields: vec![] }),\n                        hir_id: None,\n                        attributes: vec![],\n                        ty: expr.ty.sinto(s),\n                        span: expr.span.sinto(s),\n                    }\n                } else {\n                    expr.sinto(&s.with_thir(thir))\n                };\n                Some(Self { expr, params })\n            }\n        }\n\n        impl<A: IsBody, B: IsBody> IsBody for (A, B) {\n            fn body<'tcx, S: UnderOwnerState<'tcx>>(\n                s: &S,\n                did: RDefId,\n                instantiate: Option<ty::GenericArgsRef<'tcx>>,\n            ) -> Option<Self> {\n                Some((A::body(s, did, instantiate)?, B::body(s, did, instantiate)?))\n            }\n        }\n\n        impl<MirKind: IsMirKind + Clone + 'static> IsBody for MirBody<MirKind> {\n            fn body<'tcx, S: UnderOwnerState<'tcx>>(\n                s: &S,\n                did: RDefId,\n                instantiate: Option<ty::GenericArgsRef<'tcx>>,\n            ) -> Option<Self> {\n                let tcx = s.base().tcx;\n                let typing_env = s.typing_env();\n                MirKind::get_mir(tcx, did, |body| {\n                    let body = substitute(tcx, typing_env, instantiate, body.clone());\n                    let body = Rc::new(body);\n                    body.sinto(&s.with_mir(body.clone()))\n                })\n            }\n            fn from_mir<'tcx, S: UnderOwnerState<'tcx>>(\n                s: &S,\n                body: rustc_middle::mir::Body<'tcx>,\n            ) -> Option<Self> {\n                let body = Rc::new(body.clone());\n                let s = &s.with_mir(body.clone());\n                Some(body.sinto(s))\n            }\n        }\n    }\n\n    impl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto<S, Body> for rustc_hir::BodyId {\n        fn sinto(&self, s: &S) -> Body {\n            body_from_id::<Body, _>(*self, s)\n        }\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/comments.rs",
    "content": "use crate::prelude::*;\nuse rustc_lexer::TokenKind;\nuse std::fs;\n\n/// Returns a list of (spanned) comments found in file `path`, or an\n/// error if the file at `path` could not be open.\npub fn comments_of_file(path: PathBuf) -> std::io::Result<Vec<(Span, String)>> {\n    fn clean_comment(comment: &str) -> &str {\n        let comment = if let Some(comment) = comment.strip_prefix(\"/*\") {\n            comment\n                .strip_suffix(\"*/\")\n                .expect(\"A comment that starts with `/*` should always ends with `*/`\")\n        } else {\n            comment\n                .strip_prefix(\"//\")\n                .expect(\"A comment has to start with `//` or `/*`\")\n        };\n        comment.strip_prefix(\"!\").unwrap_or(comment)\n    }\n    let source = &fs::read_to_string(&path)?;\n\n    let mut comments = vec![];\n    let (mut pos, mut line, mut col) = (0, 0, 0);\n    for token in rustc_lexer::tokenize(source, rustc_lexer::FrontmatterAllowed::Yes) {\n        let len = token.len as usize;\n        let sub = &source[pos..(pos + len)];\n        let lo = Loc { line, col };\n        line += sub.chars().filter(|c| matches!(c, '\\n')).count();\n        pos += len;\n        if lo.line != line {\n            col = sub.chars().rev().take_while(|c| !matches!(c, '\\n')).count();\n        } else {\n            col += len;\n        }\n\n        if let TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } = token.kind {\n            if !sub.starts_with(\"///\") && !sub.starts_with(\"/**\") {\n                let span = Span {\n                    lo,\n                    hi: Loc { line, col },\n                    filename: FileName::Real(RealFileName::LocalPath(path.clone())),\n                    rust_span_data: None,\n                };\n                comments.push((span, clean_comment(sub).to_string()));\n            }\n        }\n    }\n    Ok(comments)\n}\n"
  },
  {
    "path": "frontend/exporter/src/constant_utils/uneval.rs",
    "content": "//! Reconstruct structured expressions from rustc's various constant representations.\nuse super::*;\nuse rustc_const_eval::interpret::{InterpResult, interp_ok};\nuse rustc_middle::mir::interpret;\nuse rustc_middle::{mir, ty};\n\nimpl ConstantLiteral {\n    /// Rustc always represents string constants as `&[u8]`, but this\n    /// is not nice to consume. This associated function interpret\n    /// bytes as an unicode string, and as a byte string otherwise.\n    fn byte_str(bytes: Vec<u8>) -> Self {\n        match String::from_utf8(bytes.clone()) {\n            Ok(s) => Self::Str(s),\n            Err(_) => Self::ByteStr(bytes),\n        }\n    }\n}\n\n#[tracing::instrument(level = \"trace\", skip(s))]\npub(crate) fn scalar_int_to_constant_literal<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    x: rustc_middle::ty::ScalarInt,\n    ty: rustc_middle::ty::Ty<'tcx>,\n) -> ConstantLiteral {\n    match ty.kind() {\n        ty::Char => ConstantLiteral::Char(\n            char::try_from(x).s_expect(s, \"scalar_int_to_constant_literal: expected a char\"),\n        ),\n        ty::Bool => ConstantLiteral::Bool(\n            x.try_to_bool()\n                .s_expect(s, \"scalar_int_to_constant_literal: expected a bool\"),\n        ),\n        ty::Int(kind) => {\n            let v = x.to_int(x.size());\n            ConstantLiteral::Int(ConstantInt::Int(v, kind.sinto(s)))\n        }\n        ty::Uint(kind) => {\n            let v = x.to_uint(x.size());\n            ConstantLiteral::Int(ConstantInt::Uint(v, kind.sinto(s)))\n        }\n        ty::Float(kind) => {\n            let v = x.to_bits_unchecked();\n            bits_and_type_to_float_constant_literal(v, kind.sinto(s))\n        }\n        _ => {\n            let ty_sinto: Ty = ty.sinto(s);\n            supposely_unreachable_fatal!(\n                s,\n                \"scalar_int_to_constant_literal_ExpectedLiteralType\";\n                { ty, ty_sinto, x }\n            )\n        }\n    }\n}\n\n/// Converts a bit-representation of a float of type `ty` to a constant literal\nfn bits_and_type_to_float_constant_literal(bits: u128, ty: FloatTy) -> ConstantLiteral {\n    use rustc_apfloat::{Float, ieee};\n    let string = match &ty {\n        FloatTy::F16 => ieee::Half::from_bits(bits).to_string(),\n        FloatTy::F32 => ieee::Single::from_bits(bits).to_string(),\n        FloatTy::F64 => ieee::Double::from_bits(bits).to_string(),\n        FloatTy::F128 => ieee::Quad::from_bits(bits).to_string(),\n    };\n    ConstantLiteral::Float(string, ty)\n}\n\nimpl ConstantExprKind {\n    pub fn decorate(self, ty: Ty, span: Span) -> Decorated<Self> {\n        Decorated {\n            contents: Box::new(self),\n            hir_id: None,\n            attributes: vec![],\n            ty,\n            span,\n        }\n    }\n}\n\n/// Whether a `DefId` is a `AnonConst`. An anonymous constant is\n/// generated by Rustc, hoisting every constat bits from items as\n/// separate top-level items. This AnonConst mechanism is internal to\n/// Rustc; we don't want to reflect that, instead we prefer inlining\n/// those. `is_anon_const` is used to detect such AnonConst so that we\n/// can evaluate and inline them.\npub(crate) fn is_anon_const(\n    did: rustc_span::def_id::DefId,\n    tcx: rustc_middle::ty::TyCtxt<'_>,\n) -> bool {\n    matches!(\n        tcx.def_kind(did),\n        rustc_hir::def::DefKind::AnonConst | rustc_hir::def::DefKind::InlineConst\n    )\n}\n\n/// Attempts to translate a `ty::UnevaluatedConst` into a constant expression. This handles cases\n/// of references to top-level or associated constants. Returns `None` if the input was not a named\n/// constant.\npub fn translate_constant_reference<'tcx>(\n    s: &impl UnderOwnerState<'tcx>,\n    span: rustc_span::Span,\n    ucv: rustc_middle::ty::UnevaluatedConst<'tcx>,\n) -> Option<ConstantExpr> {\n    let tcx = s.base().tcx;\n    if s.base().options.inline_anon_consts && is_anon_const(ucv.def, tcx) {\n        return None;\n    }\n    let typing_env = s.typing_env();\n    let ty = s.base().tcx.type_of(ucv.def).instantiate(tcx, ucv.args);\n    let ty = tcx\n        .try_normalize_erasing_regions(typing_env, ty)\n        .unwrap_or(ty);\n    let kind = if let Some(assoc) = s.base().tcx.opt_associated_item(ucv.def)\n        && matches!(\n            assoc.container,\n            ty::AssocContainer::Trait | ty::AssocContainer::TraitImpl(..)\n        ) {\n        // This is an associated constant in a trait.\n        let name = assoc.name().to_string();\n        let impl_expr = self_clause_for_item(s, ucv.def, ucv.args).unwrap();\n        ConstantExprKind::TraitConst { impl_expr, name }\n    } else {\n        let item = translate_item_ref(s, ucv.def, ucv.args);\n        ConstantExprKind::GlobalName(item)\n    };\n    let cv = kind.decorate(ty.sinto(s), span.sinto(s));\n    Some(cv)\n}\n\n/// Evaluate a `ty::Const`.\npub fn eval_ty_constant<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    uv: rustc_middle::ty::UnevaluatedConst<'tcx>,\n) -> Option<ty::Const<'tcx>> {\n    use ty::TypeVisitableExt;\n    let tcx = s.base().tcx;\n    let typing_env = s.typing_env();\n    if uv.has_non_region_param() {\n        return None;\n    }\n    let span = tcx.def_span(uv.def);\n    let erased_uv = tcx.erase_and_anonymize_regions(uv);\n    let val = tcx\n        .const_eval_resolve_for_typeck(typing_env, erased_uv, span)\n        .ok()?\n        .ok()?;\n    let ty = tcx.type_of(uv.def).instantiate(tcx, uv.args);\n    Some(ty::Const::new_value(tcx, val, ty))\n}\n\n/// Evaluate a `mir::Const`.\npub fn eval_mir_constant<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    c: mir::Const<'tcx>,\n) -> Option<mir::Const<'tcx>> {\n    let evaluated = c\n        .eval(s.base().tcx, s.typing_env(), rustc_span::DUMMY_SP)\n        .ok()?;\n    let evaluated = mir::Const::Val(evaluated, c.ty());\n    (evaluated != c).then_some(evaluated)\n}\n\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, ConstantExpr> for ty::Const<'tcx> {\n    #[tracing::instrument(level = \"trace\", skip(s))]\n    fn sinto(&self, s: &S) -> ConstantExpr {\n        use rustc_middle::query::Key;\n        let span = self.default_span(s.base().tcx);\n        match self.kind() {\n            ty::ConstKind::Param(p) => {\n                let ty = p.find_const_ty_from_env(s.param_env());\n                let kind = ConstantExprKind::ConstRef { id: p.sinto(s) };\n                kind.decorate(ty.sinto(s), span.sinto(s))\n            }\n            ty::ConstKind::Infer(..) => {\n                fatal!(s[span], \"ty::ConstKind::Infer node? {:#?}\", self)\n            }\n\n            ty::ConstKind::Unevaluated(ucv) => match translate_constant_reference(s, span, ucv) {\n                Some(val) => val,\n                None => match eval_ty_constant(s, ucv) {\n                    Some(val) => val.sinto(s),\n                    // TODO: This is triggered when compiling using `generic_const_exprs`\n                    None => supposely_unreachable_fatal!(s, \"TranslateUneval\"; {self, ucv}),\n                },\n            },\n\n            ty::ConstKind::Value(val) => valtree_to_constant_expr(s, val.valtree, val.ty, span),\n            ty::ConstKind::Error(_) => fatal!(s[span], \"ty::ConstKind::Error\"),\n            ty::ConstKind::Expr(e) => fatal!(s[span], \"ty::ConstKind::Expr {:#?}\", e),\n\n            ty::ConstKind::Bound(i, bound) => {\n                supposely_unreachable_fatal!(s[span], \"ty::ConstKind::Bound\"; {i, bound})\n            }\n            _ => fatal!(s[span], \"unexpected case\"),\n        }\n    }\n}\n\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, ConstantExpr> for ty::Value<'tcx> {\n    #[tracing::instrument(level = \"trace\", skip(s))]\n    fn sinto(&self, s: &S) -> ConstantExpr {\n        valtree_to_constant_expr(s, self.valtree, self.ty, rustc_span::DUMMY_SP)\n    }\n}\n\n#[tracing::instrument(level = \"trace\", skip(s))]\npub(crate) fn valtree_to_constant_expr<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    valtree: rustc_middle::ty::ValTree<'tcx>,\n    ty: rustc_middle::ty::Ty<'tcx>,\n    span: rustc_span::Span,\n) -> ConstantExpr {\n    let kind = match (&*valtree, ty.kind()) {\n        (_, ty::Ref(_, inner_ty, _)) => {\n            ConstantExprKind::Borrow(valtree_to_constant_expr(s, valtree, *inner_ty, span))\n        }\n        (ty::ValTreeKind::Branch(valtrees), ty::Str) => {\n            let bytes = valtrees\n                .iter()\n                .map(|x| match &***x {\n                    ty::ValTreeKind::Leaf(leaf) => leaf.to_u8(),\n                    _ => fatal!(\n                        s[span],\n                        \"Expected a flat list of leaves while translating \\\n                            a str literal, got a arbitrary valtree.\"\n                    ),\n                })\n                .collect();\n            ConstantExprKind::Literal(ConstantLiteral::byte_str(bytes))\n        }\n        (\n            ty::ValTreeKind::Branch(_),\n            ty::Array(..) | ty::Slice(..) | ty::Tuple(..) | ty::Adt(..),\n        ) => {\n            let tcx = s.base().tcx;\n            let contents: rustc_middle::ty::DestructuredConst =\n                tcx.destructure_const(ty::Const::new_value(s.base().tcx, valtree, ty));\n            let fields = contents.fields.iter().copied();\n            match ty.kind() {\n                ty::Slice(inner_ty) => {\n                    let array_ty = {\n                        let size = rustc_middle::ty::ScalarInt::try_from_target_usize(\n                            fields.len() as u128,\n                            tcx,\n                        )\n                        .s_unwrap(s);\n                        let valtree = rustc_middle::ty::ValTree::from_scalar_int(tcx, size);\n                        let value = rustc_middle::ty::Value {\n                            ty: tcx.types.usize,\n                            valtree,\n                        };\n                        let len = tcx.mk_ct_from_kind(rustc_middle::ty::ConstKind::Value(value));\n                        tcx.mk_ty_from_kind(rustc_middle::ty::TyKind::Array(*inner_ty, len))\n                    };\n                    let array = ConstantExprKind::Array {\n                        fields: fields.map(|field| field.sinto(s)).collect(),\n                    }\n                    .decorate(array_ty.sinto(s), span.sinto(s));\n                    ConstantExprKind::Borrow(array)\n                }\n                ty::Array(_, _) => ConstantExprKind::Array {\n                    fields: fields.map(|field| field.sinto(s)).collect(),\n                },\n                ty::Tuple(_) => ConstantExprKind::Tuple {\n                    fields: fields.map(|field| field.sinto(s)).collect(),\n                },\n                ty::Adt(def, _) => {\n                    let variant_idx = contents\n                        .variant\n                        .s_expect(s, \"destructed const of adt without variant idx\");\n                    let variant_def = &def.variant(variant_idx);\n\n                    ConstantExprKind::Adt {\n                        info: get_variant_information(def, variant_idx, s),\n                        fields: fields\n                            .into_iter()\n                            .zip(&variant_def.fields)\n                            .map(|(value, field)| ConstantFieldExpr {\n                                field: field.did.sinto(s),\n                                value: value.sinto(s),\n                            })\n                            .collect(),\n                    }\n                }\n                _ => unreachable!(),\n            }\n        }\n        (ty::ValTreeKind::Leaf(x), ty::RawPtr(_, _)) => {\n            use crate::rustc_type_ir::inherent::Ty;\n            let raw_address = x.to_bits_unchecked();\n            let uint_ty = UintTy::Usize;\n            let usize_ty = rustc_middle::ty::Ty::new_usize(s.base().tcx).sinto(s);\n            let lit = ConstantLiteral::Int(ConstantInt::Uint(raw_address, uint_ty));\n            ConstantExprKind::Cast {\n                source: ConstantExprKind::Literal(lit).decorate(usize_ty, span.sinto(s)),\n            }\n        }\n        (ty::ValTreeKind::Leaf(x), _) => {\n            ConstantExprKind::Literal(scalar_int_to_constant_literal(s, *x, ty))\n        }\n        _ => supposely_unreachable_fatal!(\n            s[span], \"valtree_to_expr\";\n            {valtree, ty}\n        ),\n    };\n    kind.decorate(ty.sinto(s), span.sinto(s))\n}\n\n/// Use the const-eval interpreter to convert an evaluated operand back to a structured\n/// constant expression.\nfn op_to_const<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    span: rustc_span::Span,\n    ecx: &rustc_const_eval::const_eval::CompileTimeInterpCx<'tcx>,\n    op: rustc_const_eval::interpret::OpTy<'tcx>,\n) -> InterpResult<'tcx, ConstantExpr> {\n    use crate::rustc_const_eval::interpret::Projectable;\n    // Code inspired from `try_destructure_mir_constant_for_user_output` and\n    // `const_eval::eval_queries::op_to_const`.\n    let tcx = s.base().tcx;\n    let ty = op.layout.ty;\n    // Helper for struct-likes.\n    let read_fields = |of: rustc_const_eval::interpret::OpTy<'tcx>, field_count| {\n        (0..field_count).map(move |i| {\n            let field_op = ecx.project_field(&of, rustc_abi::FieldIdx::from_usize(i))?;\n            op_to_const(s, span, &ecx, field_op)\n        })\n    };\n    let kind = match ty.kind() {\n        // Detect statics\n        _ if let Some(place) = op.as_mplace_or_imm().left()\n            && let ptr = place.ptr()\n            && let (alloc_id, _, _) = ecx.ptr_get_alloc_id(ptr, 0)?\n            && let interpret::GlobalAlloc::Static(did) = tcx.global_alloc(alloc_id) =>\n        {\n            let item = translate_item_ref(s, did, ty::GenericArgsRef::default());\n            ConstantExprKind::GlobalName(item)\n        }\n        ty::Char | ty::Bool | ty::Uint(_) | ty::Int(_) | ty::Float(_) => {\n            let scalar = ecx.read_scalar(&op)?;\n            let scalar_int = scalar.try_to_scalar_int().unwrap();\n            let lit = scalar_int_to_constant_literal(s, scalar_int, ty);\n            ConstantExprKind::Literal(lit)\n        }\n        ty::Adt(adt_def, ..) if adt_def.is_union() => {\n            ConstantExprKind::Todo(\"Cannot translate constant of union type\".into())\n        }\n        ty::Adt(adt_def, ..) => {\n            let variant = ecx.read_discriminant(&op)?;\n            let down = ecx.project_downcast(&op, variant)?;\n            let field_count = adt_def.variants()[variant].fields.len();\n            let fields = read_fields(down, field_count)\n                .zip(&adt_def.variant(variant).fields)\n                .map(|(value, field)| {\n                    interp_ok(ConstantFieldExpr {\n                        field: field.did.sinto(s),\n                        value: value?,\n                    })\n                })\n                .collect::<InterpResult<Vec<_>>>()?;\n            let variants_info = get_variant_information(adt_def, variant, s);\n            ConstantExprKind::Adt {\n                info: variants_info,\n                fields,\n            }\n        }\n        ty::Closure(def_id, args) => {\n            // A closure is essentially an adt with funky generics and some builtin impls.\n            let def_id: DefId = def_id.sinto(s);\n            let field_count = args.as_closure().upvar_tys().len();\n            let fields = read_fields(op, field_count)\n                .map(|value| {\n                    interp_ok(ConstantFieldExpr {\n                        // HACK: Closure fields don't have their own def_id, but Charon doesn't use\n                        // field DefIds so we put a dummy one.\n                        field: def_id.clone(),\n                        value: value?,\n                    })\n                })\n                .collect::<InterpResult<Vec<_>>>()?;\n            let variants_info = VariantInformations {\n                type_namespace: def_id.parent.clone().unwrap(),\n                typ: def_id.clone(),\n                variant: def_id,\n                kind: VariantKind::Struct { named: false },\n            };\n            ConstantExprKind::Adt {\n                info: variants_info,\n                fields,\n            }\n        }\n        ty::Tuple(args) => {\n            let fields = read_fields(op, args.len()).collect::<InterpResult<Vec<_>>>()?;\n            ConstantExprKind::Tuple { fields }\n        }\n        ty::Array(..) | ty::Slice(..) => {\n            let len = op.len(ecx)?;\n            let fields = (0..len)\n                .map(|i| {\n                    let op = ecx.project_index(&op, i)?;\n                    op_to_const(s, span, ecx, op)\n                })\n                .collect::<InterpResult<Vec<_>>>()?;\n            ConstantExprKind::Array { fields }\n        }\n        ty::Str => {\n            let str = ecx.read_str(&op.assert_mem_place())?;\n            ConstantExprKind::Literal(ConstantLiteral::Str(str.to_owned()))\n        }\n        ty::FnDef(def_id, args) => {\n            let item = translate_item_ref(s, *def_id, args);\n            ConstantExprKind::FnPtr(item)\n        }\n        ty::RawPtr(..) | ty::Ref(..) => {\n            if let Some(op) = ecx.deref_pointer(&op).discard_err() {\n                // Valid pointer case\n                let val = op_to_const(s, span, ecx, op.into())?;\n                match ty.kind() {\n                    ty::Ref(..) => ConstantExprKind::Borrow(val),\n                    ty::RawPtr(.., mutability) => ConstantExprKind::RawBorrow {\n                        arg: val,\n                        mutability: mutability.sinto(s),\n                    },\n                    _ => unreachable!(),\n                }\n            } else {\n                // Invalid pointer; try reading it as a raw address\n                let scalar = ecx.read_scalar(&op)?;\n                let scalar_int = scalar.try_to_scalar_int().unwrap();\n                let v = scalar_int.to_uint(scalar_int.size());\n                let lit = ConstantLiteral::PtrNoProvenance(v);\n                ConstantExprKind::Literal(lit)\n            }\n        }\n        ty::FnPtr(..)\n        | ty::Dynamic(..)\n        | ty::Foreign(..)\n        | ty::Pat(..)\n        | ty::UnsafeBinder(..)\n        | ty::CoroutineClosure(..)\n        | ty::Coroutine(..)\n        | ty::CoroutineWitness(..) => ConstantExprKind::Todo(\"Unhandled constant type\".into()),\n        ty::Alias(..) | ty::Param(..) | ty::Bound(..) | ty::Placeholder(..) | ty::Infer(..) => {\n            fatal!(s[span], \"Encountered evaluated constant of non-monomorphic type\"; {op})\n        }\n        ty::Never | ty::Error(..) => {\n            fatal!(s[span], \"Encountered evaluated constant of invalid type\"; {ty})\n        }\n    };\n    let val = kind.decorate(ty.sinto(s), span.sinto(s));\n    interp_ok(val)\n}\n\npub fn const_value_to_constant_expr<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    ty: rustc_middle::ty::Ty<'tcx>,\n    val: mir::ConstValue,\n    span: rustc_span::Span,\n) -> InterpResult<'tcx, ConstantExpr> {\n    let tcx = s.base().tcx;\n    let typing_env = s.typing_env();\n    let (ecx, op) =\n        rustc_const_eval::const_eval::mk_eval_cx_for_const_val(tcx.at(span), typing_env, val, ty)\n            .unwrap();\n    op_to_const(s, span, &ecx, op)\n}\n"
  },
  {
    "path": "frontend/exporter/src/constant_utils.rs",
    "content": "use crate::prelude::*;\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ConstantInt {\n    Int(\n        #[serde(with = \"serialize_int::signed\")]\n        #[schemars(with = \"String\")]\n        i128,\n        IntTy,\n    ),\n    Uint(\n        #[serde(with = \"serialize_int::unsigned\")]\n        #[schemars(with = \"String\")]\n        u128,\n        UintTy,\n    ),\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ConstantLiteral {\n    Bool(bool),\n    Char(char),\n    Float(String, FloatTy),\n    Int(ConstantInt),\n    PtrNoProvenance(u128),\n    Str(String),\n    ByteStr(Vec<u8>),\n}\n\n/// The subset of [Expr] that corresponds to constants.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ConstantExprKind {\n    Literal(ConstantLiteral),\n    // Adts (structs, enums, unions) or closures.\n    Adt {\n        info: VariantInformations,\n        fields: Vec<ConstantFieldExpr>,\n    },\n    Array {\n        fields: Vec<ConstantExpr>,\n    },\n    Tuple {\n        fields: Vec<ConstantExpr>,\n    },\n    /// A top-level constant or a constant appearing in an impl block.\n    ///\n    /// Remark: constants *can* have generic parameters.\n    /// Example:\n    /// ```text\n    /// struct V<const N: usize, T> {\n    ///   x: [T; N],\n    /// }\n    ///\n    /// impl<const N: usize, T> V<N, T> {\n    ///   const LEN: usize = N; // This has generics <N, T>\n    /// }\n    /// ```\n    ///\n    /// If `options.inline_anon_consts` is `false`, this is also used for inline const blocks and\n    /// advanced const generics expressions.\n    GlobalName(ItemRef),\n    /// A trait constant\n    ///\n    /// Ex.:\n    /// ```text\n    /// impl Foo for Bar {\n    ///   const C : usize = 32; // <-\n    /// }\n    /// ```\n    TraitConst {\n        impl_expr: ImplExpr,\n        name: String,\n    },\n    /// A shared reference to a static variable.\n    Borrow(ConstantExpr),\n    /// A raw borrow (`*const` or `*mut`).\n    RawBorrow {\n        mutability: Mutability,\n        arg: ConstantExpr,\n    },\n    /// A cast `<source> as <type>`, `<type>` is stored as the type of\n    /// the current constant expression. Currently, this is only used\n    /// to represent `lit as *mut T` or `lit as *const T`, where `lit`\n    /// is a `usize` literal.\n    Cast {\n        source: ConstantExpr,\n    },\n    ConstRef {\n        id: ParamConst,\n    },\n    FnPtr(ItemRef),\n    /// A blob of memory containing the byte representation of the value. This can occur when\n    /// evaluating MIR constants. Interpreting this back to a structured value is left as an\n    /// exercice to the consumer.\n    Memory(Vec<u8>),\n    Todo(String),\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct ConstantFieldExpr {\n    pub field: DefId,\n    pub value: ConstantExpr,\n}\n\n/// Rustc has different representation for constants: one for MIR\n/// ([`rustc_middle::mir::Const`]), one for the type system\n/// ([`rustc_middle::ty::ConstKind`]). For simplicity hax maps those\n/// two construct to one same `ConstantExpr` type.\npub type ConstantExpr = Decorated<ConstantExprKind>;\n\n// For ConstantKind we merge all the cases (Ty, Val, Unevaluated) into one\npub type ConstantKind = ConstantExpr;\n\nimpl From<ConstantFieldExpr> for FieldExpr {\n    fn from(c: ConstantFieldExpr) -> FieldExpr {\n        FieldExpr {\n            value: c.value.into(),\n            field: c.field,\n        }\n    }\n}\n\nimpl From<ConstantExpr> for Expr {\n    fn from(c: ConstantExpr) -> Expr {\n        use ConstantExprKind::*;\n        let kind = match *c.contents {\n            Literal(lit) => {\n                use ConstantLiteral::*;\n                let mut neg = false;\n                let node = match lit {\n                    Bool(b) => LitKind::Bool(b),\n                    Char(c) => LitKind::Char(c),\n                    Int(i) => {\n                        use LitIntType::*;\n                        match i {\n                            ConstantInt::Uint(v, t) => LitKind::Int(v, Unsigned(t)),\n                            ConstantInt::Int(v, t) => {\n                                neg = v.is_negative();\n                                LitKind::Int(v.abs_diff(0), Signed(t))\n                            }\n                        }\n                    }\n                    Float(f, ty) => LitKind::Float(f, LitFloatType::Suffixed(ty)),\n                    PtrNoProvenance(p) => LitKind::Int(p, LitIntType::Unsigned(UintTy::Usize)),\n                    ByteStr(raw) => LitKind::ByteStr(raw, StrStyle::Cooked),\n                    Str(raw) => LitKind::Str(raw, StrStyle::Cooked),\n                };\n                let span = c.span.clone();\n                let lit = Spanned { span, node };\n                ExprKind::Literal { lit, neg }\n            }\n            Adt { info, fields } => ExprKind::Adt(AdtExpr {\n                info,\n                fields: fields.into_iter().map(|field| field.into()).collect(),\n                base: AdtExprBase::None,\n                user_ty: None,\n            }),\n            GlobalName(item) => ExprKind::GlobalName {\n                item,\n                constructor: None,\n            },\n            Borrow(e) => ExprKind::Borrow {\n                borrow_kind: BorrowKind::Shared,\n                arg: e.into(),\n            },\n            RawBorrow { mutability, arg } => ExprKind::RawBorrow {\n                mutability,\n                arg: arg.into(),\n            },\n            ConstRef { id } => ExprKind::ConstRef { id },\n            Array { fields } => ExprKind::Array {\n                fields: fields.into_iter().map(|field| field.into()).collect(),\n            },\n            Tuple { fields } => ExprKind::Tuple {\n                fields: fields.into_iter().map(|field| field.into()).collect(),\n            },\n            Cast { source } => ExprKind::Cast {\n                source: source.into(),\n            },\n            kind @ (FnPtr { .. } | TraitConst { .. } | Memory { .. }) => {\n                ExprKind::Todo(format!(\"Unsupported constant kind. kind={:#?}\", kind))\n            }\n            Todo(msg) => ExprKind::Todo(msg),\n        };\n        Decorated {\n            contents: Box::new(kind),\n            ty: c.ty,\n            span: c.span,\n            hir_id: c.hir_id,\n            attributes: c.attributes,\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\npub use self::uneval::*;\n#[cfg(feature = \"rustc\")]\nmod uneval;\n"
  },
  {
    "path": "frontend/exporter/src/deterministic_hash.rs",
    "content": "//! Stolen from <https://github.com/Wassasin/deterministic-hash/blob/main/src/lib.rs>\nuse core::hash::Hasher;\n\n/// Wrapper around any hasher to make it deterministic.\n#[derive(Default)]\npub struct DeterministicHasher<T: Hasher>(T);\n\n/// Implementation of hasher that forces all bytes written to be platform agnostic.\nimpl<T: Hasher> core::hash::Hasher for DeterministicHasher<T> {\n    fn finish(&self) -> u64 {\n        self.0.finish()\n    }\n\n    fn write(&mut self, bytes: &[u8]) {\n        self.0.write(bytes);\n    }\n\n    fn write_u8(&mut self, i: u8) {\n        self.write(&i.to_le_bytes())\n    }\n\n    fn write_u16(&mut self, i: u16) {\n        self.write(&i.to_le_bytes())\n    }\n\n    fn write_u32(&mut self, i: u32) {\n        self.write(&i.to_le_bytes())\n    }\n\n    fn write_u64(&mut self, i: u64) {\n        self.write(&i.to_le_bytes())\n    }\n\n    fn write_u128(&mut self, i: u128) {\n        self.write(&i.to_le_bytes())\n    }\n\n    fn write_usize(&mut self, i: usize) {\n        self.write(&(i as u64).to_le_bytes())\n    }\n\n    fn write_i8(&mut self, i: i8) {\n        self.write_u8(i as u8)\n    }\n\n    fn write_i16(&mut self, i: i16) {\n        self.write_u16(i as u16)\n    }\n\n    fn write_i32(&mut self, i: i32) {\n        self.write_u32(i as u32)\n    }\n\n    fn write_i64(&mut self, i: i64) {\n        self.write_u64(i as u64)\n    }\n\n    fn write_i128(&mut self, i: i128) {\n        self.write_u128(i as u128)\n    }\n\n    fn write_isize(&mut self, i: isize) {\n        self.write_usize(i as usize)\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/id_table.rs",
    "content": "/// This module provides a notion of table, identifiers and nodes. A\n/// `Node<T>` is a `Arc<T>` bundled with a unique identifier such that\n/// there exists an entry in a table for that identifier.\n///\n/// The type `WithTable<T>` bundles a table with a value of type\n/// `T`. That value of type `T` may hold an arbitrary number of\n/// `Node<_>`s. In the context of a `WithTable<T>`, the type `Node<_>`\n/// serializes and deserializes using a table as a state. In this\n/// case, serializing a `Node<U>` produces only an identifier, without\n/// any data of type `U`. Deserializing a `Node<U>` under a\n/// `WithTable<T>` will recover `U` data from the table held by\n/// `WithTable`.\n///\n/// Serde is not designed for stateful (de)serialization. There is no\n/// way of deriving `serde::de::DeserializeSeed` systematically. This\n/// module thus makes use of global state to achieve serialization and\n/// deserialization. This modules provides an API that hides this\n/// global state.\nuse crate::prelude::*;\nuse std::{\n    hash::{Hash, Hasher},\n    sync::{Arc, LazyLock, Mutex, MutexGuard, atomic::Ordering},\n};\n\n/// Unique IDs in a ID table.\n#[derive_group(Serializers)]\n#[derive(Default, Clone, Copy, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[serde(transparent)]\npub struct Id {\n    id: u32,\n}\n\n/// A session providing fresh IDs for ID table.\n#[derive(Default, Debug)]\npub struct Session {\n    next_id: Id,\n    table: Table,\n}\n\nimpl Session {\n    pub fn table(&self) -> &Table {\n        &self.table\n    }\n}\n\n/// The different types of values one can store in an ID table.\n#[derive(Debug, Clone, Deserialize, Serialize)]\npub enum Value {\n    Ty(Arc<TyKind>),\n    DefId(Arc<DefIdContents>),\n    ItemRef(Arc<ItemRefContents>),\n}\n\nimpl SupportedType<Value> for TyKind {\n    fn to_types(value: Arc<Self>) -> Value {\n        Value::Ty(value)\n    }\n    fn from_types(t: &Value) -> Option<Arc<Self>> {\n        match t {\n            Value::Ty(value) => Some(value.clone()),\n            _ => None,\n        }\n    }\n}\n\nimpl SupportedType<Value> for DefIdContents {\n    fn to_types(value: Arc<Self>) -> Value {\n        Value::DefId(value)\n    }\n    fn from_types(t: &Value) -> Option<Arc<Self>> {\n        match t {\n            Value::DefId(value) => Some(value.clone()),\n            _ => None,\n        }\n    }\n}\n\nimpl SupportedType<Value> for ItemRefContents {\n    fn to_types(value: Arc<Self>) -> Value {\n        Value::ItemRef(value)\n    }\n    fn from_types(t: &Value) -> Option<Arc<Self>> {\n        match t {\n            Value::ItemRef(value) => Some(value.clone()),\n            _ => None,\n        }\n    }\n}\n\n/// A node is a bundle of an ID with a value.\n#[derive(Deserialize, Serialize, Debug, JsonSchema, PartialOrd, Ord)]\n#[serde(into = \"serde_repr::NodeRepr<T>\")]\n#[serde(try_from = \"serde_repr::NodeRepr<T>\")]\npub struct Node<T: 'static + SupportedType<Value>> {\n    id: Id,\n    value: Arc<T>,\n}\n\nimpl<T: SupportedType<Value>> std::ops::Deref for Node<T> {\n    type Target = T;\n    fn deref(&self) -> &Self::Target {\n        self.value.as_ref()\n    }\n}\n\n/// Hax relies on hashes being deterministic for predicates\n/// ids. Identifiers are not deterministic: we implement hash for\n/// `Node` manually, discarding the field `id`.\nimpl<T: SupportedType<Value> + Hash> Hash for Node<T> {\n    fn hash<H: Hasher>(&self, state: &mut H) {\n        self.value.as_ref().hash(state);\n    }\n}\nimpl<T: SupportedType<Value> + Eq> Eq for Node<T> {}\nimpl<T: SupportedType<Value> + PartialEq> PartialEq for Node<T> {\n    fn eq(&self, other: &Self) -> bool {\n        self.value == other.value\n    }\n}\n\n/// Manual implementation of `Clone` that doesn't require a `Clone`\n/// bound on `T`.\nimpl<T: SupportedType<Value>> Clone for Node<T> {\n    fn clone(&self) -> Self {\n        Self {\n            id: self.id.clone(),\n            value: self.value.clone(),\n        }\n    }\n}\n\n/// A table is a map from IDs to `Value`s. When serialized, we\n/// represent a table as a *sorted* vector. Indeed, the values stored\n/// in the table might reference each other, without cycle, so the\n/// order matters.\n#[derive(Default, Debug, Clone, Deserialize, Serialize)]\n#[serde(into = \"serde_repr::SortedIdValuePairs\")]\n#[serde(from = \"serde_repr::SortedIdValuePairs\")]\npub struct Table(HeterogeneousMap<Id, Value>);\n\nmod heterogeneous_map {\n    //! This module provides an heterogenous map that can store types\n    //! that implement the trait `SupportedType`.\n\n    use std::collections::HashMap;\n    use std::hash::Hash;\n    use std::sync::Arc;\n    #[derive(Clone, Debug)]\n    /// An heterogenous map is a map from `Key` to `Value`. It provide\n    /// the methods `insert` and `get` for any type `T` that\n    /// implements `SupportedType<Value>`.\n    pub struct HeterogeneousMap<Key, Value>(HashMap<Key, Value>);\n\n    impl<Id, Value> Default for HeterogeneousMap<Id, Value> {\n        fn default() -> Self {\n            Self(HashMap::default())\n        }\n    }\n\n    impl<Key: Hash + Eq + PartialEq, Value> HeterogeneousMap<Key, Value> {\n        pub(super) fn insert<T>(&mut self, key: Key, value: Arc<T>)\n        where\n            T: SupportedType<Value>,\n        {\n            self.insert_raw_value(key, T::to_types(value));\n        }\n        pub(super) fn insert_raw_value(&mut self, key: Key, value: Value) {\n            self.0.insert(key, value);\n        }\n        pub(super) fn from_iter(it: impl Iterator<Item = (Key, Value)>) -> Self {\n            Self(HashMap::from_iter(it))\n        }\n        pub(super) fn into_iter(self) -> impl Iterator<Item = (Key, Value)> {\n            self.0.into_iter()\n        }\n        pub(super) fn get<T>(&self, key: &Key) -> Option<Option<Arc<T>>>\n        where\n            T: SupportedType<Value>,\n        {\n            self.0.get(key).map(T::from_types)\n        }\n    }\n\n    /// A type that can be mapped to `Value` and optionally\n    /// reconstructed back.\n    pub trait SupportedType<Value>: std::fmt::Debug {\n        fn to_types(value: Arc<Self>) -> Value;\n        fn from_types(t: &Value) -> Option<Arc<Self>>;\n    }\n}\nuse heterogeneous_map::*;\n\nimpl Session {\n    fn fresh_id(&mut self) -> Id {\n        let id = self.next_id.id;\n        self.next_id.id += 1;\n        Id { id }\n    }\n}\n\nimpl<T: Sync + Send + 'static + SupportedType<Value>> Node<T> {\n    pub fn new(value: T, session: &mut Session) -> Self {\n        let id = session.fresh_id();\n        let value = Arc::new(value);\n        session.table.0.insert(id.clone(), value.clone());\n        Self { id, value }\n    }\n\n    pub fn inner(&self) -> &Arc<T> {\n        &self.value\n    }\n\n    pub fn id(&self) -> Id {\n        self.id\n    }\n}\n\n/// Wrapper for a type `T` that creates a bundle containing both a ID\n/// table and a value `T`. That value may contains `Node` values\n/// inside it. Serializing `WithTable<T>` will serialize IDs only,\n/// skipping values. Deserialization of a `WithTable<T>` will\n/// automatically use the table and IDs to reconstruct skipped values.\n#[derive(Debug)]\npub struct WithTable<T> {\n    table: Table,\n    value: T,\n}\n\n/// The state used for deserialization: a table.\nstatic DESERIALIZATION_STATE: LazyLock<Mutex<Table>> =\n    LazyLock::new(|| Mutex::new(Table::default()));\nstatic DESERIALIZATION_STATE_LOCK: LazyLock<Mutex<()>> = LazyLock::new(|| Mutex::new(()));\n\n/// The mode of serialization: should `Node<T>` ship values of type `T` or not?\nstatic SERIALIZATION_MODE_USE_IDS: std::sync::atomic::AtomicBool =\n    std::sync::atomic::AtomicBool::new(false);\n\nfn serialize_use_id() -> bool {\n    SERIALIZATION_MODE_USE_IDS.load(Ordering::Relaxed)\n}\n\nimpl<T> WithTable<T> {\n    /// Runs `f` with a `WithTable<T>` created out of `map` and\n    /// `value`. Any serialization of values of type `Node<_>` will\n    /// skip the field `value`.\n    pub fn run<R>(map: Table, value: T, f: impl FnOnce(&Self) -> R) -> R {\n        if serialize_use_id() {\n            panic!(\n                \"CACHE_MAP_LOCK: only one WithTable serialization can occur at a time (nesting is forbidden)\"\n            )\n        }\n        SERIALIZATION_MODE_USE_IDS.store(true, Ordering::Relaxed);\n        let result = f(&Self { table: map, value });\n        SERIALIZATION_MODE_USE_IDS.store(false, Ordering::Relaxed);\n        result\n    }\n    pub fn destruct(self) -> (T, Table) {\n        let Self { value, table: map } = self;\n        (value, map)\n    }\n}\n\nimpl<T: Serialize> Serialize for WithTable<T> {\n    fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n        let mut ts = serializer.serialize_tuple_struct(\"WithTable\", 2)?;\n        use serde::ser::SerializeTupleStruct;\n        ts.serialize_field(&self.table)?;\n        ts.serialize_field(&self.value)?;\n        ts.end()\n    }\n}\n\n/// The deserializer of `WithTable<T>` is special. We first decode the\n/// table in order: each `(Id, Value)` pair of the table populates the\n/// global table state found in `DESERIALIZATION_STATE`. Only then we\n/// can decode the value itself, knowing `DESERIALIZATION_STATE` is\n/// complete.\nimpl<'de, T: Deserialize<'de>> serde::Deserialize<'de> for WithTable<T> {\n    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n    where\n        D: serde::Deserializer<'de>,\n    {\n        let _lock: MutexGuard<_> = DESERIALIZATION_STATE_LOCK.try_lock().expect(\"CACHE_MAP_LOCK: only one WithTable deserialization can occur at a time (nesting is forbidden)\");\n        use serde_repr::WithTableRepr;\n        let previous = std::mem::take(&mut *DESERIALIZATION_STATE.lock().unwrap());\n        let with_table_repr = WithTableRepr::deserialize(deserializer);\n        *DESERIALIZATION_STATE.lock().unwrap() = previous;\n        let WithTableRepr(table, value) = with_table_repr?;\n        Ok(Self { table, value })\n    }\n}\n\n/// Defines representations for various types when serializing or/and\n/// deserializing via serde\nmod serde_repr {\n    use super::*;\n\n    #[derive(Serialize, Deserialize, JsonSchema, Debug)]\n    pub(super) struct NodeRepr<T> {\n        id: Id,\n        value: Option<Arc<T>>,\n    }\n\n    #[derive(Serialize)]\n    pub(super) struct Pair(Id, Value);\n    pub(super) type SortedIdValuePairs = Vec<Pair>;\n\n    #[derive(Serialize, Deserialize)]\n    pub(super) struct WithTableRepr<T>(pub(super) Table, pub(super) T);\n\n    impl<T: SupportedType<Value>> Into<NodeRepr<T>> for Node<T> {\n        fn into(self) -> NodeRepr<T> {\n            let value = if serialize_use_id() {\n                None\n            } else {\n                Some(self.value.clone())\n            };\n            let id = self.id;\n            NodeRepr { value, id }\n        }\n    }\n\n    impl<T: 'static + SupportedType<Value>> TryFrom<NodeRepr<T>> for Node<T> {\n        type Error = serde::de::value::Error;\n\n        fn try_from(cached: NodeRepr<T>) -> Result<Self, Self::Error> {\n            use serde::de::Error;\n            let table = DESERIALIZATION_STATE.lock().unwrap();\n            let id = cached.id;\n            let kind = if let Some(kind) = cached.value {\n                kind\n            } else {\n                table\n                    .0\n                    .get(&id)\n                    .ok_or_else(|| {\n                        Self::Error::custom(&format!(\n                            \"Stateful deserialization failed for id {:?}: not found in cache\",\n                            id\n                        ))\n                    })?\n                    .ok_or_else(|| {\n                        Self::Error::custom(&format!(\n                            \"Stateful deserialization failed for id {:?}: wrong type\",\n                            id\n                        ))\n                    })?\n            };\n            Ok(Self { value: kind, id })\n        }\n    }\n\n    impl<'de> serde::Deserialize<'de> for Pair {\n        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n        where\n            D: serde::Deserializer<'de>,\n        {\n            let (id, v) = <(Id, Value)>::deserialize(deserializer)?;\n            DESERIALIZATION_STATE\n                .lock()\n                .unwrap()\n                .0\n                .insert_raw_value(id.clone(), v.clone());\n            Ok(Pair(id, v))\n        }\n    }\n\n    impl Into<SortedIdValuePairs> for Table {\n        fn into(self) -> SortedIdValuePairs {\n            let mut vec: Vec<_> = self.0.into_iter().map(|(x, y)| Pair(x, y)).collect();\n            vec.sort_by_key(|o| o.0.clone());\n            vec\n        }\n    }\n\n    impl From<SortedIdValuePairs> for Table {\n        fn from(t: SortedIdValuePairs) -> Self {\n            Self(HeterogeneousMap::from_iter(\n                t.into_iter().map(|Pair(x, y)| (x, y)),\n            ))\n        }\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/index_vec.rs",
    "content": "use crate::prelude::*;\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct IndexVec<I: 'static, T: 'static> {\n    pub raw: Vec<T>,\n    _marker: std::marker::PhantomData<fn(_: &I)>,\n}\n\nimpl<I, T: Sized> IndexVec<I, T> {\n    pub fn into_iter(self) -> impl DoubleEndedIterator<Item = T> + ExactSizeIterator {\n        self.raw.into_iter()\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<I: rustc_index::Idx, T: Sized> IndexVec<I, T> {\n    pub fn into_iter_enumerated(\n        self,\n    ) -> impl DoubleEndedIterator<Item = (I, T)> + ExactSizeIterator {\n        rustc_index::IndexVec::from_raw(self.raw).into_iter_enumerated()\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<I: rustc_index::Idx, T: Sized> std::ops::Deref for IndexVec<I, T> {\n    type Target = rustc_index::IndexSlice<I, T>;\n    fn deref(&self) -> &Self::Target {\n        Self::Target::from_raw(&self.raw)\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<I: rustc_index::Idx, T: Sized> std::ops::DerefMut for IndexVec<I, T> {\n    fn deref_mut(&mut self) -> &mut Self::Target {\n        Self::Target::from_raw_mut(&mut self.raw)\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<I: rustc_index::Idx, T> From<rustc_index::IndexVec<I, T>> for IndexVec<I, T> {\n    fn from(val: rustc_index::IndexVec<I, T>) -> Self {\n        IndexVec {\n            raw: val.raw,\n            _marker: std::marker::PhantomData,\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S, J: rustc_index::Idx, I: rustc_index::Idx + SInto<S, J>, U: Clone, T: SInto<S, U>>\n    SInto<S, IndexVec<J, U>> for rustc_index::IndexSlice<I, T>\n{\n    fn sinto(&self, s: &S) -> IndexVec<J, U> {\n        IndexVec {\n            raw: self.raw.sinto(s),\n            _marker: std::marker::PhantomData,\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<I, T> FromIterator<T> for IndexVec<I, T>\nwhere\n    I: rustc_index::Idx,\n{\n    #[inline]\n    fn from_iter<It: IntoIterator<Item = T>>(iter: It) -> Self {\n        Self {\n            raw: Vec::from_iter(iter),\n            _marker: std::marker::PhantomData,\n        }\n    }\n}\n\nmacro_rules! make_idx_wrapper {\n    ($($mod:ident)::+, $type:ident) => {\n        #[derive_group(Serializers)]#[derive(Copy, Clone, Eq, Debug, Hash, PartialEq, PartialOrd, Ord, JsonSchema)]\n        #[serde(untagged)]\n        pub enum $type {\n            $type(usize),\n        }\n        #[cfg(feature = \"rustc\")]\n        const _: () = {\n            use rustc_index::Idx;\n            type OriginalType = $($mod::)+$type;\n            impl Idx for $type {\n                fn new(idx: usize) -> Self {\n                    $type::$type(idx)\n                }\n                fn index(self) -> usize {\n                    let $type::$type(x) = self;\n                    x.index()\n                }\n            }\n            impl<S> SInto<S, $type> for OriginalType {\n                fn sinto(&self, _s: &S) -> $type {\n                    $type::new(self.index())\n                }\n            }\n        };\n    };\n}\npub(crate) use make_idx_wrapper;\n"
  },
  {
    "path": "frontend/exporter/src/lib.rs",
    "content": "#![allow(rustdoc::private_intra_doc_links)]\n#![cfg_attr(feature = \"rustc\", feature(if_let_guard))]\n#![cfg_attr(feature = \"rustc\", feature(macro_metavar_expr))]\n#![cfg_attr(feature = \"rustc\", feature(rustc_private))]\n#![cfg_attr(feature = \"rustc\", feature(sized_hierarchy))]\n#![cfg_attr(feature = \"rustc\", feature(trait_alias))]\n#![cfg_attr(feature = \"rustc\", feature(type_changing_struct_update))]\n\nmacro_rules! cfg_feature_rustc {\n    ($($item:item)*) => {\n        $(\n            #[cfg(feature = \"rustc\")]\n            $item\n        )*\n    }\n}\n\ncfg_feature_rustc! {\n    // When the feature `rustc` is enabled, we enable the bridges\n    // between rustc ASTs, which are defined in the crates\n    // `rustc_*`. We thus need to import them with `extern crate\n    // rustc_*`\n    extern crate rustc_abi;\n    extern crate rustc_ast;\n    extern crate rustc_ast_pretty;\n    extern crate rustc_apfloat;\n    extern crate rustc_const_eval;\n    extern crate rustc_data_structures;\n    extern crate rustc_driver;\n    extern crate rustc_hashes;\n    extern crate rustc_errors;\n    extern crate rustc_hir;\n    extern crate rustc_hir_analysis;\n    extern crate rustc_index;\n    extern crate rustc_infer;\n    extern crate rustc_interface;\n    extern crate rustc_middle;\n    extern crate rustc_mir_build;\n    extern crate rustc_session;\n    extern crate rustc_span;\n    extern crate rustc_target;\n    extern crate rustc_trait_selection;\n    extern crate rustc_type_ir;\n    extern crate rustc_lexer;\n\n    mod rustc_utils;\n    pub mod state;\n    mod utils;\n    mod deterministic_hash;\n    pub mod comments;\n}\n\nmod body;\nmod constant_utils;\npub mod id_table;\nmod types;\n\nmod index_vec;\nmod prelude;\n\npub use hax_frontend_exporter_options as options;\npub use prelude::*;\n\nmod sinto;\nmod traits;\n\npub use hax_adt_into::AdtInto;\npub use sinto::SInto;\n"
  },
  {
    "path": "frontend/exporter/src/prelude.rs",
    "content": "pub use crate::*;\npub use schemars::{JsonSchema, schema_for};\npub use serde::{Deserialize, Serialize};\npub use std::collections::HashMap;\npub use std::path::PathBuf;\npub use std::rc::Rc;\n\npub use crate::body::*;\npub use crate::constant_utils::*;\npub use crate::id_table;\npub use crate::index_vec::*;\npub use crate::traits::*;\npub use crate::types::*;\n\n#[cfg(feature = \"rustc\")]\npub use self::rustc::*;\n#[cfg(feature = \"rustc\")]\npub mod rustc {\n    pub use crate::rustc_utils::*;\n    pub use crate::state::*;\n    pub use crate::utils::*;\n}\n\npub(crate) use hax_adt_into::derive_group;\n"
  },
  {
    "path": "frontend/exporter/src/rustc_utils.rs",
    "content": "use crate::prelude::*;\nuse rustc_hir::def::DefKind as RDefKind;\nuse rustc_middle::{mir, ty};\n\npub fn inst_binder<'tcx, T>(\n    tcx: ty::TyCtxt<'tcx>,\n    typing_env: ty::TypingEnv<'tcx>,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n    x: ty::EarlyBinder<'tcx, T>,\n) -> T\nwhere\n    T: ty::TypeFoldable<ty::TyCtxt<'tcx>> + Clone,\n{\n    match args {\n        None => x.instantiate_identity(),\n        Some(args) => tcx.normalize_erasing_regions(typing_env, x.instantiate(tcx, args)),\n    }\n}\n\npub fn substitute<'tcx, T>(\n    tcx: ty::TyCtxt<'tcx>,\n    typing_env: ty::TypingEnv<'tcx>,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n    x: T,\n) -> T\nwhere\n    T: ty::TypeFoldable<ty::TyCtxt<'tcx>>,\n{\n    inst_binder(tcx, typing_env, args, ty::EarlyBinder::bind(x))\n}\n\n#[extension_traits::extension(pub trait SubstBinder)]\nimpl<'tcx, T: ty::TypeFoldable<ty::TyCtxt<'tcx>>> ty::Binder<'tcx, T> {\n    fn subst(\n        self,\n        tcx: ty::TyCtxt<'tcx>,\n        generics: &[ty::GenericArg<'tcx>],\n    ) -> ty::Binder<'tcx, T> {\n        ty::EarlyBinder::bind(self).instantiate(tcx, generics)\n    }\n}\n\n/// Whether the item can have generic parameters.\npub(crate) fn can_have_generics<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> bool {\n    use RDefKind::*;\n    match get_def_kind(tcx, def_id) {\n        Mod | ConstParam | TyParam | LifetimeParam | Macro(..) | ExternCrate | Use | ForeignMod\n        | GlobalAsm => false,\n        _ => true,\n    }\n}\n\n#[tracing::instrument(skip(s))]\npub(crate) fn get_variant_information<'s, S: UnderOwnerState<'s>>(\n    adt_def: &ty::AdtDef<'s>,\n    variant_index: rustc_abi::VariantIdx,\n    s: &S,\n) -> VariantInformations {\n    fn is_named<'s, I: std::iter::Iterator<Item = &'s ty::FieldDef> + Clone>(it: I) -> bool {\n        it.clone()\n            .any(|field| field.name.to_ident_string().parse::<u64>().is_err())\n    }\n    let variant_def = adt_def.variant(variant_index);\n    let variant = variant_def.def_id;\n    let constructs_type: DefId = adt_def.did().sinto(s);\n    let kind = if adt_def.is_struct() {\n        let named = is_named(adt_def.all_fields());\n        VariantKind::Struct { named }\n    } else if adt_def.is_union() {\n        VariantKind::Union\n    } else {\n        let named = is_named(variant_def.fields.iter());\n        let index = variant_index.into();\n        VariantKind::Enum { index, named }\n    };\n    VariantInformations {\n        typ: constructs_type.clone(),\n        variant: variant.sinto(s),\n        kind,\n        type_namespace: match &constructs_type.parent {\n            Some(parent) => parent.clone(),\n            None => {\n                let span = s.base().tcx.def_span(variant);\n                fatal!(\n                    s[span],\n                    \"Type {:#?} appears to have no parent\",\n                    constructs_type\n                )\n            }\n        },\n    }\n}\n\n#[tracing::instrument(skip(sess))]\npub fn translate_span(span: rustc_span::Span, sess: &rustc_session::Session) -> Span {\n    let smap: &rustc_span::source_map::SourceMap = sess.psess.source_map();\n    let filename = smap.span_to_filename(span);\n\n    let lo = smap.lookup_char_pos(span.lo());\n    let hi = smap.lookup_char_pos(span.hi());\n\n    Span {\n        lo: lo.into(),\n        hi: hi.into(),\n        filename: filename.sinto(&()),\n        rust_span_data: Some(span.data()),\n    }\n}\n\npub trait HasParamEnv<'tcx> {\n    fn param_env(&self) -> ty::ParamEnv<'tcx>;\n    fn typing_env(&self) -> ty::TypingEnv<'tcx>;\n}\n\nimpl<'tcx, S: UnderOwnerState<'tcx>> HasParamEnv<'tcx> for S {\n    fn param_env(&self) -> ty::ParamEnv<'tcx> {\n        let tcx = self.base().tcx;\n        let def_id = self.owner_id();\n        if can_have_generics(tcx, def_id) {\n            tcx.param_env(def_id)\n        } else {\n            ty::ParamEnv::empty()\n        }\n    }\n    fn typing_env(&self) -> ty::TypingEnv<'tcx> {\n        ty::TypingEnv {\n            param_env: self.param_env(),\n            typing_mode: ty::TypingMode::PostAnalysis,\n        }\n    }\n}\n\n#[tracing::instrument(skip(s))]\npub(crate) fn attribute_from_scope<'tcx, S: ExprState<'tcx>>(\n    s: &S,\n    scope: &rustc_middle::middle::region::Scope,\n) -> (Option<rustc_hir::hir_id::HirId>, Vec<Attribute>) {\n    let owner = s.owner_id();\n    let tcx = s.base().tcx;\n    let scope_tree = tcx.region_scope_tree(owner);\n    let hir_id = scope.hir_id(scope_tree);\n    let tcx = s.base().tcx;\n    let attributes = hir_id\n        .map(|hir_id| tcx.hir_attrs(hir_id).sinto(s))\n        .unwrap_or_default();\n    (hir_id, attributes)\n}\n\n/// Gets the closest ancestor of `id` that is the id of a type.\npub fn get_closest_parent_type(\n    tcx: &ty::TyCtxt,\n    id: rustc_span::def_id::DefId,\n) -> rustc_span::def_id::DefId {\n    match tcx.def_kind(id) {\n        rustc_hir::def::DefKind::Union\n        | rustc_hir::def::DefKind::Struct\n        | rustc_hir::def::DefKind::Enum => id,\n        _ => get_closest_parent_type(tcx, tcx.parent(id)),\n    }\n}\n\n/// Gets the visibility (`pub` or not) of the definition. Returns `None` for defs that don't have a\n/// meaningful visibility.\npub fn get_def_visibility<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    def_id: RDefId,\n    def_kind: RDefKind,\n) -> Option<bool> {\n    use RDefKind::*;\n    match def_kind {\n        AssocConst\n        | AssocFn\n        | Const\n        | Enum\n        | Field\n        | Fn\n        | ForeignTy\n        | Macro { .. }\n        | Mod\n        | Static { .. }\n        | Struct\n        | Trait\n        | TraitAlias\n        | TyAlias { .. }\n        | Union\n        | Use\n        | Variant => Some(tcx.visibility(def_id).is_public()),\n        // These kinds don't have visibility modifiers (which would cause `visibility` to panic).\n        AnonConst\n        | AssocTy\n        | Closure\n        | ConstParam\n        | Ctor { .. }\n        | ExternCrate\n        | ForeignMod\n        | GlobalAsm\n        | Impl { .. }\n        | InlineConst\n        | LifetimeParam\n        | OpaqueTy\n        | SyntheticCoroutineBody\n        | TyParam => None,\n    }\n}\n\n/// Gets the attributes of the definition.\npub fn get_def_attrs<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    def_id: RDefId,\n    def_kind: RDefKind,\n) -> &'tcx [rustc_hir::Attribute] {\n    if let Some(ldid) = def_id.as_local() {\n        tcx.hir_attrs(tcx.local_def_id_to_hir_id(ldid))\n    } else {\n        match def_kind {\n            // These kinds cause `get_attrs` to panic.\n            RDefKind::ConstParam | RDefKind::LifetimeParam | RDefKind::ForeignMod | RDefKind::TyParam => &[],\n            _ => tcx.attrs_for_def(def_id),\n        }\n    }\n}\n\n/// Gets the children of a module.\npub fn get_mod_children<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    def_id: RDefId,\n) -> Vec<(Option<rustc_span::Ident>, RDefId)> {\n    match def_id.as_local() {\n        Some(ldid) => match tcx.hir_node_by_def_id(ldid) {\n            rustc_hir::Node::Crate(m)\n            | rustc_hir::Node::Item(&rustc_hir::Item {\n                kind: rustc_hir::ItemKind::Mod(_, m),\n                ..\n            }) => m\n                .item_ids\n                .iter()\n                .map(|&item_id| {\n                    let opt_ident = tcx.hir_item(item_id).kind.ident();\n                    let def_id = item_id.owner_id.to_def_id();\n                    (opt_ident, def_id)\n                })\n                .collect(),\n            node => panic!(\"DefKind::Module is an unexpected node: {node:?}\"),\n        },\n        None => tcx\n            .module_children(def_id)\n            .iter()\n            .map(|child| (Some(child.ident), child.res.def_id()))\n            .collect(),\n    }\n}\n\n/// Gets the children of an `extern` block. Empty if the block is not defined in the current crate.\npub fn get_foreign_mod_children<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> Vec<RDefId> {\n    match def_id.as_local() {\n        Some(ldid) => tcx\n            .hir_node_by_def_id(ldid)\n            .expect_item()\n            .expect_foreign_mod()\n            .1\n            .iter()\n            .map(|foreign_item_ref| foreign_item_ref.owner_id.to_def_id())\n            .collect(),\n        None => vec![],\n    }\n}\n\n/// The signature of a method impl may be a subtype of the one expected from the trait decl, as in\n/// the example below. For correctness, we must be able to map from the method generics declared in\n/// the trait to the actual method generics. Because this would require type inference, we instead\n/// simply return the declared signature. This will cause issues if it is possible to use such a\n/// more-specific implementation with its more-specific type, but we have a few other issues with\n/// lifetime-generic function pointers anyway so this is unlikely to cause problems.\n///\n/// ```ignore\n/// trait MyCompare<Other>: Sized {\n///     fn compare(self, other: Other) -> bool;\n/// }\n/// impl<'a> MyCompare<&'a ()> for &'a () {\n///     // This implementation is more general because it works for non-`'a` refs. Note that only\n///     // late-bound vars may differ in this way.\n///     // `<&'a () as MyCompare<&'a ()>>::compare` has type `fn<'b>(&'a (), &'b ()) -> bool`,\n///     // but type `fn(&'a (), &'a ()) -> bool` was expected from the trait declaration.\n///     fn compare<'b>(self, _other: &'b ()) -> bool {\n///         true\n///     }\n/// }\n/// ```\npub fn get_method_sig<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    typing_env: ty::TypingEnv<'tcx>,\n    def_id: RDefId,\n    method_args: Option<ty::GenericArgsRef<'tcx>>,\n) -> ty::PolyFnSig<'tcx> {\n    let real_sig = inst_binder(tcx, typing_env, method_args, tcx.fn_sig(def_id));\n    let item = tcx.associated_item(def_id);\n    let ty::AssocContainer::TraitImpl(Ok(decl_method_id)) = item.container else {\n        return real_sig;\n    };\n    let declared_sig = tcx.fn_sig(decl_method_id);\n\n    // TODO(Nadrieril): Temporary hack: if the signatures have the same number of bound vars, we\n    // keep the real signature. While the declared signature is more correct, it is also less\n    // normalized and we can't normalize without erasing regions but regions are crucial in\n    // function signatures. Hence we cheat here, until charon gains proper normalization\n    // capabilities.\n    if declared_sig.skip_binder().bound_vars().len() == real_sig.bound_vars().len() {\n        return real_sig;\n    }\n\n    let impl_def_id = item.container_id(tcx);\n    let method_args =\n        method_args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id));\n    // The trait predicate that is implemented by the surrounding impl block.\n    let implemented_trait_ref = tcx\n        .impl_trait_ref(impl_def_id)\n        .instantiate(tcx, method_args);\n    // Construct arguments for the declared method generics in the context of the implemented\n    // method generics.\n    let decl_args = method_args.rebase_onto(tcx, impl_def_id, implemented_trait_ref.args);\n    let sig = declared_sig.instantiate(tcx, decl_args);\n    // Avoids accidentally using the same lifetime name twice in the same scope\n    // (once in impl parameters, second in the method declaration late-bound vars).\n    let sig = tcx.anonymize_bound_vars(sig);\n    normalize(tcx, typing_env, sig)\n}\n\n/// Generates a list of `<trait_ref>::Ty` type aliases for each non-gat associated type of the\n/// given trait and its parents, in a specific order.\npub fn assoc_tys_for_trait<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    typing_env: ty::TypingEnv<'tcx>,\n    tref: ty::TraitRef<'tcx>,\n) -> Vec<ty::AliasTy<'tcx>> {\n    fn gather_assoc_tys<'tcx>(\n        tcx: ty::TyCtxt<'tcx>,\n        typing_env: ty::TypingEnv<'tcx>,\n        assoc_tys: &mut Vec<ty::AliasTy<'tcx>>,\n        tref: ty::TraitRef<'tcx>,\n    ) {\n        assoc_tys.extend(\n            tcx.associated_items(tref.def_id)\n                .in_definition_order()\n                .filter(|assoc| matches!(assoc.kind, ty::AssocKind::Type { .. }))\n                .filter(|assoc| tcx.generics_of(assoc.def_id).own_params.is_empty())\n                .map(|assoc| ty::AliasTy::new(tcx, assoc.def_id, tref.args)),\n        );\n        for clause in tcx\n            .explicit_super_predicates_of(tref.def_id)\n            .map_bound(|clauses| clauses.iter().map(|(clause, _span)| *clause))\n            .iter_instantiated(tcx, tref.args)\n        {\n            if let Some(pred) = clause.as_trait_clause() {\n                let tref = erase_and_norm(tcx, typing_env, pred.skip_binder().trait_ref);\n                gather_assoc_tys(tcx, typing_env, assoc_tys, tref);\n            }\n        }\n    }\n    let mut ret = vec![];\n    gather_assoc_tys(tcx, typing_env, &mut ret, tref);\n    ret\n}\n\n/// Generates a `dyn Trait<Args.., Ty = <Self as Trait>::Ty..>` type for the given trait ref.\npub fn dyn_self_ty<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    typing_env: ty::TypingEnv<'tcx>,\n    tref: ty::TraitRef<'tcx>,\n) -> Option<ty::Ty<'tcx>> {\n    let re_erased = tcx.lifetimes.re_erased;\n    if !tcx.is_dyn_compatible(tref.def_id) {\n        return None;\n    }\n\n    // The main `Trait<Args>` predicate.\n    let main_pred = ty::Binder::dummy(ty::ExistentialPredicate::Trait(\n        ty::ExistentialTraitRef::erase_self_ty(tcx, tref),\n    ));\n\n    let ty_constraints = assoc_tys_for_trait(tcx, typing_env, tref)\n        .into_iter()\n        .map(|alias_ty| {\n            let proj = ty::ProjectionPredicate {\n                projection_term: alias_ty.into(),\n                term: ty::Ty::new_alias(tcx, ty::Projection, alias_ty).into(),\n            };\n            let proj = ty::ExistentialProjection::erase_self_ty(tcx, proj);\n            ty::Binder::dummy(ty::ExistentialPredicate::Projection(proj))\n        });\n\n    let preds = {\n        // Stable sort predicates to prevent platform-specific ordering issues\n        let mut preds: Vec<_> = [main_pred].into_iter().chain(ty_constraints).collect();\n        preds.sort_by(|a, b| {\n            use crate::rustc_middle::ty::ExistentialPredicateStableCmpExt;\n            a.skip_binder().stable_cmp(tcx, &b.skip_binder())\n        });\n        tcx.mk_poly_existential_predicates(&preds)\n    };\n    let ty = tcx.mk_ty_from_kind(ty::Dynamic(preds, re_erased));\n    let ty = normalize(tcx, typing_env, ty);\n    Some(ty)\n}\n\npub fn closure_once_shim<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    closure_ty: ty::Ty<'tcx>,\n) -> Option<mir::Body<'tcx>> {\n    let ty::Closure(def_id, args) = closure_ty.kind() else {\n        unreachable!()\n    };\n    let instance = match args.as_closure().kind() {\n        ty::ClosureKind::Fn | ty::ClosureKind::FnMut => {\n            ty::Instance::fn_once_adapter_instance(tcx, *def_id, args)\n        }\n        ty::ClosureKind::FnOnce => return None,\n    };\n    let mir = tcx.instance_mir(instance.def).clone();\n    let mir = ty::EarlyBinder::bind(mir).instantiate(tcx, instance.args);\n    Some(mir)\n}\n\npub fn drop_glue_shim<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    def_id: RDefId,\n    instantiate: Option<ty::GenericArgsRef<'tcx>>,\n) -> Option<mir::Body<'tcx>> {\n    let drop_in_place =\n        tcx.require_lang_item(rustc_hir::LangItem::DropInPlace, rustc_span::DUMMY_SP);\n    let ty = tcx.type_of(def_id);\n    let ty = match instantiate {\n        None => {\n            if !tcx.generics_of(def_id).is_empty() {\n                // Hack: layout code panics if it can't fully normalize types, which can happen e.g. with a\n                // trait associated type. For now we only translate the glue for monomorphic types.\n                return None;\n            }\n            ty.instantiate_identity()\n        }\n        Some(args) => ty.instantiate(tcx, args),\n    };\n    let instance_kind = ty::InstanceKind::DropGlue(drop_in_place, Some(ty));\n    let mir = tcx.instance_mir(instance_kind).clone();\n    Some(mir)\n}\n"
  },
  {
    "path": "frontend/exporter/src/sinto.rs",
    "content": "#[cfg(not(feature = \"rustc\"))]\npub trait SInto<S, To> {\n    fn sinto(&self, s: &S) -> To;\n}\n\n#[cfg(feature = \"rustc\")]\npub trait SInto<S, To>: std::marker::PointeeSized {\n    fn sinto(&self, s: &S) -> To;\n}\n\n#[macro_export]\nmacro_rules! sinto_todo {\n    ($($mod:ident)::+, $type:ident$(<$($lts:lifetime),*$(,)?>)? as $renamed:ident) => {\n        #[derive_group(Serializers)]\n        #[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n        pub enum $renamed {\n            $type {\n                todo: String\n            },\n        }\n        #[cfg(feature = \"rustc\")]\n        impl<$($($lts,)*)? S> SInto<S, $renamed> for $($mod)::+::$type$(<$($lts,)*>)? {\n            fn sinto(&self, _: &S) -> $renamed {\n                $renamed::$type{todo: format!(\"{:?}\", self)}\n            }\n        }\n    };\n    ($($mod:ident)::+, $type:ident$(<$($lts:lifetime),*$(,)?>)?) => {\n        sinto_todo!($($mod)::+, $type$(<$($lts),*>)? as $type);\n    }\n}\n\n#[macro_export]\nmacro_rules! sinto_as_usize {\n    ($($mod:ident)::+, $type:ident$(<$($lts:lifetime),*$(,)?>)?) => {\n        pub type $type = usize;\n        #[cfg(feature = \"rustc\")]\n        impl<$($($lts,)*)? S> SInto<S, $type> for $($mod)::+::$type$(<$($lts,)*>)? {\n            fn sinto(&self, _: &S) -> $type {\n                self.as_usize()\n            }\n        }\n    }\n}\n\nimpl<S, LL, RR, L: SInto<S, LL>, R: SInto<S, RR>> SInto<S, (LL, RR)> for (L, R) {\n    fn sinto(&self, s: &S) -> (LL, RR) {\n        (self.0.sinto(s), self.1.sinto(s))\n    }\n}\n\nimpl<S, AA, BB, CC, A: SInto<S, AA>, B: SInto<S, BB>, C: SInto<S, CC>> SInto<S, (AA, BB, CC)>\n    for (A, B, C)\n{\n    fn sinto(&self, s: &S) -> (AA, BB, CC) {\n        (self.0.sinto(s), self.1.sinto(s), self.2.sinto(s))\n    }\n}\n\nimpl<S, D, T: SInto<S, D>> SInto<S, Option<D>> for Option<T> {\n    fn sinto(&self, s: &S) -> Option<D> {\n        self.as_ref().map(|x| x.sinto(s))\n    }\n}\nimpl<S, D, T: SInto<S, D>> SInto<S, D> for Box<T> {\n    fn sinto(&self, s: &S) -> D {\n        (**self).sinto(s)\n    }\n}\nimpl<S, D, T: SInto<S, D>> SInto<S, D> for &T {\n    fn sinto(&self, s: &S) -> D {\n        (**self).sinto(s)\n    }\n}\nimpl<S, D: Clone, T: SInto<S, D>> SInto<S, Vec<D>> for [T] {\n    fn sinto(&self, s: &S) -> Vec<D> {\n        self.iter().map(|x| x.sinto(s)).collect()\n    }\n}\nimpl<S, D: Clone, T: SInto<S, D>> SInto<S, Vec<D>> for Box<[T]> {\n    fn sinto(&self, s: &S) -> Vec<D> {\n        self.into_iter().map(|x| x.sinto(s)).collect()\n    }\n}\n\nimpl<S, D: Clone, T: SInto<S, D>> SInto<S, Vec<D>> for Vec<T> {\n    fn sinto(&self, s: &S) -> Vec<D> {\n        self.iter().map(|x| x.sinto(s)).collect()\n    }\n}\n\nmacro_rules! sinto_clone {\n    ($t:ty) => {\n        impl<S> SInto<S, $t> for $t {\n            fn sinto(&self, _: &S) -> $t {\n                self.clone()\n            }\n        }\n    };\n    ($t:ty, $($rest:tt)*) => {\n        sinto_clone!($t);\n        sinto_clone!($($rest)+);\n    };\n    () => {};\n}\n\nsinto_clone!(bool, String, char);\nsinto_clone!(u8, u16, u32, u64, u128, usize);\nsinto_clone!(i8, i16, i32, i64, i128, isize);\n"
  },
  {
    "path": "frontend/exporter/src/state.rs",
    "content": "use crate::prelude::*;\nuse paste::paste;\n\nmacro_rules! mk_aux {\n    ($state:ident {$($lts:lifetime)*} $field:ident {$($field_type:tt)+} {$($gen:tt)*} {$($gen_full:tt)*} {$($params:tt)*} {$($fields:tt)*}) => {\n        paste ! {\n            pub trait [<Has $field:camel>]<$($lts,)*> {\n                fn $field(self: &Self) -> $($field_type)+<$($lts)*>;\n            }\n            impl<$($lts,)*$($gen)*> [<Has $field:camel>]<$($lts,)*> for $state<$($params)*> {\n                fn $field(self: &Self) -> $($field_type)+<$($lts)*> {\n                    self.$field.clone()\n                }\n            }\n        }\n    };\n}\nmacro_rules! mk {\n    (struct $state:ident<$($glts:lifetime),*> {$($field:ident : {$($lts:lifetime),*} $field_type:ty),*$(,)?}) => {\n        mk!(@$state {} {$($field)*} {$($field: {$($lts),*} {$field_type},)*});\n    };\n    (@$state:ident {$($acc:tt)*} $fields:tt {\n        $field:ident : $lts:tt $field_type:tt\n        $(,$($rest:tt)*)?\n    }) => {mk!(@$state {\n        $($acc)* $fields $field: $lts $field_type,\n    } $fields {$($($rest)*)?} );};\n    (@$state:ident $body:tt $fields:tt {$(,)?}) => { mk! (@@ $state $body ); };\n    (@@$state:ident {$({$($fields:tt)*} $field:ident : {$($lts:lifetime)*} {$($field_type:tt)+},)*}) => {\n        paste! {\n            #[derive(Clone)]\n            pub struct $state<$([<$field:camel>],)*>{\n                $(pub $field: [<$field:camel>],)*\n            }\n        }\n        $(\n            macro_rules! __inner_helper {\n                ($gen:tt {$$($full_gen:tt)*} {$$($params:tt)*} $field $$($rest:tt)*) => {\n                    paste! {__inner_helper!(\n                        $gen {$$($full_gen)*[<$field:camel>],}\n                        {$$($params)*$($field_type)+<$($lts,)*>,} $$($rest)*\n                    );}\n                };\n                ({$$($gen:tt)*} {$$($full_gen:tt)*} {$$($params:tt)*} $i:ident $$($rest:tt)*) => {\n                    paste! {__inner_helper!(\n                        {$$($gen)*[<$i:camel>],} {$$($full_gen)*[<$i:camel>],}\n                        {$$($params)*[<$i:camel>],} $$($rest)*\n                    );}\n                };\n                ($gen:tt $full_gen:tt $params:tt $$(,)?) => {\n                    mk_aux!($state {$($lts)*} $field {$($field_type)+} $gen $full_gen $params {$($fields)*});\n                };\n            }\n            __inner_helper!({} {} {} $($fields)*);\n        )*\n    };\n}\n\nmod types {\n    use crate::prelude::*;\n    use rustc_middle::ty;\n    use std::{cell::RefCell, sync::Arc};\n\n    pub struct LocalContextS {\n        pub vars: HashMap<rustc_middle::thir::LocalVarId, String>,\n    }\n\n    impl Default for LocalContextS {\n        fn default() -> Self {\n            Self::new()\n        }\n    }\n\n    impl LocalContextS {\n        pub fn new() -> LocalContextS {\n            LocalContextS {\n                vars: HashMap::new(),\n            }\n        }\n    }\n\n    /// Global caches\n    #[derive(Default)]\n    pub struct GlobalCache<'tcx> {\n        /// Cache the `Span` translations.\n        pub spans: HashMap<rustc_span::Span, Span>,\n        /// Per-item cache.\n        pub per_item: HashMap<RDefId, ItemCache<'tcx>>,\n        /// A ID table session, providing fresh IDs.\n        pub id_table_session: id_table::Session,\n        /// Map that recovers rustc args for a given `ItemRef`.\n        pub reverse_item_refs_map: HashMap<id_table::Id, ty::GenericArgsRef<'tcx>>,\n        /// We create some artificial items; their def_ids are stored here. See the\n        /// `synthetic_items` module.\n        pub synthetic_def_ids: HashMap<SyntheticItem, RDefId>,\n        pub reverse_synthetic_map: HashMap<RDefId, SyntheticItem>,\n    }\n\n    /// Defines a mapping from types to types, for use with `TypeMap`.\n    pub struct FullDefMapper;\n    impl TypeMapper for FullDefMapper {\n        type Value<Body: TypeMappable> = Arc<FullDef<Body>>;\n    }\n\n    /// Per-item cache\n    #[derive(Default)]\n    pub struct ItemCache<'tcx> {\n        /// The translated `DefId`.\n        pub def_id: Option<DefId>,\n        /// The translated definitions, generic in the Body kind.\n        /// Each rustc `DefId` gives several hax `DefId`s: one for each promoted constant (if any),\n        /// and the base one represented by `None`. Moreover we can instantiate definitions with\n        /// generic arguments.\n        pub full_defs:\n            HashMap<(Option<PromotedId>, Option<ty::GenericArgsRef<'tcx>>), TypeMap<FullDefMapper>>,\n        /// Cache the `Ty` translations.\n        pub tys: HashMap<ty::Ty<'tcx>, Ty>,\n        /// Cache the `ItemRef` translations. This is fast because `GenericArgsRef` is interned.\n        pub item_refs: HashMap<(RDefId, ty::GenericArgsRef<'tcx>), ItemRef>,\n        /// Cache the trait resolution engine for each item.\n        pub predicate_searcher: Option<crate::traits::PredicateSearcher<'tcx>>,\n        /// Cache of trait refs to resolved impl expressions.\n        pub impl_exprs: HashMap<ty::PolyTraitRef<'tcx>, crate::traits::ImplExpr>,\n    }\n\n    #[derive(Clone)]\n    pub struct Base<'tcx> {\n        pub options: Rc<hax_frontend_exporter_options::Options>,\n        pub local_ctx: Rc<RefCell<LocalContextS>>,\n        pub opt_def_id: Option<rustc_hir::def_id::DefId>,\n        pub cache: Rc<RefCell<GlobalCache<'tcx>>>,\n        pub tcx: ty::TyCtxt<'tcx>,\n        /// Silence the warnings in case of trait resolution failure.\n        pub silence_resolution_errors: bool,\n    }\n\n    impl<'tcx> Base<'tcx> {\n        pub fn new(\n            tcx: rustc_middle::ty::TyCtxt<'tcx>,\n            options: hax_frontend_exporter_options::Options,\n        ) -> Self {\n            Self {\n                tcx,\n                cache: Default::default(),\n                options: Rc::new(options),\n                // Always prefer `s.owner_id()` to `s.base().opt_def_id`.\n                // `opt_def_id` is used in `utils` for error reporting\n                opt_def_id: None,\n                local_ctx: Rc::new(RefCell::new(LocalContextS::new())),\n                silence_resolution_errors: false,\n            }\n        }\n    }\n\n    pub type MacroCalls = Rc<HashMap<Span, Span>>;\n    pub type RcThir<'tcx> = Rc<rustc_middle::thir::Thir<'tcx>>;\n    pub type RcMir<'tcx> = Rc<rustc_middle::mir::Body<'tcx>>;\n    pub type UnitBinder<'tcx> = rustc_middle::ty::Binder<'tcx, ()>;\n}\n\nmk!(\n    struct State<'tcx> {\n        base: {'tcx} types::Base,\n        owner_id: {} rustc_hir::def_id::DefId,\n        thir: {'tcx} types::RcThir,\n        mir: {'tcx} types::RcMir,\n        binder: {'tcx} types::UnitBinder,\n        ty: {'tcx} rustc_middle::ty::Ty,\n    }\n);\n\npub use self::types::*;\n\npub type StateWithBase<'tcx> = State<Base<'tcx>, (), (), (), (), ()>;\npub type StateWithOwner<'tcx> = State<Base<'tcx>, rustc_hir::def_id::DefId, (), (), (), ()>;\npub type StateWithBinder<'tcx> =\n    State<Base<'tcx>, rustc_hir::def_id::DefId, (), (), types::UnitBinder<'tcx>, ()>;\npub type StateWithThir<'tcx> =\n    State<Base<'tcx>, rustc_hir::def_id::DefId, types::RcThir<'tcx>, (), (), ()>;\npub type StateWithThirAndTy<'tcx> = State<\n    Base<'tcx>,\n    rustc_hir::def_id::DefId,\n    types::RcThir<'tcx>,\n    (),\n    (),\n    rustc_middle::ty::Ty<'tcx>,\n>;\npub type StateWithMir<'tcx> =\n    State<Base<'tcx>, rustc_hir::def_id::DefId, (), types::RcMir<'tcx>, (), ()>;\n\nimpl<'tcx> StateWithBase<'tcx> {\n    pub fn new(\n        tcx: rustc_middle::ty::TyCtxt<'tcx>,\n        options: hax_frontend_exporter_options::Options,\n    ) -> Self {\n        Self {\n            base: Base::new(tcx, options),\n            owner_id: (),\n            thir: (),\n            mir: (),\n            binder: (),\n            ty: (),\n        }\n    }\n}\n\npub trait BaseState<'tcx>: HasBase<'tcx> + Clone {\n    /// Updates the OnwerId in a state, making sure to override `opt_def_id` in base as well.\n    fn with_owner_id(&self, owner_id: rustc_hir::def_id::DefId) -> StateWithOwner<'tcx> {\n        let mut base = self.base();\n        base.opt_def_id = Some(owner_id);\n        State {\n            owner_id,\n            base,\n            thir: (),\n            mir: (),\n            binder: (),\n            ty: (),\n        }\n    }\n}\nimpl<'tcx, T: HasBase<'tcx> + Clone> BaseState<'tcx> for T {}\n\n/// State of anything below an `owner`.\npub trait UnderOwnerState<'tcx>: BaseState<'tcx> + HasOwnerId {\n    fn with_base(&self, base: types::Base<'tcx>) -> StateWithOwner<'tcx> {\n        State {\n            owner_id: self.owner_id(),\n            base,\n            thir: (),\n            mir: (),\n            binder: (),\n            ty: (),\n        }\n    }\n    fn with_binder(&self, binder: types::UnitBinder<'tcx>) -> StateWithBinder<'tcx> {\n        State {\n            base: self.base(),\n            owner_id: self.owner_id(),\n            binder,\n            thir: (),\n            mir: (),\n            ty: (),\n        }\n    }\n    fn with_thir(&self, thir: types::RcThir<'tcx>) -> StateWithThir<'tcx> {\n        State {\n            base: self.base(),\n            owner_id: self.owner_id(),\n            thir,\n            mir: (),\n            binder: (),\n            ty: (),\n        }\n    }\n    fn with_mir(&self, mir: types::RcMir<'tcx>) -> StateWithMir<'tcx> {\n        State {\n            base: self.base(),\n            owner_id: self.owner_id(),\n            mir,\n            thir: (),\n            binder: (),\n            ty: (),\n        }\n    }\n}\nimpl<'tcx, T: BaseState<'tcx> + HasOwnerId> UnderOwnerState<'tcx> for T {}\n\n/// State of anything below a binder.\npub trait UnderBinderState<'tcx> = UnderOwnerState<'tcx> + HasBinder<'tcx>;\n\n/// While translating expressions, we expect to always have a THIR\n/// body and an `owner_id` in the state\npub trait ExprState<'tcx>: UnderOwnerState<'tcx> + HasThir<'tcx> {\n    fn with_ty(&self, ty: rustc_middle::ty::Ty<'tcx>) -> StateWithThirAndTy<'tcx> {\n        State {\n            base: self.base(),\n            owner_id: self.owner_id(),\n            thir: self.thir(),\n            mir: (),\n            binder: (),\n            ty,\n        }\n    }\n}\nimpl<'tcx, T> ExprState<'tcx> for T where T: UnderOwnerState<'tcx> + HasThir<'tcx> {}\n\npub trait WithGlobalCacheExt<'tcx>: BaseState<'tcx> {\n    /// Access the global cache. You must not call `sinto` within this function as this will likely\n    /// result in `BorrowMut` panics.\n    fn with_global_cache<T>(&self, f: impl FnOnce(&mut GlobalCache<'tcx>) -> T) -> T {\n        let base = self.base();\n        let mut cache = base.cache.borrow_mut();\n        f(&mut *cache)\n    }\n    /// Access the cache for a given item. You must not call `sinto` within this function as this\n    /// will likely result in `BorrowMut` panics.\n    fn with_item_cache<T>(&self, def_id: RDefId, f: impl FnOnce(&mut ItemCache<'tcx>) -> T) -> T {\n        self.with_global_cache(|cache| f(cache.per_item.entry(def_id).or_default()))\n    }\n}\nimpl<'tcx, S: BaseState<'tcx>> WithGlobalCacheExt<'tcx> for S {}\n\npub trait WithItemCacheExt<'tcx>: UnderOwnerState<'tcx> {\n    /// Access the cache for the current item. You must not call `sinto` within this function as\n    /// this will likely result in `BorrowMut` panics.\n    fn with_cache<T>(&self, f: impl FnOnce(&mut ItemCache<'tcx>) -> T) -> T {\n        self.with_item_cache(self.owner_id(), f)\n    }\n    fn with_predicate_searcher<T>(&self, f: impl FnOnce(&mut PredicateSearcher<'tcx>) -> T) -> T {\n        self.with_cache(|cache| {\n            f(cache.predicate_searcher.get_or_insert_with(|| {\n                PredicateSearcher::new_for_owner(\n                    self.base().tcx,\n                    self.owner_id(),\n                    self.base().options.bounds_options,\n                )\n            }))\n        })\n    }\n}\nimpl<'tcx, S: UnderOwnerState<'tcx>> WithItemCacheExt<'tcx> for S {}\n\nimpl ImplInfos {\n    fn from<'tcx, S: BaseState<'tcx>>(s: &S, did: rustc_hir::def_id::DefId) -> Self {\n        let tcx = s.base().tcx;\n        let s = &s.with_owner_id(did);\n\n        Self {\n            generics: tcx.generics_of(did).sinto(s),\n            typ: tcx.type_of(did).instantiate_identity().sinto(s),\n            trait_ref: match tcx.def_kind(did) {\n                rustc_hir::def::DefKind::Impl { of_trait: true } => {\n                    Some(tcx.impl_trait_ref(did).instantiate_identity().sinto(s))\n                }\n                _ => None,\n            },\n            clauses: predicates_defined_on(tcx, did).as_ref().sinto(s),\n        }\n    }\n}\n\n/// Returns a map from every implementation (`Impl`) `DefId`s to the\n/// type they implement, plus the bounds.\npub fn impl_def_ids_to_impled_types_and_bounds<'tcx, S: BaseState<'tcx>>(\n    s: &S,\n) -> HashMap<DefId, ImplInfos> {\n    let tcx = s.base().tcx;\n\n    let def_ids: Vec<_> = s.with_global_cache(|cache| cache.per_item.keys().copied().collect());\n    let with_parents = |mut did: rustc_hir::def_id::DefId| {\n        let mut acc = vec![did];\n        while let Some(parent) = tcx.opt_parent(did) {\n            did = parent;\n            acc.push(did);\n        }\n        acc.into_iter()\n    };\n    use itertools::Itertools;\n    def_ids\n        .into_iter()\n        .flat_map(with_parents)\n        .unique()\n        .filter(|&did| {\n            // keep only DefIds that corresponds to implementations\n            matches!(\n                tcx.def_path(did).data.last(),\n                Some(rustc_hir::definitions::DisambiguatedDefPathData {\n                    data: rustc_hir::definitions::DefPathData::Impl,\n                    ..\n                })\n            )\n        })\n        .map(|did| (did.sinto(s), ImplInfos::from(s, did)))\n        .collect()\n}\n"
  },
  {
    "path": "frontend/exporter/src/traits/resolution.rs",
    "content": "//! Trait resolution: given a trait reference, we track which local clause caused it to be true.\n//! This module is independent from the rest of hax, in particular it doesn't use its\n//! state-tracking machinery.\n\nuse hax_frontend_exporter_options::BoundsOptions;\nuse itertools::{Either, Itertools};\nuse std::collections::{HashMap, hash_map::Entry};\n\nuse rustc_hir::def::DefKind;\nuse rustc_hir::def_id::DefId;\nuse rustc_middle::traits::CodegenObligationError;\nuse rustc_middle::ty::{self, *};\nuse rustc_trait_selection::traits::ImplSource;\n\nuse super::utils::{\n    self, ToPolyTraitRef, erase_and_norm, implied_predicates, normalize_bound_val,\n    required_predicates, self_predicate,\n};\n\n#[derive(Debug, Clone)]\npub enum PathChunk<'tcx> {\n    AssocItem {\n        item: AssocItem,\n        /// The arguments provided to the item (for GATs). Includes trait args.\n        generic_args: GenericArgsRef<'tcx>,\n        /// The implemented predicate.\n        predicate: PolyTraitPredicate<'tcx>,\n        /// The index of this predicate in the list returned by `implied_predicates`.\n        index: usize,\n    },\n    Parent {\n        /// The implemented predicate.\n        predicate: PolyTraitPredicate<'tcx>,\n        /// The index of this predicate in the list returned by `implied_predicates`.\n        index: usize,\n    },\n}\npub type Path<'tcx> = Vec<PathChunk<'tcx>>;\n\n#[derive(Debug, Clone)]\npub enum ImplExprAtom<'tcx> {\n    /// A concrete `impl Trait for Type {}` item.\n    Concrete {\n        def_id: DefId,\n        generics: GenericArgsRef<'tcx>,\n    },\n    /// A context-bound clause like `where T: Trait`.\n    LocalBound {\n        predicate: Predicate<'tcx>,\n        /// The nth (non-self) predicate found for this item. We use predicates from\n        /// `required_predicates` starting from the parentmost item.\n        index: usize,\n        r#trait: PolyTraitRef<'tcx>,\n        path: Path<'tcx>,\n    },\n    /// The automatic clause `Self: Trait` present inside a `impl Trait for Type {}` item.\n    SelfImpl {\n        r#trait: PolyTraitRef<'tcx>,\n        path: Path<'tcx>,\n    },\n    /// `dyn Trait` is a wrapped value with a virtual table for trait\n    /// `Trait`.  In other words, a value `dyn Trait` is a dependent\n    /// triple that gathers a type τ, a value of type τ and an\n    /// instance of type `Trait`.\n    /// `dyn Trait` implements `Trait` using a built-in implementation; this refers to that\n    /// built-in implementation.\n    Dyn,\n    /// A built-in trait whose implementation is computed by the compiler, such as `FnMut`. This\n    /// morally points to an invisible `impl` block; as such it contains the information we may\n    /// need from one.\n    Builtin {\n        /// Extra data for the given trait.\n        trait_data: BuiltinTraitData<'tcx>,\n        /// The `ImplExpr`s required to satisfy the implied predicates on the trait declaration.\n        /// E.g. since `FnMut: FnOnce`, a built-in `T: FnMut` impl would have an `ImplExpr` for `T:\n        /// FnOnce`.\n        impl_exprs: Vec<ImplExpr<'tcx>>,\n        /// The values of the associated types for this trait.\n        types: Vec<(DefId, Ty<'tcx>, Vec<ImplExpr<'tcx>>)>,\n    },\n    /// An error happened while resolving traits.\n    Error(String),\n}\n\n#[derive(Debug, Clone)]\npub enum BuiltinTraitData<'tcx> {\n    /// A virtual `Destruct` implementation.\n    /// `Destruct` is implemented automatically for all types. For our purposes, we chose to attach\n    /// the information about `drop_in_place` to that trait. This data tells us what kind of\n    /// `drop_in_place` the target type has.\n    Destruct(DestructData<'tcx>),\n    /// Some other builtin trait.\n    Other,\n}\n\n#[derive(Debug, Clone)]\npub enum DestructData<'tcx> {\n    /// A drop that does nothing, e.g. for scalars and pointers.\n    Noop,\n    /// An implicit `Destruct` local clause, if the `resolve_destruct_bounds` option is `false`. If\n    /// that option is `true`, we'll add `Destruct` bounds to every type param, and use that to\n    /// resolve `Destruct` impls of generics. If it's `false`, we use this variant to indicate that\n    /// the clause comes from a generic or associated type.\n    Implicit,\n    /// The `drop_in_place` is known and non-trivial.\n    Glue {\n        /// The type we're generating glue for.\n        ty: Ty<'tcx>,\n    },\n}\n\n#[derive(Clone, Debug)]\npub struct ImplExpr<'tcx> {\n    /// The trait this is an impl for.\n    pub r#trait: PolyTraitRef<'tcx>,\n    /// The kind of implemention of the root of the tree.\n    pub r#impl: ImplExprAtom<'tcx>,\n}\n\n/// Items have various predicates in scope. `path_to` uses them as a starting point for trait\n/// resolution. This tracks where each of them comes from.\n#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]\npub enum BoundPredicateOrigin {\n    /// The `Self: Trait` predicate implicitly present within trait declarations (note: we\n    /// don't add it for trait implementations, should we?).\n    SelfPred,\n    /// The nth (non-self) predicate found for this item. We use predicates from\n    /// `required_predicates` starting from the parentmost item.\n    Item(usize),\n}\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]\npub struct AnnotatedTraitPred<'tcx> {\n    pub origin: BoundPredicateOrigin,\n    pub clause: PolyTraitPredicate<'tcx>,\n}\n\n/// Returns the predicate to resolve as `Self`, if that makes sense in the current item.\n/// Currently this predicate is only used inside trait declarations and their asosciated types.\nfn initial_self_pred<'tcx>(\n    tcx: TyCtxt<'tcx>,\n    def_id: rustc_span::def_id::DefId,\n) -> Option<PolyTraitPredicate<'tcx>> {\n    use DefKind::*;\n    let trait_def_id = match tcx.def_kind(def_id) {\n        Trait | TraitAlias => def_id,\n        // Associated types can refer to the implicit `Self` clause. For methods and associated\n        // consts we pass an explicit `Self: Trait` clause to make the corresponding item\n        // reuseable.\n        AssocTy => tcx.parent(def_id),\n        _ => return None,\n    };\n    let self_pred = self_predicate(tcx, trait_def_id).upcast(tcx);\n    Some(self_pred)\n}\n\n/// The predicates to use as a starting point for resolving trait references within this item. This\n/// includes the `required_predicates` of this item and all its parents.\nfn local_bound_predicates<'tcx>(\n    tcx: TyCtxt<'tcx>,\n    def_id: rustc_span::def_id::DefId,\n    options: BoundsOptions,\n) -> Vec<PolyTraitPredicate<'tcx>> {\n    fn acc_predicates<'tcx>(\n        tcx: TyCtxt<'tcx>,\n        def_id: rustc_span::def_id::DefId,\n        options: BoundsOptions,\n        predicates: &mut Vec<PolyTraitPredicate<'tcx>>,\n    ) {\n        use DefKind::*;\n        match tcx.def_kind(def_id) {\n            // These inherit predicates from their parent.\n            AssocTy | AssocFn | AssocConst | Closure | Ctor(..) | Variant => {\n                let parent = tcx.parent(def_id);\n                acc_predicates(tcx, parent, options, predicates);\n            }\n            _ => {}\n        }\n        predicates.extend(\n            required_predicates(tcx, def_id, options)\n                .iter()\n                .map(|(clause, _span)| *clause)\n                .filter_map(|clause| clause.as_trait_clause()),\n        );\n    }\n\n    let mut predicates = vec![];\n    acc_predicates(tcx, def_id, options, &mut predicates);\n    predicates\n}\n\n#[tracing::instrument(level = \"trace\", skip(tcx))]\nfn parents_trait_predicates<'tcx>(\n    tcx: TyCtxt<'tcx>,\n    pred: PolyTraitPredicate<'tcx>,\n    options: BoundsOptions,\n) -> Vec<PolyTraitPredicate<'tcx>> {\n    let self_trait_ref = pred.to_poly_trait_ref();\n    implied_predicates(tcx, pred.def_id(), options)\n        .iter()\n        .map(|(clause, _span)| *clause)\n        // Substitute with the `self` args so that the clause makes sense in the\n        // outside context.\n        .map(|clause| clause.instantiate_supertrait(tcx, self_trait_ref))\n        .filter_map(|pred| pred.as_trait_clause())\n        .collect()\n}\n\n/// A candidate projects `self` along a path reaching some predicate. A candidate is\n/// selected when its predicate is the one expected, aka `target`.\n#[derive(Debug, Clone)]\nstruct Candidate<'tcx> {\n    path: Path<'tcx>,\n    pred: PolyTraitPredicate<'tcx>,\n    origin: AnnotatedTraitPred<'tcx>,\n}\n\nimpl<'tcx> Candidate<'tcx> {\n    fn into_impl_expr(self, tcx: TyCtxt<'tcx>) -> ImplExprAtom<'tcx> {\n        let path = self.path;\n        let r#trait = self.origin.clause.to_poly_trait_ref();\n        match self.origin.origin {\n            BoundPredicateOrigin::SelfPred => ImplExprAtom::SelfImpl { r#trait, path },\n            BoundPredicateOrigin::Item(index) => ImplExprAtom::LocalBound {\n                predicate: self.origin.clause.upcast(tcx),\n                index,\n                r#trait,\n                path,\n            },\n        }\n    }\n}\n\n/// Stores a set of predicates along with where they came from.\n#[derive(Clone)]\npub struct PredicateSearcher<'tcx> {\n    tcx: TyCtxt<'tcx>,\n    typing_env: rustc_middle::ty::TypingEnv<'tcx>,\n    /// Local clauses available in the current context.\n    candidates: HashMap<PolyTraitPredicate<'tcx>, Candidate<'tcx>>,\n    /// Resolution options.\n    options: BoundsOptions,\n    /// Count the number of bound clauses in scope; used to identify clauses uniquely.\n    bound_clause_count: usize,\n}\n\nimpl<'tcx> PredicateSearcher<'tcx> {\n    /// Initialize the elaborator with the predicates accessible within this item.\n    pub fn new_for_owner(tcx: TyCtxt<'tcx>, owner_id: DefId, options: BoundsOptions) -> Self {\n        let mut out = Self {\n            tcx,\n            typing_env: TypingEnv {\n                param_env: tcx.param_env(owner_id),\n                typing_mode: TypingMode::PostAnalysis,\n            },\n            candidates: Default::default(),\n            options,\n            bound_clause_count: 0,\n        };\n        out.insert_predicates(\n            initial_self_pred(tcx, owner_id).map(|clause| AnnotatedTraitPred {\n                origin: BoundPredicateOrigin::SelfPred,\n                clause,\n            }),\n        );\n        out.insert_bound_predicates(local_bound_predicates(tcx, owner_id, options));\n        out\n    }\n\n    /// Insert the bound clauses in the search context. Prefer inserting them all at once as this\n    /// will give priority to shorter resolution paths. Bound clauses are numbered from `0` in\n    /// insertion order.\n    pub fn insert_bound_predicates(\n        &mut self,\n        clauses: impl IntoIterator<Item = PolyTraitPredicate<'tcx>>,\n    ) {\n        let mut count = usize::MAX;\n        // Swap to avoid borrow conflicts.\n        std::mem::swap(&mut count, &mut self.bound_clause_count);\n        self.insert_predicates(clauses.into_iter().map(|clause| {\n            let i = count;\n            count += 1;\n            AnnotatedTraitPred {\n                origin: BoundPredicateOrigin::Item(i),\n                clause,\n            }\n        }));\n        std::mem::swap(&mut count, &mut self.bound_clause_count);\n    }\n\n    /// Override the param env; we use this when resolving `dyn` predicates to add more clauses to\n    /// the scope.\n    pub fn set_param_env(&mut self, param_env: ParamEnv<'tcx>) {\n        self.typing_env.param_env = param_env;\n    }\n\n    /// Insert annotated predicates in the search context. Prefer inserting them all at once as\n    /// this will give priority to shorter resolution paths.\n    fn insert_predicates(&mut self, preds: impl IntoIterator<Item = AnnotatedTraitPred<'tcx>>) {\n        self.insert_candidates(preds.into_iter().map(|clause| Candidate {\n            path: vec![],\n            pred: clause.clause,\n            origin: clause,\n        }))\n    }\n\n    /// Insert new candidates and all their parent predicates. This deduplicates predicates\n    /// to avoid divergence.\n    fn insert_candidates(&mut self, candidates: impl IntoIterator<Item = Candidate<'tcx>>) {\n        let tcx = self.tcx;\n        // Filter out duplicated candidates.\n        let mut new_candidates = Vec::new();\n        for mut candidate in candidates {\n            // Normalize and erase all lifetimes.\n            candidate.pred = normalize_bound_val(tcx, self.typing_env, candidate.pred);\n            if let Entry::Vacant(entry) = self.candidates.entry(candidate.pred) {\n                entry.insert(candidate.clone());\n                new_candidates.push(candidate);\n            }\n        }\n        if !new_candidates.is_empty() {\n            // Insert the parents all at once.\n            self.insert_candidate_parents(new_candidates);\n        }\n    }\n\n    /// Add the parents of these candidates. This is a separate function to avoid\n    /// polymorphic recursion due to the closures capturing the type parameters of this\n    /// function.\n    fn insert_candidate_parents(&mut self, new_candidates: Vec<Candidate<'tcx>>) {\n        let tcx = self.tcx;\n        // Then recursively add their parents. This way ensures a breadth-first order,\n        // which means we select the shortest path when looking up predicates.\n        let options = self.options;\n        self.insert_candidates(new_candidates.into_iter().flat_map(|candidate| {\n            parents_trait_predicates(tcx, candidate.pred, options)\n                .into_iter()\n                .enumerate()\n                .map(move |(index, parent_pred)| {\n                    let mut parent_candidate = Candidate {\n                        pred: parent_pred,\n                        path: candidate.path.clone(),\n                        origin: candidate.origin,\n                    };\n                    parent_candidate.path.push(PathChunk::Parent {\n                        predicate: parent_pred,\n                        index,\n                    });\n                    parent_candidate\n                })\n        }));\n    }\n\n    /// If the type is a trait associated type, we add any relevant bounds to our context.\n    fn add_associated_type_refs(\n        &mut self,\n        ty: Binder<'tcx, Ty<'tcx>>,\n        // Call back into hax-related code to display a nice warning.\n        warn: &impl Fn(&str),\n    ) -> Result<(), String> {\n        let tcx = self.tcx;\n        // Note: We skip a binder but rebind it just after.\n        let TyKind::Alias(AliasTyKind::Projection, alias_ty) = ty.skip_binder().kind() else {\n            return Ok(());\n        };\n        let trait_ref = ty.rebind(alias_ty.trait_ref(tcx)).upcast(tcx);\n\n        // The predicate we're looking for is is `<T as Trait>::Type: OtherTrait`. We look up `T as\n        // Trait` in the current context and add all the bounds on `Trait::Type` to our context.\n        let Some(trait_candidate) = self.resolve_local(trait_ref, warn)? else {\n            return Ok(());\n        };\n\n        // The bounds that hold on the associated type.\n        let item_bounds = implied_predicates(tcx, alias_ty.def_id, self.options);\n        let item_bounds = item_bounds\n            .iter()\n            .map(|(clause, _span)| *clause)\n            .filter_map(|pred| pred.as_trait_clause())\n            // Substitute the item generics\n            .map(|pred| EarlyBinder::bind(pred).instantiate(tcx, alias_ty.args))\n            .enumerate();\n\n        // Add all the bounds on the corresponding associated item.\n        self.insert_candidates(item_bounds.map(|(index, pred)| {\n            let mut candidate = Candidate {\n                path: trait_candidate.path.clone(),\n                pred,\n                origin: trait_candidate.origin,\n            };\n            candidate.path.push(PathChunk::AssocItem {\n                item: tcx.associated_item(alias_ty.def_id),\n                generic_args: alias_ty.args,\n                predicate: pred,\n                index,\n            });\n            candidate\n        }));\n\n        Ok(())\n    }\n\n    /// Resolve a local clause by looking it up in this set. If the predicate applies to an\n    /// associated type, we add the relevant implied associated type bounds to the set as well.\n    fn resolve_local(\n        &mut self,\n        target: PolyTraitPredicate<'tcx>,\n        // Call back into hax-related code to display a nice warning.\n        warn: &impl Fn(&str),\n    ) -> Result<Option<Candidate<'tcx>>, String> {\n        tracing::trace!(\"Looking for {target:?}\");\n\n        // Look up the predicate\n        let ret = self.candidates.get(&target).cloned();\n        if ret.is_some() {\n            return Ok(ret);\n        }\n\n        // Add clauses related to associated type in the `Self` type of the predicate.\n        self.add_associated_type_refs(target.self_ty(), warn)?;\n\n        let ret = self.candidates.get(&target).cloned();\n        if ret.is_none() {\n            tracing::trace!(\n                \"Couldn't find {target:?} in: [\\n{}]\",\n                self.candidates\n                    .iter()\n                    .map(|(_, c)| format!(\"  - {:?}\\n\", c.pred))\n                    .join(\"\")\n            );\n        }\n        Ok(ret)\n    }\n\n    /// Resolve the given trait reference in the local context.\n    #[tracing::instrument(level = \"trace\", skip(self, warn))]\n    pub fn resolve(\n        &mut self,\n        tref: &PolyTraitRef<'tcx>,\n        // Call back into hax-related code to display a nice warning.\n        warn: &impl Fn(&str),\n    ) -> Result<ImplExpr<'tcx>, String> {\n        use rustc_trait_selection::traits::{\n            BuiltinImplSource, ImplSource, ImplSourceUserDefinedData,\n        };\n        let tcx = self.tcx;\n        let destruct_trait = tcx.lang_items().destruct_trait().unwrap();\n\n        let erased_tref = normalize_bound_val(self.tcx, self.typing_env, *tref);\n        let trait_def_id = erased_tref.skip_binder().def_id;\n\n        let error = |msg: String| {\n            warn(&msg);\n            Ok(ImplExpr {\n                r#impl: ImplExprAtom::Error(msg),\n                r#trait: *tref,\n            })\n        };\n\n        let impl_source = shallow_resolve_trait_ref(tcx, self.typing_env.param_env, erased_tref);\n        let atom = match impl_source {\n            Ok(ImplSource::UserDefined(ImplSourceUserDefinedData {\n                impl_def_id,\n                args: generics,\n                ..\n            })) => ImplExprAtom::Concrete {\n                def_id: impl_def_id,\n                generics,\n            },\n            Ok(ImplSource::Param(_)) => {\n                match self.resolve_local(erased_tref.upcast(self.tcx), warn)? {\n                    Some(candidate) => candidate.into_impl_expr(tcx),\n                    None => {\n                        let msg = format!(\n                            \"Could not find a clause for `{tref:?}` in the item parameters\"\n                        );\n                        return error(msg);\n                    }\n                }\n            }\n            Ok(ImplSource::Builtin(BuiltinImplSource::Object { .. }, _)) => ImplExprAtom::Dyn,\n            Ok(ImplSource::Builtin(_, _)) => {\n                // Resolve the predicates implied by the trait.\n                // If we wanted to not skip this binder, we'd have to instantiate the bound\n                // regions, solve, then wrap the result in a binder. And track higher-kinded\n                // clauses better all over.\n                let impl_exprs = self.resolve_item_implied_predicates(\n                    trait_def_id,\n                    erased_tref.skip_binder().args,\n                    warn,\n                )?;\n                let types = tcx\n                    .associated_items(trait_def_id)\n                    .in_definition_order()\n                    .filter(|assoc| matches!(assoc.kind, AssocKind::Type { .. }))\n                    .filter_map(|assoc| {\n                        let ty =\n                            Ty::new_projection(tcx, assoc.def_id, erased_tref.skip_binder().args);\n                        let ty = erase_and_norm(tcx, self.typing_env, ty);\n                        if let TyKind::Alias(_, alias_ty) = ty.kind() {\n                            if alias_ty.def_id == assoc.def_id {\n                                // Couldn't normalize the type to anything different than itself;\n                                // this must be a built-in associated type such as\n                                // `DiscriminantKind::Discriminant`.\n                                // We can't return the unnormalized associated type as that would\n                                // make the trait ref contain itself, which would make hax's\n                                // `sinto` infrastructure loop. That's ok because we can't provide\n                                // a value for this type other than the associate type alias\n                                // itself.\n                                return None;\n                            }\n                        }\n                        let impl_exprs = self\n                            .resolve_item_implied_predicates(\n                                assoc.def_id,\n                                erased_tref.skip_binder().args,\n                                warn,\n                            )\n                            .ok()?;\n                        Some((assoc.def_id, ty, impl_exprs))\n                    })\n                    .collect();\n\n                let trait_data = if erased_tref.skip_binder().def_id == destruct_trait {\n                    let ty = erased_tref.skip_binder().args[0].as_type().unwrap();\n                    // Source of truth are `ty::needs_drop_components` and `tcx.needs_drop_raw`.\n                    let destruct_data = match ty.kind() {\n                        // TODO: Does `UnsafeBinder` drop its contents?\n                        ty::Bool\n                        | ty::Char\n                        | ty::Int(..)\n                        | ty::Uint(..)\n                        | ty::Float(..)\n                        | ty::Foreign(..)\n                        | ty::Str\n                        | ty::RawPtr(..)\n                        | ty::Ref(..)\n                        | ty::FnDef(..)\n                        | ty::FnPtr(..)\n                        | ty::UnsafeBinder(..)\n                        | ty::Never => Either::Left(DestructData::Noop),\n                        ty::Tuple(tys) if tys.is_empty() => Either::Left(DestructData::Noop),\n                        ty::Array(..)\n                        | ty::Pat(..)\n                        | ty::Slice(..)\n                        | ty::Tuple(..)\n                        | ty::Adt(..)\n                        | ty::Closure(..)\n                        | ty::Coroutine(..)\n                        | ty::CoroutineClosure(..)\n                        | ty::CoroutineWitness(..) => Either::Left(DestructData::Glue { ty }),\n                        // Every `dyn` has a `drop_in_place` in its vtable, ergo we pretend that every\n                        // `dyn` has `Destruct` in its list of traits.\n                        ty::Dynamic(..) => Either::Right(ImplExprAtom::Dyn),\n                        ty::Param(..) | ty::Alias(..) | ty::Bound(..) => {\n                            if self.options.resolve_destruct {\n                                // We've added `Destruct` impls on everything, we should be able to resolve\n                                // it.\n                                match self.resolve_local(erased_tref.upcast(self.tcx), warn)? {\n                                    Some(candidate) => Either::Right(candidate.into_impl_expr(tcx)),\n                                    None => {\n                                        let msg = format!(\n                                            \"Cannot find virtual `Destruct` clause: `{tref:?}`\"\n                                        );\n                                        return error(msg);\n                                    }\n                                }\n                            } else {\n                                Either::Left(DestructData::Implicit)\n                            }\n                        }\n\n                        ty::Placeholder(..) | ty::Infer(..) | ty::Error(..) => {\n                            let msg = format!(\n                                \"Cannot resolve clause `{tref:?}` \\\n                                because of a type error\"\n                            );\n                            return error(msg);\n                        }\n                    };\n                    destruct_data.map_left(BuiltinTraitData::Destruct)\n                } else {\n                    Either::Left(BuiltinTraitData::Other)\n                };\n                match trait_data {\n                    Either::Left(trait_data) => ImplExprAtom::Builtin {\n                        trait_data,\n                        impl_exprs,\n                        types,\n                    },\n                    Either::Right(atom) => atom,\n                }\n            }\n            Err(e) => {\n                let msg = format!(\n                    \"Could not find a clause for `{tref:?}` \\\n                    in the current context: `{e:?}`\"\n                );\n                return error(msg);\n            }\n        };\n\n        Ok(ImplExpr {\n            r#impl: atom,\n            r#trait: *tref,\n        })\n    }\n\n    /// Resolve the predicates required by the given item.\n    pub fn resolve_item_required_predicates(\n        &mut self,\n        def_id: DefId,\n        generics: GenericArgsRef<'tcx>,\n        // Call back into hax-related code to display a nice warning.\n        warn: &impl Fn(&str),\n    ) -> Result<Vec<ImplExpr<'tcx>>, String> {\n        let tcx = self.tcx;\n        self.resolve_predicates(\n            generics,\n            required_predicates(tcx, def_id, self.options),\n            warn,\n        )\n    }\n\n    /// Resolve the predicates implied by the given item.\n    pub fn resolve_item_implied_predicates(\n        &mut self,\n        def_id: DefId,\n        generics: GenericArgsRef<'tcx>,\n        // Call back into hax-related code to display a nice warning.\n        warn: &impl Fn(&str),\n    ) -> Result<Vec<ImplExpr<'tcx>>, String> {\n        let tcx = self.tcx;\n        self.resolve_predicates(\n            generics,\n            implied_predicates(tcx, def_id, self.options),\n            warn,\n        )\n    }\n\n    /// Apply the given generics to the provided clauses and resolve the trait references in the\n    /// current context.\n    pub fn resolve_predicates(\n        &mut self,\n        generics: GenericArgsRef<'tcx>,\n        predicates: utils::Predicates<'tcx>,\n        // Call back into hax-related code to display a nice warning.\n        warn: &impl Fn(&str),\n    ) -> Result<Vec<ImplExpr<'tcx>>, String> {\n        let tcx = self.tcx;\n        predicates\n            .iter()\n            .map(|(clause, _span)| *clause)\n            .filter_map(|clause| clause.as_trait_clause())\n            .map(|trait_pred| trait_pred.map_bound(|p| p.trait_ref))\n            // Substitute the item generics\n            .map(|trait_ref| EarlyBinder::bind(trait_ref).instantiate(tcx, generics))\n            // Resolve\n            .map(|trait_ref| self.resolve(&trait_ref, warn))\n            .collect()\n    }\n}\n\n/// Attempts to resolve an obligation to an `ImplSource`. The result is a shallow `ImplSource`\n/// resolution, meaning that we do not resolve all nested obligations on the impl. Note that type\n/// check should guarantee to us that all nested obligations *could be* resolved if we wanted to.\n///\n/// This expects that `trait_ref` is fully normalized.\n///\n/// This is based on `rustc_traits::codegen::codegen_select_candidate` in rustc.\npub fn shallow_resolve_trait_ref<'tcx>(\n    tcx: TyCtxt<'tcx>,\n    param_env: ParamEnv<'tcx>,\n    trait_ref: PolyTraitRef<'tcx>,\n) -> Result<ImplSource<'tcx, ()>, CodegenObligationError> {\n    use rustc_infer::infer::TyCtxtInferExt;\n    use rustc_middle::traits::CodegenObligationError;\n    use rustc_middle::ty::TypeVisitableExt;\n    use rustc_trait_selection::traits::{\n        Obligation, ObligationCause, ObligationCtxt, SelectionContext, SelectionError,\n    };\n    // Do the initial selection for the obligation. This yields the\n    // shallow result we are looking for -- that is, what specific impl.\n    let infcx = tcx\n        .infer_ctxt()\n        .ignoring_regions()\n        .build(TypingMode::PostAnalysis);\n    let mut selcx = SelectionContext::new(&infcx);\n\n    let obligation_cause = ObligationCause::dummy();\n    let obligation = Obligation::new(tcx, obligation_cause, param_env, trait_ref);\n\n    let selection = match selcx.poly_select(&obligation) {\n        Ok(Some(selection)) => selection,\n        Ok(None) => return Err(CodegenObligationError::Ambiguity),\n        Err(SelectionError::Unimplemented) => return Err(CodegenObligationError::Unimplemented),\n        Err(_) => return Err(CodegenObligationError::Ambiguity),\n    };\n\n    // Currently, we use a fulfillment context to completely resolve\n    // all nested obligations. This is because they can inform the\n    // inference of the impl's type parameters.\n    // FIXME(-Znext-solver): Doesn't need diagnostics if new solver.\n    let ocx = ObligationCtxt::new(&infcx);\n    let impl_source = selection.map(|obligation| {\n        ocx.register_obligation(obligation.clone());\n        ()\n    });\n\n    let errors = ocx.evaluate_obligations_error_on_ambiguity();\n    if !errors.is_empty() {\n        return Err(CodegenObligationError::Ambiguity);\n    }\n\n    let impl_source = infcx.resolve_vars_if_possible(impl_source);\n    let impl_source = tcx.erase_and_anonymize_regions(impl_source);\n\n    if impl_source.has_infer() {\n        // Unused lifetimes on an impl get replaced with inference vars, but never resolved.\n        return Err(CodegenObligationError::Ambiguity);\n    }\n\n    Ok(impl_source)\n}\n"
  },
  {
    "path": "frontend/exporter/src/traits/utils.rs",
    "content": "//! Each item can involve three kinds of predicates:\n//! - input aka required predicates: the predicates required to mention the item. These are usually `where`\n//!   clauses (or equivalent) on the item:\n//! ```ignore\n//! struct Foo<T: Clone> { ... }\n//! trait Foo<T> where T: Clone { ... }\n//! fn function<I>() where I: Iterator, I::Item: Clone { ... }\n//! ```\n//! - output aka implied predicates: the predicates that are implied by the presence of this item in a\n//!   signature. This is mostly trait parent predicates:\n//! ```ignore\n//! trait Foo: Clone { ... }\n//! fn bar<T: Foo>() {\n//!   // from `T: Foo` we can deduce `T: Clone`\n//! }\n//! ```\n//!   This could also include implied predicates such as `&'a T` implying `T: 'a` but we don't\n//!   consider these.\n//! - \"self\" predicate: that's the special `Self: Trait` predicate in scope within a trait\n//!   declaration or implementation for trait `Trait`.\n//!\n//! Note that within a given item the polarity is reversed: input predicates are the ones that can\n//! be assumed to hold and output predicates must be proven to hold. The \"self\" predicate is both\n//! assumed and proven within an impl block, and just assumed within a trait declaration block.\n//!\n//! The current implementation considers all predicates on traits to be outputs, which has the\n//! benefit of reducing the size of signatures. Moreover, the rules on which bounds are required vs\n//! implied are subtle. We may change this if this proves to be a problem.\nuse hax_frontend_exporter_options::BoundsOptions;\nuse rustc_hir::LangItem;\nuse rustc_hir::def::DefKind;\nuse rustc_middle::ty::*;\nuse rustc_span::def_id::DefId;\nuse rustc_span::{DUMMY_SP, Span};\nuse std::borrow::Cow;\n\npub type Predicates<'tcx> = Cow<'tcx, [(Clause<'tcx>, Span)]>;\n\n/// Returns a list of type predicates for the definition with ID `def_id`, including inferred\n/// lifetime constraints. This is the basic list of predicates we use for essentially all items.\npub fn predicates_defined_on(tcx: TyCtxt<'_>, def_id: DefId) -> Predicates<'_> {\n    let mut result = Cow::Borrowed(tcx.explicit_predicates_of(def_id).predicates);\n    let inferred_outlives = tcx.inferred_outlives_of(def_id);\n    if !inferred_outlives.is_empty() {\n        result.to_mut().extend(\n            inferred_outlives\n                .iter()\n                .map(|(clause, span)| ((*clause).upcast(tcx), *span)),\n        );\n    }\n    result\n}\n\n/// Add `T: Destruct` bounds for every generic parameter of the given item.\nfn add_destruct_bounds<'tcx>(\n    tcx: TyCtxt<'tcx>,\n    def_id: DefId,\n    predicates: &mut Vec<(Clause<'tcx>, Span)>,\n) {\n    let def_kind = tcx.def_kind(def_id);\n    if matches!(def_kind, DefKind::Closure) {\n        // Closures have fictitious weird type parameters in their `own_args` that we don't want to\n        // add `Destruct` bounds for.\n        return;\n    }\n    // Add a `T: Destruct` bound for every generic.\n    let destruct_trait = tcx.lang_items().destruct_trait().unwrap();\n    let extra_bounds = tcx\n        .generics_of(def_id)\n        .own_params\n        .iter()\n        .filter(|param| matches!(param.kind, GenericParamDefKind::Type { .. }))\n        .map(|param| tcx.mk_param_from_def(param))\n        .map(|ty| Binder::dummy(TraitRef::new(tcx, destruct_trait, [ty])))\n        .map(|tref| tref.upcast(tcx))\n        .map(|clause| (clause, DUMMY_SP));\n    predicates.extend(extra_bounds);\n}\n\n/// The predicates that must hold to mention this item. E.g.\n///\n/// ```ignore\n/// // `U: OtherTrait` is required, `Self: Sized` is implied.\n/// trait Trait<U: OtherTrait>: Sized {\n///     // `T: Clone` is required, `Self::Type<T>: Debug` is implied.\n///     type Type<T: Clone>: Debug;\n/// }\n/// ```\n///\n/// If `add_drop` is true, we add a `T: Drop` bound for every type generic.\npub fn required_predicates<'tcx>(\n    tcx: TyCtxt<'tcx>,\n    def_id: DefId,\n    options: BoundsOptions,\n) -> Predicates<'tcx> {\n    use DefKind::*;\n    let def_kind = tcx.def_kind(def_id);\n    let mut predicates = match def_kind {\n        AssocConst\n        | AssocFn\n        | AssocTy\n        | Const\n        | Enum\n        | Fn\n        | ForeignTy\n        | Impl { .. }\n        | OpaqueTy\n        | Static { .. }\n        | Struct\n        | TyAlias\n        | Union => predicates_defined_on(tcx, def_id),\n        // We consider all predicates on traits to be outputs\n        Trait | TraitAlias => Default::default(),\n        // `predicates_defined_on` ICEs on other def kinds.\n        _ => Default::default(),\n    };\n    // For methods and assoc consts in trait definitions, we add an explicit `Self: Trait` clause.\n    // Associated types get to use the implicit `Self: Trait` clause instead.\n    if !matches!(def_kind, AssocTy)\n        && let Some(trait_def_id) = tcx.trait_of_assoc(def_id)\n    {\n        let self_clause = self_predicate(tcx, trait_def_id).upcast(tcx);\n        predicates.to_mut().insert(0, (self_clause, DUMMY_SP));\n    }\n    if options.resolve_destruct && !matches!(def_kind, Trait | TraitAlias) {\n        // Add a `T: Destruct` bound for every generic. For traits we consider these predicates\n        // implied instead of required.\n        add_destruct_bounds(tcx, def_id, predicates.to_mut());\n    }\n    if options.prune_sized {\n        prune_sized_predicates(tcx, &mut predicates);\n    }\n    predicates\n}\n\n/// The special \"self\" predicate on a trait.\npub fn self_predicate<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> PolyTraitRef<'tcx> {\n    // Copied from the code of `tcx.predicates_of()`.\n    Binder::dummy(TraitRef::identity(tcx, def_id))\n}\n\n/// The predicates that can be deduced from the presence of this item in a signature. We only\n/// consider predicates implied by traits here, not implied bounds such as `&'a T` implying `T:\n/// 'a`. E.g.\n///\n/// ```ignore\n/// // `U: OtherTrait` is required, `Self: Sized` is implied.\n/// trait Trait<U: OtherTrait>: Sized {\n///     // `T: Clone` is required, `Self::Type<T>: Debug` is implied.\n///     type Type<T: Clone>: Debug;\n/// }\n/// ```\n///\n/// If `add_drop` is true, we add a `T: Drop` bound for every type generic and associated type.\npub fn implied_predicates<'tcx>(\n    tcx: TyCtxt<'tcx>,\n    def_id: DefId,\n    options: BoundsOptions,\n) -> Predicates<'tcx> {\n    use DefKind::*;\n    let parent = tcx.opt_parent(def_id);\n    let mut predicates = match tcx.def_kind(def_id) {\n        // We consider all predicates on traits to be outputs\n        Trait | TraitAlias => {\n            let mut predicates = predicates_defined_on(tcx, def_id);\n            if options.resolve_destruct {\n                // Add a `T: Drop` bound for every generic, unless the current trait is `Drop` itself, or a\n                // built-in marker trait that we know doesn't need the bound.\n                if !matches!(\n                    tcx.as_lang_item(def_id),\n                    Some(\n                        LangItem::Destruct\n                            | LangItem::Sized\n                            | LangItem::MetaSized\n                            | LangItem::PointeeSized\n                            | LangItem::DiscriminantKind\n                            | LangItem::PointeeTrait\n                            | LangItem::Tuple\n                    )\n                ) {\n                    add_destruct_bounds(tcx, def_id, predicates.to_mut());\n                }\n            }\n            predicates\n        }\n        AssocTy if matches!(tcx.def_kind(parent.unwrap()), Trait) => {\n            // `skip_binder` is for the GAT `EarlyBinder`\n            let mut predicates = Cow::Borrowed(tcx.explicit_item_bounds(def_id).skip_binder());\n            if options.resolve_destruct {\n                // Add a `Drop` bound to the assoc item.\n                let destruct_trait = tcx.lang_items().destruct_trait().unwrap();\n                let ty =\n                    Ty::new_projection(tcx, def_id, GenericArgs::identity_for_item(tcx, def_id));\n                let tref = Binder::dummy(TraitRef::new(tcx, destruct_trait, [ty]));\n                predicates.to_mut().push((tref.upcast(tcx), DUMMY_SP));\n            }\n            predicates\n        }\n        _ => Predicates::default(),\n    };\n    if options.prune_sized {\n        prune_sized_predicates(tcx, &mut predicates);\n    }\n    predicates\n}\n\n/// Normalize a value.\npub fn normalize<'tcx, T>(tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, value: T) -> T\nwhere\n    T: TypeFoldable<TyCtxt<'tcx>> + Clone,\n{\n    use rustc_infer::infer::TyCtxtInferExt;\n    use rustc_middle::traits::ObligationCause;\n    use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;\n    let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(typing_env);\n    infcx\n        .at(&ObligationCause::dummy(), param_env)\n        .query_normalize(value.clone())\n        // We ignore the generated outlives relations. Unsure what we should do with them.\n        .map(|x| x.value)\n        .unwrap_or(value)\n}\n\n/// Erase free regions from the given value. Largely copied from `tcx.erase_and_anonymize_regions`, but also\n/// erases bound regions that are bound outside `value`, so we can call this function inside a\n/// `Binder`.\npub fn erase_free_regions<'tcx, T>(tcx: TyCtxt<'tcx>, value: T) -> T\nwhere\n    T: TypeFoldable<TyCtxt<'tcx>>,\n{\n    use rustc_middle::ty;\n    struct RegionEraserVisitor<'tcx> {\n        tcx: TyCtxt<'tcx>,\n        depth: u32,\n    }\n\n    impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RegionEraserVisitor<'tcx> {\n        fn cx(&self) -> TyCtxt<'tcx> {\n            self.tcx\n        }\n\n        fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {\n            ty.super_fold_with(self)\n        }\n\n        fn fold_binder<T>(&mut self, t: ty::Binder<'tcx, T>) -> ty::Binder<'tcx, T>\n        where\n            T: TypeFoldable<TyCtxt<'tcx>>,\n        {\n            let t = self.tcx.anonymize_bound_vars(t);\n            self.depth += 1;\n            let t = t.super_fold_with(self);\n            self.depth -= 1;\n            t\n        }\n\n        fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {\n            // We don't erase bound regions that are bound inside the expression we started with,\n            // but we do erase those that point \"outside of it\".\n            match r.kind() {\n                ty::ReBound(BoundVarIndexKind::Bound(dbid), _) if dbid.as_u32() < self.depth => r,\n                _ => self.tcx.lifetimes.re_erased,\n            }\n        }\n    }\n    value.fold_with(&mut RegionEraserVisitor { tcx, depth: 0 })\n}\n\n// Normalize and erase lifetimes, erasing more lifetimes than normal because we might be already\n// inside a binder and rustc doesn't like that.\npub fn erase_and_norm<'tcx, T>(tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, x: T) -> T\nwhere\n    T: TypeFoldable<TyCtxt<'tcx>> + Copy,\n{\n    erase_free_regions(\n        tcx,\n        tcx.try_normalize_erasing_regions(typing_env, x)\n            .unwrap_or(x),\n    )\n}\n\n/// Given our currently hacky handling of binders, in order for trait resolution to work we must\n/// empty out the binders of trait refs. Specifically it's so that we can reconnect associated type\n/// constraints with the trait ref they come from, given that the projection in question doesn't\n/// track the right binder currently.\npub fn normalize_bound_val<'tcx, T>(\n    tcx: TyCtxt<'tcx>,\n    typing_env: TypingEnv<'tcx>,\n    x: Binder<'tcx, T>,\n) -> Binder<'tcx, T>\nwhere\n    T: TypeFoldable<TyCtxt<'tcx>> + Copy,\n{\n    Binder::dummy(erase_and_norm(tcx, typing_env, x.skip_binder()))\n}\n\n/// Returns true whenever `def_id` is `MetaSized`, `Sized` or `PointeeSized`.\npub fn is_sized_related_trait<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {\n    use rustc_hir::lang_items::LangItem;\n    let lang_item = tcx.as_lang_item(def_id);\n    matches!(\n        lang_item,\n        Some(LangItem::PointeeSized | LangItem::MetaSized | LangItem::Sized)\n    )\n}\n\n/// Given a `GenericPredicates`, prune every occurence of a sized-related clause.\n/// Prunes bounds of the shape `T: MetaSized`, `T: Sized` or `T: PointeeSized`.\nfn prune_sized_predicates<'tcx>(tcx: TyCtxt<'tcx>, generic_predicates: &mut Predicates<'tcx>) {\n    let predicates: Vec<(Clause<'tcx>, rustc_span::Span)> = generic_predicates\n        .iter()\n        .filter(|(clause, _)| {\n            clause.as_trait_clause().is_none_or(|trait_predicate| {\n                !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id())\n            })\n        })\n        .copied()\n        .collect();\n    if predicates.len() != generic_predicates.len() {\n        *generic_predicates.to_mut() = predicates;\n    }\n}\n\npub trait ToPolyTraitRef<'tcx> {\n    fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx>;\n}\n\nimpl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> {\n    fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> {\n        self.map_bound_ref(|trait_pred| trait_pred.trait_ref)\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/traits.rs",
    "content": "use crate::prelude::*;\n\n#[cfg(feature = \"rustc\")]\npub mod resolution;\n#[cfg(feature = \"rustc\")]\nmod utils;\n#[cfg(feature = \"rustc\")]\npub use utils::{\n    Predicates, ToPolyTraitRef, erase_and_norm, erase_free_regions, implied_predicates, normalize,\n    predicates_defined_on, required_predicates, self_predicate,\n};\n\n#[cfg(feature = \"rustc\")]\npub use resolution::PredicateSearcher;\n#[cfg(feature = \"rustc\")]\nuse rustc_middle::ty;\n#[cfg(feature = \"rustc\")]\nuse rustc_span::def_id::DefId as RDefId;\n\n#[cfg(feature = \"rustc\")]\npub use utils::is_sized_related_trait;\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]\npub enum ImplExprPathChunk {\n    AssocItem {\n        /// Reference to the item, with generics (for GATs), e.g. the `T` and `T: Clone` `ImplExpr`\n        /// in the following example:\n        /// ```ignore\n        /// trait Foo {\n        ///     type Type<T: Clone>: Debug;\n        /// }\n        /// ```\n        item: ItemRef,\n        assoc_item: AssocItem,\n        /// The implemented predicate.\n        predicate: Binder<TraitPredicate>,\n        predicate_id: PredicateId,\n        /// The index of this predicate in the list returned by `implied_predicates`.\n        index: usize,\n    },\n    Parent {\n        /// The implemented predicate.\n        predicate: Binder<TraitPredicate>,\n        predicate_id: PredicateId,\n        /// The index of this predicate in the list returned by `implied_predicates`.\n        index: usize,\n    },\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, ImplExprPathChunk> for resolution::PathChunk<'tcx> {\n    fn sinto(&self, s: &S) -> ImplExprPathChunk {\n        match self {\n            resolution::PathChunk::AssocItem {\n                item,\n                generic_args,\n                predicate,\n                index,\n                ..\n            } => ImplExprPathChunk::AssocItem {\n                item: translate_item_ref(s, item.def_id, generic_args),\n                assoc_item: AssocItem::sfrom(s, item),\n                predicate: predicate.sinto(s),\n                predicate_id: <_ as SInto<_, Clause>>::sinto(predicate, s).id,\n                index: index.sinto(s),\n            },\n            resolution::PathChunk::Parent {\n                predicate, index, ..\n            } => ImplExprPathChunk::Parent {\n                predicate: predicate.sinto(s),\n                predicate_id: <_ as SInto<_, Clause>>::sinto(predicate, s).id,\n                index: index.sinto(s),\n            },\n        }\n    }\n}\n\n/// The source of a particular trait implementation. Most often this is either `Concrete` for a\n/// concrete `impl Trait for Type {}` item, or `LocalBound` for a context-bound `where T: Trait`.\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::ImplExprAtom<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]\npub enum ImplExprAtom {\n    /// A concrete `impl Trait for Type {}` item.\n    #[custom_arm(FROM_TYPE::Concrete { def_id, generics } => TO_TYPE::Concrete(\n        translate_item_ref(s, *def_id, generics),\n    ),)]\n    Concrete(ItemRef),\n    /// A context-bound clause like `where T: Trait`.\n    LocalBound {\n        #[not_in_source]\n        #[value({\n            let Self::LocalBound { predicate, .. } = self else { unreachable!() };\n            predicate.sinto(s).id\n        })]\n        predicate_id: PredicateId,\n        /// The nth (non-self) predicate found for this item. We use predicates from\n        /// `required_predicates` starting from the parentmost item.\n        index: usize,\n        r#trait: Binder<TraitRef>,\n        path: Vec<ImplExprPathChunk>,\n    },\n    /// The implicit `Self: Trait` clause present inside a `trait Trait {}` item.\n    // TODO: should we also get that clause for trait impls?\n    SelfImpl {\n        r#trait: Binder<TraitRef>,\n        path: Vec<ImplExprPathChunk>,\n    },\n    /// `dyn Trait` is a wrapped value with a virtual table for trait\n    /// `Trait`.  In other words, a value `dyn Trait` is a dependent\n    /// triple that gathers a type τ, a value of type τ and an\n    /// instance of type `Trait`.\n    /// `dyn Trait` implements `Trait` using a built-in implementation; this refers to that\n    /// built-in implementation.\n    Dyn,\n    /// A built-in trait whose implementation is computed by the compiler, such as `FnMut`. This\n    /// morally points to an invisible `impl` block; as such it contains the information we may\n    /// need from one.\n    Builtin {\n        /// Extra data for the given trait.\n        trait_data: BuiltinTraitData,\n        /// The `ImplExpr`s required to satisfy the implied predicates on the trait declaration.\n        /// E.g. since `FnMut: FnOnce`, a built-in `T: FnMut` impl would have an `ImplExpr` for `T:\n        /// FnOnce`.\n        impl_exprs: Vec<ImplExpr>,\n        /// The values of the associated types for this trait.\n        types: Vec<(DefId, Ty, Vec<ImplExpr>)>,\n    },\n    /// An error happened while resolving traits.\n    Error(String),\n}\n\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::BuiltinTraitData<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]\npub enum BuiltinTraitData {\n    /// A virtual `Destruct` implementation.\n    /// `Destruct` is implemented automatically for all types. For our purposes, we chose to attach\n    /// the information about `drop_in_place` to that trait. This data tells us what kind of\n    /// `drop_in_place` the target type has.\n    Destruct(DestructData),\n    /// Some other builtin trait.\n    Other,\n}\n\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::DestructData<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]\npub enum DestructData {\n    /// A drop that does nothing, e.g. for scalars and pointers.\n    Noop,\n    /// An implicit `Destruct` local clause, if the `resolve_destruct_bounds` option is `false`. If\n    /// that option is `true`, we'll add `Destruct` bounds to every type param, and use that to\n    /// resolve `Destruct` impls of generics. If it's `false`, we use this variant to indicate that\n    /// the clause comes from a generic or associated type.\n    Implicit,\n    /// The `drop_in_place` is known and non-trivial.\n    Glue {\n        /// The type we're generating glue for.\n        ty: Ty,\n    },\n}\n\n/// An `ImplExpr` describes the full data of a trait implementation. Because of generics, this may\n/// need to combine several concrete trait implementation items. For example, `((1u8, 2u8),\n/// \"hello\").clone()` combines the generic implementation of `Clone` for `(A, B)` with the\n/// concrete implementations for `u8` and `&str`, represented as a tree.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema, AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: resolution::ImplExpr<'tcx>, state: S as s)]\npub struct ImplExpr {\n    /// The trait this is an impl for.\n    pub r#trait: Binder<TraitRef>,\n    /// The kind of implemention of the root of the tree.\n    pub r#impl: ImplExprAtom,\n}\n\n/// Given a clause `clause` in the context of some impl block `impl_did`, susbts correctly `Self`\n/// from `clause` and (1) derive a `Clause` and (2) resolve an `ImplExpr`.\n#[cfg(feature = \"rustc\")]\npub fn super_clause_to_clause_and_impl_expr<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    impl_did: rustc_span::def_id::DefId,\n    clause: rustc_middle::ty::Clause<'tcx>,\n    span: rustc_span::Span,\n) -> Option<(Clause, ImplExpr, Span)> {\n    let tcx = s.base().tcx;\n    if !matches!(\n        tcx.def_kind(impl_did),\n        rustc_hir::def::DefKind::Impl { of_trait: true }\n    ) {\n        return None;\n    }\n    let impl_trait_ref =\n        rustc_middle::ty::Binder::dummy(tcx.impl_trait_ref(impl_did).instantiate_identity());\n    let original_predicate_id = {\n        // We don't want the id of the substituted clause id, but the\n        // original clause id (with, i.e., `Self`)\n        let s = &s.with_owner_id(impl_trait_ref.def_id());\n        clause.sinto(s).id\n    };\n    let new_clause = clause.instantiate_supertrait(tcx, impl_trait_ref);\n    let impl_expr = solve_trait(\n        s,\n        new_clause\n            .as_predicate()\n            .as_trait_clause()?\n            .to_poly_trait_ref(),\n    );\n    let mut new_clause_no_binder = new_clause.sinto(s);\n    new_clause_no_binder.id = original_predicate_id;\n    Some((new_clause_no_binder, impl_expr, span.sinto(s)))\n}\n\n/// This is the entrypoint of the solving.\n#[cfg(feature = \"rustc\")]\n#[tracing::instrument(level = \"trace\", skip(s))]\npub fn solve_trait<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    trait_ref: rustc_middle::ty::PolyTraitRef<'tcx>,\n) -> ImplExpr {\n    let warn = |msg: &str| {\n        if !s.base().silence_resolution_errors {\n            crate::warning!(s, \"{}\", msg)\n        }\n    };\n    if let Some(impl_expr) = s.with_cache(|cache| cache.impl_exprs.get(&trait_ref).cloned()) {\n        return impl_expr;\n    }\n    let resolved =\n        s.with_predicate_searcher(|pred_searcher| pred_searcher.resolve(&trait_ref, &warn));\n    let impl_expr = match resolved {\n        Ok(x) => x.sinto(s),\n        Err(e) => crate::fatal!(s, \"{}\", e),\n    };\n    s.with_cache(|cache| cache.impl_exprs.insert(trait_ref, impl_expr.clone()));\n    impl_expr\n}\n\n/// Translate a reference to an item, resolving the appropriate trait clauses as needed.\n#[cfg(feature = \"rustc\")]\n#[tracing::instrument(level = \"trace\", skip(s), ret)]\npub fn translate_item_ref<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    def_id: RDefId,\n    generics: ty::GenericArgsRef<'tcx>,\n) -> ItemRef {\n    ItemRef::translate(s, def_id, generics)\n}\n\n/// Solve the trait obligations for a specific item use (for example, a method call, an ADT, etc.)\n/// in the current context. Just like generic args include generics of parent items, this includes\n/// impl exprs for parent items.\n#[cfg(feature = \"rustc\")]\n#[tracing::instrument(level = \"trace\", skip(s), ret)]\npub fn solve_item_required_traits<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    def_id: RDefId,\n    generics: ty::GenericArgsRef<'tcx>,\n) -> Vec<ImplExpr> {\n    fn accumulate<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        def_id: RDefId,\n        generics: ty::GenericArgsRef<'tcx>,\n        impl_exprs: &mut Vec<ImplExpr>,\n    ) {\n        let tcx = s.base().tcx;\n        use rustc_hir::def::DefKind::*;\n        match tcx.def_kind(def_id) {\n            AssocTy | AssocFn | AssocConst | Closure | Ctor(..) | Variant => {\n                let parent = tcx.parent(def_id);\n                accumulate(s, parent, generics, impl_exprs);\n            }\n            _ => {}\n        }\n        let predicates = required_predicates(tcx, def_id, s.base().options.bounds_options);\n        impl_exprs.extend(solve_item_traits_inner(s, generics, predicates));\n    }\n    let mut impl_exprs = vec![];\n    accumulate(s, def_id, generics, &mut impl_exprs);\n    impl_exprs\n}\n\n/// Solve the trait obligations for implementing a trait (or for trait associated type bounds) in\n/// the current context.\n#[cfg(feature = \"rustc\")]\n#[tracing::instrument(level = \"trace\", skip(s), ret)]\npub fn solve_item_implied_traits<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    def_id: RDefId,\n    generics: ty::GenericArgsRef<'tcx>,\n) -> Vec<ImplExpr> {\n    let predicates = implied_predicates(s.base().tcx, def_id, s.base().options.bounds_options);\n    solve_item_traits_inner(s, generics, predicates)\n}\n\n/// Apply the given generics to the provided clauses and resolve the trait references in the\n/// current context.\n#[cfg(feature = \"rustc\")]\nfn solve_item_traits_inner<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    generics: ty::GenericArgsRef<'tcx>,\n    predicates: utils::Predicates<'tcx>,\n) -> Vec<ImplExpr> {\n    let tcx = s.base().tcx;\n    let typing_env = s.typing_env();\n    predicates\n        .iter()\n        .map(|(clause, _span)| *clause)\n        .filter_map(|clause| clause.as_trait_clause())\n        .map(|clause| clause.to_poly_trait_ref())\n        // Substitute the item generics\n        .map(|trait_ref| ty::EarlyBinder::bind(trait_ref).instantiate(tcx, generics))\n        // We unfortunately don't have a way to normalize without erasing regions.\n        .map(|trait_ref| {\n            tcx.try_normalize_erasing_regions(typing_env, trait_ref)\n                .unwrap_or(trait_ref)\n        })\n        // Resolve\n        .map(|trait_ref| solve_trait(s, trait_ref))\n        .collect()\n}\n\n/// Retrieve the `Self: Trait` clause for a trait associated item.\n#[cfg(feature = \"rustc\")]\npub fn self_clause_for_item<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    def_id: RDefId,\n    generics: rustc_middle::ty::GenericArgsRef<'tcx>,\n) -> Option<ImplExpr> {\n    let tcx = s.base().tcx;\n\n    let tr_def_id = tcx.trait_of_assoc(def_id)?;\n    // The \"self\" predicate in the context of the trait.\n    let self_pred = self_predicate(tcx, tr_def_id);\n    // Substitute to be in the context of the current item.\n    let generics = generics.truncate_to(tcx, tcx.generics_of(tr_def_id));\n    let self_pred = ty::EarlyBinder::bind(self_pred).instantiate(tcx, generics);\n\n    // Resolve\n    Some(solve_trait(s, self_pred))\n}\n\n/// Solve the `T: Sized` predicate.\n#[cfg(feature = \"rustc\")]\npub fn solve_sized<'tcx, S: UnderOwnerState<'tcx>>(s: &S, ty: ty::Ty<'tcx>) -> ImplExpr {\n    let tcx = s.base().tcx;\n    let sized_trait = tcx.lang_items().sized_trait().unwrap();\n    let ty = erase_free_regions(tcx, ty);\n    let tref = ty::Binder::dummy(ty::TraitRef::new(tcx, sized_trait, [ty]));\n    solve_trait(s, tref)\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/attributes.rs",
    "content": "//! Copies of the types related to attributes.\n//! Such types are mostly contained in the crate `rustc_hir::attrs`.\n\nuse crate::prelude::*;\n\n/// Reflects [`rustc_hir::attrs::AttributeKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::attrs::AttributeKind, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum AttributeKind {\n    Align {\n        align: Align,\n        span: Span,\n    },\n    AutomaticallyDerived(Span),\n    Deprecation {\n        deprecation: Deprecation,\n        span: Span,\n    },\n    DocComment {\n        style: AttrStyle,\n        kind: CommentKind,\n        span: Span,\n        comment: Symbol,\n    },\n    Ignore {\n        span: Span,\n        reason: Option<Symbol>,\n    },\n    Marker(Span),\n    MayDangle(Span),\n    MustUse {\n        span: Span,\n        reason: Option<Symbol>,\n    },\n    Path(Symbol, Span),\n    #[todo]\n    Todo(String),\n}\n\n/// Reflects [`rustc_hir::attrs::Deprecation`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S>, from: rustc_hir::attrs::Deprecation, state: S as _s)]\npub struct Deprecation {\n    pub since: DeprecatedSince,\n    pub note: Option<Symbol>,\n    pub suggestion: Option<Symbol>,\n}\n\n/// Reflects [`rustc_hir::attrs::DeprecatedSince`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S>, from: rustc_hir::attrs::DeprecatedSince, state: S as _s)]\npub enum DeprecatedSince {\n    RustcVersion(RustcVersion),\n    Future,\n    NonStandard(Symbol),\n    Unspecified,\n    Err,\n}\n\n/// Reflects [`rustc_hir::RustcVersion`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S>, from: rustc_hir::RustcVersion, state: S as _s)]\npub struct RustcVersion {\n    pub major: u16,\n    pub minor: u16,\n    pub patch: u16,\n}\n\n/// Reflects [`rustc_hir::attrs::InlineAttr`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::attrs::InlineAttr, state: S as _s)]\npub enum InlineAttr {\n    None,\n    Hint,\n    Always,\n    Never,\n    Force {\n        attr_span: Span,\n        reason: Option<Symbol>,\n    },\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/def_id.rs",
    "content": "//! This module contains the type definition for `DefId` and the types\n//! `DefId` depends on.\n//!\n//! This is purposely a very small isolated module:\n//! `hax-engine-names-extract` uses those types, but we don't want\n//! `hax-engine-names-extract` to have a build dependency on the whole\n//! frontend, that double the build times for the Rust part of hax.\n//!\n//! The feature `extract_names_mode` exists only in the crate\n//! `hax-engine-names-extract`, and is used to turn off the derive\n//! attributes `AdtInto` and `JsonSchema`.\n\nuse hax_adt_into::derive_group;\n\n#[cfg(feature = \"rustc\")]\nuse crate::prelude::*;\n#[cfg(not(feature = \"extract_names_mode\"))]\nuse crate::{AdtInto, JsonSchema};\n\n#[cfg(feature = \"rustc\")]\nuse {rustc_hir as hir, rustc_hir::def_id::DefId as RDefId, rustc_middle::ty};\n\npub type Symbol = String;\n#[cfg(not(feature = \"extract_names_mode\"))]\npub type ByteSymbol = Vec<u8>;\n\n#[cfg(all(not(feature = \"extract_names_mode\"), feature = \"rustc\"))]\nimpl<'t, S> SInto<S, Symbol> for rustc_span::symbol::Symbol {\n    fn sinto(&self, _s: &S) -> Symbol {\n        self.to_ident_string()\n    }\n}\n\n#[cfg(all(not(feature = \"extract_names_mode\"), feature = \"rustc\"))]\nimpl<'t, S> SInto<S, ByteSymbol> for rustc_span::symbol::ByteSymbol {\n    fn sinto(&self, _s: &S) -> ByteSymbol {\n        self.as_byte_str().to_owned()\n    }\n}\n\n/// Reflects [`hir::Safety`]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(AdtInto, JsonSchema))]\n#[cfg_attr(not(feature = \"extract_names_mode\"), args(<S>, from: hir::Safety, state: S as _s))]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum Safety {\n    Unsafe,\n    Safe,\n}\n\npub type Mutability = bool;\n#[cfg(not(feature = \"extract_names_mode\"))]\npub type Pinnedness = bool;\n\n/// Reflects [`hir::def::CtorKind`]\n#[derive_group(Serializers)]\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(JsonSchema, AdtInto))]\n#[cfg_attr(not(feature = \"extract_names_mode\"), args(<S>, from: hir::def::CtorKind, state: S as _s))]\npub enum CtorKind {\n    Fn,\n    Const,\n}\n\n/// Reflects [`hir::def::CtorOf`]\n#[derive_group(Serializers)]\n#[derive(Debug, Copy, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(JsonSchema, AdtInto))]\n#[cfg_attr(not(feature = \"extract_names_mode\"), args(<S>, from: hir::def::CtorOf, state: S as _s))]\npub enum CtorOf {\n    Struct,\n    Variant,\n}\n\n/// The id of a promoted MIR constant.\n///\n/// Reflects [`rustc_middle::mir::Promoted`].\n#[derive_group(Serializers)]\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(JsonSchema, AdtInto))]\n#[cfg_attr(not(feature = \"extract_names_mode\"), args(<S>, from: rustc_middle::mir::Promoted, state: S as _s))]\npub struct PromotedId {\n    #[cfg_attr(not(feature = \"extract_names_mode\"), value(self.as_u32()))]\n    pub id: u32,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl PromotedId {\n    pub fn as_rust_promoted_id(&self) -> rustc_middle::mir::Promoted {\n        rustc_middle::mir::Promoted::from_u32(self.id)\n    }\n}\n\n/// Reflects [`rustc_hir::def::DefKind`]\n#[derive_group(Serializers)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(JsonSchema, AdtInto))]\n#[cfg_attr(not(feature = \"extract_names_mode\"), args(<S>, from: rustc_hir::def::DefKind, state: S as tcx))]\n#[derive(Debug, Clone, PartialEq, Hash, Eq, PartialOrd, Ord)]\npub enum DefKind {\n    Mod,\n    Struct,\n    Union,\n    Enum,\n    Variant,\n    Trait,\n    TyAlias,\n    ForeignTy,\n    TraitAlias,\n    AssocTy,\n    TyParam,\n    Fn,\n    Const,\n    ConstParam,\n    Static {\n        safety: Safety,\n        mutability: Mutability,\n        nested: bool,\n    },\n    Ctor(CtorOf, CtorKind),\n    AssocFn,\n    AssocConst,\n    Macro(MacroKinds),\n    ExternCrate,\n    Use,\n    ForeignMod,\n    AnonConst,\n    InlineConst,\n    #[cfg_attr(not(feature = \"extract_names_mode\"), disable_mapping)]\n    /// Added by hax: promoted constants don't have def_ids in rustc but they do in hax.\n    PromotedConst,\n    OpaqueTy,\n    Field,\n    LifetimeParam,\n    GlobalAsm,\n    Impl {\n        of_trait: bool,\n    },\n    Closure,\n    SyntheticCoroutineBody,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Default)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(JsonSchema))]\npub struct MacroKinds {\n    bang: bool,\n    attr: bool,\n    derive: bool,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, MacroKinds> for rustc_hir::def::MacroKinds {\n    fn sinto(&self, _s: &S) -> MacroKinds {\n        MacroKinds {\n            bang: self.contains(Self::BANG),\n            attr: self.contains(Self::ATTR),\n            derive: self.contains(Self::DERIVE),\n        }\n    }\n}\n\n/// Reflects [`rustc_hir::def_id::DefId`], augmented to also give ids to promoted constants (which\n/// have their own ad-hoc numbering scheme in rustc for now).\n#[derive_group(Serializers)]\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(JsonSchema))]\npub struct DefId {\n    pub(crate) contents: crate::id_table::Node<DefIdContents>,\n}\n\n#[derive_group(Serializers)]\n#[derive(Debug, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(JsonSchema))]\npub struct DefIdContents {\n    pub krate: String,\n    pub path: Vec<DisambiguatedDefPathItem>,\n    pub parent: Option<DefId>,\n    /// Stores rustc's `CrateNum`, `DefIndex` and `Promoted` raw indices. This can be useful if one\n    /// needs to convert a [`DefId`] into a [`rustc_hir::def_id::DefId`]. If the promoted id is\n    /// `Some`, then this `DefId` indicates the nth promoted constant associated with the item,\n    /// which doesn't have a real `rustc::DefId`.\n    ///\n    /// **Warning: this `index` field might not be safe to use**. They are valid only for one Rustc\n    /// sesssion. Please do not rely on those indices unless you cannot do otherwise.\n    pub index: (u32, u32, Option<PromotedId>),\n    pub is_local: bool,\n\n    /// The kind of definition this `DefId` points to.\n    pub kind: crate::DefKind,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl DefIdContents {\n    pub fn make_def_id<'tcx, S: BaseState<'tcx>>(self, s: &S) -> DefId {\n        let contents =\n            s.with_global_cache(|cache| id_table::Node::new(self, &mut cache.id_table_session));\n        DefId { contents }\n    }\n}\n\n/// Returns the [`SyntheticItem`] encoded by a [rustc `DefId`](RDefId), if any.\n#[cfg(feature = \"rustc\")]\npub fn def_id_as_synthetic<'tcx>(\n    def_id: RDefId,\n    s: &impl BaseState<'tcx>,\n) -> Option<SyntheticItem> {\n    s.with_global_cache(|c| c.reverse_synthetic_map.get(&def_id).copied())\n}\n\n#[cfg(feature = \"rustc\")]\nimpl DefId {\n    /// The rustc def_id corresponding to this item, if there is one. Promoted constants don't have\n    /// a rustc def_id.\n    pub fn as_rust_def_id(&self) -> Option<RDefId> {\n        let (_, _, promoted) = self.index;\n        match promoted {\n            None => Some(self.underlying_rust_def_id()),\n            Some(_) => None,\n        }\n    }\n    /// The def_id of this item or its parent if this is a promoted constant.\n    pub fn underlying_rust_def_id(&self) -> RDefId {\n        let (krate, index, _) = self.index;\n        RDefId {\n            krate: rustc_hir::def_id::CrateNum::from_u32(krate),\n            index: rustc_hir::def_id::DefIndex::from_u32(index),\n        }\n    }\n\n    /// Returns the [`SyntheticItem`] encoded by a [rustc `DefId`](RDefId), if\n    /// any.\n    ///\n    /// Note that this method relies on rustc indexes, which are session\n    /// specific. See [`Self`] documentation.\n    pub fn as_synthetic<'tcx>(&self, s: &impl BaseState<'tcx>) -> Option<SyntheticItem> {\n        def_id_as_synthetic(self.underlying_rust_def_id(), s)\n    }\n\n    /// Iterate over this element and its parents.\n    pub fn ancestry(&self) -> impl Iterator<Item = &Self> {\n        std::iter::successors(Some(self), |def| def.parent.as_ref())\n    }\n\n    /// The `PathItem` corresponding to this item.\n    pub fn path_item(&self) -> DisambiguatedDefPathItem {\n        self.path\n            .last()\n            .cloned()\n            .unwrap_or_else(|| DisambiguatedDefPathItem {\n                disambiguator: 0,\n                data: DefPathItem::CrateRoot {\n                    name: self.krate.clone(),\n                },\n            })\n    }\n\n    /// Construct a hax `DefId` for the nth promoted constant of the current item. That `DefId` has\n    /// no corresponding rustc `DefId`.\n    pub fn make_promoted_child<'tcx, S: BaseState<'tcx>>(\n        &self,\n        s: &S,\n        promoted_id: PromotedId,\n    ) -> Self {\n        let mut path = self.path.clone();\n        path.push(DisambiguatedDefPathItem {\n            data: DefPathItem::PromotedConst,\n            // Reuse the promoted id as disambiguator, like for inline consts.\n            disambiguator: promoted_id.id,\n        });\n        let (krate, index, _) = self.index;\n        let contents = DefIdContents {\n            krate: self.krate.clone(),\n            path,\n            parent: Some(self.clone()),\n            is_local: self.is_local,\n            index: (krate, index, Some(promoted_id)),\n            kind: DefKind::PromotedConst,\n        };\n        contents.make_def_id(s)\n    }\n}\n\nimpl DefId {\n    pub fn promoted_id(&self) -> Option<PromotedId> {\n        let (_, _, promoted) = self.index;\n        promoted\n    }\n}\n\nimpl std::ops::Deref for DefId {\n    type Target = DefIdContents;\n    fn deref(&self) -> &Self::Target {\n        &self.contents\n    }\n}\n\n#[cfg(not(feature = \"rustc\"))]\nimpl std::fmt::Debug for DefId {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        f.debug_struct(\"DefId\")\n            .field(\"krate\", &self.krate)\n            .field(\"path\", &self.path)\n            .finish()\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl std::fmt::Debug for DefId {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        // Use the more legible rustc debug implementation.\n        write!(f, \"{:?}\", self.underlying_rust_def_id())?;\n        if let Some(promoted) = self.promoted_id() {\n            write!(f, \"::promoted#{}\", promoted.id)?;\n        }\n        Ok(())\n    }\n}\n\nimpl std::hash::Hash for DefId {\n    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n        // A `DefId` is basically an interned path; we only hash the path, discarding the rest of\n        // the information.\n        self.krate.hash(state);\n        self.path.hash(state);\n        self.promoted_id().hash(state);\n    }\n}\n\n/// Gets the kind of the definition. Can't use `def_kind` directly because this crashes on the\n/// crate root.\n#[cfg(feature = \"rustc\")]\npub(crate) fn get_def_kind<'tcx>(tcx: ty::TyCtxt<'tcx>, def_id: RDefId) -> hir::def::DefKind {\n    if def_id == rustc_span::def_id::CRATE_DEF_ID.to_def_id() {\n        // Horrible hack: without this, `def_kind` crashes on the crate root. Presumably some table\n        // isn't properly initialized otherwise.\n        let _ = tcx.def_span(def_id);\n    };\n    tcx.def_kind(def_id)\n}\n\n/// The crate name under which synthetic items are exported under.\n#[cfg(any(feature = \"extract_names_mode\", feature = \"rustc\"))]\npub(super) const SYNTHETIC_CRATE_NAME: &str = \"<synthetic>\";\n\n#[cfg(feature = \"rustc\")]\nfn translate_def_id<'tcx, S: BaseState<'tcx>>(s: &S, def_id: RDefId) -> DefId {\n    let tcx = s.base().tcx;\n    let path = {\n        // Set the def_id so the `CrateRoot` path item can fetch the crate name.\n        let state_with_id = s.with_owner_id(def_id);\n        tcx.def_path(def_id)\n            .data\n            .iter()\n            .map(|x| x.sinto(&state_with_id))\n            .collect()\n    };\n    let contents = DefIdContents {\n        path,\n        krate: if def_id_as_synthetic(def_id, s).is_some() {\n            SYNTHETIC_CRATE_NAME.to_string()\n        } else {\n            tcx.crate_name(def_id.krate).to_string()\n        },\n        parent: tcx.opt_parent(def_id).sinto(s),\n        index: (\n            rustc_hir::def_id::CrateNum::as_u32(def_id.krate),\n            rustc_hir::def_id::DefIndex::as_u32(def_id.index),\n            None,\n        ),\n        is_local: def_id.is_local(),\n        kind: get_def_kind(tcx, def_id).sinto(s),\n    };\n    contents.make_def_id(s)\n}\n\n#[cfg(all(not(feature = \"extract_names_mode\"), feature = \"rustc\"))]\nimpl<'s, S: BaseState<'s>> SInto<S, DefId> for RDefId {\n    fn sinto(&self, s: &S) -> DefId {\n        if let Some(def_id) = s.with_item_cache(*self, |cache| cache.def_id.clone()) {\n            return def_id;\n        }\n        let def_id = translate_def_id(s, *self);\n        s.with_item_cache(*self, |cache| cache.def_id = Some(def_id.clone()));\n        def_id\n    }\n}\n\n#[cfg(not(feature = \"extract_names_mode\"))]\npub type Path = Vec<String>;\n\n#[cfg(all(not(feature = \"extract_names_mode\"), feature = \"rustc\"))]\nimpl std::convert::From<DefId> for Path {\n    fn from(v: DefId) -> Vec<String> {\n        std::iter::once(&v.krate)\n            .chain(v.path.iter().filter_map(|item| match &item.data {\n                DefPathItem::TypeNs(s)\n                | DefPathItem::ValueNs(s)\n                | DefPathItem::MacroNs(s)\n                | DefPathItem::LifetimeNs(s) => Some(s),\n                _ => None,\n            }))\n            .cloned()\n            .collect()\n    }\n}\n\n#[cfg(not(feature = \"extract_names_mode\"))]\npub type GlobalIdent = DefId;\n\n#[cfg(all(not(feature = \"extract_names_mode\"), feature = \"rustc\"))]\nimpl<'tcx, S: BaseState<'tcx>> SInto<S, GlobalIdent> for rustc_hir::def_id::LocalDefId {\n    fn sinto(&self, st: &S) -> DefId {\n        self.to_def_id().sinto(st)\n    }\n}\n\n/// Reflects [`rustc_hir::definitions::DefPathData`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(AdtInto, JsonSchema))]\n#[cfg_attr(not(feature = \"extract_names_mode\"), args(<'ctx, S: UnderOwnerState<'ctx>>, from: rustc_hir::definitions::DefPathData, state: S as s))]\npub enum DefPathItem {\n    CrateRoot {\n        #[cfg_attr(not(feature = \"extract_names_mode\"), value(s.base().tcx.crate_name(s.owner_id().krate).sinto(s)))]\n        name: Symbol,\n    },\n    Impl,\n    ForeignMod,\n    Use,\n    GlobalAsm,\n    TypeNs(Symbol),\n    ValueNs(Symbol),\n    MacroNs(Symbol),\n    LifetimeNs(Symbol),\n    Closure,\n    Ctor,\n    LateAnonConst,\n    AnonConst,\n    #[cfg_attr(not(feature = \"extract_names_mode\"), disable_mapping)]\n    PromotedConst,\n    DesugaredAnonymousLifetime,\n    OpaqueTy,\n    OpaqueLifetime(Symbol),\n    AnonAssocTy(Symbol),\n    SyntheticCoroutineBody,\n    NestedStatic,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[cfg_attr(not(feature = \"extract_names_mode\"), derive(AdtInto, JsonSchema))]\n#[cfg_attr(not(feature = \"extract_names_mode\"), args(<'a, S: UnderOwnerState<'a>>, from: rustc_hir::definitions::DisambiguatedDefPathData, state: S as s))]\n/// Reflects [`rustc_hir::definitions::DisambiguatedDefPathData`]\npub struct DisambiguatedDefPathItem {\n    pub data: DefPathItem,\n    pub disambiguator: u32,\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/hir.rs",
    "content": "//! Copies of the relevant `HIR` types. HIR represents the code of a rust crate post-macro\n//! expansion. It is close to the parsed AST, modulo some desugarings (and macro expansion).\n//!\n//! This module also includes some `rustc_ast` definitions when they show up in HIR.\nuse crate::prelude::*;\nuse crate::sinto_todo;\n\n#[cfg(feature = \"rustc\")]\nuse rustc_ast::ast;\n#[cfg(feature = \"rustc\")]\nuse rustc_hir as hir;\n#[cfg(feature = \"rustc\")]\nuse rustc_middle::ty;\n\n/// Reflects [`hir::hir_id::HirId`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: hir::hir_id::HirId, state: S as gstate)]\npub struct HirId {\n    owner: DefId,\n    local_id: usize,\n    // attrs: String\n}\n// TODO: If not working: See original\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: BaseState<'tcx>> SInto<S, DefId> for hir::hir_id::OwnerId {\n    fn sinto(&self, s: &S) -> DefId {\n        self.to_def_id().sinto(s)\n    }\n}\n\n/// Reflects [`ast::LitFloatType`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: ast::LitFloatType, state: S as gstate)]\npub enum LitFloatType {\n    Suffixed(FloatTy),\n    Unsuffixed,\n}\n\n/// Reflects [`hir::Movability`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S>, from: hir::Movability, state: S as _s)]\npub enum Movability {\n    Static,\n    Movable,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, Mutability> for hir::Mutability {\n    fn sinto(&self, _s: &S) -> Mutability {\n        match self {\n            Self::Mut => true,\n            Self::Not => false,\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, Pinnedness> for hir::Pinnedness {\n    fn sinto(&self, _s: &S) -> Pinnedness {\n        match self {\n            Self::Pinned => true,\n            Self::Not => false,\n        }\n    }\n}\n\n/// Reflects [`hir::RangeEnd`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::RangeEnd, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum RangeEnd {\n    Included,\n    Excluded,\n}\n\n/// Reflects [`hir::ImplicitSelfKind`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::ImplicitSelfKind, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ImplicitSelfKind {\n    Imm,\n    Mut,\n    RefImm,\n    RefMut,\n    None,\n}\n\n/// Reflects [`hir::FnDecl`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnDecl<'tcx>, state: S as tcx)]\npub struct FnDecl {\n    pub inputs: Vec<Ty>,\n    pub output: FnRetTy,\n    pub c_variadic: bool,\n    pub implicit_self: ImplicitSelfKind,\n    pub lifetime_elision_allowed: bool,\n}\n\n/// Reflects [`hir::FnSig`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnSig<'tcx>, state: S as tcx)]\npub struct FnSig {\n    pub header: FnHeader,\n    pub decl: FnDecl,\n    pub span: Span,\n}\n\n#[derive(AdtInto, JsonSchema)]\n#[args(<S>, from: hir::HeaderSafety, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum HeaderSafety {\n    SafeTargetFeatures,\n    Normal(Safety),\n}\n\n/// Reflects [`hir::FnHeader`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::FnHeader, state: S as tcx)]\npub struct FnHeader {\n    pub safety: HeaderSafety,\n    pub constness: Constness,\n    pub asyncness: IsAsync,\n    pub abi: ExternAbi,\n}\n\n/// Reflects [`rustc_abi::ExternAbi`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_abi::ExternAbi, state: S as s)]\npub enum ExternAbi {\n    Rust,\n    C {\n        unwind: bool,\n    },\n    #[todo]\n    Other(String),\n}\n\n/// Function definition\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct FnDef<Body: IsBody> {\n    pub header: FnHeader,\n    pub params: Vec<Param>,\n    pub ret: Ty,\n    pub body: Body,\n    pub sig_span: Span,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'x: 'tcx, 'tcx, S: UnderOwnerState<'tcx>> SInto<S, Ty> for hir::Ty<'x> {\n    fn sinto(self: &hir::Ty<'x>, s: &S) -> Ty {\n        // **Important:**\n        // We need a local id here, and we get it from the owner id, which must\n        // be local. It is safe to do so, because if we have access to a HIR ty,\n        // it necessarily means we are exploring a local item (we don't have\n        // access to the HIR of external objects, only their MIR).\n        rustc_hir_analysis::lower_ty(s.base().tcx, self).sinto(s)\n    }\n}\n\n/// Reflects [`hir::UseKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: hir::UseKind, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum UseKind {\n    Single(Ident),\n    Glob,\n    ListStem,\n}\n\n/// Reflects [`hir::IsAuto`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::IsAuto, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum IsAuto {\n    Yes,\n    No,\n}\n\n/// Reflects [`hir::Defaultness`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::Defaultness, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum Defaultness {\n    Default { has_value: bool },\n    Final,\n}\n\n/// Reflects [`hir::ImplPolarity`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ImplPolarity, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ImplPolarity {\n    Positive,\n    Negative(Span),\n}\n\n/// Reflects [`hir::Constness`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::Constness, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum Constness {\n    Const,\n    NotConst,\n}\n\n/// Reflects [`hir::Generics`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Generics<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Generics<Body: IsBody> {\n    pub params: Vec<GenericParam<Body>>,\n    #[value(region_bounds_at_current_owner(tcx))]\n    pub bounds: GenericBounds,\n    pub has_where_clause_predicates: bool,\n    pub where_clause_span: Span,\n    pub span: Span,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto<S, ImplItem<Body>> for hir::ImplItemId {\n    fn sinto(&self, s: &S) -> ImplItem<Body> {\n        let tcx: rustc_middle::ty::TyCtxt = s.base().tcx;\n        let impl_item = tcx.hir_impl_item(*self);\n        let s = s.with_owner_id(impl_item.owner_id.to_def_id());\n        impl_item.sinto(&s)\n    }\n}\n\n/// Reflects [`hir::ParamName`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ParamName {\n    Plain(LocalIdent),\n    Fresh,\n    Error,\n}\n\n/// Reflects [`hir::LifetimeParamKind`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::LifetimeParamKind, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum LifetimeParamKind {\n    Explicit,\n    Elided(MissingLifetimeKind),\n    Error,\n}\n\n/// Reflects [`hir::AnonConst`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: hir::AnonConst, state: S as s)]\npub struct AnonConst<Body: IsBody> {\n    pub hir_id: HirId,\n    pub def_id: GlobalIdent,\n    #[map({\n        body_from_id::<Body, _>(*x, &s.with_owner_id(hir_id.owner.to_def_id()))\n    })]\n    pub body: Body,\n}\n\n/// Reflects [`hir::ConstArg`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ConstArg<'tcx>, state: S as s)]\npub struct ConstArg<Body: IsBody> {\n    pub hir_id: HirId,\n    pub kind: ConstArgKind<Body>,\n}\n\n/// Reflects [`hir::ConstArgKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::ConstArgKind<'tcx>, state: S as s)]\npub enum ConstArgKind<Body: IsBody> {\n    Path(QPath),\n    Anon(AnonConst<Body>),\n    #[todo]\n    Infer(String),\n}\n\n/// Reflects [`hir::GenericParamKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::GenericParamKind<'tcx>, state: S as tcx)]\npub enum GenericParamKind<Body: IsBody> {\n    Lifetime {\n        kind: LifetimeParamKind,\n    },\n    Type {\n        /// On use site, Rust always give us all the generic\n        /// parameters, no matter the defaultness. This information is\n        /// thus not so useful. At the same time, as discussed in\n        /// https://github.com/hacspec/hax/issues/310, extracting this\n        /// default type causes failures when querying Rust for trait\n        /// resolution. We thus decided to disable this feature. If\n        /// this default type information is useful to you, please\n        /// open an issue on https://github.com/hacspec/hax.\n        #[map(x.map(|_ty| ()))]\n        default: Option<()>,\n        synthetic: bool,\n    },\n    Const {\n        ty: Ty,\n        default: Option<ConstArg<Body>>,\n    },\n}\n\n/// Reflects [`hir::GenericParam`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::GenericParam<'tcx>, state: S as s)]\npub struct GenericParam<Body: IsBody> {\n    pub hir_id: HirId,\n    pub def_id: GlobalIdent,\n    #[map(match x {\n        hir::ParamName::Plain(loc_ident) =>\n            ParamName::Plain(LocalIdent {\n                name: loc_ident.as_str().to_string(),\n                id: self.hir_id.sinto(s)\n            }),\n        hir::ParamName::Fresh =>\n            ParamName::Fresh,\n        hir::ParamName::Error { .. } =>\n            ParamName::Error,\n    })]\n    pub name: ParamName,\n    pub span: Span,\n    pub pure_wrt_drop: bool,\n    pub kind: GenericParamKind<Body>,\n    pub colon_span: Option<Span>,\n    #[value(s.base().tcx.hir_attrs(*hir_id).sinto(s))]\n    attributes: Vec<Attribute>,\n}\n\n/// Reflects [`hir::ImplItem`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ImplItem<'tcx>, state: S as s)]\npub struct ImplItem<Body: IsBody> {\n    pub ident: Ident,\n    pub owner_id: DefId,\n    pub generics: Generics<Body>,\n    pub kind: ImplItemKind<Body>,\n    pub span: Span,\n    // Removed fields. If these are used, will need to provide `#[value(..)]` implementations.\n    // pub defaultness: Defaultness,\n    // pub vis_span: Span,\n    #[value(ItemAttributes::from_owner_id(s, *owner_id))]\n    /// the attributes on this impl item\n    pub attributes: ItemAttributes,\n}\n\n/// Reflects [`hir::ImplItemKind`], inlining the body of the items.\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ImplItemKind<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ImplItemKind<Body: IsBody> {\n    Const(Ty, Body),\n    #[custom_arm(hir::ImplItemKind::Fn(sig, body) => {\n                ImplItemKind::Fn(make_fn_def::<Body, _>(sig, body, s))\n        },)]\n    Fn(FnDef<Body>),\n    #[custom_arm(hir::ImplItemKind::Type(t) => {\n        let parent_bounds = {\n            let (tcx, owner_id) = (s.base().tcx, s.owner_id());\n            let assoc_item = tcx.opt_associated_item(owner_id).unwrap();\n            let impl_did = assoc_item.impl_container(tcx).unwrap();\n            tcx.explicit_item_bounds(assoc_item.trait_item_def_id().unwrap())\n                .skip_binder() // Skips an `EarlyBinder`, likely for GATs\n                .iter()\n                .copied()\n                .filter(|(clause, _)| clause.as_trait_clause().is_some_and(|trait_predicate| {\n                    !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id())\n                }))\n                .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, impl_did, clause, span))\n                .collect::<Vec<_>>()\n        };\n        ImplItemKind::Type {\n            ty: t.sinto(s),\n            parent_bounds\n        }\n        },)]\n    /// An associated type with its parent bounds inlined.\n    Type {\n        ty: Ty,\n        parent_bounds: Vec<(Clause, ImplExpr, Span)>,\n    },\n}\n\n/// Reflects [`hir::Impl`].\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Impl<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Impl<Body: IsBody> {\n    #[value(of_trait.map(|trait_impl_header| trait_impl_header.safety).unwrap_or(rustc_hir::Safety::Safe).sinto(s))]\n    pub safety: Safety,\n    // Removed fields. If these are used, will need to provide `#[value(..)]` implementations.\n    // pub polarity: ImplPolarity,\n    // pub defaultness: Defaultness,\n    pub generics: Generics<Body>,\n    #[map({\n        x.map(|_|\n            s.base().tcx\n                .impl_trait_ref(s.owner_id())\n                .instantiate_identity()\n                .sinto(s)\n        )\n    })]\n    pub of_trait: Option<TraitRef>,\n    pub self_ty: Ty,\n    pub items: Vec<ImplItem<Body>>,\n    #[value({\n        let (tcx, owner_id) = (s.base().tcx, s.owner_id());\n        if self.of_trait.is_some() {\n            let trait_did = tcx.impl_trait_id(owner_id);\n            tcx.explicit_super_predicates_of(trait_did)\n                .iter_identity_copied()\n                .filter(|(clause, _)| clause.as_trait_clause().is_some_and(|trait_predicate| {\n                    !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id())\n                }))\n                .filter_map(|(clause, span)| super_clause_to_clause_and_impl_expr(s, owner_id, clause, span))\n                .collect::<Vec<_>>()\n        } else {\n            vec![]\n        }\n    })]\n    /// The clauses and impl expressions corresponding to the impl's\n    /// trait (if not inherent) super bounds (if any).\n    pub parent_bounds: Vec<(Clause, ImplExpr, Span)>,\n}\n\n/// Reflects [`hir::IsAsync`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::IsAsync, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum IsAsync {\n    Async(Span),\n    NotAsync,\n}\n\n/// Reflects [`hir::FnRetTy`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FnRetTy<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum FnRetTy {\n    DefaultReturn(Span),\n    Return(Ty),\n}\n\n/// Reflects [`hir::VariantData`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::VariantData<'tcx>, state: S as tcx)]\npub enum VariantData {\n    Struct {\n        fields: Vec<HirFieldDef>,\n        recovered: bool,\n    },\n    Tuple(Vec<HirFieldDef>, HirId, GlobalIdent),\n    Unit(HirId, GlobalIdent),\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, bool> for ast::Recovered {\n    fn sinto(&self, _s: &S) -> bool {\n        match self {\n            Self::Yes(_) => true,\n            Self::No => false,\n        }\n    }\n}\n\n/// Reflects [`hir::FieldDef`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::FieldDef<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct HirFieldDef {\n    pub span: Span,\n    pub vis_span: Span,\n    pub ident: Ident,\n    pub hir_id: HirId,\n    pub def_id: GlobalIdent,\n    pub ty: Ty,\n    #[value(s.base().tcx.hir_attrs(*hir_id).sinto(s))]\n    attributes: Vec<Attribute>,\n}\n\n/// Reflects [`hir::Variant`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::Variant<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Variant<Body: IsBody> {\n    pub ident: Ident,\n    pub hir_id: HirId,\n    pub def_id: GlobalIdent,\n    #[map(x.sinto(&s.with_owner_id(self.def_id.to_def_id())))]\n    pub data: VariantData,\n    pub disr_expr: Option<AnonConst<Body>>,\n    #[value({\n        let tcx = s.base().tcx;\n        let variant = tcx\n            .adt_def(s.owner_id())\n            .variants()\n            .into_iter()\n            .find(|v| v.def_id == self.def_id.into()).unwrap();\n        variant.discr.sinto(s)\n    })]\n    pub discr: DiscriminantDefinition,\n    pub span: Span,\n    #[value(s.base().tcx.hir_attrs(*hir_id).sinto(s))]\n    pub attributes: Vec<Attribute>,\n}\n\n/// Reflects [`hir::UsePath`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::UsePath<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct UsePath {\n    pub span: Span,\n    #[map(x.iter().map(|res| res.sinto(s)).collect())]\n    pub res: Vec<Option<Res>>,\n    pub segments: Vec<PathSegment>,\n    #[value(self.segments.iter().last().and_then(|segment| {\n            match s.base().tcx.hir_node_by_def_id(segment.hir_id.owner.def_id) {\n                hir::Node::Item(hir::Item {\n                    kind: hir::ItemKind::Use(_, hir::UseKind::Single(ident)),\n                    ..\n                }) => Some(ident.name.to_ident_string()),\n                _ => None,\n            }\n        }))]\n    pub rename: Option<String>,\n}\n\n/// Reflects [`hir::def::Res`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::def::Res, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum Res {\n    Def(DefKind, DefId),\n    PrimTy(PrimTy),\n    SelfTyParam {\n        trait_: DefId,\n    },\n    SelfTyAlias {\n        alias_to: DefId,\n        forbid_generic: bool,\n        is_trait_impl: bool,\n    },\n    SelfCtor(DefId),\n    Local(HirId),\n    ToolMod,\n    NonMacroAttr(NonMacroAttrKind),\n    Err,\n}\n\n/// Reflects [`hir::PrimTy`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::PrimTy, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum PrimTy {\n    Int(IntTy),\n    Uint(UintTy),\n    Float(FloatTy),\n    Str,\n    Bool,\n    Char,\n}\n\n/// Reflects [`hir::def::NonMacroAttrKind`]\n#[derive(AdtInto)]\n#[args(<S>, from: hir::def::NonMacroAttrKind, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum NonMacroAttrKind {\n    Builtin(Symbol),\n    Tool,\n    DeriveHelper,\n    DeriveHelperCompat,\n}\n\n/// Reflects [`hir::PathSegment`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: hir::PathSegment<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct PathSegment {\n    pub ident: Ident,\n    pub hir_id: HirId,\n    pub res: Res,\n    #[map(args.map(|args| args.sinto(s)))]\n    pub args: Option<HirGenericArgs>,\n    pub infer_args: bool,\n}\n\n/// Reflects [`hir::ItemKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ItemKind<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ItemKind<Body: IsBody> {\n    ExternCrate(Option<Symbol>, Ident),\n    Use(UsePath, UseKind),\n    Static(Mutability, Ident, Ty, Body),\n    Const(Ident, Generics<Body>, Ty, Body),\n    #[custom_arm(\n        hir::ItemKind::Fn{ ident, sig, generics, body, .. } => {\n            ItemKind::Fn {\n                ident: ident.sinto(s),\n                generics: generics.sinto(s),\n                def: make_fn_def::<Body, _>(sig, body, s),\n            }\n        }\n    )]\n    Fn {\n        ident: Ident,\n        generics: Generics<Body>,\n        def: FnDef<Body>,\n    },\n\n    Macro(Ident, MacroDef, MacroKinds),\n    Mod(Ident, Vec<Item<Body>>),\n    ForeignMod {\n        abi: ExternAbi,\n        items: Vec<ForeignItem<Body>>,\n    },\n    GlobalAsm {\n        asm: InlineAsm,\n    },\n    TyAlias(\n        Ident,\n        Generics<Body>,\n        #[map({\n            // Rust doesn't enforce bounds on generic parameters in type aliases. Thus, when\n            // translating type aliases, we need to disable trait resolution errors. For more\n            // details, please see https://github.com/hacspec/hax/issues/707.\n            let s = &s.with_base(Base { silence_resolution_errors: true, ..s.base() });\n            x.sinto(s)\n        })]\n        Ty,\n    ),\n    Enum(\n        Ident,\n        Generics<Body>,\n        EnumDef<Body>,\n        #[value({\n            let tcx = s.base().tcx;\n            tcx.repr_options_of_def(s.owner_id().expect_local()).sinto(s)\n        })]\n        ReprOptions,\n    ),\n    Struct(Ident, Generics<Body>, VariantData),\n    Union(Ident, Generics<Body>, VariantData),\n    Trait(\n        Constness,\n        IsAuto,\n        Safety,\n        Ident,\n        Generics<Body>,\n        GenericBounds,\n        Vec<TraitItem<Body>>,\n    ),\n    TraitAlias(Constness, Ident, Generics<Body>, GenericBounds),\n    Impl(Impl<Body>),\n}\n\npub type EnumDef<Body> = Vec<Variant<Body>>;\n\n/// Reflects [`hir::TraitItemKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::TraitItemKind<'tcx>, state: S as tcx)]\n#[derive(Clone, Debug, JsonSchema)]\n#[derive_group(Serializers)]\npub enum TraitItemKind<Body: IsBody> {\n    Const(Ty, Option<Body>),\n    #[custom_arm(\n        hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(id)) => {\n            TraitItemKind::RequiredFn(sig.sinto(tcx), id.sinto(tcx))\n        }\n    )]\n    /// Reflects a required [`hir::TraitItemKind::Fn`]\n    RequiredFn(FnSig, Vec<Option<Ident>>),\n    #[custom_arm(\n        hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body)) => {\n            TraitItemKind::ProvidedFn(sig.sinto(tcx), make_fn_def::<Body, _>(sig, body, tcx))\n        }\n    )]\n    /// Reflects a provided [`hir::TraitItemKind::Fn`]\n    ProvidedFn(FnSig, FnDef<Body>),\n    #[custom_arm(\n        hir::TraitItemKind::Type(b, ty) => {\n            TraitItemKind::Type(b.sinto(tcx), ty.map(|t| t.sinto(tcx)))\n        }\n    )]\n    Type(GenericBounds, Option<Ty>),\n}\n\n/// Reflects [`hir::TraitItem`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::TraitItem<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct TraitItem<Body: IsBody> {\n    pub ident: Ident,\n    pub owner_id: DefId,\n    pub generics: Generics<Body>,\n    pub kind: TraitItemKind<Body>,\n    pub span: Span,\n    pub defaultness: Defaultness,\n    #[value(ItemAttributes::from_owner_id(s, *owner_id))]\n    /// The attributes on this trait item\n    pub attributes: ItemAttributes,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto<S, EnumDef<Body>> for hir::EnumDef<'tcx> {\n    fn sinto(&self, s: &S) -> EnumDef<Body> {\n        self.variants.iter().map(|v| v.sinto(s)).collect()\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'a, S: BaseState<'a>, Body: IsBody> SInto<S, TraitItem<Body>> for hir::TraitItemId {\n    fn sinto(&self, s: &S) -> TraitItem<Body> {\n        let s = s.with_owner_id(self.owner_id.to_def_id());\n        let tcx: rustc_middle::ty::TyCtxt = s.base().tcx;\n        tcx.hir_trait_item(*self).sinto(&s)\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'a, 'tcx, S: UnderOwnerState<'tcx>, Body: IsBody> SInto<S, Vec<Item<Body>>> for hir::Mod<'a> {\n    fn sinto(&self, s: &S) -> Vec<Item<Body>> {\n        let tcx = s.base().tcx;\n        self.item_ids\n            .iter()\n            .map(|id| tcx.hir_item(*id).sinto(s))\n            .collect()\n    }\n}\n\n/// Reflects [`hir::ForeignItemKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ForeignItemKind<'tcx>, state: S as tcx)]\n#[derive(Clone, Debug, JsonSchema)]\n#[derive_group(Serializers)]\npub enum ForeignItemKind<Body: IsBody> {\n    Fn(FnSig, Vec<Option<Ident>>, Generics<Body>),\n    Static(Ty, Mutability, Safety),\n    Type,\n}\n\n/// Reflects [`hir::ForeignItem`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> >, from: hir::ForeignItem<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ForeignItem<Body: IsBody> {\n    pub ident: Ident,\n    pub kind: ForeignItemKind<Body>,\n    pub owner_id: DefId,\n    pub span: Span,\n    pub vis_span: Span,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'a, S: UnderOwnerState<'a>, Body: IsBody> SInto<S, ForeignItem<Body>> for hir::ForeignItemId {\n    fn sinto(&self, s: &S) -> ForeignItem<Body> {\n        let tcx: rustc_middle::ty::TyCtxt = s.base().tcx;\n        tcx.hir_foreign_item(*self).sinto(s)\n    }\n}\n\n/// Reflects [`hir::GenericBounds`]\ntype GenericBounds = Vec<Clause>;\n\n/// Compute the bounds for the owner registed in the state `s`\n#[cfg(feature = \"rustc\")]\nfn region_bounds_at_current_owner<'tcx, S: UnderOwnerState<'tcx>>(s: &S) -> GenericBounds {\n    let tcx = s.base().tcx;\n\n    // According to what kind of node we are looking at, we should\n    // either call `predicates_defined_on` or `item_bounds`\n    let use_item_bounds = {\n        if let Some(oid) = s.owner_id().as_local() {\n            let hir_id = tcx.local_def_id_to_hir_id(oid);\n            let node = tcx.hir_node(hir_id);\n            matches!(\n                node,\n                hir::Node::TraitItem(hir::TraitItem {\n                    kind: hir::TraitItemKind::Type(..),\n                    ..\n                }) | hir::Node::OpaqueTy(..),\n            )\n        } else {\n            false\n        }\n    };\n\n    let clauses: Vec<ty::Clause<'tcx>> = if use_item_bounds {\n        tcx.explicit_item_bounds(s.owner_id())\n            .map_bound(|clauses| {\n                clauses\n                    .iter()\n                    .map(|(x, _span)| x)\n                    .copied()\n                    .collect::<Vec<_>>()\n            })\n            .instantiate_identity()\n    } else {\n        predicates_defined_on(tcx, s.owner_id())\n            .iter()\n            .map(|(x, _span)| x)\n            .copied()\n            .collect()\n    };\n    clauses\n        .into_iter()\n        .filter(|clause| {\n            clause.as_trait_clause().is_none_or(|trait_predicate| {\n                !is_sized_related_trait(tcx, trait_predicate.skip_binder().def_id())\n            })\n        })\n        .collect::<Vec<_>>()\n        .sinto(s)\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, GenericBounds> for hir::GenericBounds<'tcx> {\n    fn sinto(&self, s: &S) -> GenericBounds {\n        region_bounds_at_current_owner(s)\n    }\n}\n\n/// Reflects [`rustc_ast::tokenstream::TokenStream`] as a plain\n/// string. If you need to reshape that into Rust tokens or construct,\n/// please use, e.g., `syn`.\npub type TokenStream = String;\n\n#[cfg(feature = \"rustc\")]\nimpl<'t, S> SInto<S, TokenStream> for rustc_ast::tokenstream::TokenStream {\n    fn sinto(&self, _: &S) -> String {\n        rustc_ast_pretty::pprust::tts_to_string(self)\n    }\n}\n\n/// Reflects [`rustc_ast::token::Delimiter`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_ast::token::Delimiter, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum Delimiter {\n    Parenthesis,\n    Brace,\n    Bracket,\n    Invisible(InvisibleOrigin),\n}\n\nsinto_todo!(rustc_ast::token, InvisibleOrigin);\n\n/// Reflects [`rustc_ast::ast::DelimArgs`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_ast::ast::DelimArgs, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct DelimArgs {\n    pub dspan: DelimSpan,\n    pub delim: Delimiter,\n    pub tokens: TokenStream,\n}\n\nsinto_todo!(rustc_ast::tokenstream, DelimSpan);\n\n/// Reflects [`ast::MacroDef`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: ast::MacroDef, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct MacroDef {\n    pub body: DelimArgs,\n    pub macro_rules: bool,\n}\n\n/// Reflects [`hir::Item`] (and [`hir::ItemId`])\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Item<Body: IsBody> {\n    pub def_id: Option<GlobalIdent>,\n    pub owner_id: DefId,\n    pub span: Span,\n    pub vis_span: Span,\n    pub kind: ItemKind<Body>,\n    pub attributes: ItemAttributes,\n    pub visibility: Visibility<DefId>,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto<S, Item<Body>> for hir::Item<'tcx> {\n    fn sinto(&self, s: &S) -> Item<Body> {\n        use hir::ItemKind::*;\n        // TODO: Not all items have an identifier; return `Option` here, or even better: use the\n        // ident in the `ItemKind`.\n        let name = match self.kind {\n            ExternCrate(_, i)\n            | Use(_, hir::UseKind::Single(i))\n            | Static(_, i, ..)\n            | Const(i, ..)\n            | Fn { ident: i, .. }\n            | Macro(i, ..)\n            | Mod(i, ..)\n            | TyAlias(i, ..)\n            | Enum(i, ..)\n            | Struct(i, ..)\n            | Union(i, ..)\n            | Trait(_, _, _, i, ..)\n            | TraitAlias(_, i, ..) => i.name.to_ident_string(),\n            Use(..) | ForeignMod { .. } | GlobalAsm { .. } | Impl { .. } => String::new(),\n        };\n        let s = &s.with_owner_id(self.owner_id.to_def_id());\n        let tcx = s.base().tcx;\n        let owner_id: DefId = self.owner_id.sinto(s);\n        let def_id = Path::from(owner_id.clone())\n            .ends_with(&[name])\n            .then(|| owner_id.clone());\n        Item {\n            def_id,\n            owner_id,\n            span: self.span.sinto(s),\n            vis_span: self.span.sinto(s),\n            kind: self.kind.sinto(s),\n            attributes: ItemAttributes::from_owner_id(s, self.owner_id),\n            visibility: tcx.visibility(self.owner_id).sinto(s),\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: BaseState<'tcx>, Body: IsBody> SInto<S, Item<Body>> for hir::ItemId {\n    fn sinto(&self, s: &S) -> Item<Body> {\n        let tcx: rustc_middle::ty::TyCtxt = s.base().tcx;\n        tcx.hir_item(*self).sinto(s)\n    }\n}\n\n/// Reflects [`rustc_span::symbol::Ident`]\npub type Ident = (Symbol, Span);\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: BaseState<'tcx>> SInto<S, Ident> for rustc_span::symbol::Ident {\n    fn sinto(&self, s: &S) -> Ident {\n        (self.name.sinto(s), self.span.sinto(s))\n    }\n}\n\n/// Reflects [`rustc_ast::AttrStyle`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<S>, from: rustc_ast::AttrStyle, state: S as _s)]\npub enum AttrStyle {\n    Outer,\n    Inner,\n}\n\n/// Reflects [`rustc_ast::Attribute`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::Attribute, state: S as gstate)]\npub enum Attribute {\n    Parsed(AttributeKind),\n    Unparsed(AttrItem),\n}\n/// Reflects [`rustc_ast::ast::BindingMode`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_ast::ast::BindingMode, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct BindingMode {\n    #[value(self.0.sinto(s))]\n    pub by_ref: ByRef,\n    #[value(self.1.sinto(s))]\n    pub mutability: Mutability,\n}\n\n/// Reflects [`rustc_ast::ast::ByRef`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_ast::ast::ByRef, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ByRef {\n    Yes(Pinnedness, Mutability),\n    No,\n}\n\n/// Reflects [`rustc_ast::ast::StrStyle`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_ast::ast::StrStyle, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum StrStyle {\n    Cooked,\n    Raw(u8),\n}\n\n/// Reflects [`rustc_ast::ast::LitKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitKind, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum LitKind {\n    Str(Symbol, StrStyle),\n    ByteStr(ByteSymbol, StrStyle),\n    CStr(ByteSymbol, StrStyle),\n    Byte(u8),\n    Char(char),\n    Int(\n        #[serde(with = \"serialize_int::unsigned\")]\n        #[schemars(with = \"String\")]\n        u128,\n        LitIntType,\n    ),\n    Float(Symbol, LitFloatType),\n    Bool(bool),\n    Err(ErrorGuaranteed),\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, u128> for rustc_data_structures::packed::Pu128 {\n    fn sinto(&self, _s: &S) -> u128 {\n        self.0\n    }\n}\n\n/// Reflects [`rustc_ast::token::CommentKind`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_ast::token::CommentKind, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum CommentKind {\n    Line,\n    Block,\n}\n\n/// Reflects [`rustc_hir::AttrArgs`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::AttrArgs, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum AttrArgs {\n    Empty,\n    Delimited(DelimArgs),\n    Eq { eq_span: Span, expr: MetaItemLit },\n}\n\n/// Reflects [`rustc_ast::MetaItemLit`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::MetaItemLit, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct MetaItemLit {\n    pub symbol: Symbol,\n    pub suffix: Option<Symbol>,\n    pub kind: LitKind,\n    pub span: Span,\n}\n\n/// Reflects [`rustc_hir::AttrItem`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_hir::AttrItem, state: S as gstate)]\npub struct AttrItem {\n    #[map(x.to_string())]\n    pub path: String,\n    pub args: AttrArgs,\n    pub span: Span,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, String> for rustc_ast::tokenstream::LazyAttrTokenStream {\n    fn sinto(&self, st: &S) -> String {\n        rustc_ast::tokenstream::TokenStream::new(self.to_attr_token_stream().to_token_trees())\n            .sinto(st)\n    }\n}\n\nsinto_todo!(rustc_hir, GenericArgs<'a> as HirGenericArgs);\nsinto_todo!(rustc_hir, InlineAsm<'a>);\nsinto_todo!(rustc_hir, MissingLifetimeKind);\nsinto_todo!(rustc_hir, QPath<'tcx>);\nsinto_todo!(rustc_hir, WhereRegionPredicate<'tcx>);\nsinto_todo!(rustc_hir, WhereEqPredicate<'tcx>);\nsinto_todo!(rustc_hir, OwnerId);\n"
  },
  {
    "path": "frontend/exporter/src/types/mir.rs",
    "content": "//! Copies of the relevant `MIR` types. MIR represents a rust (function) body as a CFG. It's a\n//! semantically rich representation that contains no high-level control-flow operations like loops\n//! or patterns; instead the control flow is entirely described by gotos and switches on integer\n//! values.\nuse crate::prelude::*;\n#[cfg(feature = \"rustc\")]\nuse rustc_middle::{mir, ty};\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::mir::SourceInfo, state: S as s)]\npub struct SourceInfo {\n    pub span: Span,\n    pub scope: SourceScope,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::mir::LocalDecl<'tcx>, state: S as s)]\npub struct LocalDecl {\n    pub mutability: Mutability,\n    pub ty: Ty,\n    pub source_info: SourceInfo,\n    #[value(None)]\n    pub name: Option<String>, // This information is contextual, thus the SInto instance initializes it to None, and then we fill it while `SInto`ing MirBody\n}\n\npub type BasicBlocks = IndexVec<BasicBlock, BasicBlockData>;\n\n#[cfg(feature = \"rustc\")]\nfn name_of_local(\n    local: rustc_middle::mir::Local,\n    var_debug_info: &Vec<mir::VarDebugInfo>,\n) -> Option<String> {\n    var_debug_info\n        .iter()\n        .find(|info| {\n            if let mir::VarDebugInfoContents::Place(place) = info.value {\n                place.projection.is_empty() && place.local == local\n            } else {\n                false\n            }\n        })\n        .map(|dbg| dbg.name.to_ident_string())\n}\n\n/// Enumerates the kinds of Mir bodies. TODO: use const generics\n/// instead of an open list of types.\npub mod mir_kinds {\n    use crate::prelude::{JsonSchema, derive_group};\n\n    #[derive_group(Serializers)]\n    #[derive(Clone, Copy, Debug, JsonSchema)]\n    pub struct Built;\n\n    #[derive_group(Serializers)]\n    #[derive(Clone, Copy, Debug, JsonSchema)]\n    pub struct Promoted;\n\n    #[derive_group(Serializers)]\n    #[derive(Clone, Copy, Debug, JsonSchema)]\n    pub struct Elaborated;\n\n    #[derive_group(Serializers)]\n    #[derive(Clone, Copy, Debug, JsonSchema)]\n    pub struct Optimized;\n\n    #[derive_group(Serializers)]\n    #[derive(Clone, Copy, Debug, JsonSchema)]\n    pub struct CTFE;\n\n    /// MIR of unknown origin. `body()` returns `None`; this is used to get the bodies provided via\n    /// `from_mir` but not attempt to get MIR for functions etc.\n    #[derive_group(Serializers)]\n    #[derive(Clone, Copy, Debug, JsonSchema)]\n    pub struct Unknown;\n\n    #[cfg(feature = \"rustc\")]\n    pub use rustc::*;\n    #[cfg(feature = \"rustc\")]\n    mod rustc {\n        use super::*;\n        use rustc_middle::mir::Body;\n        use rustc_middle::ty::TyCtxt;\n        use rustc_span::def_id::DefId;\n\n        pub trait IsMirKind: Clone + std::fmt::Debug + std::any::Any + Send + Sync {\n            // CPS to deal with stealable bodies cleanly.\n            fn get_mir<'tcx, T>(\n                tcx: TyCtxt<'tcx>,\n                id: DefId,\n                f: impl FnOnce(&Body<'tcx>) -> T,\n            ) -> Option<T>;\n        }\n\n        impl IsMirKind for Built {\n            fn get_mir<'tcx, T>(\n                tcx: TyCtxt<'tcx>,\n                id: DefId,\n                f: impl FnOnce(&Body<'tcx>) -> T,\n            ) -> Option<T> {\n                let id = id.as_local()?;\n                let steal = tcx.mir_built(id);\n                if steal.is_stolen() {\n                    None\n                } else {\n                    Some(f(&steal.borrow()))\n                }\n            }\n        }\n\n        impl IsMirKind for Promoted {\n            fn get_mir<'tcx, T>(\n                tcx: TyCtxt<'tcx>,\n                id: DefId,\n                f: impl FnOnce(&Body<'tcx>) -> T,\n            ) -> Option<T> {\n                let id = id.as_local()?;\n                let (steal, _) = tcx.mir_promoted(id);\n                if steal.is_stolen() {\n                    None\n                } else {\n                    Some(f(&steal.borrow()))\n                }\n            }\n        }\n\n        impl IsMirKind for Elaborated {\n            fn get_mir<'tcx, T>(\n                tcx: TyCtxt<'tcx>,\n                id: DefId,\n                f: impl FnOnce(&Body<'tcx>) -> T,\n            ) -> Option<T> {\n                let id = id.as_local()?;\n                let steal = tcx.mir_drops_elaborated_and_const_checked(id);\n                if steal.is_stolen() {\n                    None\n                } else {\n                    Some(f(&steal.borrow()))\n                }\n            }\n        }\n\n        impl IsMirKind for Optimized {\n            fn get_mir<'tcx, T>(\n                tcx: TyCtxt<'tcx>,\n                id: DefId,\n                f: impl FnOnce(&Body<'tcx>) -> T,\n            ) -> Option<T> {\n                Some(f(tcx.optimized_mir(id)))\n            }\n        }\n\n        impl IsMirKind for CTFE {\n            fn get_mir<'tcx, T>(\n                tcx: TyCtxt<'tcx>,\n                id: DefId,\n                f: impl FnOnce(&Body<'tcx>) -> T,\n            ) -> Option<T> {\n                Some(f(tcx.mir_for_ctfe(id)))\n            }\n        }\n\n        impl IsMirKind for Unknown {\n            fn get_mir<'tcx, T>(\n                _tcx: TyCtxt<'tcx>,\n                _id: DefId,\n                _f: impl FnOnce(&Body<'tcx>) -> T,\n            ) -> Option<T> {\n                None\n            }\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\npub use mir_kinds::IsMirKind;\n\n/// The contents of `Operand::Const`.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ConstOperand {\n    pub span: Span,\n    pub ty: Ty,\n    pub kind: ConstOperandKind,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ConstOperandKind {\n    /// An evaluated constant represented as an expression.\n    Value(ConstantExpr),\n    /// Part of a MIR body that was promoted to be a constant. May not be evaluatable because of\n    /// generics.\n    /// It's a reference to the `DefId` of the constant. Note that rustc does not give a `DefId` to\n    /// promoted constants, but we do in hax.\n    Promoted(ItemRef),\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, ConstOperand> for mir::ConstOperand<'tcx> {\n    fn sinto(&self, s: &S) -> ConstOperand {\n        let kind = translate_mir_const(s, self.span, self.const_);\n        ConstOperand {\n            span: self.span.sinto(s),\n            ty: self.const_.ty().sinto(s),\n            kind,\n        }\n    }\n}\n\n/// Retrieve the MIR for a promoted body.\n#[cfg(feature = \"rustc\")]\npub fn get_promoted_mir<'tcx>(\n    tcx: ty::TyCtxt<'tcx>,\n    def_id: RDefId,\n    promoted_id: mir::Promoted,\n) -> mir::Body<'tcx> {\n    if let Some(local_def_id) = def_id.as_local() {\n        let (_, promoteds) = tcx.mir_promoted(local_def_id);\n        if !promoteds.is_stolen() {\n            promoteds.borrow()[promoted_id].clone()\n        } else {\n            tcx.promoted_mir(def_id)[promoted_id].clone()\n        }\n    } else {\n        tcx.promoted_mir(def_id)[promoted_id].clone()\n    }\n}\n\n#[cfg(feature = \"rustc\")]\n/// Translate a MIR constant.\nfn translate_mir_const<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    span: rustc_span::Span,\n    konst: mir::Const<'tcx>,\n) -> ConstOperandKind {\n    use ConstOperandKind::{Promoted, Value};\n    use rustc_middle::mir::Const;\n    let tcx = s.base().tcx;\n    match konst {\n        Const::Val(const_value, ty) => {\n            let evaluated = const_value_to_constant_expr(s, ty, const_value, span);\n            match evaluated.report_err() {\n                Ok(val) => Value(val),\n                Err(err) => {\n                    warning!(\n                        s[span], \"Couldn't convert constant back to an expression\";\n                        {const_value, ty, err}\n                    );\n                    Value(\n                        ConstantExprKind::Todo(\"ConstEvalVal\".into())\n                            .decorate(ty.sinto(s), span.sinto(s)),\n                    )\n                }\n            }\n        }\n        Const::Ty(_ty, c) => Value(c.sinto(s)),\n        Const::Unevaluated(ucv, ty) => {\n            use crate::rustc_middle::query::Key;\n            let span = span.substitute_dummy(\n                tcx.def_ident_span(ucv.def)\n                    .unwrap_or_else(|| ucv.def.default_span(tcx)),\n            );\n            match ucv.promoted {\n                Some(promoted) => {\n                    let item = translate_item_ref(s, ucv.def, ucv.args);\n                    let item = item.mutate_def_id(s, |def_id| {\n                        // Construct a def_id for the promoted constant.\n                        *def_id = def_id.make_promoted_child(s, promoted.sinto(s));\n                    });\n                    Promoted(item)\n                }\n                None => match translate_constant_reference(s, span, ucv.shrink()) {\n                    Some(val) => Value(val),\n                    None => match eval_mir_constant(s, konst) {\n                        Some(val) => translate_mir_const(s, span, val),\n                        // TODO: This is triggered when compiling using `generic_const_exprs`. We\n                        // might be able to get a MIR body from the def_id.\n                        None => Value(\n                            ConstantExprKind::Todo(\"TranslateUneval\".into())\n                                .decorate(ty.sinto(s), span.sinto(s)),\n                        ),\n                    },\n                },\n            }\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Body<'tcx>, state: S as s)]\npub struct MirBody<KIND> {\n    pub span: Span,\n    #[map({\n        x.iter_enumerated().map(|(local, local_decl)| {\n            let mut local_decl = local_decl.sinto(s);\n            local_decl.name = name_of_local(local, &self.var_debug_info);\n            local_decl\n        }).collect()\n    })]\n    pub local_decls: IndexVec<Local, LocalDecl>,\n    pub arg_count: usize,\n    pub basic_blocks: BasicBlocks,\n    pub source_scopes: IndexVec<SourceScope, SourceScopeData>,\n    pub tainted_by_errors: Option<ErrorGuaranteed>,\n    #[value(std::marker::PhantomData)]\n    pub _kind: std::marker::PhantomData<KIND>,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_middle::mir::SourceScopeData<'tcx>, state: S as s)]\npub struct SourceScopeData {\n    pub span: Span,\n    pub parent_scope: Option<SourceScope>,\n    pub inlined_parent_scope: Option<SourceScope>,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Operand<'tcx>, state: S as s)]\npub enum Operand {\n    Copy(Place),\n    Move(Place),\n    Constant(ConstOperand),\n}\n\n#[cfg(feature = \"rustc\")]\nimpl Operand {\n    pub(crate) fn ty(&self) -> &Ty {\n        match self {\n            Operand::Copy(p) | Operand::Move(p) => &p.ty,\n            Operand::Constant(c) => &c.ty,\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Terminator<'tcx>, state: S as s)]\npub struct Terminator {\n    pub source_info: SourceInfo,\n    pub kind: TerminatorKind,\n}\n\n#[cfg(feature = \"rustc\")]\nfn translate_terminator_kind_call<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>(\n    s: &S,\n    terminator: &rustc_middle::mir::TerminatorKind<'tcx>,\n) -> TerminatorKind {\n    let tcx = s.base().tcx;\n    let mir::TerminatorKind::Call {\n        func,\n        args,\n        destination,\n        target,\n        unwind,\n        fn_span,\n        ..\n    } = terminator\n    else {\n        unreachable!()\n    };\n\n    let ty = func.ty(&s.mir().local_decls, tcx);\n    let hax_ty: crate::Ty = ty.sinto(s);\n    let sig = match hax_ty.kind() {\n        TyKind::Arrow(sig) => sig,\n        TyKind::FnDef { fn_sig, .. } => fn_sig,\n        TyKind::Closure(args) => &args.fn_sig,\n        _ => supposely_unreachable_fatal!(\n            s,\n            \"TerminatorKind_Call_expected_fn_type\";\n            { ty }\n        ),\n    };\n    let fun_op = if let ty::TyKind::FnDef(def_id, generics) = ty.kind() {\n        // The type of the value is one of the singleton types that corresponds to each function,\n        // which is enough information.\n        let item = translate_item_ref(s, *def_id, *generics);\n        FunOperand::Static(item)\n    } else {\n        use mir::Operand;\n        match func {\n            Operand::Constant(_) => {\n                unimplemented!(\"{:?}\", func);\n            }\n            Operand::Move(place) => {\n                // Function pointer or closure.\n                let place = place.sinto(s);\n                FunOperand::DynamicMove(place)\n            }\n            Operand::Copy(_place) => {\n                unimplemented!(\"{:?}\", func);\n            }\n        }\n    };\n\n    let late_bound_generics = sig\n        .bound_vars\n        .iter()\n        .map(|var| match var {\n            BoundVariableKind::Region(r) => r,\n            BoundVariableKind::Ty(..) | BoundVariableKind::Const => {\n                supposely_unreachable_fatal!(\n                    s,\n                    \"non_lifetime_late_bound\";\n                    { var }\n                )\n            }\n        })\n        .map(|_| {\n            GenericArg::Lifetime(Region {\n                kind: RegionKind::ReErased,\n            })\n        })\n        .collect();\n    TerminatorKind::Call {\n        fun: fun_op,\n        late_bound_generics,\n        args: args.sinto(s),\n        destination: destination.sinto(s),\n        target: target.sinto(s),\n        unwind: unwind.sinto(s),\n        fn_span: fn_span.sinto(s),\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nfn translate_terminator_kind_drop<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>(\n    s: &S,\n    terminator: &rustc_middle::mir::TerminatorKind<'tcx>,\n) -> TerminatorKind {\n    let tcx = s.base().tcx;\n    let mir::TerminatorKind::Drop {\n        place,\n        target,\n        unwind,\n        ..\n    } = terminator\n    else {\n        unreachable!()\n    };\n\n    let local_decls = &s.mir().local_decls;\n    let place_ty = place.ty(local_decls, tcx).ty;\n    let destruct_trait = tcx.lang_items().destruct_trait().unwrap();\n    let impl_expr = solve_trait(\n        s,\n        ty::Binder::dummy(ty::TraitRef::new(tcx, destruct_trait, [place_ty])),\n    );\n\n    TerminatorKind::Drop {\n        place: place.sinto(s),\n        impl_expr,\n        target: target.sinto(s),\n        unwind: unwind.sinto(s),\n    }\n}\n\n// We don't use the LitIntType on purpose (we don't want the \"unsuffixed\" case)\n#[derive_group(Serializers)]\n#[derive(Clone, Copy, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ScalarTy {\n    Bool,\n    Int(IntTy),\n    Uint(UintTy),\n    Char,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ScalarInt {\n    /// Little-endian representation of the integer\n    pub data_le_bytes: [u8; 16],\n    pub int_ty: ScalarTy,\n}\n\n/// Translate a `SwitchInt` terminator.\n#[cfg(feature = \"rustc\")]\nfn translate_switchint<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>(\n    s: &S,\n    discr: &mir::Operand<'tcx>,\n    targets: &mir::SwitchTargets,\n) -> TerminatorKind {\n    let discr = discr.sinto(s);\n    let ty = match discr.ty().kind() {\n        TyKind::Bool => ScalarTy::Bool,\n        TyKind::Int(ty) => ScalarTy::Int(*ty),\n        TyKind::Uint(ty) => ScalarTy::Uint(*ty),\n        TyKind::Char => ScalarTy::Char,\n        ty => fatal!(s, \"Unexpected switch_ty: {:?}\", ty),\n    };\n\n    // Convert all the test values to the proper values.\n    let otherwise = targets.otherwise().sinto(s);\n    let targets_vec: Vec<(ScalarInt, BasicBlock)> = targets\n        .iter()\n        .map(|(v, b)| {\n            let v = ScalarInt {\n                data_le_bytes: v.to_le_bytes(),\n                int_ty: ty,\n            };\n            (v, b.sinto(s))\n        })\n        .collect();\n\n    TerminatorKind::SwitchInt {\n        discr,\n        ty,\n        targets: targets_vec,\n        otherwise,\n    }\n}\n\n/// A value of type `fn<...> A -> B` that can be called.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum FunOperand {\n    /// Call to a statically-known function.\n    Static(ItemRef),\n    /// Use of a closure or a function pointer value. Counts as a move from the given place.\n    DynamicMove(Place),\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_middle::mir::UnwindAction, state: S as _s)]\npub enum UnwindAction {\n    Continue,\n    Unreachable,\n    Terminate(UnwindTerminateReason),\n    Cleanup(BasicBlock),\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::TerminatorKind<'tcx>, state: S as s)]\npub enum TerminatorKind {\n    Goto {\n        target: BasicBlock,\n    },\n    #[custom_arm(\n        rustc_middle::mir::TerminatorKind::SwitchInt { discr, targets } => {\n            translate_switchint(s, discr, targets)\n        }\n    )]\n    SwitchInt {\n        /// The value being switched one.\n        discr: Operand,\n        /// The type that is being switched on.\n        ty: ScalarTy,\n        /// Possible success cases.\n        targets: Vec<(ScalarInt, BasicBlock)>,\n        /// If none of the `targets` match, branch to that block.\n        otherwise: BasicBlock,\n    },\n    Return,\n    Unreachable,\n    #[custom_arm(\n        x @ rustc_middle::mir::TerminatorKind::Drop { .. } => {\n          translate_terminator_kind_drop(s, x)\n        }\n    )]\n    Drop {\n        place: Place,\n        /// Implementation of `place.ty(): Drop`.\n        impl_expr: ImplExpr,\n        target: BasicBlock,\n        unwind: UnwindAction,\n    },\n    #[custom_arm(\n        x @ rustc_middle::mir::TerminatorKind::Call { .. } => {\n          translate_terminator_kind_call(s, x)\n        }\n    )]\n    Call {\n        fun: FunOperand,\n        /// A `FunOperand` is a value of type `fn<...> A -> B`. The generics in `<...>` are called\n        /// \"late-bound\" and are instantiated anew at each call site. This list provides the\n        /// generics used at this call-site. They are all lifetimes and at the time of writing are\n        /// all erased lifetimes.\n        late_bound_generics: Vec<GenericArg>,\n        args: Vec<Spanned<Operand>>,\n        destination: Place,\n        target: Option<BasicBlock>,\n        unwind: UnwindAction,\n        fn_span: Span,\n    },\n    TailCall {\n        func: Operand,\n        args: Vec<Spanned<Operand>>,\n        fn_span: Span,\n    },\n    Assert {\n        cond: Operand,\n        expected: bool,\n        msg: AssertMessage,\n        target: BasicBlock,\n        unwind: UnwindAction,\n    },\n    Yield {\n        value: Operand,\n        resume: BasicBlock,\n        resume_arg: Place,\n        drop: Option<BasicBlock>,\n    },\n    CoroutineDrop,\n    FalseEdge {\n        real_target: BasicBlock,\n        imaginary_target: BasicBlock,\n    },\n    FalseUnwind {\n        real_target: BasicBlock,\n        unwind: UnwindAction,\n    },\n    UnwindResume,\n    UnwindTerminate(UnwindTerminateReason),\n    InlineAsm {\n        template: Vec<InlineAsmTemplatePiece>,\n        operands: Vec<InlineAsmOperand>,\n        options: InlineAsmOptions,\n        line_spans: Vec<Span>,\n        targets: Vec<BasicBlock>,\n        unwind: UnwindAction,\n    },\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Statement<'tcx>, state: S as s)]\npub struct Statement {\n    pub source_info: SourceInfo,\n    #[map(Box::new(x.sinto(s)))]\n    pub kind: Box<StatementKind>,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::StatementKind<'tcx>, state: S as s)]\npub enum StatementKind {\n    Assign((Place, Rvalue)),\n    FakeRead((FakeReadCause, Place)),\n    SetDiscriminant {\n        place: Place,\n        variant_index: VariantIdx,\n    },\n    StorageLive(Local),\n    StorageDead(Local),\n    Retag(RetagKind, Place),\n    PlaceMention(Place),\n    AscribeUserType((Place, UserTypeProjection), Variance),\n    Coverage(CoverageKind),\n    Intrinsic(NonDivergingIntrinsic),\n    ConstEvalCounter,\n    BackwardIncompatibleDropHint {\n        place: Place,\n    },\n    Nop,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::NonDivergingIntrinsic<'tcx>, state: S as s)]\npub enum NonDivergingIntrinsic {\n    Assume(Operand),\n    CopyNonOverlapping(CopyNonOverlapping),\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::CopyNonOverlapping<'tcx>, state: S as s)]\npub struct CopyNonOverlapping {\n    pub src: Operand,\n    pub dst: Operand,\n    pub count: Operand,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Place {\n    /// The type of the element on which we apply the projection given by `kind`\n    pub ty: Ty,\n    pub kind: PlaceKind,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum PlaceKind {\n    Local(Local),\n    Projection {\n        place: Box<Place>,\n        kind: ProjectionElem,\n    },\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ProjectionElemFieldKind {\n    Tuple(FieldIdx),\n    Adt {\n        typ: DefId,\n        variant: Option<VariantIdx>,\n        index: FieldIdx,\n    },\n    /// Get access to one of the fields of the state of a closure\n    ClosureState(FieldIdx),\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ProjectionElem {\n    Deref,\n    Field(ProjectionElemFieldKind),\n    Index(Local),\n    ConstantIndex {\n        offset: u64,\n        min_length: u64,\n        from_end: bool,\n    },\n    Subslice {\n        from: u64,\n        to: u64,\n        from_end: bool,\n    },\n    Downcast(Option<Symbol>, VariantIdx),\n    OpaqueCast,\n}\n\n// refactor\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>> SInto<S, Place>\n    for rustc_middle::mir::Place<'tcx>\n{\n    #[tracing::instrument(level = \"info\", skip(s))]\n    fn sinto(&self, s: &S) -> Place {\n        let tcx = s.base().tcx;\n        let local_decls = &s.mir().local_decls;\n\n        let mut place_ty: mir::PlaceTy = mir::Place::from(self.local).ty(local_decls, tcx);\n        let mut place = Place {\n            ty: place_ty.ty.sinto(s),\n            kind: PlaceKind::Local(self.local.sinto(s)),\n        };\n        for elem in self.projection.as_slice() {\n            use rustc_middle::mir::ProjectionElem::*;\n            let projected_place_ty = place_ty.projection_ty(tcx, *elem);\n            if matches!(elem, Downcast { .. }) {\n                // We keep the same `Place`, the variant is tracked in the `PlaceTy` and we can\n                // access it next loop iteration.\n            } else {\n                let elem_kind = match elem {\n                    Deref => ProjectionElem::Deref,\n                    Field(index, _) => {\n                        let field_pj = match place_ty.ty.kind() {\n                            ty::Adt(adt_def, _) => {\n                                let variant = place_ty.variant_index;\n                                assert!(\n                                    ((adt_def.is_struct() || adt_def.is_union())\n                                        && variant.is_none())\n                                        || (adt_def.is_enum() && variant.is_some())\n                                );\n                                ProjectionElemFieldKind::Adt {\n                                    typ: adt_def.did().sinto(s),\n                                    variant: variant.map(|id| id.sinto(s)),\n                                    index: index.sinto(s),\n                                }\n                            }\n                            ty::Tuple(_types) => ProjectionElemFieldKind::Tuple(index.sinto(s)),\n                            // We get there when we access one of the fields of the the state\n                            // captured by a closure.\n                            ty::Closure(..) => {\n                                ProjectionElemFieldKind::ClosureState(index.sinto(s))\n                            }\n                            ty_kind => supposely_unreachable_fatal!(\n                                s, \"ProjectionElemFieldBadType\";\n                                {index, ty_kind, &place_ty, &place}\n                            ),\n                        };\n                        ProjectionElem::Field(field_pj)\n                    }\n                    Index(local) => ProjectionElem::Index(local.sinto(s)),\n                    ConstantIndex {\n                        offset,\n                        min_length,\n                        from_end,\n                    } => ProjectionElem::ConstantIndex {\n                        offset: *offset,\n                        min_length: *min_length,\n                        from_end: *from_end,\n                    },\n                    Subslice { from, to, from_end } => ProjectionElem::Subslice {\n                        from: *from,\n                        to: *to,\n                        from_end: *from_end,\n                    },\n                    OpaqueCast(..) => ProjectionElem::OpaqueCast,\n                    Downcast { .. } => unreachable!(),\n                    UnwrapUnsafeBinder { .. } => panic!(\"unsupported feature: unsafe binders\"),\n                };\n\n                place = Place {\n                    ty: projected_place_ty.ty.sinto(s),\n                    kind: PlaceKind::Projection {\n                        place: Box::new(place),\n                        kind: elem_kind,\n                    },\n                };\n            }\n            place_ty = projected_place_ty;\n        }\n        place\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::AggregateKind<'tcx>, state: S as s)]\npub enum AggregateKind {\n    Array(Ty),\n    Tuple,\n    #[custom_arm(rustc_middle::mir::AggregateKind::Adt(def_id, vid, generics, annot, fid) => {\n        let adt_kind = s.base().tcx.adt_def(def_id).adt_kind().sinto(s);\n        let item = translate_item_ref(s, *def_id, generics);\n        AggregateKind::Adt(\n            item,\n            vid.sinto(s),\n            adt_kind,\n            annot.sinto(s),\n            fid.sinto(s),\n        )\n    })]\n    Adt(\n        ItemRef,\n        VariantIdx,\n        AdtKind,\n        Option<UserTypeAnnotationIndex>,\n        Option<FieldIdx>,\n    ),\n    #[custom_arm(rustc_middle::mir::AggregateKind::Closure(def_id, generics) => {\n        let closure = generics.as_closure();\n        let args = ClosureArgs::sfrom(s, *def_id, closure);\n        AggregateKind::Closure(args)\n    })]\n    Closure(ClosureArgs),\n    #[custom_arm(FROM_TYPE::Coroutine(def_id, generics) => TO_TYPE::Coroutine(translate_item_ref(s, *def_id, generics)),)]\n    Coroutine(ItemRef),\n    #[custom_arm(FROM_TYPE::CoroutineClosure(def_id, generics) => TO_TYPE::CoroutineClosure(translate_item_ref(s, *def_id, generics)),)]\n    CoroutineClosure(ItemRef),\n    RawPtr(Ty, Mutability),\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum CastKind {\n    PointerExposeProvenance,\n    PointerWithExposedProvenance,\n    PointerCoercion(PointerCoercion, CoercionSource),\n    IntToInt,\n    FloatToInt,\n    FloatToFloat,\n    IntToFloat,\n    PtrToPtr,\n    FnPtrToPtr,\n    Transmute,\n    Subtype,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl CastKind {\n    fn sfrom<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        kind: mir::CastKind,\n        src_ty: ty::Ty<'tcx>,\n        tgt_ty: ty::Ty<'tcx>,\n    ) -> CastKind {\n        match kind {\n            mir::CastKind::PointerExposeProvenance => CastKind::PointerExposeProvenance,\n            mir::CastKind::PointerWithExposedProvenance => CastKind::PointerWithExposedProvenance,\n            mir::CastKind::PointerCoercion(coercion, y) => {\n                let coercion = PointerCoercion::sfrom(s, coercion, src_ty, tgt_ty);\n                CastKind::PointerCoercion(coercion, y.sinto(s))\n            }\n            mir::CastKind::IntToInt => CastKind::IntToInt,\n            mir::CastKind::FloatToInt => CastKind::FloatToInt,\n            mir::CastKind::FloatToFloat => CastKind::FloatToFloat,\n            mir::CastKind::IntToFloat => CastKind::IntToFloat,\n            mir::CastKind::PtrToPtr => CastKind::PtrToPtr,\n            mir::CastKind::FnPtrToPtr => CastKind::FnPtrToPtr,\n            mir::CastKind::Transmute => CastKind::Transmute,\n            mir::CastKind::Subtype => CastKind::Subtype,\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S>, from: rustc_middle::mir::CoercionSource, state: S as _s)]\npub enum CoercionSource {\n    AsCast,\n    Implicit,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::NullOp<'tcx>, state: S as s)]\npub enum NullOp {\n    OffsetOf(Vec<(VariantIdx, FieldIdx)>),\n    UbChecks,\n    ContractChecks,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::Rvalue<'tcx>, state: S as s)]\npub enum Rvalue {\n    Use(Operand),\n    Repeat(Operand, ConstantExpr),\n    Ref(Region, BorrowKind, Place),\n    ThreadLocalRef(DefId),\n    RawPtr(RawPtrKind, Place),\n    #[custom_arm(\n        FROM_TYPE::Cast(kind, op, tgt_ty) => {\n            let src_ty = op.ty(&*s.mir(), s.base().tcx);\n            let kind = CastKind::sfrom(s, *kind, src_ty, *tgt_ty);\n            TO_TYPE::Cast(kind, op.sinto(s), tgt_ty.sinto(s))\n        },\n    )]\n    Cast(CastKind, Operand, Ty),\n    BinaryOp(BinOp, (Operand, Operand)),\n    NullaryOp(NullOp, Ty),\n    UnaryOp(UnOp, Operand),\n    Discriminant(Place),\n    Aggregate(AggregateKind, IndexVec<FieldIdx, Operand>),\n    ShallowInitBox(Operand, Ty),\n    CopyForDeref(Place),\n    WrapUnsafeBinder(Operand, Ty),\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_middle::mir::RawPtrKind, state: S as _s)]\npub enum RawPtrKind {\n    Mut,\n    Const,\n    FakeForPtrMetadata,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasMir<'tcx>>, from: rustc_middle::mir::BasicBlockData<'tcx>, state: S as s)]\npub struct BasicBlockData {\n    pub statements: Vec<Statement>,\n    pub terminator: Option<Terminator>,\n    pub is_cleanup: bool,\n}\n\nmake_idx_wrapper!(rustc_middle::mir, BasicBlock);\nmake_idx_wrapper!(rustc_middle::mir, SourceScope);\nmake_idx_wrapper!(rustc_middle::mir, Local);\nmake_idx_wrapper!(rustc_middle::ty, UserTypeAnnotationIndex);\nmake_idx_wrapper!(rustc_abi, FieldIdx);\n\n/// Reflects [`rustc_middle::mir::UnOp`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)]\n#[args(<'slt, S: UnderOwnerState<'slt>>, from: mir::UnOp, state: S as _s)]\npub enum UnOp {\n    Not,\n    Neg,\n    PtrMetadata,\n}\n\n/// Reflects [`rustc_middle::mir::BinOp`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)]\n#[args(<'slt, S: UnderOwnerState<'slt>>, from: mir::BinOp, state: S as _s)]\npub enum BinOp {\n    Add,\n    AddUnchecked,\n    AddWithOverflow,\n    Sub,\n    SubUnchecked,\n    SubWithOverflow,\n    Mul,\n    MulUnchecked,\n    MulWithOverflow,\n    Div,\n    Rem,\n    BitXor,\n    BitAnd,\n    BitOr,\n    Shl,\n    ShlUnchecked,\n    Shr,\n    ShrUnchecked,\n    Eq,\n    Lt,\n    Le,\n    Ne,\n    Ge,\n    Gt,\n    Cmp,\n    Offset,\n}\n\n/// Reflects [`rustc_middle::mir::AssignOp`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Copy, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: mir::AssignOp, state: S as _s)]\npub enum AssignOp {\n    AddAssign,\n    SubAssign,\n    MulAssign,\n    DivAssign,\n    RemAssign,\n    BitXorAssign,\n    BitAndAssign,\n    BitOrAssign,\n    ShlAssign,\n    ShrAssign,\n}\n\n/// Reflects [`rustc_middle::mir::BorrowKind`]\n#[derive(AdtInto)]\n#[args(<S>, from: mir::BorrowKind, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum BorrowKind {\n    Shared,\n    Fake(FakeBorrowKind),\n    Mut { kind: MutBorrowKind },\n}\n\n/// Reflects [`rustc_middle::mir::MutBorrowKind`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_middle::mir::MutBorrowKind, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum MutBorrowKind {\n    Default,\n    TwoPhaseBorrow,\n    ClosureCapture,\n}\n\n/// Reflects [`rustc_middle::mir::FakeBorrowKind`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_middle::mir::FakeBorrowKind, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum FakeBorrowKind {\n    /// A shared (deep) borrow. Data must be immutable and is aliasable.\n    Deep,\n    /// The immediately borrowed place must be immutable, but projections from\n    /// it don't need to be. This is used to prevent match guards from replacing\n    /// the scrutinee. For example, a fake borrow of `a.b` doesn't\n    /// conflict with a mutable borrow of `a.b.c`.\n    Shallow,\n}\n\nsinto_todo!(rustc_ast::ast, InlineAsmTemplatePiece);\nsinto_todo!(rustc_ast::ast, InlineAsmOptions);\nsinto_todo!(rustc_middle::mir, InlineAsmOperand<'tcx>);\nsinto_todo!(rustc_middle::mir, AssertMessage<'tcx>);\nsinto_todo!(rustc_middle::mir, FakeReadCause);\nsinto_todo!(rustc_middle::mir, RetagKind);\nsinto_todo!(rustc_middle::mir, UserTypeProjection);\nsinto_todo!(rustc_middle::mir, UnwindTerminateReason);\nsinto_todo!(rustc_middle::mir::coverage, CoverageKind);\n"
  },
  {
    "path": "frontend/exporter/src/types/mod.rs",
    "content": "mod attributes;\nmod def_id;\nmod hir;\nmod mir;\nmod new;\npub(crate) mod serialize_int;\nmod span;\nmod thir;\nmod ty;\n\npub use attributes::*;\npub use def_id::*;\npub use hir::*;\npub use mir::*;\npub use new::*;\npub use span::*;\npub use thir::*;\npub use ty::*;\n"
  },
  {
    "path": "frontend/exporter/src/types/new/full_def.rs",
    "content": "use crate::prelude::*;\n\n#[cfg(feature = \"rustc\")]\nuse rustc_hir::def::DefKind as RDefKind;\n#[cfg(feature = \"rustc\")]\nuse rustc_middle::ty;\n#[cfg(feature = \"rustc\")]\nuse rustc_span::def_id::DefId as RDefId;\n#[cfg(feature = \"rustc\")]\nuse std::sync::Arc;\n\n/// Hack: charon used to rely on the old `()` default everywhere. To avoid big merge conflicts with\n/// in-flight PRs we're changing the default here. Eventually this should be removed.\ntype DefaultFullDefBody = MirBody<mir_kinds::Unknown>;\n\n/// Gathers a lot of definition information about a [`rustc_hir::def_id::DefId`].\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct FullDef<Body = DefaultFullDefBody> {\n    /// A reference to the current item. If the item was provided with generic args, they are\n    /// stored here; otherwise the args are the identity_args for this item.\n    pub this: ItemRef,\n    /// The span of the definition of this item (e.g. for a function this is is signature).\n    pub span: Span,\n    /// The span of the whole definition (including e.g. the function body).\n    pub source_span: Option<Span>,\n    /// The text of the whole definition.\n    pub source_text: Option<String>,\n    /// Attributes on this definition, if applicable.\n    pub attributes: Vec<Attribute>,\n    /// Visibility of the definition, for definitions where this makes sense.\n    pub visibility: Option<bool>,\n    /// If this definition is a lang item, we store the identifier, e.g. `sized`.\n    pub lang_item: Option<String>,\n    /// If this definition is a diagnostic item, we store the identifier, e.g. `box_new`.\n    pub diagnostic_item: Option<String>,\n    pub kind: FullDefKind<Body>,\n}\n\n#[cfg(feature = \"rustc\")]\n/// Construct the `FullDefKind` for this item. If `args` is `Some`, the returned `FullDef` will be\n/// instantiated with the provided generics.\nfn translate_full_def<'tcx, S, Body>(\n    s: &S,\n    def_id: &DefId,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n) -> FullDef<Body>\nwhere\n    S: UnderOwnerState<'tcx>,\n    Body: IsBody + TypeMappable,\n{\n    let tcx = s.base().tcx;\n    let rust_def_id = def_id.underlying_rust_def_id();\n    let source_span;\n    let attributes;\n    let visibility;\n    let lang_item;\n    let diagnostic_item;\n    let kind;\n    if let Some(item) = def_id.as_synthetic(s) {\n        let adt_kind = match item {\n            SyntheticItem::Array => AdtKind::Array,\n            SyntheticItem::Slice => AdtKind::Slice,\n            SyntheticItem::Tuple(..) => AdtKind::Tuple,\n        };\n        let param_env = get_param_env(s, args);\n        let destruct_impl = {\n            let destruct_trait = tcx.lang_items().destruct_trait().unwrap();\n            let type_of_self = inst_binder(tcx, s.typing_env(), args, tcx.type_of(rust_def_id));\n            virtual_impl_for(s, ty::TraitRef::new(tcx, destruct_trait, [type_of_self]))\n        };\n        kind = FullDefKind::Adt {\n            param_env,\n            adt_kind,\n            variants: [].into_iter().collect(),\n            flags: AdtFlags::AdtFlags {\n                todo: String::new(),\n            },\n            repr: ReprOptions {\n                int_specified: false,\n                typ: Ty::new(s, TyKind::Int(IntTy::Isize)),\n                align: None,\n                pack: None,\n                flags: Default::default(),\n            },\n            drop_glue: get_drop_glue_shim(s, args),\n            destruct_impl,\n        };\n\n        source_span = None;\n        attributes = Default::default();\n        visibility = Default::default();\n        lang_item = Default::default();\n        diagnostic_item = Default::default();\n    } else if let Some(promoted_id) = def_id.promoted_id() {\n        let parent_def = def_id\n            .parent\n            .as_ref()\n            .unwrap()\n            .full_def_maybe_instantiated::<_, Body>(s, args);\n        let parent_param_env = parent_def.param_env().unwrap();\n        let param_env = ParamEnv {\n            generics: TyGenerics {\n                parent: def_id.parent.clone(),\n                parent_count: parent_param_env.generics.count_total_params(),\n                params: vec![],\n                has_self: false,\n                has_late_bound_regions: None,\n            },\n            predicates: GenericPredicates { predicates: vec![] },\n            parent: Some(parent_def.this().clone()),\n        };\n        let body = get_promoted_mir(tcx, rust_def_id, promoted_id.as_rust_promoted_id());\n        let body = substitute(tcx, s.typing_env(), args, body);\n        source_span = Some(body.span);\n\n        let ty: Ty = body.local_decls[rustc_middle::mir::Local::ZERO].ty.sinto(s);\n        kind = FullDefKind::Const {\n            param_env,\n            ty,\n            kind: ConstKind::PromotedConst,\n            body: Body::from_mir(s, body),\n            value: None,\n        };\n\n        // None of these make sense for a promoted constant.\n        attributes = Default::default();\n        visibility = Default::default();\n        lang_item = Default::default();\n        diagnostic_item = Default::default();\n    } else {\n        kind = translate_full_def_kind(s, rust_def_id, args);\n\n        let def_kind = get_def_kind(tcx, rust_def_id);\n        source_span = rust_def_id.as_local().map(|ldid| tcx.source_span(ldid));\n        attributes = get_def_attrs(tcx, rust_def_id, def_kind).sinto(s);\n        visibility = get_def_visibility(tcx, rust_def_id, def_kind);\n        lang_item = s\n            .base()\n            .tcx\n            .as_lang_item(rust_def_id)\n            .map(|litem| litem.name())\n            .sinto(s);\n        diagnostic_item = tcx.get_diagnostic_name(rust_def_id).sinto(s);\n    }\n\n    let source_text = source_span\n        .filter(|source_span| source_span.ctxt().is_root())\n        .and_then(|source_span| tcx.sess.source_map().span_to_snippet(source_span).ok());\n    let this = if can_have_generics(tcx, rust_def_id) {\n        let args_or_default = args.unwrap_or_else(|| {\n            if matches!(def_id.kind, DefKind::Closure) {\n                // For closures we use the args of their parent. Otherwise closure items get some\n                // special generics used for inference that we don't care about.\n                ty::GenericArgs::identity_for_item(tcx, tcx.typeck_root_def_id(rust_def_id))\n            } else {\n                ty::GenericArgs::identity_for_item(tcx, rust_def_id)\n            }\n        });\n        let item = translate_item_ref(s, rust_def_id, args_or_default);\n        // Tricky: hax's DefId has more info (could be a promoted const), we must be careful to use\n        // the input DefId instead of the one derived from `rust_def_id`.\n        item.with_def_id(s, def_id)\n    } else {\n        ItemRef::dummy_without_generics(s, def_id.clone())\n    };\n    FullDef {\n        this,\n        span: def_id.def_span(s),\n        source_span: source_span.sinto(s),\n        source_text,\n        attributes,\n        visibility,\n        lang_item,\n        diagnostic_item,\n        kind,\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl DefId {\n    /// Get the span of the definition of this item. This is the span used in diagnostics when\n    /// referring to the item.\n    pub fn def_span<'tcx>(&self, s: &impl BaseState<'tcx>) -> Span {\n        use DefKind::*;\n        let tcx = s.base().tcx;\n        let def_id = self.underlying_rust_def_id();\n        if let ForeignMod = &self.kind {\n            // These kind causes `def_span` to panic.\n            rustc_span::DUMMY_SP\n        } else if let Some(ldid) = def_id.as_local() {\n            let hir_id = tcx.local_def_id_to_hir_id(ldid);\n            if matches!(tcx.hir_node(hir_id), rustc_hir::Node::Synthetic) {\n                // Synthetic items (those we create ourselves) make `def_span` panic.\n                rustc_span::DUMMY_SP\n            } else {\n                // Unlike `tcx.def_span`, `tcx.hir_span_with_body` returns the full span of the item, not only of its header\n                tcx.hir_span_with_body(hir_id)\n            }\n        } else {\n            tcx.def_span(def_id)\n        }\n        .sinto(s)\n    }\n\n    /// Get the full definition of this item.\n    pub fn full_def<'tcx, S, Body>(&self, s: &S) -> Arc<FullDef<Body>>\n    where\n        Body: IsBody + TypeMappable,\n        S: BaseState<'tcx>,\n    {\n        self.full_def_maybe_instantiated(s, None)\n    }\n\n    /// Get the full definition of this item, instantiated if `args` is `Some`.\n    pub fn full_def_maybe_instantiated<'tcx, S, Body>(\n        &self,\n        s: &S,\n        args: Option<ty::GenericArgsRef<'tcx>>,\n    ) -> Arc<FullDef<Body>>\n    where\n        Body: IsBody + TypeMappable,\n        S: BaseState<'tcx>,\n    {\n        let rust_def_id = self.underlying_rust_def_id();\n        let s = &s.with_owner_id(rust_def_id);\n        let cache_key = (self.promoted_id(), args);\n        if let Some(def) =\n            s.with_cache(|cache| cache.full_defs.entry(cache_key).or_default().get().cloned())\n        {\n            return def;\n        }\n        let def = Arc::new(translate_full_def(s, self, args));\n        s.with_cache(|cache| {\n            cache\n                .full_defs\n                .entry(cache_key)\n                .or_default()\n                .insert(def.clone());\n        });\n        def\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl ItemRef {\n    /// Get the full definition of the item, instantiated with the provided generics.\n    pub fn instantiated_full_def<'tcx, S, Body>(&self, s: &S) -> Arc<FullDef<Body>>\n    where\n        Body: IsBody + TypeMappable,\n        S: BaseState<'tcx>,\n    {\n        let args = self.rustc_args(s);\n        self.def_id.full_def_maybe_instantiated(s, Some(args))\n    }\n}\n\n/// The combination of type generics and related predicates.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ParamEnv {\n    /// Generic parameters of the item.\n    pub generics: TyGenerics,\n    /// Required predicates for the item (see `traits::utils::required_predicates`).\n    pub predicates: GenericPredicates,\n    /// A reference to the parent of this item, with appropriate args.\n    pub parent: Option<ItemRef>,\n}\n\n/// The kind of a constant item.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ConstKind {\n    /// Top-level constant: `const CONST: usize = 42;`\n    TopLevel,\n    /// Anonymous constant, e.g. the `1 + 2` in `[u8; 1 + 2]`\n    AnonConst,\n    /// An inline constant, e.g. `const { 1 + 2 }`\n    InlineConst,\n    /// A promoted constant, e.g. the `1 + 2` in `&(1 + 2)`\n    PromotedConst,\n}\n\n/// Imbues [`rustc_hir::def::DefKind`] with a lot of extra information.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum FullDefKind<Body> {\n    // Types\n    /// ADts (`Struct`, `Enum` and `Union` map to this variant).\n    Adt {\n        param_env: ParamEnv,\n        adt_kind: AdtKind,\n        variants: IndexVec<VariantIdx, VariantDef>,\n        flags: AdtFlags,\n        repr: ReprOptions,\n        /// MIR body of the builtin `drop` impl.\n        drop_glue: Option<Body>,\n        /// Info required to construct a virtual `Drop` impl for this adt.\n        destruct_impl: Box<VirtualTraitImpl>,\n    },\n    /// Type alias: `type Foo = Bar;`\n    TyAlias {\n        param_env: ParamEnv,\n        ty: Ty,\n    },\n    /// Type from an `extern` block.\n    ForeignTy,\n    /// Associated type: `trait MyTrait { type Assoc; }`\n    AssocTy {\n        param_env: ParamEnv,\n        implied_predicates: GenericPredicates,\n        associated_item: AssocItem,\n        value: Option<Ty>,\n    },\n    /// Opaque type, aka `impl Trait`.\n    OpaqueTy,\n\n    // Traits\n    Trait {\n        param_env: ParamEnv,\n        implied_predicates: GenericPredicates,\n        /// The special `Self: Trait` clause.\n        self_predicate: TraitPredicate,\n        /// Associated items, in definition order.\n        items: Vec<AssocItem>,\n        /// `dyn Trait<Args.., Ty = <Self as Trait>::Ty..>` for this trait. This is `Some` iff this\n        /// trait is dyn-compatible.\n        dyn_self: Option<Ty>,\n        /// Whether it's a `unsafe trait`, or just a `trait`.\n        safety: Safety,\n    },\n    /// Trait alias: `trait IntIterator = Iterator<Item = i32>;`\n    TraitAlias {\n        param_env: ParamEnv,\n        implied_predicates: GenericPredicates,\n        /// The special `Self: Trait` clause.\n        self_predicate: TraitPredicate,\n        /// `dyn Trait<Args.., Ty = <Self as Trait>::Ty..>` for this trait. This is `Some` iff this\n        /// trait is dyn-compatible.\n        dyn_self: Option<Ty>,\n    },\n    TraitImpl {\n        param_env: ParamEnv,\n        /// The trait that is implemented by this impl block.\n        trait_pred: TraitPredicate,\n        /// `dyn Trait<Args.., Ty = <Self as Trait>::Ty..>` for the implemented trait. This is\n        /// `Some` iff the trait is dyn-compatible.\n        dyn_self: Option<Ty>,\n        /// The `ImplExpr`s required to satisfy the predicates on the trait declaration. E.g.:\n        /// ```ignore\n        /// trait Foo: Bar {}\n        /// impl Foo for () {} // would supply an `ImplExpr` for `Self: Bar`.\n        /// ```\n        implied_impl_exprs: Vec<ImplExpr>,\n        /// Associated items, in the order of the trait declaration. Includes defaulted items.\n        items: Vec<ImplAssocItem>,\n    },\n    InherentImpl {\n        param_env: ParamEnv,\n        /// The type to which this block applies.\n        ty: Ty,\n        /// Associated items, in definition order.\n        items: Vec<AssocItem>,\n    },\n\n    // Functions\n    Fn {\n        param_env: ParamEnv,\n        inline: InlineAttr,\n        is_const: bool,\n        sig: PolyFnSig,\n        body: Option<Body>,\n    },\n    /// Associated function: `impl MyStruct { fn associated() {} }` or `trait Foo { fn associated()\n    /// {} }`\n    AssocFn {\n        param_env: ParamEnv,\n        associated_item: AssocItem,\n        inline: InlineAttr,\n        is_const: bool,\n        /// The function signature when this method is used in a vtable. `None` if this method is not\n        /// vtable safe. `Some(sig)` if it is vtable safe, where `sig` is the trait method declaration's\n        /// signature with `Self` replaced by `dyn Trait` and associated types normalized.\n        vtable_sig: Option<PolyFnSig>,\n        sig: PolyFnSig,\n        body: Option<Body>,\n    },\n    /// A closure, coroutine, or coroutine-closure.\n    ///\n    /// Note: the (early-bound) generics of a closure are the same as those of the item in which it\n    /// is defined.\n    Closure {\n        args: ClosureArgs,\n        is_const: bool,\n        /// Info required to construct a virtual `FnOnce` impl for this closure.\n        fn_once_impl: Box<VirtualTraitImpl>,\n        /// Info required to construct a virtual `FnMut` impl for this closure.\n        fn_mut_impl: Option<Box<VirtualTraitImpl>>,\n        /// Info required to construct a virtual `Fn` impl for this closure.\n        fn_impl: Option<Box<VirtualTraitImpl>>,\n        /// For `FnMut`&`Fn` closures: the MIR for the `call_once` method; it simply calls\n        /// `call_mut`.\n        once_shim: Option<Body>,\n        /// MIR body of the builtin `drop` impl.\n        drop_glue: Option<Body>,\n        /// Info required to construct a virtual `Drop` impl for this closure.\n        destruct_impl: Box<VirtualTraitImpl>,\n    },\n\n    // Constants\n    Const {\n        param_env: ParamEnv,\n        ty: Ty,\n        kind: ConstKind,\n        body: Option<Body>,\n        value: Option<ConstantExpr>,\n    },\n    /// Associated constant: `trait MyTrait { const ASSOC: usize; }`\n    AssocConst {\n        param_env: ParamEnv,\n        associated_item: AssocItem,\n        ty: Ty,\n        body: Option<Body>,\n        value: Option<ConstantExpr>,\n    },\n    Static {\n        param_env: ParamEnv,\n        /// Whether it's a `unsafe static`, `safe static` (inside extern only) or just a `static`.\n        safety: Safety,\n        /// Whether it's a `static mut` or just a `static`.\n        mutability: Mutability,\n        /// Whether it's an anonymous static generated for nested allocations.\n        nested: bool,\n        ty: Ty,\n        body: Option<Body>,\n    },\n\n    // Crates and modules\n    ExternCrate,\n    Use(Option<(UsePath, UseKind)>),\n    Mod {\n        items: Vec<(Option<Ident>, DefId)>,\n    },\n    /// An `extern` block.\n    ForeignMod {\n        items: Vec<DefId>,\n    },\n\n    // Type-level parameters\n    /// Type parameter: the `T` in `struct Vec<T> { ... }`\n    TyParam,\n    /// Constant generic parameter: `struct Foo<const N: usize> { ... }`\n    ConstParam,\n    /// Lifetime parameter: the `'a` in `struct Foo<'a> { ... }`\n    LifetimeParam,\n\n    // ADT parts\n    /// Refers to the variant definition, [`DefKind::Ctor`] refers to its constructor if it exists.\n    Variant,\n    /// The constructor function of a tuple/unit struct or tuple/unit enum variant.\n    Ctor {\n        adt_def_id: DefId,\n        ctor_of: CtorOf,\n        variant_id: VariantIdx,\n        fields: IndexVec<FieldIdx, FieldDef>,\n        output_ty: Ty,\n    },\n    /// A field in a struct, enum or union. e.g.\n    /// - `bar` in `struct Foo { bar: u8 }`\n    /// - `Foo::Bar::0` in `enum Foo { Bar(u8) }`\n    Field,\n\n    // Others\n    /// Macros\n    Macro(MacroKinds),\n    /// A use of `global_asm!`.\n    GlobalAsm,\n    /// A synthetic coroutine body created by the lowering of a coroutine-closure, such as an async\n    /// closure.\n    SyntheticCoroutineBody,\n}\n\n#[cfg(feature = \"rustc\")]\nfn gen_vtable_sig<'tcx>(\n    // The state that owns the method DefId\n    s: &impl UnderOwnerState<'tcx>,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n) -> Option<PolyFnSig> {\n    let method_def_id = s.owner_id();\n    let tcx = s.base().tcx;\n    let assoc_item = tcx.associated_item(method_def_id);\n    let container_id = assoc_item.container_id(tcx);\n\n    // Get the original trait method id.\n    let method_decl_id = match assoc_item.container {\n        ty::AssocContainer::TraitImpl(Ok(id)) => id,\n        ty::AssocContainer::Trait => method_def_id,\n        _ => return None,\n    };\n    let trait_id = tcx.trait_of_assoc(method_decl_id)?;\n\n    let decl_assoc_item = tcx.associated_item(method_decl_id);\n    if !rustc_trait_selection::traits::is_vtable_safe_method(tcx, trait_id, decl_assoc_item) {\n        return None;\n    }\n\n    // Move into the context of the container (trait decl or impl) instead of the method.\n    let s = &s.with_owner_id(container_id);\n    let args = {\n        let container_generics = tcx.generics_of(container_id);\n        args.map(|args| args.truncate_to(tcx, container_generics))\n    };\n\n    let dyn_self = match assoc_item.container {\n        ty::AssocContainer::Trait => get_trait_decl_dyn_self_ty(s, args),\n        ty::AssocContainer::TraitImpl(..) => {\n            // For impl methods, compute concrete dyn_self from the impl's trait reference\n            let impl_def_id = assoc_item.container_id(tcx);\n            let impl_trait_ref = tcx.impl_trait_ref(impl_def_id);\n            // Get the concrete trait reference by rebasing the impl's trait ref args onto `container_args`\n            let concrete_trait_ref = inst_binder(tcx, s.typing_env(), args, impl_trait_ref);\n            dyn_self_ty(tcx, s.typing_env(), concrete_trait_ref)\n        }\n        ty::AssocContainer::InherentImpl => {\n            unreachable!()\n        }\n    }?;\n\n    // dyn_self is of form `dyn Trait<Args...>`, we extract the trait args\n    let ty::Dynamic(preds, _) = dyn_self.kind() else {\n        panic!(\"Unexpected dyn_self: {:?}\", dyn_self);\n    };\n    // Safe to use `skip_binder` because we know the predicate we built in dyn_self_ty has no bound\n    // vars.\n    let ty::ExistentialPredicate::Trait(trait_ref) = preds[0].skip_binder() else {\n        panic!(\"No principal trait found in dyn_self: {:?}\", dyn_self);\n    };\n\n    // Build a full list of args for the trait: dyn_self + trait args\n    // Note: trait_ref.args doesn't include Self (it's existential), so we prepend dyn_self\n    let mut full_args = vec![ty::GenericArg::from(dyn_self)];\n    full_args.extend(trait_ref.args.iter());\n    let trait_args = tcx.mk_args(&full_args);\n\n    // Instantiate and normalize the signature.\n    let method_decl_sig = tcx.fn_sig(method_decl_id).instantiate(tcx, trait_args);\n    let normalized_sig = normalize(tcx, s.typing_env(), method_decl_sig);\n\n    Some(normalized_sig.sinto(s))\n}\n\n#[cfg(feature = \"rustc\")]\n/// Construct the `FullDefKind` for this item.\n///\n/// If `args` is `Some`, instantiate the whole definition with these generics; otherwise keep the\n/// polymorphic definition.\n// Note: this is tricky to get right, we have to make sure to isntantiate every single field that\n// may contain a type/const/trait reference.\nfn translate_full_def_kind<'tcx, S, Body>(\n    s: &S,\n    def_id: RDefId,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n) -> FullDefKind<Body>\nwhere\n    S: BaseState<'tcx>,\n    Body: IsBody + TypeMappable,\n{\n    let s = &s.with_owner_id(def_id);\n    let tcx = s.base().tcx;\n    let type_of_self = || inst_binder(tcx, s.typing_env(), args, tcx.type_of(def_id));\n    let args_or_default =\n        || args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id));\n    match get_def_kind(tcx, def_id) {\n        RDefKind::Struct { .. } | RDefKind::Union { .. } | RDefKind::Enum { .. } => {\n            let def = tcx.adt_def(def_id);\n            let variants = def\n                .variants()\n                .iter_enumerated()\n                .map(|(variant_idx, variant)| {\n                    let discr = if def.is_enum() {\n                        def.discriminant_for_variant(tcx, variant_idx)\n                    } else {\n                        // Structs and unions have a single variant.\n                        assert_eq!(variant_idx.index(), 0);\n                        ty::util::Discr {\n                            val: 0,\n                            ty: tcx.types.isize,\n                        }\n                    };\n                    VariantDef::sfrom(s, variant, discr, args)\n                })\n                .collect();\n\n            let destruct_trait = tcx.lang_items().destruct_trait().unwrap();\n            FullDefKind::Adt {\n                param_env: get_param_env(s, args),\n                adt_kind: def.adt_kind().sinto(s),\n                variants,\n                flags: def.flags().sinto(s),\n                repr: def.repr().sinto(s),\n                drop_glue: get_drop_glue_shim(s, args),\n                destruct_impl: virtual_impl_for(\n                    s,\n                    ty::TraitRef::new(tcx, destruct_trait, [type_of_self()]),\n                ),\n            }\n        }\n        RDefKind::TyAlias { .. } => {\n            let s = &s.with_base(Base {\n                // Rust doesn't enforce bounds on generic parameters in type aliases. Thus, when\n                // translating type aliases, we need to disable trait resolution errors. For more\n                // details, please see https://github.com/hacspec/hax/issues/707.\n                silence_resolution_errors: true,\n                ..s.base()\n            });\n            FullDefKind::TyAlias {\n                param_env: get_param_env(s, args),\n                ty: type_of_self().sinto(s),\n            }\n        }\n        RDefKind::ForeignTy => FullDefKind::ForeignTy,\n        RDefKind::AssocTy { .. } => FullDefKind::AssocTy {\n            param_env: get_param_env(s, args),\n            implied_predicates: get_implied_predicates(s, args),\n            associated_item: AssocItem::sfrom_instantiated(s, &tcx.associated_item(def_id), args),\n            value: if tcx.defaultness(def_id).has_value() {\n                Some(type_of_self().sinto(s))\n            } else {\n                None\n            },\n        },\n        RDefKind::OpaqueTy => FullDefKind::OpaqueTy,\n        RDefKind::Trait { .. } => FullDefKind::Trait {\n            param_env: get_param_env(s, args),\n            implied_predicates: get_implied_predicates(s, args),\n            self_predicate: get_self_predicate(s, args),\n            dyn_self: get_trait_decl_dyn_self_ty(s, args).sinto(s),\n            items: tcx\n                .associated_items(def_id)\n                .in_definition_order()\n                .map(|assoc| {\n                    let item_args = args.map(|args| {\n                        let item_identity_args =\n                            ty::GenericArgs::identity_for_item(tcx, assoc.def_id);\n                        let item_args = item_identity_args.rebase_onto(tcx, def_id, args);\n                        tcx.mk_args(item_args)\n                    });\n                    AssocItem::sfrom_instantiated(s, assoc, item_args)\n                })\n                .collect::<Vec<_>>(),\n            safety: tcx.trait_def(def_id).safety.sinto(s),\n        },\n        RDefKind::TraitAlias { .. } => FullDefKind::TraitAlias {\n            param_env: get_param_env(s, args),\n            implied_predicates: get_implied_predicates(s, args),\n            self_predicate: get_self_predicate(s, args),\n            dyn_self: get_trait_decl_dyn_self_ty(s, args).sinto(s),\n        },\n        RDefKind::Impl { of_trait, .. } => {\n            use std::collections::HashMap;\n            let param_env = get_param_env(s, args);\n            if !of_trait {\n                let ty = tcx.type_of(def_id);\n                let ty = inst_binder(tcx, s.typing_env(), args, ty);\n                let items = tcx\n                    .associated_items(def_id)\n                    .in_definition_order()\n                    .map(|assoc| {\n                        let item_args = args.map(|args| {\n                            let item_identity_args =\n                                ty::GenericArgs::identity_for_item(tcx, assoc.def_id);\n                            let item_args = item_identity_args.rebase_onto(tcx, def_id, args);\n                            tcx.mk_args(item_args)\n                        });\n                        AssocItem::sfrom_instantiated(s, assoc, item_args)\n                    })\n                    .collect::<Vec<_>>();\n                FullDefKind::InherentImpl {\n                    param_env,\n                    ty: ty.sinto(s),\n                    items,\n                }\n            } else {\n                let trait_ref = tcx.impl_trait_ref(def_id);\n                let trait_ref = inst_binder(tcx, s.typing_env(), args, trait_ref);\n                let polarity = tcx.impl_polarity(def_id);\n                let trait_pred = TraitPredicate {\n                    trait_ref: trait_ref.sinto(s),\n                    is_positive: matches!(polarity, ty::ImplPolarity::Positive),\n                };\n                let dyn_self = dyn_self_ty(tcx, s.typing_env(), trait_ref).sinto(s);\n                // Impl exprs required by the trait.\n                let required_impl_exprs =\n                    solve_item_implied_traits(s, trait_ref.def_id, trait_ref.args);\n\n                let mut item_map: HashMap<RDefId, _> = tcx\n                    .associated_items(def_id)\n                    .in_definition_order()\n                    .map(|assoc| (assoc.trait_item_def_id().unwrap(), assoc))\n                    .collect();\n                let items = tcx\n                    .associated_items(trait_ref.def_id)\n                    .in_definition_order()\n                    .map(|decl_assoc| {\n                        let decl_def_id = decl_assoc.def_id;\n                        // Impl exprs required by the item.\n                        let required_impl_exprs;\n                        let value = match item_map.remove(&decl_def_id) {\n                            Some(impl_assoc) => {\n                                required_impl_exprs = {\n                                    let item_args =\n                                        ty::GenericArgs::identity_for_item(tcx, impl_assoc.def_id);\n                                    // Subtlety: we have to add the GAT arguments (if any) to the trait ref arguments.\n                                    let args = item_args.rebase_onto(tcx, def_id, trait_ref.args);\n                                    let state_with_id = s.with_owner_id(impl_assoc.def_id);\n                                    solve_item_implied_traits(&state_with_id, decl_def_id, args)\n                                };\n\n                                ImplAssocItemValue::Provided {\n                                    def_id: impl_assoc.def_id.sinto(s),\n                                    is_override: decl_assoc.defaultness(tcx).has_value(),\n                                }\n                            }\n                            None => {\n                                required_impl_exprs = if tcx.generics_of(decl_def_id).is_own_empty()\n                                {\n                                    // Non-GAT case.\n                                    let item_args =\n                                        ty::GenericArgs::identity_for_item(tcx, decl_def_id);\n                                    let args = item_args.rebase_onto(tcx, def_id, trait_ref.args);\n                                    // TODO: is it the right `def_id`?\n                                    let state_with_id = s.with_owner_id(def_id);\n                                    solve_item_implied_traits(&state_with_id, decl_def_id, args)\n                                } else {\n                                    // FIXME: For GATs, we need a param_env that has the arguments of\n                                    // the impl plus those of the associated type, but there's no\n                                    // def_id with that param_env.\n                                    vec![]\n                                };\n                                match decl_assoc.kind {\n                                    ty::AssocKind::Type { .. } => {\n                                        let ty = tcx\n                                            .type_of(decl_def_id)\n                                            .instantiate(tcx, trait_ref.args)\n                                            .sinto(s);\n                                        ImplAssocItemValue::DefaultedTy { ty }\n                                    }\n                                    ty::AssocKind::Fn { .. } => {\n                                        let sig = if tcx.generics_of(decl_def_id).is_own_empty() {\n                                            // The method doesn't have generics of its own, so\n                                            // we can instantiate it with just the trait\n                                            // generics.\n                                            let sig = tcx\n                                                .fn_sig(decl_def_id)\n                                                .instantiate(tcx, trait_ref.args)\n                                                .sinto(s);\n                                            Some(sig)\n                                        } else {\n                                            None\n                                        };\n                                        ImplAssocItemValue::DefaultedFn { sig }\n                                    }\n                                    ty::AssocKind::Const { .. } => {\n                                        ImplAssocItemValue::DefaultedConst {}\n                                    }\n                                }\n                            }\n                        };\n\n                        ImplAssocItem {\n                            name: decl_assoc.opt_name().sinto(s),\n                            value,\n                            required_impl_exprs,\n                            decl_def_id: decl_def_id.sinto(s),\n                        }\n                    })\n                    .collect();\n                assert!(item_map.is_empty());\n                FullDefKind::TraitImpl {\n                    param_env,\n                    trait_pred,\n                    dyn_self,\n                    implied_impl_exprs: required_impl_exprs,\n                    items,\n                }\n            }\n        }\n        RDefKind::Fn { .. } => FullDefKind::Fn {\n            param_env: get_param_env(s, args),\n            inline: tcx.codegen_fn_attrs(def_id).inline.sinto(s),\n            is_const: tcx.constness(def_id) == rustc_hir::Constness::Const,\n            sig: inst_binder(tcx, s.typing_env(), args, tcx.fn_sig(def_id)).sinto(s),\n            body: get_body(s, args),\n        },\n        RDefKind::AssocFn { .. } => {\n            let item = tcx.associated_item(def_id);\n            FullDefKind::AssocFn {\n                param_env: get_param_env(s, args),\n                associated_item: AssocItem::sfrom_instantiated(s, &item, args),\n                inline: tcx.codegen_fn_attrs(def_id).inline.sinto(s),\n                is_const: tcx.constness(def_id) == rustc_hir::Constness::Const,\n                vtable_sig: gen_vtable_sig(s, args),\n                sig: get_method_sig(tcx, s.typing_env(), def_id, args).sinto(s),\n                body: get_body(s, args),\n            }\n        }\n        RDefKind::Closure { .. } => {\n            use ty::ClosureKind::{Fn, FnMut};\n            let closure_ty = type_of_self();\n            let ty::TyKind::Closure(_, closure_args) = closure_ty.kind() else {\n                unreachable!()\n            };\n            let closure = closure_args.as_closure();\n            // We lose lifetime information here. Eventually would be nice not to.\n            let input_ty = erase_free_regions(tcx, closure.sig().input(0).skip_binder());\n            let trait_args = [closure_ty, input_ty];\n            let fn_once_trait = tcx.lang_items().fn_once_trait().unwrap();\n            let fn_mut_trait = tcx.lang_items().fn_mut_trait().unwrap();\n            let fn_trait = tcx.lang_items().fn_trait().unwrap();\n            let destruct_trait = tcx.lang_items().destruct_trait().unwrap();\n            FullDefKind::Closure {\n                is_const: tcx.constness(def_id) == rustc_hir::Constness::Const,\n                args: ClosureArgs::sfrom(s, def_id, closure),\n                once_shim: get_closure_once_shim(s, closure_ty),\n                drop_glue: get_drop_glue_shim(s, args),\n                destruct_impl: virtual_impl_for(\n                    s,\n                    ty::TraitRef::new(tcx, destruct_trait, [type_of_self()]),\n                ),\n                fn_once_impl: virtual_impl_for(\n                    s,\n                    ty::TraitRef::new(tcx, fn_once_trait, trait_args),\n                ),\n                fn_mut_impl: matches!(closure.kind(), FnMut | Fn)\n                    .then(|| virtual_impl_for(s, ty::TraitRef::new(tcx, fn_mut_trait, trait_args))),\n                fn_impl: matches!(closure.kind(), Fn)\n                    .then(|| virtual_impl_for(s, ty::TraitRef::new(tcx, fn_trait, trait_args))),\n            }\n        }\n        kind @ (RDefKind::Const { .. }\n        | RDefKind::AnonConst { .. }\n        | RDefKind::InlineConst { .. }) => {\n            let kind = match kind {\n                RDefKind::Const { .. } => ConstKind::TopLevel,\n                RDefKind::AnonConst { .. } => ConstKind::AnonConst,\n                RDefKind::InlineConst { .. } => ConstKind::InlineConst,\n                _ => unreachable!(),\n            };\n            FullDefKind::Const {\n                param_env: get_param_env(s, args),\n                ty: type_of_self().sinto(s),\n                kind,\n                body: get_body(s, args),\n                value: const_value(s, def_id, args_or_default()),\n            }\n        }\n        RDefKind::AssocConst { .. } => FullDefKind::AssocConst {\n            param_env: get_param_env(s, args),\n            associated_item: AssocItem::sfrom_instantiated(s, &tcx.associated_item(def_id), args),\n            ty: type_of_self().sinto(s),\n            body: get_body(s, args),\n            value: const_value(s, def_id, args_or_default()),\n        },\n        RDefKind::Static {\n            safety,\n            mutability,\n            nested,\n            ..\n        } => FullDefKind::Static {\n            param_env: get_param_env(s, args),\n            safety: safety.sinto(s),\n            mutability: mutability.sinto(s),\n            nested: nested.sinto(s),\n            ty: type_of_self().sinto(s),\n            body: get_body(s, args),\n        },\n        RDefKind::ExternCrate => FullDefKind::ExternCrate,\n        RDefKind::Use => FullDefKind::Use(\n            if let Some(ldid) = def_id.as_local()\n                && let rustc_hir::Node::Item(item) = tcx.hir_node_by_def_id(ldid)\n                && let rustc_hir::ItemKind::Use(use_path, use_kind) = item.kind\n            {\n                Some((use_path.sinto(s), use_kind.sinto(s)))\n            } else {\n                None\n            },\n        ),\n        RDefKind::Mod { .. } => FullDefKind::Mod {\n            items: get_mod_children(tcx, def_id).sinto(s),\n        },\n        RDefKind::ForeignMod { .. } => FullDefKind::ForeignMod {\n            items: get_foreign_mod_children(tcx, def_id).sinto(s),\n        },\n        RDefKind::TyParam => FullDefKind::TyParam,\n        RDefKind::ConstParam => FullDefKind::ConstParam,\n        RDefKind::LifetimeParam => FullDefKind::LifetimeParam,\n        RDefKind::Variant => FullDefKind::Variant,\n        RDefKind::Ctor(ctor_of, _) => {\n            let args = args_or_default();\n            let ctor_of = ctor_of.sinto(s);\n\n            // The def_id of the adt this ctor belongs to.\n            let adt_def_id = match ctor_of {\n                CtorOf::Struct => tcx.parent(def_id),\n                CtorOf::Variant => tcx.parent(tcx.parent(def_id)),\n            };\n            let adt_def = tcx.adt_def(adt_def_id);\n            let variant_id = adt_def.variant_index_with_ctor_id(def_id);\n            let fields = adt_def\n                .variant(variant_id)\n                .fields\n                .iter()\n                .map(|f| FieldDef::sfrom(s, f, args))\n                .collect();\n            let output_ty = ty::Ty::new_adt(tcx, adt_def, args).sinto(s);\n            FullDefKind::Ctor {\n                adt_def_id: adt_def_id.sinto(s),\n                ctor_of,\n                variant_id: variant_id.sinto(s),\n                fields,\n                output_ty,\n            }\n        }\n        RDefKind::Field => FullDefKind::Field,\n        RDefKind::Macro(kinds) => FullDefKind::Macro(kinds.sinto(s)),\n        RDefKind::GlobalAsm => FullDefKind::GlobalAsm,\n        RDefKind::SyntheticCoroutineBody => FullDefKind::SyntheticCoroutineBody,\n    }\n}\n\n/// An associated item in a trait impl. This can be an item provided by the trait impl, or an item\n/// that reuses the trait decl default value.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ImplAssocItem {\n    /// This is `None` for RPTITs.\n    pub name: Option<Symbol>,\n    /// The definition of the item from the trait declaration. This is an `AssocTy`, `AssocFn` or\n    /// `AssocConst`.\n    pub decl_def_id: DefId,\n    /// The `ImplExpr`s required to satisfy the predicates on the associated type. E.g.:\n    /// ```ignore\n    /// trait Foo {\n    ///     type Type<T>: Clone,\n    /// }\n    /// impl Foo for () {\n    ///     type Type<T>: Arc<T>; // would supply an `ImplExpr` for `Arc<T>: Clone`.\n    /// }\n    /// ```\n    /// Empty if this item is an associated const or fn.\n    pub required_impl_exprs: Vec<ImplExpr>,\n    /// The value of the implemented item.\n    pub value: ImplAssocItemValue,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum ImplAssocItemValue {\n    /// The item is provided by the trait impl.\n    Provided {\n        /// The definition of the item in the trait impl. This is an `AssocTy`, `AssocFn` or\n        /// `AssocConst`.\n        def_id: DefId,\n        /// Whether the trait had a default value for this item (which is therefore overriden).\n        is_override: bool,\n    },\n    /// This is an associated type that reuses the trait declaration default.\n    DefaultedTy {\n        /// The default type, with generics properly instantiated. Note that this can be a GAT;\n        /// relevant generics and predicates can be found in `decl_def`.\n        ty: Ty,\n    },\n    /// This is a non-overriden default method.\n    /// FIXME: provide properly instantiated generics.\n    DefaultedFn {\n        /// The signature of the method, if we could translate it. `None` if the method as generics\n        /// of its own, because then we'd need to resolve traits but the method doesn't have it's\n        /// own `DefId`.\n        sig: Option<PolyFnSig>,\n    },\n    /// This is an associated const that reuses the trait declaration default. The default const\n    /// value can be found in `decl_def`.\n    DefaultedConst,\n}\n\n/// Partial data for a trait impl, used for fake trait impls that we generate ourselves such as\n/// `FnOnce` and `Drop` impls.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct VirtualTraitImpl {\n    /// The trait that is implemented by this impl block.\n    pub trait_pred: TraitPredicate,\n    /// The `ImplExpr`s required to satisfy the predicates on the trait declaration.\n    pub implied_impl_exprs: Vec<ImplExpr>,\n    /// The associated types and their predicates, in definition order.\n    pub types: Vec<(Ty, Vec<ImplExpr>)>,\n}\n\nimpl<Body> FullDef<Body> {\n    pub fn def_id(&self) -> &DefId {\n        &self.this.def_id\n    }\n\n    /// Reference to the item itself.\n    pub fn this(&self) -> &ItemRef {\n        &self.this\n    }\n\n    pub fn kind(&self) -> &FullDefKind<Body> {\n        &self.kind\n    }\n\n    /// Returns the generics and predicates for definitions that have those.\n    pub fn param_env(&self) -> Option<&ParamEnv> {\n        use FullDefKind::*;\n        match self.kind() {\n            Adt { param_env, .. }\n            | Trait { param_env, .. }\n            | TraitAlias { param_env, .. }\n            | TyAlias { param_env, .. }\n            | AssocTy { param_env, .. }\n            | Fn { param_env, .. }\n            | AssocFn { param_env, .. }\n            | Const { param_env, .. }\n            | AssocConst { param_env, .. }\n            | Static { param_env, .. }\n            | TraitImpl { param_env, .. }\n            | InherentImpl { param_env, .. } => Some(param_env),\n            _ => None,\n        }\n    }\n\n    /// Return the parent of this item if the item inherits the typing context from its parent.\n    #[cfg(feature = \"rustc\")]\n    pub fn typing_parent<'tcx>(&self, s: &impl BaseState<'tcx>) -> Option<ItemRef> {\n        use FullDefKind::*;\n        match self.kind() {\n            AssocTy { .. }\n            | AssocFn { .. }\n            | AssocConst { .. }\n            | Const {\n                kind: ConstKind::AnonConst | ConstKind::InlineConst | ConstKind::PromotedConst,\n                ..\n            } => self.param_env().unwrap().parent.clone(),\n            Closure { .. } | Ctor { .. } | Variant { .. } => {\n                let parent = self.def_id().parent.as_ref().unwrap();\n                // The parent has the same generics as this item.\n                Some(self.this().with_def_id(s, parent))\n            }\n            _ => None,\n        }\n    }\n\n    /// Whether the item has any generics at all (including parent generics).\n    pub fn has_any_generics(&self) -> bool {\n        match self.param_env() {\n            Some(p) => p.generics.parent_count != 0 || !p.generics.params.is_empty(),\n            None => false,\n        }\n    }\n\n    /// Whether the item has any generics of its own (ignoring parent generics).\n    pub fn has_own_generics(&self) -> bool {\n        match self.param_env() {\n            Some(p) => !p.generics.params.is_empty(),\n            None => false,\n        }\n    }\n\n    /// Whether the item has any generics or predicates of its own (ignoring parent\n    /// generics/predicates).\n    pub fn has_own_generics_or_predicates(&self) -> bool {\n        match self.param_env() {\n            Some(p) => {\n                let has_predicates = if let FullDefKind::AssocFn { .. }\n                | FullDefKind::AssocConst { .. } = self.kind()\n                {\n                    // Assoc fns and consts have a special `Self: Trait` predicate inserted, which\n                    // we don't want to consider as an \"own predicate\".\n                    p.predicates.predicates.len() > 1\n                } else {\n                    !p.predicates.predicates.is_empty()\n                };\n                !p.generics.params.is_empty() || has_predicates\n            }\n            None => false,\n        }\n    }\n\n    /// Lists the children of this item that can be named, in the way of normal rust paths. For\n    /// types, this includes inherent items.\n    #[cfg(feature = \"rustc\")]\n    pub fn nameable_children<'tcx>(&self, s: &impl BaseState<'tcx>) -> Vec<(Symbol, DefId)> {\n        let mut children = match self.kind() {\n            FullDefKind::Mod { items } => items\n                .iter()\n                .filter_map(|(opt_ident, def_id)| {\n                    Some((opt_ident.as_ref()?.0.clone(), def_id.clone()))\n                })\n                .collect(),\n            FullDefKind::Adt {\n                adt_kind: AdtKind::Enum,\n                variants,\n                ..\n            } => variants\n                .iter()\n                .map(|variant| (variant.name.clone(), variant.def_id.clone()))\n                .collect(),\n            FullDefKind::InherentImpl { items, .. } | FullDefKind::Trait { items, .. } => items\n                .iter()\n                .filter_map(|item| Some((item.name.clone()?, item.def_id.clone())))\n                .collect(),\n            FullDefKind::TraitImpl { items, .. } => items\n                .iter()\n                .filter_map(|item| Some((item.name.clone()?, item.def_id().clone())))\n                .collect(),\n            _ => vec![],\n        };\n        // Add inherent impl items if any.\n        if let Some(rust_def_id) = self.def_id().as_rust_def_id() {\n            let tcx = s.base().tcx;\n            for impl_def_id in tcx.inherent_impls(rust_def_id) {\n                children.extend(\n                    tcx.associated_items(impl_def_id)\n                        .in_definition_order()\n                        .filter_map(|assoc| Some((assoc.opt_name()?, assoc.def_id).sinto(s))),\n                );\n            }\n        }\n        children\n    }\n\n    /// Gives the list of DefIds for associated items when self is a container\n    pub fn associated_def_ids(&self) -> Vec<DefId> {\n        match self.kind() {\n            FullDefKind::InherentImpl { items, .. } | FullDefKind::Trait { items, .. } => {\n                items.iter().map(|item| item.def_id.clone()).collect()\n            }\n            FullDefKind::TraitImpl { items, .. } => {\n                items.iter().map(|item| item.def_id().clone()).collect()\n            }\n            _ => vec![],\n        }\n    }\n}\n\nimpl ImplAssocItem {\n    /// The relevant definition: the provided implementation if any, otherwise the default\n    /// declaration from the trait declaration.\n    pub fn def_id(&self) -> &DefId {\n        match &self.value {\n            ImplAssocItemValue::Provided { def_id, .. } => def_id,\n            _ => &self.decl_def_id,\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nfn get_self_predicate<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n) -> TraitPredicate {\n    use ty::Upcast;\n    let tcx = s.base().tcx;\n    let typing_env = s.typing_env();\n    let pred: ty::TraitPredicate = crate::traits::self_predicate(tcx, s.owner_id())\n        .no_bound_vars()\n        .unwrap()\n        .upcast(tcx);\n    let pred = substitute(tcx, typing_env, args, pred);\n    pred.sinto(s)\n}\n\n/// Generates a `dyn Trait<Args.., Ty = <Self as Trait>::Ty..>` type for this trait.\n#[cfg(feature = \"rustc\")]\nfn get_trait_decl_dyn_self_ty<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n) -> Option<ty::Ty<'tcx>> {\n    let tcx = s.base().tcx;\n    let typing_env = s.typing_env();\n    let def_id = s.owner_id();\n\n    let self_tref = ty::TraitRef::new_from_args(\n        tcx,\n        def_id,\n        args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id)),\n    );\n    rustc_utils::dyn_self_ty(tcx, typing_env, self_tref).map(|ty| {\n        let ty = if args.is_some() {\n            erase_free_regions(tcx, ty)\n        } else {\n            ty\n        };\n        ty\n    })\n}\n\n/// Do the trait resolution necessary to create a new impl for the given trait_ref. Used when we\n/// generate fake trait impls e.g. for `FnOnce` and `Drop`.\n#[cfg(feature = \"rustc\")]\nfn virtual_impl_for<'tcx, S>(s: &S, trait_ref: ty::TraitRef<'tcx>) -> Box<VirtualTraitImpl>\nwhere\n    S: UnderOwnerState<'tcx>,\n{\n    let tcx = s.base().tcx;\n    let trait_pred = TraitPredicate {\n        trait_ref: trait_ref.sinto(s),\n        is_positive: true,\n    };\n    // Impl exprs required by the trait.\n    let required_impl_exprs = solve_item_implied_traits(s, trait_ref.def_id, trait_ref.args);\n    let types = tcx\n        .associated_items(trait_ref.def_id)\n        .in_definition_order()\n        .filter(|assoc| matches!(assoc.kind, ty::AssocKind::Type { .. }))\n        .map(|assoc| {\n            // This assumes non-GAT because this is for builtin-trait (that don't\n            // have GATs).\n            let ty = ty::Ty::new_projection(tcx, assoc.def_id, trait_ref.args).sinto(s);\n            // Impl exprs required by the type.\n            let required_impl_exprs = solve_item_implied_traits(s, assoc.def_id, trait_ref.args);\n            (ty, required_impl_exprs)\n        })\n        .collect();\n    Box::new(VirtualTraitImpl {\n        trait_pred,\n        implied_impl_exprs: required_impl_exprs,\n        types,\n    })\n}\n\n#[cfg(feature = \"rustc\")]\nfn get_body<'tcx, S, Body>(s: &S, args: Option<ty::GenericArgsRef<'tcx>>) -> Option<Body>\nwhere\n    S: UnderOwnerState<'tcx>,\n    Body: IsBody + TypeMappable,\n{\n    let def_id = s.owner_id();\n    Body::body(s, def_id, args)\n}\n\n#[cfg(feature = \"rustc\")]\nfn get_closure_once_shim<'tcx, S, Body>(s: &S, closure_ty: ty::Ty<'tcx>) -> Option<Body>\nwhere\n    S: UnderOwnerState<'tcx>,\n    Body: IsBody + TypeMappable,\n{\n    let tcx = s.base().tcx;\n    let mir = crate::closure_once_shim(tcx, closure_ty)?;\n    let body = Body::from_mir(s, mir)?;\n    Some(body)\n}\n\n#[cfg(feature = \"rustc\")]\nfn get_drop_glue_shim<'tcx, S, Body>(s: &S, args: Option<ty::GenericArgsRef<'tcx>>) -> Option<Body>\nwhere\n    S: UnderOwnerState<'tcx>,\n    Body: IsBody + TypeMappable,\n{\n    let tcx = s.base().tcx;\n    let mir = crate::drop_glue_shim(tcx, s.owner_id(), args)?;\n    let body = Body::from_mir(s, mir)?;\n    Some(body)\n}\n\n#[cfg(feature = \"rustc\")]\nfn get_param_env<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n) -> ParamEnv {\n    let tcx = s.base().tcx;\n    let def_id = s.owner_id();\n    let generics = tcx.generics_of(def_id).sinto(s);\n\n    let parent = generics.parent.as_ref().map(|parent| {\n        let parent = parent.underlying_rust_def_id();\n        let args = args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def_id));\n        let parent_args = args.truncate_to(tcx, tcx.generics_of(parent));\n        translate_item_ref(s, parent, parent_args)\n    });\n    match args {\n        None => ParamEnv {\n            generics,\n            predicates: required_predicates(tcx, def_id, s.base().options.bounds_options).sinto(s),\n            parent,\n        },\n        // An instantiated item is monomorphic.\n        Some(_) => ParamEnv {\n            generics: TyGenerics {\n                parent_count: 0,\n                params: Default::default(),\n                ..generics\n            },\n            predicates: GenericPredicates::default(),\n            parent,\n        },\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nfn get_implied_predicates<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    args: Option<ty::GenericArgsRef<'tcx>>,\n) -> GenericPredicates {\n    use std::borrow::Cow;\n    let tcx = s.base().tcx;\n    let def_id = s.owner_id();\n    let typing_env = s.typing_env();\n    let mut implied_predicates = implied_predicates(tcx, def_id, s.base().options.bounds_options);\n    if args.is_some() {\n        implied_predicates = Cow::Owned(\n            implied_predicates\n                .iter()\n                .copied()\n                .map(|(clause, span)| {\n                    let clause = substitute(tcx, typing_env, args, clause);\n                    (clause, span)\n                })\n                .collect(),\n        );\n    }\n    implied_predicates.sinto(s)\n}\n\n#[cfg(feature = \"rustc\")]\nfn const_value<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    def_id: RDefId,\n    args: ty::GenericArgsRef<'tcx>,\n) -> Option<ConstantExpr> {\n    let uneval = ty::UnevaluatedConst::new(def_id, args);\n    let c = eval_ty_constant(s, uneval)?;\n    match c.kind() {\n        ty::ConstKind::Error(..) => None,\n        _ => Some(c.sinto(s)),\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/new/impl_infos.rs",
    "content": "use crate::prelude::*;\n\n/// Meta-informations about an `impl<GENERICS[: PREDICATES]> TRAIT for\n/// TYPE where PREDICATES {}`\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ImplInfos {\n    pub generics: TyGenerics,\n    pub clauses: Vec<(Clause, Span)>,\n    pub typ: Ty,\n    pub trait_ref: Option<TraitRef>,\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/new/item_attributes.rs",
    "content": "use crate::prelude::*;\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ItemAttributes {\n    pub attributes: Vec<Attribute>,\n    pub parent_attributes: Vec<Attribute>,\n}\n\nimpl Default for ItemAttributes {\n    fn default() -> Self {\n        Self::new()\n    }\n}\n\nimpl ItemAttributes {\n    pub fn new() -> Self {\n        ItemAttributes {\n            attributes: vec![],\n            parent_attributes: vec![],\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nlazy_static::lazy_static! {\n    pub static ref CORE_EXTRACTION_MODE: bool =\n        std::env::var_os(\"HAX_CORE_EXTRACTION_MODE\") == Some(\"on\".into());\n}\n\n#[cfg(feature = \"rustc\")]\nimpl ItemAttributes {\n    pub fn from_owner_id<'tcx, S: BaseState<'tcx>>(\n        s: &S,\n        oid: rustc_hir::hir_id::OwnerId,\n    ) -> ItemAttributes {\n        if *CORE_EXTRACTION_MODE {\n            return ItemAttributes::new();\n        }\n        use rustc_hir::hir_id::HirId;\n        let tcx = s.base().tcx;\n        let attrs_of = |id| tcx.hir_attrs(HirId::from(id)).sinto(s);\n        ItemAttributes {\n            attributes: attrs_of(oid),\n            parent_attributes: tcx\n                .hir_parent_owner_iter(HirId::from(oid))\n                .map(|(oid, _)| oid)\n                .flat_map(attrs_of)\n                .collect(),\n        }\n    }\n    pub fn from_def_id<'tcx, S: BaseState<'tcx>>(\n        s: &S,\n        did: rustc_span::def_id::DefId,\n    ) -> ItemAttributes {\n        if let Some(def_id) = did.as_local() {\n            Self::from_owner_id(s, rustc_hir::hir_id::OwnerId { def_id })\n        } else {\n            ItemAttributes::new()\n        }\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/new/mod.rs",
    "content": "//! This module contains type definitions that have no equivalent in\n//! Rustc.\n\nmod full_def;\nmod impl_infos;\nmod item_attributes;\nmod predicate_id;\nmod synthetic_items;\nmod variant_infos;\n\npub use full_def::*;\npub use impl_infos::*;\npub use item_attributes::*;\npub use predicate_id::*;\npub use synthetic_items::*;\npub use variant_infos::*;\n"
  },
  {
    "path": "frontend/exporter/src/types/new/predicate_id.rs",
    "content": "use crate::prelude::*;\n\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[serde(transparent)]\n/// A `PredicateId` is a unique identifier for a clause or a\n/// predicate. It is computed by hashing predicates and clause in a\n/// uniform and deterministic way.\npub struct PredicateId(pub u64);\n\n#[cfg(feature = \"rustc\")]\nmod rustc {\n    use super::*;\n    impl<'tcx> Binder<PredicateKind> {\n        #[tracing::instrument(level = \"trace\")]\n        pub fn predicate_id(&self) -> PredicateId {\n            // Here, we need to be careful about not hashing a `crate::Predicate`,\n            // but `crate::Binder<crate::PredicateKind>` instead,\n            // otherwise we would get into a infinite recursion.\n            PredicateId(deterministic_hash(self))\n        }\n    }\n\n    /// A `PredicateId` can be mapped to itself via SInto. This is useful\n    /// for mirroring the type [`traits::search_clause::PathChunk`] as\n    /// [`traits::ImplExprPathChunk`].\n    impl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, PredicateId> for PredicateId {\n        fn sinto(&self, _s: &S) -> PredicateId {\n            *self\n        }\n    }\n\n    /// We need identifiers that are stable across different\n    /// architectures, different paths (which are observable from\n    /// `Span`s), etc.\n    /// Rustc's stable hash is not doing what we want here: it is sensible\n    /// to the environment. Instead, we first `sinto` and then hash with\n    /// `deterministic_hash` below.\n    fn deterministic_hash<T: std::hash::Hash>(x: &T) -> u64 {\n        use crate::deterministic_hash::DeterministicHasher;\n        use std::collections::hash_map::DefaultHasher;\n        use std::hash::BuildHasher;\n        use std::hash::BuildHasherDefault;\n        <BuildHasherDefault<DeterministicHasher<DefaultHasher>>>::default().hash_one(x)\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/new/synthetic_items.rs",
    "content": "#[cfg(feature = \"rustc\")]\nuse crate::prelude::*;\n\n#[cfg(feature = \"rustc\")]\nuse {\n    rustc_hir::definitions::DisambiguatorState,\n    rustc_middle::ty,\n    rustc_span::{DUMMY_SP, Symbol, def_id::DefId as RDefId},\n    rustc_type_ir::Upcast,\n};\n\n/// We create some extra `DefId`s to represent things that rustc doesn't have a `DefId` for. This\n/// makes the pipeline much easier to have \"real\" def_ids for them.\n/// We generate fake struct-like items for each of: arrays, slices, and tuples. This makes it\n/// easier to emit trait impls for these types, especially with monomorphization. This enum tracks\n/// identifies these builtin types.\n#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]\npub enum SyntheticItem {\n    /// Fake ADT representing the `[T; N]` type.\n    Array,\n    /// Fake ADT representing the `[T]` type.\n    Slice,\n    /// Fake ADT representing the length-n tuple `(A, B, ...)`.\n    Tuple(usize),\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx> GlobalCache<'tcx> {\n    pub fn get_synthetic_def_id(\n        &mut self,\n        s: &impl BaseState<'tcx>,\n        item: SyntheticItem,\n    ) -> RDefId {\n        if let Some(def_id) = self.synthetic_def_ids.get(&item) {\n            return *def_id;\n        }\n        let tcx = s.base().tcx;\n        let mut disambiguator_state = DisambiguatorState::new();\n\n        let name = match item {\n            SyntheticItem::Array => \"<array>\",\n            SyntheticItem::Slice => \"<slice>\",\n            SyntheticItem::Tuple(n) => &format!(\"<tuple_{n}>\"),\n        };\n        // Create a fake item, to which we'll assign generics and a param_env, which we can\n        // then use to generate the `FullDefKind` we want.\n        let feed = tcx.create_def(\n            rustc_span::def_id::CRATE_DEF_ID,\n            Some(Symbol::intern(name)),\n            rustc_hir::def::DefKind::Struct,\n            None,\n            &mut disambiguator_state,\n        );\n        let def_id = feed.def_id().to_def_id();\n        // Insert the def_ids early so we record them even if we panic later in this function.\n        self.reverse_synthetic_map.insert(def_id, item);\n        self.synthetic_def_ids.insert(item, def_id);\n\n        let mut generics = ty::Generics {\n            parent: None,\n            parent_count: 0,\n            own_params: Default::default(),\n            param_def_id_to_index: Default::default(),\n            has_self: false,\n            has_late_bound_regions: None,\n        };\n        let mut mk_param = |name: &str, def_kind, kind| {\n            let name = Symbol::intern(name);\n            let param_feed = tcx.create_def(\n                feed.def_id(),\n                Some(name),\n                def_kind,\n                None,\n                &mut disambiguator_state,\n            );\n            param_feed.feed_hir(); // Avoid panics on `local_def_id_to_hir_id`.\n            let param_def_id = param_feed.def_id().into();\n            let index = generics.own_params.len() as u32;\n            let param_def = ty::GenericParamDef {\n                name,\n                def_id: param_def_id,\n                index,\n                kind,\n                pure_wrt_drop: true,\n            };\n            let arg = tcx.mk_param_from_def(&param_def);\n            generics.own_params.push(param_def);\n            generics.param_def_id_to_index.insert(param_def_id, index);\n            (arg, param_feed)\n        };\n\n        let mut clauses = vec![];\n        let sized_trait = tcx.lang_items().sized_trait().unwrap();\n        match item {\n            SyntheticItem::Array => {\n                let (t_arg, _) = mk_param(\n                    \"T\",\n                    rustc_hir::def::DefKind::TyParam,\n                    ty::GenericParamDefKind::Type {\n                        has_default: false,\n                        synthetic: false,\n                    },\n                );\n                let (n_arg, n_feed) = mk_param(\n                    \"N\",\n                    rustc_hir::def::DefKind::ConstParam,\n                    ty::GenericParamDefKind::Const { has_default: false },\n                );\n                n_feed.type_of(ty::EarlyBinder::bind(tcx.types.usize));\n\n                let item_ty = t_arg.as_type().unwrap();\n                let len = n_arg.as_const().unwrap();\n                let type_of = ty::Ty::new_array_with_const_len(tcx, item_ty, len);\n                feed.type_of(ty::EarlyBinder::bind(type_of));\n\n                let ty_is_sized = ty::TraitRef::new(tcx, sized_trait, [item_ty]);\n                clauses.push(ty_is_sized.upcast(tcx));\n                let len_is_usize = ty::ClauseKind::ConstArgHasType(len, tcx.types.usize);\n                clauses.push(len_is_usize.upcast(tcx));\n            }\n            SyntheticItem::Slice => {\n                let (t_arg, _) = mk_param(\n                    \"T\",\n                    rustc_hir::def::DefKind::TyParam,\n                    ty::GenericParamDefKind::Type {\n                        has_default: false,\n                        synthetic: false,\n                    },\n                );\n\n                let item_ty = t_arg.as_type().unwrap();\n                let type_of = ty::Ty::new_slice(tcx, item_ty);\n                feed.type_of(ty::EarlyBinder::bind(type_of));\n\n                let ty_is_sized = ty::TraitRef::new(tcx, sized_trait, [item_ty]);\n                clauses.push(ty_is_sized.upcast(tcx));\n            }\n            SyntheticItem::Tuple(len) => {\n                let tys = (0..len).into_iter().map(|i| {\n                    let name: String = if i < 26 {\n                        format!(\"{}\", (b'A' + i as u8) as char)\n                    } else {\n                        format!(\"T{i}\")\n                    };\n                    let (arg, _) = mk_param(\n                        &name,\n                        rustc_hir::def::DefKind::TyParam,\n                        ty::GenericParamDefKind::Type {\n                            has_default: false,\n                            synthetic: false,\n                        },\n                    );\n                    arg.as_type().unwrap()\n                });\n                let tys = tcx.arena.alloc_from_iter(tys);\n\n                let type_of = ty::Ty::new_tup(tcx, tys);\n                feed.type_of(ty::EarlyBinder::bind(type_of));\n\n                // All types except the last one are sized.\n                for ty in tys.iter().rev().skip(1).rev() {\n                    let arg: ty::GenericArg = (*ty).into();\n                    let ty_is_sized = ty::TraitRef::new(tcx, sized_trait, [arg]);\n                    clauses.push(ty_is_sized.upcast(tcx));\n                }\n            }\n        }\n\n        feed.generics_of(generics);\n        feed.explicit_predicates_of(ty::GenericPredicates {\n            parent: None,\n            predicates: tcx\n                .arena\n                .alloc_from_iter(clauses.iter().map(|cl| (*cl, DUMMY_SP))),\n        });\n        feed.param_env(ty::ParamEnv::new(\n            tcx.mk_clauses_from_iter(clauses.into_iter()),\n        ));\n        feed.feed_hir();\n\n        def_id\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/new/variant_infos.rs",
    "content": "use crate::prelude::*;\nuse crate::sinto_as_usize;\n\n/// Describe the kind of a variant\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum VariantKind {\n    /// The variant is the only variant of a `struct` type\n    Struct {\n        /// Are the fields on this struct all named?\n        named: bool,\n    },\n    /// The variant is the only variant of a `union` type\n    Union,\n    /// The variant is one of the many variants of a `enum` type\n    Enum {\n        /// The index of this variant in the `enum`\n        index: VariantIdx,\n        /// Are the fields on this struct all named?\n        named: bool,\n    },\n}\n\nsinto_as_usize!(rustc_abi, VariantIdx);\n\n/// Describe a variant\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct VariantInformations {\n    pub type_namespace: DefId,\n\n    pub typ: DefId,\n    pub variant: DefId,\n    pub kind: VariantKind,\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/serialize_int.rs",
    "content": "//! This module provides serde manual serializes/deserializers as\n//! strings for u128 and i128: those types are not well supported in\n//! serde (see https://github.com/serde-rs/json/issues/625).\n\nuse serde::{Deserializer, Serializer, de::Visitor, ser::Serialize};\n\npub mod unsigned {\n    use super::*;\n    pub fn serialize<S>(value: &u128, serializer: S) -> Result<S::Ok, S::Error>\n    where\n        S: Serializer,\n    {\n        value.to_string().serialize(serializer)\n    }\n\n    pub fn deserialize<'de, D>(deserializer: D) -> Result<u128, D::Error>\n    where\n        D: Deserializer<'de>,\n    {\n        deserializer.deserialize_any(IntScalarVisitor)\n    }\n\n    #[derive(Debug)]\n    struct IntScalarVisitor;\n    impl<'de> Visitor<'de> for IntScalarVisitor {\n        type Value = u128;\n\n        fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n            dbg!(self);\n            formatter.write_str(\"expect to receive integer\")\n        }\n\n        fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n        where\n            E: serde::de::Error,\n        {\n            v.parse().map_err(serde::de::Error::custom)\n        }\n\n        fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>\n        where\n            E: serde::de::Error,\n        {\n            Ok(v as u128)\n        }\n    }\n}\npub mod signed {\n    use super::*;\n    pub fn serialize<S>(value: &i128, serializer: S) -> Result<S::Ok, S::Error>\n    where\n        S: Serializer,\n    {\n        value.to_string().serialize(serializer)\n    }\n\n    pub fn deserialize<'de, D>(deserializer: D) -> Result<i128, D::Error>\n    where\n        D: Deserializer<'de>,\n    {\n        deserializer.deserialize_any(IntScalarVisitor)\n    }\n\n    #[derive(Debug)]\n    struct IntScalarVisitor;\n    impl<'de> Visitor<'de> for IntScalarVisitor {\n        type Value = i128;\n\n        fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n            dbg!(self);\n            formatter.write_str(\"expect to receive integer\")\n        }\n\n        fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n        where\n            E: serde::de::Error,\n        {\n            v.parse().map_err(serde::de::Error::custom)\n        }\n\n        fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>\n        where\n            E: serde::de::Error,\n        {\n            Ok(v as i128)\n        }\n\n        fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n        where\n            E: serde::de::Error,\n        {\n            Ok(v as i128)\n        }\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/span.rs",
    "content": "use crate::prelude::*;\nuse crate::sinto_todo;\n\n/// Reflects [`rustc_span::Loc`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)]\npub struct Loc {\n    pub line: usize,\n    pub col: usize,\n}\n\n/// Reflects [`rustc_span::Span`]\n#[derive(::serde::Serialize, ::serde::Deserialize, Clone, Debug, JsonSchema, Eq, Ord)]\npub struct Span {\n    pub lo: Loc,\n    pub hi: Loc,\n    pub filename: FileName,\n    /// Original rustc span; can be useful for reporting rustc\n    /// diagnostics (this is used in Charon)\n    #[cfg(feature = \"rustc\")]\n    #[serde(skip)]\n    pub rust_span_data: Option<rustc_span::SpanData>,\n    #[cfg(not(feature = \"rustc\"))]\n    #[serde(skip)]\n    pub rust_span_data: Option<()>,\n}\n\nconst _: () = {\n    // `rust_span_data` is a metadata that should *not* be taken into\n    // account while hashing or comparing\n\n    impl std::hash::Hash for Span {\n        fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n            self.lo.hash(state);\n            self.hi.hash(state);\n            self.filename.hash(state);\n        }\n    }\n    impl PartialEq for Span {\n        fn eq(&self, other: &Self) -> bool {\n            self.lo == other.lo && self.hi == other.hi && self.filename == other.filename\n        }\n    }\n\n    impl PartialOrd for Span {\n        fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n            Some(\n                self.lo.partial_cmp(&other.lo)?.then(\n                    self.hi\n                        .partial_cmp(&other.hi)?\n                        .then(self.filename.partial_cmp(&other.filename)?),\n                ),\n            )\n        }\n    }\n};\n\n#[cfg(feature = \"rustc\")]\nimpl From<rustc_span::Loc> for Loc {\n    fn from(val: rustc_span::Loc) -> Self {\n        Loc {\n            line: val.line,\n            col: val.col_display,\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: BaseState<'tcx>> SInto<S, Span> for rustc_span::Span {\n    fn sinto(&self, s: &S) -> Span {\n        if let Some(span) = s.with_global_cache(|cache| cache.spans.get(self).cloned()) {\n            return span;\n        }\n        let span = translate_span(*self, s.base().tcx.sess);\n        s.with_global_cache(|cache| cache.spans.insert(*self, span.clone()));\n        span\n    }\n}\n\n/// Reflects [`rustc_span::source_map::Spanned`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Spanned<T> {\n    pub node: T,\n    pub span: Span,\n}\n#[cfg(feature = \"rustc\")]\nimpl<'s, S: UnderOwnerState<'s>, T: SInto<S, U>, U> SInto<S, Spanned<U>>\n    for rustc_span::source_map::Spanned<T>\n{\n    fn sinto<'a>(&self, s: &S) -> Spanned<U> {\n        Spanned {\n            node: self.node.sinto(s),\n            span: self.span.sinto(s),\n        }\n    }\n}\n\nimpl<'tcx, S> SInto<S, PathBuf> for PathBuf {\n    fn sinto(&self, _: &S) -> PathBuf {\n        self.clone()\n    }\n}\n\n/// Reflects [`rustc_span::RealFileName`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)]\n#[args(<S>, from: rustc_span::RealFileName, state: S as _s)]\npub enum RealFileName {\n    LocalPath(PathBuf),\n    Remapped {\n        local_path: Option<PathBuf>,\n        virtual_name: PathBuf,\n    },\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, u64> for rustc_hashes::Hash64 {\n    fn sinto(&self, _: &S) -> u64 {\n        self.as_u64()\n    }\n}\n\n/// Reflects [`rustc_span::FileName`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_span::FileName, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, PartialEq, Eq, Hash, PartialOrd, Ord)]\npub enum FileName {\n    Real(RealFileName),\n    CfgSpec(u64),\n    Anon(u64),\n    MacroExpansion(u64),\n    ProcMacroSourceCode(u64),\n    CliCrateAttr(u64),\n    Custom(String),\n    // #[map(FileName::DocTest(x.0.to_str().unwrap().into()))]\n    #[custom_arm(FROM_TYPE::DocTest(x, _) => TO_TYPE::DocTest(x.to_str().unwrap().into()),)]\n    DocTest(String),\n    InlineAsm(u64),\n}\n\nimpl FileName {\n    pub fn to_string(&self) -> String {\n        match self {\n            Self::Real(RealFileName::LocalPath(path))\n            | Self::Real(RealFileName::Remapped {\n                local_path: Some(path),\n                ..\n            })\n            | Self::Real(RealFileName::Remapped {\n                virtual_name: path, ..\n            }) => format!(\"{}\", path.display()),\n            _ => format!(\"{:?}\", self),\n        }\n    }\n    pub fn to_path(&self) -> Option<&std::path::Path> {\n        match self {\n            Self::Real(RealFileName::LocalPath(path))\n            | Self::Real(RealFileName::Remapped {\n                local_path: Some(path),\n                ..\n            })\n            | Self::Real(RealFileName::Remapped {\n                virtual_name: path, ..\n            }) => Some(path),\n            _ => None,\n        }\n    }\n}\n\nsinto_todo!(rustc_span, ErrorGuaranteed);\n"
  },
  {
    "path": "frontend/exporter/src/types/thir.rs",
    "content": "//! Copies of the relevant `THIR` types. THIR represents a HIR (function) body augmented with type\n//! information and lightly desugared.\nuse crate::prelude::*;\n\n#[cfg(feature = \"rustc\")]\nuse rustc_middle::thir;\n\n/// Reflects [`thir::LogicalOp`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'a, S>, from: thir::LogicalOp, state: S as _s)]\npub enum LogicalOp {\n    And,\n    Or,\n}\n\n/// Reflects [`thir::LintLevel`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'slt, S: UnderOwnerState<'slt> + HasThir<'slt>>, from: thir::LintLevel, state: S as gstate)]\npub enum LintLevel {\n    Inherited,\n    Explicit(HirId),\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::FruInfo<'tcx>, state: S as gstate)]\n/// Field Record Update (FRU) informations, this reflects [`thir::FruInfo`]\npub struct FruInfo {\n    /// The base, e.g. `Foo {x: 1, .. base}`\n    pub base: Expr,\n    pub field_types: Vec<Ty>,\n}\n\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::AdtExprBase<'tcx>, state: S as gstate)]\npub enum AdtExprBase {\n    None,\n    Base(FruInfo),\n    DefaultFields(Vec<Ty>),\n}\n\n/// A field expression: a field name along with a value\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct FieldExpr {\n    pub field: DefId,\n    pub value: Expr,\n}\n\n/// Reflects [`thir::AdtExpr`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct AdtExpr {\n    pub info: VariantInformations,\n    pub user_ty: Option<CanonicalUserType>,\n    pub fields: Vec<FieldExpr>,\n    pub base: AdtExprBase,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: ExprState<'tcx>> SInto<S, AdtExpr> for thir::AdtExpr<'tcx> {\n    fn sinto(&self, s: &S) -> AdtExpr {\n        let variants = self.adt_def.variants();\n        let variant: &rustc_middle::ty::VariantDef = &variants[self.variant_index];\n        AdtExpr {\n            info: get_variant_information(&self.adt_def, self.variant_index, s),\n            fields: self\n                .fields\n                .iter()\n                .map(|f| FieldExpr {\n                    field: variant.fields[f.name].did.sinto(s),\n                    value: f.expr.sinto(s),\n                })\n                .collect(),\n            base: self.base.sinto(s),\n            user_ty: self.user_ty.sinto(s),\n        }\n    }\n}\n\n/// Reflects [`thir::LocalVarId`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct LocalIdent {\n    pub name: String,\n    pub id: HirId,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, LocalIdent> for thir::LocalVarId {\n    fn sinto(&self, s: &S) -> LocalIdent {\n        LocalIdent {\n            name: s\n                .base()\n                .local_ctx\n                .borrow()\n                .vars\n                .get(self)\n                .s_unwrap(s)\n                .to_string(),\n            id: self.0.sinto(s),\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S> SInto<S, u64> for rustc_middle::mir::interpret::AllocId {\n    fn sinto(&self, _: &S) -> u64 {\n        self.0.get()\n    }\n}\n\n/// Reflects [`thir::BlockSafety`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S>, from: thir::BlockSafety, state: S as _s)]\npub enum BlockSafety {\n    Safe,\n    BuiltinUnsafe,\n    #[custom_arm(FROM_TYPE::ExplicitUnsafe{..} => BlockSafety::ExplicitUnsafe,)]\n    ExplicitUnsafe,\n}\n\n/// Reflects [`rustc_middle::middle::region::ScopeData`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::ScopeData, state: S as gstate)]\npub enum ScopeData {\n    Node,\n    CallSite,\n    Arguments,\n    Destruction,\n    IfThen,\n    IfThenRescope,\n    MatchGuard,\n    Remainder(FirstStatementIndex),\n}\n\nsinto_as_usize!(rustc_middle::middle::region, FirstStatementIndex);\n\n/// Reflects [`rustc_middle::middle::region::Scope`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: rustc_middle::middle::region::Scope, state: S as gstate)]\npub struct Scope {\n    pub local_id: ItemLocalId,\n    pub data: ScopeData,\n}\n\nsinto_as_usize!(rustc_hir::hir_id, ItemLocalId);\n\n/// Reflects [`thir::Block`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Block, state: S as gstate)]\npub struct Block {\n    pub targeted_by_break: bool,\n    pub region_scope: Scope,\n    pub span: Span,\n    pub stmts: Vec<Stmt>,\n    pub expr: Option<Expr>,\n    pub safety_mode: BlockSafety,\n}\n\n/// Reflects [`thir::Stmt`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Stmt<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Stmt {\n    pub kind: StmtKind,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: ExprState<'tcx>> SInto<S, Block> for thir::BlockId {\n    fn sinto(&self, s: &S) -> Block {\n        s.thir().blocks[*self].sinto(s)\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: ExprState<'tcx>> SInto<S, Stmt> for thir::StmtId {\n    fn sinto(&self, s: &S) -> Stmt {\n        s.thir().stmts[*self].sinto(s)\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: ExprState<'tcx>> SInto<S, Expr> for thir::Expr<'tcx> {\n    fn sinto(&self, s: &S) -> Expr {\n        let s = &s.with_ty(self.ty);\n        let (hir_id, attributes) = self.hir_id_and_attributes(s);\n        let hir_id = hir_id.map(|hir_id| hir_id.index());\n        let unrolled = self.unroll_scope(s);\n        let thir::Expr { span, kind, ty, .. } = unrolled;\n        let contents = match kind {\n            // Introduce intermediate `Cast` from `T` to `U` when casting from a `#[repr(T)]` enum to `U`\n            thir::ExprKind::Cast { source } => {\n                if let rustc_middle::ty::TyKind::Adt(adt, _) = s.thir().exprs[source].ty.kind() {\n                    let tcx = s.base().tcx;\n                    let contents = kind.sinto(s);\n                    let repr_type = if adt.is_enum() {\n                        use crate::rustc_middle::ty::util::IntTypeExt;\n                        adt.repr().discr_type().to_ty(tcx)\n                    } else {\n                        ty\n                    };\n                    if repr_type == ty {\n                        contents\n                    } else {\n                        ExprKind::Cast {\n                            source: Decorated {\n                                ty: repr_type.sinto(s),\n                                span: span.sinto(s),\n                                contents: Box::new(contents),\n                                hir_id,\n                                attributes: vec![],\n                            },\n                        }\n                    }\n                } else {\n                    kind.sinto(s)\n                }\n            }\n            thir::ExprKind::NonHirLiteral { lit, .. } => {\n                let cexpr: ConstantExpr =\n                    (ConstantExprKind::Literal(scalar_int_to_constant_literal(s, lit, ty)))\n                        .decorate(ty.sinto(s), span.sinto(s));\n                return cexpr.into();\n            }\n            thir::ExprKind::ZstLiteral { .. } => {\n                if ty.is_phantom_data() {\n                    let rustc_middle::ty::Adt(def, _) = ty.kind() else {\n                        supposely_unreachable_fatal!(s[span], \"PhantomDataNotAdt\"; {kind, ty})\n                    };\n                    let adt_def = AdtExpr {\n                        info: get_variant_information(def, rustc_abi::FIRST_VARIANT, s),\n                        user_ty: None,\n                        base: AdtExprBase::None,\n                        fields: vec![],\n                    };\n                    return Expr {\n                        contents: Box::new(ExprKind::Adt(adt_def)),\n                        span: self.span.sinto(s),\n                        ty: ty.sinto(s),\n                        hir_id,\n                        attributes,\n                    };\n                }\n                let (def_id, generics) = match ty.kind() {\n                    rustc_middle::ty::Adt(adt_def, generics) => {\n                        // Here, we should only get `struct Name;` structs.\n                        s_assert!(s, adt_def.variants().len() == 1);\n                        s_assert!(s, generics.is_empty());\n                        (adt_def.did(), generics)\n                    }\n                    rustc_middle::ty::TyKind::FnDef(def_id, generics) => (*def_id, generics),\n                    ty_kind => {\n                        let ty_kind = ty_kind.sinto(s);\n                        supposely_unreachable_fatal!(\n                            s[span],\n                            \"ZstLiteral ty≠FnDef(...) or PhantomData or naked Struct\";\n                            {kind, span, ty, ty_kind}\n                        );\n                    }\n                };\n                let item = translate_item_ref(s, def_id, generics);\n                let tcx = s.base().tcx;\n                let constructor = if tcx.is_constructor(def_id) {\n                    let adt_def = tcx.adt_def(rustc_utils::get_closest_parent_type(&tcx, def_id));\n                    let variant_index = adt_def.variant_index_with_id(tcx.parent(def_id));\n                    Some(rustc_utils::get_variant_information(\n                        &adt_def,\n                        variant_index,\n                        s,\n                    ))\n                } else {\n                    None\n                };\n                return Expr {\n                    contents: Box::new(ExprKind::GlobalName { item, constructor }),\n                    span: self.span.sinto(s),\n                    ty: ty.sinto(s),\n                    hir_id,\n                    attributes,\n                };\n            }\n            thir::ExprKind::Field {\n                lhs,\n                variant_index,\n                name,\n            } => {\n                let lhs_ty = s.thir().exprs[lhs].ty.kind();\n                let idx = variant_index.index();\n                if idx != 0 {\n                    let _ = supposely_unreachable!(\n                        s[span],\n                        \"ExprKindFieldIdxNonZero\"; {\n                            kind,\n                            span,\n                            ty,\n                            ty.kind()\n                        }\n                    );\n                };\n                match lhs_ty {\n                    rustc_middle::ty::TyKind::Adt(adt_def, _generics) => {\n                        let variant = adt_def.variant(variant_index);\n                        ExprKind::Field {\n                            field: variant.fields[name].did.sinto(s),\n                            lhs: lhs.sinto(s),\n                        }\n                    }\n                    rustc_middle::ty::TyKind::Tuple(..) => ExprKind::TupleField {\n                        field: name.index(),\n                        lhs: lhs.sinto(s),\n                    },\n                    _ => supposely_unreachable_fatal!(\n                        s[span],\n                        \"ExprKindFieldBadTy\"; {\n                            kind,\n                            span,\n                            ty.kind(),\n                            lhs_ty\n                        }\n                    ),\n                }\n            }\n            _ => kind.sinto(s),\n        };\n        Decorated {\n            ty: ty.sinto(s),\n            span: span.sinto(s),\n            contents: Box::new(contents),\n            hir_id,\n            attributes,\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: ExprState<'tcx>> SInto<S, Expr> for thir::ExprId {\n    fn sinto(&self, s: &S) -> Expr {\n        s.thir().exprs[*self].sinto(s)\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: ExprState<'tcx>> SInto<S, Pat> for thir::Pat<'tcx> {\n    fn sinto(&self, s: &S) -> Pat {\n        let thir::Pat { span, kind, ty } = self;\n        let contents = match kind {\n            thir::PatKind::Leaf { subpatterns } => match ty.kind() {\n                rustc_middle::ty::TyKind::Adt(adt_def, args) => (thir::PatKind::Variant {\n                    adt_def: *adt_def,\n                    args,\n                    variant_index: rustc_abi::VariantIdx::from_usize(0),\n                    subpatterns: subpatterns.clone(),\n                })\n                .sinto(s),\n                rustc_middle::ty::TyKind::Tuple(tys) => {\n                    // Build a full-arity vector, filling unmatched positions with wildcards\n                    // so that tuple patterns with `..` (ellipsis) are correctly expanded.\n                    let mut full_subpatterns: Vec<Pat> = tys\n                        .iter()\n                        .map(|elem_ty| Decorated {\n                            ty: elem_ty.sinto(s),\n                            span: span.sinto(s),\n                            contents: Box::new(PatKind::Wild),\n                            hir_id: None,\n                            attributes: vec![],\n                        })\n                        .collect();\n                    for field_pat in subpatterns.iter() {\n                        full_subpatterns[field_pat.field.index()] = field_pat.pattern.sinto(s);\n                    }\n                    PatKind::Tuple {\n                        subpatterns: full_subpatterns,\n                    }\n                }\n                _ => supposely_unreachable_fatal!(\n                    s[span],\n                    \"PatLeafNonAdtTy\";\n                    {ty.kind(), kind}\n                ),\n            },\n            _ => kind.sinto(s),\n        };\n        Decorated {\n            ty: ty.sinto(s),\n            span: span.sinto(s),\n            contents: Box::new(contents),\n            hir_id: None,\n            attributes: vec![],\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: ExprState<'tcx>> SInto<S, Arm> for thir::ArmId {\n    fn sinto(&self, s: &S) -> Arm {\n        s.thir().arms[*self].sinto(s)\n    }\n}\n\n/// Reflects [`thir::StmtKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::StmtKind<'tcx>, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum StmtKind {\n    Expr {\n        scope: Scope,\n        expr: Expr,\n    },\n    Let {\n        remainder_scope: Scope,\n        init_scope: Scope,\n        pattern: Pat,\n        initializer: Option<Expr>,\n        else_block: Option<Block>,\n        lint_level: LintLevel,\n        #[value(attribute_from_scope(gstate, init_scope).1)]\n        /// The attribute on this `let` binding\n        attributes: Vec<Attribute>,\n    },\n}\n\n/// Reflects [`thir::Ascription`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx> + HasThir<'tcx>>, from: thir::Ascription<'tcx>, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Ascription {\n    pub annotation: CanonicalUserTypeAnnotation,\n    pub variance: Variance,\n}\n\n/// Reflects [`thir::PatRange`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct PatRange {\n    pub lo: PatRangeBoundary,\n    pub hi: PatRangeBoundary,\n    pub end: RangeEnd,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, PatRange> for thir::PatRange<'tcx> {\n    fn sinto(&self, s: &S) -> PatRange {\n        let sinto_bdy = |bdy| match bdy {\n            thir::PatRangeBoundary::Finite(valtree) => PatRangeBoundary::Finite(\n                valtree_to_constant_expr(s, valtree, self.ty, rustc_span::DUMMY_SP),\n            ),\n            thir::PatRangeBoundary::NegInfinity => PatRangeBoundary::NegInfinity,\n            thir::PatRangeBoundary::PosInfinity => PatRangeBoundary::PosInfinity,\n        };\n        PatRange {\n            lo: sinto_bdy(self.lo),\n            hi: sinto_bdy(self.hi),\n            end: self.end.sinto(s),\n        }\n    }\n}\n\n/// Reflects [`thir::PatRangeBoundary`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum PatRangeBoundary {\n    Finite(ConstantExpr),\n    NegInfinity,\n    PosInfinity,\n}\n\n/// A field pattern: a field name along with a pattern\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct FieldPat {\n    pub field: DefId,\n    pub pattern: Pat,\n}\n\npub type Pat = Decorated<PatKind>;\n\n/// Reflects [`thir::PatKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::PatKind<'tcx>, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\n#[append(thir::PatKind::Leaf {..} => fatal!(gstate, \"PatKind::Leaf: should never come up\"),)]\npub enum PatKind {\n    Wild,\n    Missing,\n    AscribeUserType {\n        ascription: Ascription,\n        subpattern: Pat,\n    },\n    #[custom_arm(\n        thir::PatKind::Binding {name, mode, var, ty, subpattern, is_primary, ..} => {\n            let local_ctx = gstate.base().local_ctx;\n            local_ctx.borrow_mut().vars.insert(*var, name.to_string());\n            PatKind::Binding {\n                mode: mode.sinto(gstate),\n                var: var.sinto(gstate),\n                ty: ty.sinto(gstate),\n                subpattern: subpattern.sinto(gstate),\n                is_primary: is_primary.sinto(gstate),\n            }\n        }\n    )]\n    Binding {\n        mode: BindingMode,\n        var: LocalIdent, // name VS var? TODO\n        ty: Ty,\n        subpattern: Option<Pat>,\n        is_primary: bool,\n    },\n    #[custom_arm(\n        FROM_TYPE::Variant { adt_def, variant_index, args, subpatterns } => {\n            let variant_def_id = adt_def.variant(*variant_index).def_id;\n            let item = translate_item_ref(gstate, variant_def_id, args);\n            let variants = adt_def.variants();\n            let variant: &rustc_middle::ty::VariantDef = &variants[*variant_index];\n            let tcx = gstate.base().tcx;\n            // Build a map from field index to explicit pattern, so we can\n            // fill in wildcards for fields omitted by `..` (ellipsis).\n            let explicit: std::collections::HashMap<_, _> = subpatterns\n                .iter()\n                .map(|f| (f.field, &f.pattern))\n                .collect();\n            TO_TYPE::Variant {\n                item,\n                info: get_variant_information(adt_def, *variant_index, gstate),\n                subpatterns: variant.fields.iter_enumerated()\n                    .map(|(field_idx, field_def)| {\n                        let pattern = if let Some(pat) = explicit.get(&field_idx) {\n                            pat.sinto(gstate)\n                        } else {\n                            Decorated {\n                                ty: field_def.ty(tcx, args).sinto(gstate),\n                                span: rustc_span::DUMMY_SP.sinto(gstate),\n                                contents: Box::new(PatKind::Wild),\n                                hir_id: None,\n                                attributes: vec![],\n                            }\n                        };\n                        FieldPat {\n                            field: field_def.did.sinto(gstate),\n                            pattern,\n                        }\n                    })\n                    .collect(),\n            }\n        }\n    )]\n    Variant {\n        /// Reference to variant item definition, with appropriate generics.\n        item: ItemRef,\n        /// Extra info about the variant.\n        info: VariantInformations,\n        subpatterns: Vec<FieldPat>,\n    },\n    #[disable_mapping]\n    Tuple {\n        subpatterns: Vec<Pat>,\n    },\n    Deref {\n        subpattern: Pat,\n    },\n    DerefPattern {\n        subpattern: Pat,\n    },\n    Constant {\n        value: ConstantExpr,\n    },\n    ExpandedConstant {\n        def_id: DefId,\n        subpattern: Pat,\n    },\n    Range(PatRange),\n    Slice {\n        prefix: Vec<Pat>,\n        slice: Option<Pat>,\n        suffix: Vec<Pat>,\n    },\n    Array {\n        prefix: Vec<Pat>,\n        slice: Option<Pat>,\n        suffix: Vec<Pat>,\n    },\n    Or {\n        pats: Vec<Pat>,\n    },\n    Never,\n    Error(ErrorGuaranteed),\n}\n\n/// Reflects [`thir::Arm`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Arm<'tcx>, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Arm {\n    pub pattern: Pat,\n    pub guard: Option<Expr>,\n    pub body: Expr,\n    pub lint_level: LintLevel,\n    pub scope: Scope,\n    pub span: Span,\n    #[value(attribute_from_scope(gstate, scope).1)]\n    attributes: Vec<Attribute>,\n}\n\n/// Reflects [`thir::Param`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: ExprState<'tcx>>, from: thir::Param<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Param {\n    pub pat: Option<Pat>,\n    pub ty: Ty,\n    pub ty_span: Option<Span>,\n    pub self_kind: Option<ImplicitSelfKind>,\n    pub hir_id: Option<HirId>,\n    #[value(hir_id.map(|id| {\n        s.base().tcx.hir_attrs(id).sinto(s)\n    }).unwrap_or(vec![]))]\n    /// attributes on this parameter\n    pub attributes: Vec<Attribute>,\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct ThirBody {\n    pub expr: Expr,\n    pub params: Vec<Param>,\n}\n\npub type Expr = Decorated<ExprKind>;\n\n/// Reflects [`thir::ExprKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: ExprState<'tcx> + HasTy<'tcx>>, from: thir::ExprKind<'tcx>, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\n#[append(\n    thir::ExprKind::Scope {..} => {\n        fatal!(gstate, \"Scope should have been eliminated at this point\");\n    },\n    thir::ExprKind::Field {..} => {\n        fatal!(gstate, \"Field should have been eliminated at this point\");\n    },\n    thir::ExprKind::NonHirLiteral {..} => {\n        fatal!(gstate, \"NonHirLiteral should have been eliminated at this point\");\n    },\n)]\npub enum ExprKind {\n    Box {\n        value: Expr,\n    },\n    /// Resugared macros calls. This is deprecated: see\n    /// <https://github.com/hacspec/hax/issues/145>.\n    If {\n        if_then_scope: Scope,\n        cond: Expr,\n        then: Expr,\n        else_opt: Option<Expr>,\n    },\n    #[map({\n        let e = gstate.thir().exprs[*fun].unroll_scope(gstate);\n        let fun = match e.ty.kind() {\n            rustc_middle::ty::TyKind::FnDef(def_id, generics) => {\n                let (hir_id, attributes) = e.hir_id_and_attributes(gstate);\n                let hir_id = hir_id.map(|hir_id| hir_id.index());\n                let item = translate_item_ref(gstate, *def_id, generics);\n                let contents = Box::new(ExprKind::GlobalName {\n                    item,\n                    constructor: None\n                });\n                Expr {\n                    contents,\n                    span: e.span.sinto(gstate),\n                    ty: e.ty.sinto(gstate),\n                    hir_id,\n                    attributes,\n                }\n            },\n            rustc_middle::ty::TyKind::FnPtr(..) => {\n                e.sinto(gstate)\n            },\n            ty_kind => {\n                let ty_norm: Ty = gstate.base().tcx.normalize_erasing_regions(gstate.typing_env(), *ty).sinto(gstate);\n                let ty_kind_sinto = ty_kind.sinto(gstate);\n                supposely_unreachable_fatal!(\n                    gstate[e.span],\n                    \"CallNotTyFnDef\";\n                    {e, ty_kind, ty_kind_sinto, ty_norm}\n                );\n            }\n        };\n        TO_TYPE::Call {\n            ty: ty.sinto(gstate),\n            args: args.sinto(gstate),\n            from_hir_call: from_hir_call.sinto(gstate),\n            fn_span: fn_span.sinto(gstate),\n            fun,\n        }\n    })]\n    /// A call to a function or a method.\n    ///\n    /// Example: `f(0i8)`, where `f` has signature `fn f<T: Clone>(t: T) -> ()`.\n    Call {\n        /// The type of the function, substitution applied.\n        ///\n        /// Example: for the call `f(0i8)`, this is `i8 -> ()`.\n        ty: Ty,\n        /// The function itself. This can be something else than a name, e.g. a closure.\n        ///\n        /// Example: for the call `f(0i8)`, this is `f`.\n        ///\n        /// In the case of a call to a function that's not a closure/fn pointer, the expression\n        /// will be a `GlobalName` that contains all the information about generics and whether\n        /// this is a direct call or a method call.\n        fun: Expr, // TODO: can [ty] and [fun.ty] be different?\n        /// The arguments given to the function.\n        ///\n        /// Example: for the call `f(0i8)`, this is `[0i8]`.\n        args: Vec<Expr>,\n        from_hir_call: bool,\n        fn_span: Span,\n    },\n    Deref {\n        arg: Expr,\n    },\n    Binary {\n        op: BinOp,\n        lhs: Expr,\n        rhs: Expr,\n    },\n    LogicalOp {\n        op: LogicalOp,\n        lhs: Expr,\n        rhs: Expr,\n    },\n    Unary {\n        op: UnOp,\n        arg: Expr,\n    },\n    Cast {\n        source: Expr,\n    },\n    Use {\n        source: Expr,\n    }, // Use a lexpr to get a vexpr.\n    NeverToAny {\n        source: Expr,\n    },\n    #[custom_arm(\n        &FROM_TYPE::PointerCoercion { cast, source, .. } => {\n            let source = &gstate.thir().exprs[source];\n            let src_ty = source.ty;\n            let tgt_ty = gstate.ty();\n            TO_TYPE::PointerCoercion {\n                cast: PointerCoercion::sfrom(gstate, cast, src_ty, tgt_ty),\n                source: source.sinto(gstate),\n            }\n        },\n    )]\n    PointerCoercion {\n        cast: PointerCoercion,\n        source: Expr,\n    },\n    Loop {\n        body: Expr,\n    },\n    Match {\n        scrutinee: Expr,\n        arms: Vec<Arm>,\n    },\n    Let {\n        expr: Expr,\n        pat: Pat,\n    },\n    Block {\n        #[serde(flatten)]\n        block: Block,\n    },\n    Assign {\n        lhs: Expr,\n        rhs: Expr,\n    },\n    AssignOp {\n        op: AssignOp,\n        lhs: Expr,\n        rhs: Expr,\n    },\n    #[disable_mapping]\n    Field {\n        field: DefId,\n        lhs: Expr,\n    },\n\n    #[disable_mapping]\n    TupleField {\n        field: usize,\n        lhs: Expr,\n    },\n    Index {\n        lhs: Expr,\n        index: Expr,\n    },\n    VarRef {\n        id: LocalIdent,\n    },\n    #[disable_mapping]\n    ConstRef {\n        id: ParamConst,\n    },\n    #[disable_mapping]\n    GlobalName {\n        item: ItemRef,\n        constructor: Option<VariantInformations>,\n    },\n    UpvarRef {\n        closure_def_id: DefId,\n        var_hir_id: LocalIdent,\n    },\n    Borrow {\n        borrow_kind: BorrowKind,\n        arg: Expr,\n    },\n    RawBorrow {\n        mutability: Mutability,\n        arg: Expr,\n    },\n    Break {\n        label: Scope,\n        value: Option<Expr>,\n    },\n    Continue {\n        label: Scope,\n    },\n    Return {\n        value: Option<Expr>,\n    },\n    #[custom_arm(FROM_TYPE::ConstBlock { did, args } => TO_TYPE::ConstBlock(translate_item_ref(gstate, *did, args)),)]\n    ConstBlock(ItemRef),\n    Repeat {\n        value: Expr,\n        count: ConstantExpr,\n    },\n    Array {\n        fields: Vec<Expr>,\n    },\n    Tuple {\n        fields: Vec<Expr>,\n    },\n    Adt(AdtExpr),\n    PlaceTypeAscription {\n        source: Expr,\n        user_ty: Option<CanonicalUserType>,\n    },\n    ValueTypeAscription {\n        source: Expr,\n        user_ty: Option<CanonicalUserType>,\n    },\n    #[custom_arm(FROM_TYPE::Closure(e) => {\n        let (thir, expr_entrypoint) = get_thir(e.closure_id, gstate);\n        let s = &gstate.with_thir(thir.clone());\n        TO_TYPE::Closure {\n            params: thir.params.raw.sinto(s),\n            body: expr_entrypoint.sinto(s),\n            upvars: e.upvars.sinto(gstate),\n            movability: e.movability.sinto(gstate)\n        }\n    },\n    )]\n    Closure {\n        params: Vec<Param>,\n        body: Expr,\n        upvars: Vec<Expr>,\n        movability: Option<Movability>,\n    },\n    Literal {\n        lit: Spanned<LitKind>,\n        neg: bool, // TODO\n    },\n    //zero space type\n    // This is basically used for functions! e.g. `<T>::from`\n    ZstLiteral {\n        user_ty: Option<CanonicalUserType>,\n    },\n    #[custom_arm(FROM_TYPE::NamedConst { def_id, args, user_ty } => TO_TYPE::NamedConst {\n        item: translate_item_ref(gstate, *def_id, args),\n        user_ty: user_ty.sinto(gstate),\n    },)]\n    NamedConst {\n        item: ItemRef,\n        user_ty: Option<CanonicalUserType>,\n    },\n    ConstParam {\n        param: ParamConst,\n        def_id: GlobalIdent,\n    },\n    StaticRef {\n        alloc_id: u64,\n        ty: Ty,\n        def_id: GlobalIdent,\n    },\n    Yield {\n        value: Expr,\n    },\n    #[todo]\n    Todo(String),\n}\n\n#[cfg(feature = \"rustc\")]\npub trait ExprKindExt<'tcx> {\n    fn hir_id_and_attributes<S: ExprState<'tcx>>(\n        &self,\n        s: &S,\n    ) -> (Option<rustc_hir::HirId>, Vec<Attribute>);\n    fn unroll_scope<S: BaseState<'tcx> + HasThir<'tcx>>(&self, s: &S) -> thir::Expr<'tcx>;\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx> ExprKindExt<'tcx> for thir::Expr<'tcx> {\n    fn hir_id_and_attributes<S: ExprState<'tcx>>(\n        &self,\n        s: &S,\n    ) -> (Option<rustc_hir::HirId>, Vec<Attribute>) {\n        match &self.kind {\n            thir::ExprKind::Scope {\n                region_scope: scope,\n                ..\n            } => attribute_from_scope(s, scope),\n            _ => (None, vec![]),\n        }\n    }\n    fn unroll_scope<S: BaseState<'tcx> + HasThir<'tcx>>(&self, s: &S) -> thir::Expr<'tcx> {\n        // TODO: when we see a loop, we should lookup its label! label is actually a scope id\n        // we remove scopes here, whence the TODO\n        match self.kind {\n            thir::ExprKind::Scope { value, .. } => s.thir().exprs[value].unroll_scope(s),\n            _ => self.clone(),\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\npub trait HirIdExt {\n    fn index(&self) -> (usize, usize);\n}\n\n#[cfg(feature = \"rustc\")]\nimpl HirIdExt for rustc_hir::HirId {\n    fn index(&self) -> (usize, usize) {\n        use crate::rustc_index::Idx;\n        (self.owner.def_id.index(), self.local_id.index())\n    }\n}\n"
  },
  {
    "path": "frontend/exporter/src/types/ty.rs",
    "content": "//! Copies of the relevant type-level types. These are semantically-rich representations of\n//! type-level concepts such as types and trait references.\nuse crate::prelude::*;\nuse crate::sinto_as_usize;\nuse crate::sinto_todo;\nuse std::sync::Arc;\n\n#[cfg(feature = \"rustc\")]\nuse rustc_middle::ty;\n\n/// Generic container for decorating items with a type, a span,\n/// attributes and other meta-data.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct Decorated<T> {\n    pub ty: Ty,\n    pub span: Span,\n    pub contents: Box<T>,\n    pub hir_id: Option<(usize, usize)>,\n    pub attributes: Vec<Attribute>,\n}\n\n/// Reflects [`ty::ParamTy`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ParamTy, state: S as gstate)]\npub struct ParamTy {\n    pub index: u32,\n    pub name: Symbol,\n}\n\n/// Reflects [`ty::ParamConst`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<S>, from: ty::ParamConst, state: S as gstate)]\npub struct ParamConst {\n    pub index: u32,\n    pub name: Symbol,\n}\n\n/// A predicate without `Self`, for use in `dyn Trait`.\n///\n/// Reflects [`ty::ExistentialPredicate`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ExistentialPredicate<'tcx>, state: S as state)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ExistentialPredicate {\n    /// E.g. `From<u64>`. Note that this isn't `T: From<u64>` with a given `T`, this is just\n    /// `From<u64>`. Could be written `?: From<u64>`.\n    Trait(ExistentialTraitRef),\n    /// E.g. `Iterator::Item = u64`. Could be written `<? as Iterator>::Item = u64`.\n    Projection(ExistentialProjection),\n    /// E.g. `Send`.\n    AutoTrait(DefId),\n}\n\n/// Reflects [`rustc_type_ir::ExistentialTraitRef`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialTraitRef<ty::TyCtxt<'tcx>>, state: S as state)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct ExistentialTraitRef {\n    pub def_id: DefId,\n    pub args: Vec<GenericArg>,\n}\n\n/// Reflects [`rustc_type_ir::ExistentialProjection`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_type_ir::ExistentialProjection<ty::TyCtxt<'tcx>>, state: S as state)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct ExistentialProjection {\n    pub def_id: DefId,\n    pub args: Vec<GenericArg>,\n    pub term: Term,\n}\n\n/// Reflects [`ty::BoundTyKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundTyKind, state: S as s)]\npub enum BoundTyKind {\n    Anon,\n    #[custom_arm(&FROM_TYPE::Param(def_id) => TO_TYPE::Param(def_id.sinto(s), s.base().tcx.item_name(def_id).sinto(s)),)]\n    Param(DefId, Symbol),\n}\n\n/// Reflects [`ty::BoundTy`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundTy, state: S as s)]\npub struct BoundTy {\n    pub var: BoundVar,\n    pub kind: BoundTyKind,\n}\n\nsinto_as_usize!(rustc_middle::ty, BoundVar);\n\n/// Reflects [`ty::BoundRegionKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundRegionKind, state: S as s)]\npub enum BoundRegionKind {\n    Anon,\n    NamedAnon(Symbol),\n    #[custom_arm(&FROM_TYPE::Named(def_id) => {\n        let tcx = s.base().tcx;\n        TO_TYPE::Named {\n            def_id: def_id.sinto(s),\n            name: tcx.item_name(def_id).sinto(s),\n            span: tcx.def_span(def_id).sinto(s),\n            attributes: get_def_attrs(tcx, def_id, get_def_kind(tcx, def_id)).sinto(s),\n        }\n    })]\n    Named {\n        def_id: DefId,\n        name: Symbol,\n        span: Span,\n        attributes: Vec<Attribute>,\n    },\n    ClosureEnv,\n}\n\n/// Reflects [`ty::BoundRegion`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundRegion, state: S as s)]\npub struct BoundRegion {\n    pub var: BoundVar,\n    pub kind: BoundRegionKind,\n}\n\n/// Reflects [`ty::PlaceholderRegion`]\npub type PlaceholderRegion = Placeholder<BoundRegion>;\n/// Reflects [`ty::PlaceholderConst`]\npub type PlaceholderConst = Placeholder<BoundVar>;\n/// Reflects [`ty::PlaceholderType`]\npub type PlaceholderType = Placeholder<BoundTy>;\n\n/// Reflects [`ty::Placeholder`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct Placeholder<T> {\n    pub bound: T,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>, T: SInto<S, U>, U> SInto<S, Placeholder<U>>\n    for ty::Placeholder<T>\n{\n    fn sinto(&self, s: &S) -> Placeholder<U> {\n        Placeholder {\n            bound: self.bound.sinto(s),\n        }\n    }\n}\n\n/// Reflects [`rustc_middle::infer::canonical::Canonical`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct Canonical<T> {\n    pub value: T,\n}\n/// Reflects [`ty::CanonicalUserType`]\npub type CanonicalUserType = Canonical<UserType>;\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>, T: SInto<S, U>, U> SInto<S, Canonical<U>>\n    for rustc_middle::infer::canonical::Canonical<'tcx, T>\n{\n    fn sinto(&self, s: &S) -> Canonical<U> {\n        Canonical {\n            value: self.value.sinto(s),\n        }\n    }\n}\n\n/// Reflects [`ty::UserSelfTy`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserSelfTy<'tcx>, state: S as gstate)]\npub struct UserSelfTy {\n    pub impl_def_id: DefId,\n    pub self_ty: Ty,\n}\n\n/// Reflects [`ty::UserArgs`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserArgs<'tcx>, state: S as gstate)]\npub struct UserArgs {\n    pub args: Vec<GenericArg>,\n    pub user_self_ty: Option<UserSelfTy>,\n}\n\n/// Reflects [`ty::UserType`]: this is currently\n/// disabled, and everything is printed as debug in the\n/// [`UserType::Todo`] variant.\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::UserType<'tcx>, state: S as _s)]\npub enum UserType {\n    // TODO: for now, we don't use user types at all.\n    // We disable it for now, since it cause the following to fail:\n    //\n    //    pub const MY_VAL: u16 = 5;\n    //    pub type Alias = MyStruct<MY_VAL>; // Using the literal 5, it goes through\n    //\n    //    pub struct MyStruct<const VAL: u16> {}\n    //\n    //    impl<const VAL: u16> MyStruct<VAL> {\n    //        pub const MY_CONST: u16 = VAL;\n    //    }\n    //\n    //    pub fn do_something() -> u32 {\n    //        u32::from(Alias::MY_CONST)\n    //    }\n    //\n    // In this case, we get a [ty::ConstKind::Bound] in\n    // [do_something], which we are not able to translate.\n    // See: https://github.com/hacspec/hax/pull/209\n\n    // Ty(Ty),\n    // TypeOf(DefId, UserArgs),\n    #[todo]\n    Todo(String),\n}\n\n/// Reflects [`ty::VariantDiscr`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::VariantDiscr, state: S as s)]\npub enum DiscriminantDefinition {\n    #[custom_arm(FROM_TYPE::Explicit(did) => TO_TYPE::Explicit { def_id: did.sinto(s), span: s.base().tcx.def_span(did).sinto(s) },)]\n    Explicit {\n        def_id: DefId,\n        span: Span,\n    },\n    Relative(u32),\n}\n\n/// Reflects [`ty::util::Discr`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::util::Discr<'tcx>, state: S as gstate)]\npub struct DiscriminantValue {\n    pub val: u128,\n    pub ty: Ty,\n}\n\n/// Reflects [`ty::Visibility`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum Visibility<Id> {\n    Public,\n    Restricted(Id),\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S, T: SInto<S, U>, U> SInto<S, Visibility<U>> for ty::Visibility<T> {\n    fn sinto(&self, s: &S) -> Visibility<U> {\n        use ty::Visibility as T;\n        match self {\n            T::Public => Visibility::Public,\n            T::Restricted(id) => Visibility::Restricted(id.sinto(s)),\n        }\n    }\n}\n\n/// Reflects [`ty::FieldDef`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct FieldDef {\n    pub did: DefId,\n    /// Field definition of [tuple\n    /// structs](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types)\n    /// are anonymous, in that case `name` is [`None`].\n    pub name: Option<Symbol>,\n    pub vis: Visibility<DefId>,\n    pub ty: Ty,\n    pub span: Span,\n    pub attributes: Vec<Attribute>,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl FieldDef {\n    pub fn sfrom<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        fdef: &ty::FieldDef,\n        instantiate: ty::GenericArgsRef<'tcx>,\n    ) -> FieldDef {\n        let tcx = s.base().tcx;\n        let ty = fdef.ty(tcx, instantiate).sinto(s);\n        let name = {\n            let name = fdef.name.sinto(s);\n            let is_user_provided = {\n                // SH: Note that the only way I found of checking if the user wrote the name or if it\n                // is just an integer generated by rustc is by checking if it is just made of\n                // numerals...\n                name.parse::<usize>().is_err()\n            };\n            is_user_provided.then_some(name)\n        };\n\n        FieldDef {\n            did: fdef.did.sinto(s),\n            name,\n            vis: fdef.vis.sinto(s),\n            ty,\n            span: tcx.def_span(fdef.did).sinto(s),\n            attributes: get_def_attrs(tcx, fdef.did, get_def_kind(tcx, fdef.did)).sinto(s),\n        }\n    }\n}\n\n/// Reflects [`ty::VariantDef`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct VariantDef {\n    pub def_id: DefId,\n    pub ctor: Option<(CtorKind, DefId)>,\n    pub name: Symbol,\n    pub discr_def: DiscriminantDefinition,\n    pub discr_val: DiscriminantValue,\n    /// The definitions of the fields on this variant. In case of [tuple\n    /// structs/variants](https://doc.rust-lang.org/book/ch05-01-defining-structs.html#using-tuple-structs-without-named-fields-to-create-different-types),\n    /// the fields are anonymous, otherwise fields are named.\n    pub fields: IndexVec<FieldIdx, FieldDef>,\n    /// Span of the definition of the variant\n    pub span: Span,\n    pub attributes: Vec<Attribute>,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl VariantDef {\n    pub(crate) fn sfrom<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        def: &ty::VariantDef,\n        discr_val: ty::util::Discr<'tcx>,\n        instantiate: Option<ty::GenericArgsRef<'tcx>>,\n    ) -> Self {\n        let tcx = s.base().tcx;\n        let instantiate =\n            instantiate.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, def.def_id));\n        VariantDef {\n            def_id: def.def_id.sinto(s),\n            ctor: def.ctor.sinto(s),\n            name: def.name.sinto(s),\n            discr_def: def.discr.sinto(s),\n            discr_val: discr_val.sinto(s),\n            fields: def\n                .fields\n                .iter()\n                .map(|f| FieldDef::sfrom(s, f, instantiate))\n                .collect(),\n            span: s.base().tcx.def_span(def.def_id).sinto(s),\n            attributes: get_def_attrs(tcx, def.def_id, get_def_kind(tcx, def.def_id)).sinto(s),\n        }\n    }\n}\n\n/// Reflects [`ty::EarlyParamRegion`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::EarlyParamRegion, state: S as s)]\npub struct EarlyParamRegion {\n    pub index: u32,\n    pub name: Symbol,\n}\n\n/// Reflects [`ty::LateParamRegion`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::LateParamRegion, state: S as s)]\npub struct LateParamRegion {\n    pub scope: DefId,\n    pub kind: LateParamRegionKind,\n}\n\n/// Reflects [`ty::LateParamRegionKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::LateParamRegionKind, state: S as s)]\npub enum LateParamRegionKind {\n    Anon(u32),\n    NamedAnon(u32, Symbol),\n    #[custom_arm(&FROM_TYPE::Named(def_id) => TO_TYPE::Named(def_id.sinto(s), s.base().tcx.item_name(def_id).sinto(s)),)]\n    Named(DefId, Symbol),\n    ClosureEnv,\n}\n\n/// Reflects [`ty::RegionKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::RegionKind<'tcx>, state: S as gstate)]\npub enum RegionKind {\n    ReEarlyParam(EarlyParamRegion),\n    ReBound(BoundVarIndexKind, BoundRegion),\n    ReLateParam(LateParamRegion),\n    ReStatic,\n    ReVar(RegionVid),\n    RePlaceholder(PlaceholderRegion),\n    ReErased,\n    ReError(ErrorGuaranteed),\n}\n\n/// Reflects [`ty::BoundVarIndexKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Copy, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundVarIndexKind, state: S as gstate)]\npub enum BoundVarIndexKind {\n    Bound(DebruijnIndex),\n    Canonical,\n}\n\nsinto_as_usize!(rustc_middle::ty, DebruijnIndex);\nsinto_as_usize!(rustc_middle::ty, RegionVid);\n\n/// Reflects [`ty::Region`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::Region<'tcx>, state: S as s)]\npub struct Region {\n    #[value(self.kind().sinto(s))]\n    pub kind: RegionKind,\n}\n\n/// Reflects both [`ty::GenericArg`] and [`ty::GenericArgKind`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericArgKind<'tcx>, state: S as s)]\npub enum GenericArg {\n    Lifetime(Region),\n    Type(Ty),\n    Const(ConstantExpr),\n}\n\n/// Contents of `ItemRef`.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct ItemRefContents {\n    /// The item being refered to.\n    pub def_id: DefId,\n    /// The generics passed to the item. If `in_trait` is `Some`, these are only the generics of\n    /// the method/type/const itself; generics for the traits are available in\n    /// `in_trait.unwrap().trait`.\n    pub generic_args: Vec<GenericArg>,\n    /// Witnesses of the trait clauses required by the item, e.g. `T: Sized` for `Option<T>` or `B:\n    /// ToOwned` for `Cow<'a, B>`. Same as above, for associated items this only includes clauses\n    /// for the item itself.\n    pub impl_exprs: Vec<ImplExpr>,\n    /// If we're referring to a trait associated item, this gives the trait clause/impl we're\n    /// referring to.\n    pub in_trait: Option<ImplExpr>,\n    /// Whether this contains any reference to a type/lifetime/const parameter.\n    pub has_param: bool,\n    /// Whether this contains any reference to a type/const parameter.\n    pub has_non_lt_param: bool,\n}\n\n/// Reference to an item, with generics. Basically any mention of an item (function, type, etc)\n/// uses this.\n///\n/// This can refer to a top-level item or to a trait associated item. Example:\n/// ```ignore\n/// trait MyTrait<TraitType, const TraitConst: usize> {\n///   fn meth<MethType>(...) {...}\n/// }\n/// fn example_call<TraitType, SelfType: MyTrait<TraitType, 12>>(x: SelfType) {\n///   x.meth::<String>(...)\n/// }\n/// ```\n/// Here, in the call `x.meth::<String>(...)` we will build an `ItemRef` that looks like:\n/// ```ignore\n/// ItemRef {\n///     def_id = MyTrait::meth,\n///     generic_args = [String],\n///     impl_exprs = [<proof of `String: Sized`>],\n///     in_trait = Some(<proof of `SelfType: MyTrait<TraitType, 12>`>,\n/// }\n/// ```\n/// The `in_trait` `ImplExpr` will have in its `trait` field a representation of the `SelfType:\n/// MyTrait<TraitType, 12>` predicate, which looks like:\n/// ```ignore\n/// ItemRef {\n///     def_id = MyTrait,\n///     generic_args = [SelfType, TraitType, 12],\n///     impl_exprs = [],\n///     in_trait = None,\n/// }\n/// ```\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[serde(transparent)]\npub struct ItemRef {\n    pub(crate) contents: id_table::Node<ItemRefContents>,\n}\n\nimpl ItemRefContents {\n    #[cfg(feature = \"rustc\")]\n    fn intern<'tcx, S: BaseState<'tcx>>(self, s: &S) -> ItemRef {\n        s.with_global_cache(|cache| {\n            let table_session = &mut cache.id_table_session;\n            let contents = id_table::Node::new(self, table_session);\n            ItemRef { contents }\n        })\n    }\n}\n\nimpl ItemRef {\n    /// The main way to obtain an `ItemRef`: from a `def_id` and generics.\n    #[cfg(feature = \"rustc\")]\n    pub fn translate<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        def_id: RDefId,\n        generics: ty::GenericArgsRef<'tcx>,\n    ) -> ItemRef {\n        Self::translate_maybe_resolve_impl(\n            s,\n            s.base().options.item_ref_use_concrete_impl,\n            def_id,\n            generics,\n        )\n    }\n\n    /// Makes a `ItemRef` from a `def_id` and generics.\n    ///\n    /// If `resolve_impl == true` and `(def_id, generics)` points to a trait item that\n    /// can be resolved to a specific `impl`, `translate` rewrites `def_id` to the\n    /// concrete associated item from that `impl` and re-bases the generics.\n    ///\n    /// For instance, [`<u32 as From<u8>>::from`] produces a [`ItemRef`] with a\n    /// [`DefId`] looking like `core::convert::num::Impl#42::from` when\n    /// `resolve_impl` is `true`, `core::convert::From::from` otherwise.\n    #[cfg(feature = \"rustc\")]\n    fn translate_maybe_resolve_impl<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        resolve_impl: bool,\n        mut def_id: RDefId,\n        mut generics: ty::GenericArgsRef<'tcx>,\n    ) -> ItemRef {\n        use rustc_infer::infer::canonical::ir::TypeVisitableExt;\n        let key = (def_id, generics);\n        if let Some(item) = s.with_cache(|cache| cache.item_refs.get(&key).cloned()) {\n            return item;\n        }\n\n        let tcx = s.base().tcx;\n        // If this is an associated item, resolve the trait reference.\n        let mut trait_info = self_clause_for_item(s, def_id, generics);\n\n        // If the reference is a known trait impl and the impl implements the target item, we can\n        // point directly to the implemented item.\n        if resolve_impl\n            && let Some(tinfo) = &trait_info\n            && let ImplExprAtom::Concrete(impl_ref) = &tinfo.r#impl\n            && let impl_def_id = impl_ref.def_id.as_rust_def_id().unwrap()\n            && let Some(implemented_item) = tcx\n                .associated_items(impl_def_id)\n                .in_definition_order()\n                .find(|item| item.trait_item_def_id() == Some(def_id))\n        {\n            let trait_def_id = tcx.parent(def_id);\n            def_id = implemented_item.def_id;\n            generics = generics.rebase_onto(tcx, trait_def_id, impl_ref.rustc_args(s));\n            trait_info = None;\n        }\n\n        let hax_def_id = def_id.sinto(s);\n        let mut hax_generics = generics.sinto(s);\n        let mut impl_exprs = solve_item_required_traits(s, def_id, generics);\n\n        // Fixup the generics.\n        if let Some(tinfo) = &trait_info {\n            // The generics are split in two: the arguments of the trait and the arguments of the\n            // method/associated item.\n            //\n            // For instance, if we have:\n            // ```\n            // trait Foo<T> {\n            //     fn baz<U>(...) { ... }\n            // }\n            //\n            // fn test<T : Foo<u32>(x: T) {\n            //     x.baz(...);\n            //     ...\n            // }\n            // ```\n            // The generics for the call to `baz` will be the concatenation: `<T, u32, U>`, which we\n            // split into `<T, u32>` and `<U>`.\n            let trait_ref = tinfo.r#trait.hax_skip_binder_ref();\n            let num_trait_generics = trait_ref.generic_args.len();\n            hax_generics.drain(0..num_trait_generics);\n            let mut num_trait_trait_clauses = trait_ref.impl_exprs.len();\n            // Items other than associated types get an extra `Self: Trait` clause as the first\n            // clause, we skip that one too. Note: that clause is the same as `tinfo`.\n            if !matches!(hax_def_id.kind, DefKind::AssocTy) {\n                num_trait_trait_clauses += 1;\n            };\n            impl_exprs.drain(0..num_trait_trait_clauses);\n        }\n\n        let content = ItemRefContents {\n            def_id: hax_def_id,\n            generic_args: hax_generics,\n            impl_exprs,\n            in_trait: trait_info,\n            has_param: generics.has_param()\n                || generics.has_escaping_bound_vars()\n                || generics.has_free_regions(),\n            has_non_lt_param: generics.has_param(),\n        };\n        let item = content.intern(s);\n        s.with_cache(|cache| {\n            cache.item_refs.insert(key, item.clone());\n        });\n        s.with_global_cache(|cache| {\n            cache.reverse_item_refs_map.insert(item.id(), generics);\n        });\n        item\n    }\n\n    /// Construct an `ItemRef` for items that can't have generics (e.g. modules).\n    #[cfg(feature = \"rustc\")]\n    pub fn dummy_without_generics<'tcx, S: BaseState<'tcx>>(s: &S, def_id: DefId) -> ItemRef {\n        let content = ItemRefContents {\n            def_id,\n            generic_args: Default::default(),\n            impl_exprs: Default::default(),\n            in_trait: Default::default(),\n            has_param: false,\n            has_non_lt_param: false,\n        };\n        let item = content.intern(s);\n        s.with_global_cache(|cache| {\n            cache\n                .reverse_item_refs_map\n                .insert(item.id(), ty::GenericArgsRef::default());\n        });\n        item\n    }\n\n    /// For an `ItemRef` that refers to a trait, this returns values for each of the non-gat\n    /// associated types of this trait and its parents, in a fixed order.\n    #[cfg(feature = \"rustc\")]\n    pub fn trait_associated_types<'tcx, S: UnderOwnerState<'tcx>>(&self, s: &S) -> Vec<Ty> {\n        if !matches!(self.def_id.kind, DefKind::Trait | DefKind::TraitAlias) {\n            panic!(\"`ItemRef::trait_associated_types` expected a trait\")\n        }\n        let tcx = s.base().tcx;\n        let typing_env = s.typing_env();\n        let def_id = self.def_id.as_rust_def_id().unwrap();\n        let generics = self.rustc_args(s);\n        let tref = ty::TraitRef::new(tcx, def_id, generics);\n        rustc_utils::assoc_tys_for_trait(tcx, typing_env, tref)\n            .into_iter()\n            .map(|alias_ty| ty::Ty::new_alias(tcx, ty::Projection, alias_ty))\n            .map(|ty| normalize(tcx, typing_env, ty))\n            .map(|ty| ty.sinto(s))\n            .collect()\n    }\n\n    /// Erase lifetimes from the generic arguments of this item.\n    #[cfg(feature = \"rustc\")]\n    pub fn erase<'tcx, S: UnderOwnerState<'tcx>>(&self, s: &S) -> Self {\n        let def_id = self.def_id.underlying_rust_def_id();\n        let args = self.rustc_args(s);\n        let args = erase_and_norm(s.base().tcx, s.typing_env(), args);\n        Self::translate(s, def_id, args).with_def_id(s, &self.def_id)\n    }\n\n    pub fn contents(&self) -> &ItemRefContents {\n        &self.contents\n    }\n\n    /// Get a unique id identitying this `ItemRef`.\n    pub fn id(&self) -> id_table::Id {\n        self.contents.id()\n    }\n\n    /// Recover the original rustc args that generated this `ItemRef`. Will panic if the `ItemRef`\n    /// was built by hand instead of using `translate_item_ref`.\n    #[cfg(feature = \"rustc\")]\n    pub fn rustc_args<'tcx, S: BaseState<'tcx>>(&self, s: &S) -> ty::GenericArgsRef<'tcx> {\n        s.with_global_cache(|cache| *cache.reverse_item_refs_map.get(&self.id()).unwrap())\n    }\n\n    /// Mutate the `DefId`, keeping the same generic args.\n    #[cfg(feature = \"rustc\")]\n    pub fn mutate_def_id<'tcx, S: BaseState<'tcx>>(\n        &self,\n        s: &S,\n        f: impl FnOnce(&mut DefId),\n    ) -> Self {\n        let args = self.rustc_args(s);\n        let mut contents = self.contents().clone();\n        f(&mut contents.def_id);\n        let new = contents.intern(s);\n        s.with_global_cache(|cache| {\n            cache.reverse_item_refs_map.insert(new.id(), args);\n        });\n        new\n    }\n\n    /// Set the `DefId`, keeping the same generic args.\n    #[cfg(feature = \"rustc\")]\n    pub fn with_def_id<'tcx, S: BaseState<'tcx>>(&self, s: &S, def_id: &DefId) -> Self {\n        self.mutate_def_id(s, |d| *d = def_id.clone())\n    }\n}\n\nimpl std::ops::Deref for ItemRef {\n    type Target = ItemRefContents;\n    fn deref(&self) -> &Self::Target {\n        self.contents()\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, GenericArg> for ty::GenericArg<'tcx> {\n    fn sinto(&self, s: &S) -> GenericArg {\n        self.kind().sinto(s)\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, Vec<GenericArg>> for ty::GenericArgsRef<'tcx> {\n    fn sinto(&self, s: &S) -> Vec<GenericArg> {\n        self.iter().map(|v| v.kind().sinto(s)).collect()\n    }\n}\n\n/// Reflects both [`ty::GenericArg`] and [`ty::GenericArgKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_ast::ast::LitIntType, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum LitIntType {\n    Signed(IntTy),\n    Unsigned(UintTy),\n    Unsuffixed,\n}\n\n/// Reflects partially [`ty::InferTy`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S>, from: ty::InferTy, state: S as gstate)]\npub enum InferTy {\n    #[custom_arm(FROM_TYPE::TyVar(..) => TO_TYPE::TyVar,)]\n    TyVar, /*TODO?*/\n    #[custom_arm(FROM_TYPE::IntVar(..) => TO_TYPE::IntVar,)]\n    IntVar, /*TODO?*/\n    #[custom_arm(FROM_TYPE::FloatVar(..) => TO_TYPE::FloatVar,)]\n    FloatVar, /*TODO?*/\n    FreshTy(u32),\n    FreshIntTy(u32),\n    FreshFloatTy(u32),\n}\n\n/// Reflects [`rustc_type_ir::IntTy`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_type_ir::IntTy, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum IntTy {\n    Isize,\n    I8,\n    I16,\n    I32,\n    I64,\n    I128,\n}\n\n/// Reflects [`rustc_type_ir::FloatTy`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_type_ir::FloatTy, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum FloatTy {\n    F16,\n    F32,\n    F64,\n    F128,\n}\n\n/// Reflects [`rustc_type_ir::UintTy`]\n#[derive(AdtInto)]\n#[args(<S>, from: rustc_type_ir::UintTy, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum UintTy {\n    Usize,\n    U8,\n    U16,\n    U32,\n    U64,\n    U128,\n}\n\nimpl ToString for IntTy {\n    fn to_string(&self) -> String {\n        use IntTy::*;\n        match self {\n            Isize => \"isize\".to_string(),\n            I8 => \"i8\".to_string(),\n            I16 => \"i16\".to_string(),\n            I32 => \"i32\".to_string(),\n            I64 => \"i64\".to_string(),\n            I128 => \"i128\".to_string(),\n        }\n    }\n}\n\nimpl ToString for UintTy {\n    fn to_string(&self) -> String {\n        use UintTy::*;\n        match self {\n            Usize => \"usize\".to_string(),\n            U8 => \"u8\".to_string(),\n            U16 => \"u16\".to_string(),\n            U32 => \"u32\".to_string(),\n            U64 => \"u64\".to_string(),\n            U128 => \"u128\".to_string(),\n        }\n    }\n}\n\n/// Reflects [`ty::TypeAndMut`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TypeAndMut<'tcx>, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct TypeAndMut {\n    pub ty: Box<Ty>,\n    pub mutbl: Mutability,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<S, U, T: SInto<S, U>> SInto<S, Vec<U>> for ty::List<T> {\n    fn sinto(&self, s: &S) -> Vec<U> {\n        self.iter().map(|x| x.sinto(s)).collect()\n    }\n}\n\n/// Reflects [`ty::Variance`]\n#[derive(AdtInto)]\n#[args(<S>, from: ty::Variance, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum Variance {\n    Covariant,\n    Invariant,\n    Contravariant,\n    Bivariant,\n}\n\n/// Reflects [`ty::GenericParamDef`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericParamDef, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct GenericParamDef {\n    pub name: Symbol,\n    pub def_id: DefId,\n    pub index: u32,\n    pub pure_wrt_drop: bool,\n    #[value(\n        match self.kind {\n            ty::GenericParamDefKind::Lifetime => GenericParamDefKind::Lifetime,\n            ty::GenericParamDefKind::Type { has_default, synthetic } => GenericParamDefKind::Type { has_default, synthetic },\n            ty::GenericParamDefKind::Const { has_default, .. } => {\n                let ty = s.base().tcx.type_of(self.def_id).instantiate_identity().sinto(s);\n                GenericParamDefKind::Const { has_default, ty }\n            },\n        }\n    )]\n    pub kind: GenericParamDefKind,\n    /// Variance of this type parameter, if sensible.\n    #[value({\n        use rustc_hir::def::DefKind::*;\n        let tcx = s.base().tcx;\n        let parent = tcx.parent(self.def_id);\n        match tcx.def_kind(parent) {\n            Fn | AssocFn | Enum | Struct | Union | Ctor(..) | OpaqueTy => {\n                tcx.variances_of(parent).get(self.index as usize).sinto(s)\n            }\n            _ => None\n        }\n    })]\n    pub variance: Option<Variance>,\n    #[value(s.base().tcx.def_span(self.def_id).sinto(s))]\n    pub span: Span,\n    #[value({\n        let tcx = s.base().tcx;\n        get_def_attrs(tcx, self.def_id, get_def_kind(tcx, self.def_id)).sinto(s)\n    })]\n    pub attributes: Vec<Attribute>,\n}\n\n/// Reflects [`ty::GenericParamDefKind`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum GenericParamDefKind {\n    Lifetime,\n    Type { has_default: bool, synthetic: bool },\n    Const { has_default: bool, ty: Ty },\n}\n\n/// Reflects [`ty::Generics`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::Generics, state: S as state)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct TyGenerics {\n    pub parent: Option<DefId>,\n    pub parent_count: usize,\n    #[from(own_params)]\n    pub params: Vec<GenericParamDef>,\n    // pub param_def_id_to_index: FxHashMap<DefId, u32>,\n    pub has_self: bool,\n    pub has_late_bound_regions: Option<Span>,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl TyGenerics {\n    pub(crate) fn count_total_params(&self) -> usize {\n        self.parent_count + self.params.len()\n    }\n}\n\n/// This type merges the information from\n/// `rustc_type_ir::AliasKind` and `ty::AliasTy`\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct Alias {\n    pub kind: AliasKind,\n    pub args: Vec<GenericArg>,\n    pub def_id: DefId,\n}\n\n/// Reflects [`ty::AliasKind`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum AliasKind {\n    /// The projection of a trait type: `<Ty as Trait<...>>::Type<...>`\n    Projection {\n        /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`.\n        impl_expr: ImplExpr,\n        /// The `Type` in `Ty: Trait<..., Type = U>`.\n        assoc_item: AssocItem,\n    },\n    /// An associated type in an inherent impl.\n    Inherent,\n    /// An `impl Trait` opaque type.\n    Opaque {\n        /// The real type hidden inside this opaque type.\n        hidden_ty: Ty,\n    },\n    /// A type alias that references opaque types. Likely to always be normalized away.\n    Free,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl Alias {\n    #[tracing::instrument(level = \"trace\", skip(s))]\n    fn from<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        alias_kind: &rustc_type_ir::AliasTyKind,\n        alias_ty: &ty::AliasTy<'tcx>,\n    ) -> TyKind {\n        let tcx = s.base().tcx;\n        let typing_env = s.typing_env();\n        use rustc_type_ir::AliasTyKind as RustAliasKind;\n\n        // Try to normalize the alias first.\n        let ty = ty::Ty::new_alias(tcx, *alias_kind, *alias_ty);\n        let ty = crate::traits::normalize(tcx, typing_env, ty);\n        let ty::Alias(alias_kind, alias_ty) = ty.kind() else {\n            let ty: Ty = ty.sinto(s);\n            return ty.kind().clone();\n        };\n\n        let kind = match alias_kind {\n            RustAliasKind::Projection => {\n                let trait_ref = alias_ty.trait_ref(tcx);\n                // In a case like:\n                // ```\n                // impl<T, U> Trait for Result<T, U>\n                // where\n                //     for<'a> &'a Result<T, U>: IntoIterator,\n                //     for<'a> <&'a Result<T, U> as IntoIterator>::Item: Copy,\n                // {}\n                // ```\n                // the `&'a Result<T, U> as IntoIterator` trait ref has escaping bound variables\n                // yet we dont have a binder around (could even be several). Binding this correctly\n                // is therefore difficult. Since our trait resolution ignores lifetimes anyway, we\n                // just erase them. See also https://github.com/hacspec/hax/issues/747.\n                let trait_ref = crate::traits::erase_free_regions(tcx, trait_ref);\n                let item = tcx.associated_item(alias_ty.def_id);\n                AliasKind::Projection {\n                    assoc_item: AssocItem::sfrom(s, &item),\n                    impl_expr: solve_trait(s, ty::Binder::dummy(trait_ref)),\n                }\n            }\n            RustAliasKind::Inherent => AliasKind::Inherent,\n            RustAliasKind::Opaque => {\n                // Reveal the underlying `impl Trait` type.\n                let ty = tcx.type_of(alias_ty.def_id).instantiate(tcx, alias_ty.args);\n                AliasKind::Opaque {\n                    hidden_ty: ty.sinto(s),\n                }\n            }\n            RustAliasKind::Free => AliasKind::Free,\n        };\n        TyKind::Alias(Alias {\n            kind,\n            args: alias_ty.args.sinto(s),\n            def_id: alias_ty.def_id.sinto(s),\n        })\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, Box<Ty>> for ty::Ty<'tcx> {\n    fn sinto(&self, s: &S) -> Box<Ty> {\n        Box::new(self.sinto(s))\n    }\n}\n\n/// Reflects [`rustc_middle::ty::Ty`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[serde(transparent)]\npub struct Ty {\n    pub(crate) kind: id_table::Node<TyKind>,\n}\n\nimpl Ty {\n    #[cfg(feature = \"rustc\")]\n    pub fn new<'tcx, S: BaseState<'tcx>>(s: &S, kind: TyKind) -> Self {\n        s.with_global_cache(|cache| {\n            let table_session = &mut cache.id_table_session;\n            let kind = id_table::Node::new(kind, table_session);\n            Ty { kind }\n        })\n    }\n\n    pub fn inner(&self) -> &Arc<TyKind> {\n        self.kind.inner()\n    }\n\n    pub fn kind(&self) -> &TyKind {\n        self.inner().as_ref()\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, Ty> for rustc_middle::ty::Ty<'tcx> {\n    fn sinto(&self, s: &S) -> Ty {\n        if let Some(ty) = s.with_cache(|cache| cache.tys.get(self).cloned()) {\n            return ty;\n        }\n        let kind: TyKind = self.kind().sinto(s);\n        let ty = Ty::new(s, kind);\n        s.with_cache(|cache| {\n            cache.tys.insert(*self, ty.clone());\n        });\n        ty\n    }\n}\n\n/// Reflects [`ty::TyKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TyKind<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum TyKind {\n    Bool,\n    Char,\n    Int(IntTy),\n    Uint(UintTy),\n    Float(FloatTy),\n\n    #[custom_arm(\n        ty::TyKind::FnDef(fun_id, generics) => {\n            let item = translate_item_ref(s, *fun_id, generics);\n            let tcx = s.base().tcx;\n            let fn_sig = tcx.fn_sig(*fun_id).instantiate(tcx, generics);\n            let fn_sig = Box::new(fn_sig.sinto(s));\n            TyKind::FnDef { item, fn_sig }\n        },\n    )]\n    /// Reflects [`ty::TyKind::FnDef`]\n    FnDef {\n        item: ItemRef,\n        fn_sig: Box<PolyFnSig>,\n    },\n\n    #[custom_arm(\n        ty::TyKind::FnPtr(tys, header) => {\n            let sig = tys.map_bound(|tys| ty::FnSig {\n                inputs_and_output: tys.inputs_and_output,\n                c_variadic: header.c_variadic,\n                safety: header.safety,\n                abi: header.abi,\n            });\n            TyKind::Arrow(Box::new(sig.sinto(s)))\n        },\n    )]\n    /// Reflects [`ty::TyKind::FnPtr`]\n    Arrow(Box<PolyFnSig>),\n\n    #[custom_arm(\n        ty::TyKind::Closure (def_id, generics) => {\n            let closure = generics.as_closure();\n            TyKind::Closure(ClosureArgs::sfrom(s, *def_id, closure))\n        },\n    )]\n    Closure(ClosureArgs),\n\n    #[custom_arm(FROM_TYPE::Adt(adt_def, generics) => TO_TYPE::Adt(translate_item_ref(s, adt_def.did(), generics)),)]\n    Adt(ItemRef),\n    #[custom_arm(FROM_TYPE::Foreign(def_id) => TO_TYPE::Foreign(translate_item_ref(s, *def_id, Default::default())),)]\n    Foreign(ItemRef),\n    /// The `ItemRef` uses the fake `Array` def_id.\n    #[custom_arm(FROM_TYPE::Array(ty, len) => TO_TYPE::Array({\n        let def_id = s.with_global_cache(|c| c.get_synthetic_def_id(s, SyntheticItem::Array));\n        let args = s.base().tcx.mk_args(&[(*ty).into(), (*len).into()]);\n        ItemRef::translate(s, def_id, args)\n    }),)]\n    Array(ItemRef),\n    /// The `ItemRef` uses the fake `Slice` def_id.\n    #[custom_arm(FROM_TYPE::Slice(ty) => TO_TYPE::Slice({\n        let def_id = s.with_global_cache(|c| c.get_synthetic_def_id(s, SyntheticItem::Slice));\n        let args = s.base().tcx.mk_args(&[(*ty).into()]);\n        ItemRef::translate(s, def_id, args)\n    }),)]\n    Slice(ItemRef),\n    /// The `ItemRef` uses the fake `Tuple` def_id.\n    #[custom_arm(FROM_TYPE::Tuple(tys) => TO_TYPE::Tuple({\n        let def_id = s.with_global_cache(|c| c.get_synthetic_def_id(s, SyntheticItem::Tuple(tys.len())));\n        let args = s.base().tcx.mk_args_from_iter(tys.into_iter().map(ty::GenericArg::from));\n        ItemRef::translate(s, def_id, args)\n    }),)]\n    Tuple(ItemRef),\n    Str,\n    RawPtr(Box<Ty>, Mutability),\n    Ref(Region, Box<Ty>, Mutability),\n    #[custom_arm(FROM_TYPE::Dynamic(preds, region) => make_dyn(s, preds, region),)]\n    Dynamic(\n        /// Fresh type parameter that we use as the `Self` type in the prediates below.\n        ParamTy,\n        /// Clauses that define the trait object. These clauses use the fresh type parameter above\n        /// as `Self` type.\n        GenericPredicates,\n        Region,\n    ),\n    #[custom_arm(FROM_TYPE::Coroutine(def_id, generics) => TO_TYPE::Coroutine(translate_item_ref(s, *def_id, generics)),)]\n    Coroutine(ItemRef),\n    Never,\n    #[custom_arm(FROM_TYPE::Alias(alias_kind, alias_ty) => Alias::from(s, alias_kind, alias_ty),)]\n    Alias(Alias),\n    Param(ParamTy),\n    Bound(BoundVarIndexKind, BoundTy),\n    Placeholder(PlaceholderType),\n    Infer(InferTy),\n    #[custom_arm(FROM_TYPE::Error(..) => TO_TYPE::Error,)]\n    Error,\n    #[todo]\n    Todo(String),\n}\n\n/// Transform existential predicates into properly resolved predicates.\n#[cfg(feature = \"rustc\")]\nfn make_dyn<'tcx, S: UnderOwnerState<'tcx>>(\n    s: &S,\n    epreds: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,\n    region: &ty::Region<'tcx>,\n) -> TyKind {\n    let tcx = s.base().tcx;\n    let def_id = s.owner_id();\n    let span = rustc_span::DUMMY_SP.sinto(s);\n\n    // Pretend there's an extra type in the environment.\n    let new_param_ty = {\n        let generics = tcx.generics_of(def_id);\n        let param_count = generics.parent_count + generics.own_params.len();\n        ty::ParamTy::new(param_count as u32 + 1, rustc_span::Symbol::intern(\"_dyn\"))\n    };\n    let new_ty = new_param_ty.to_ty(tcx);\n\n    // Set the new type as the `Self` parameter of our predicates.\n    let clauses: Vec<ty::Clause<'_>> = epreds\n        .iter()\n        .map(|epred| epred.with_self_ty(tcx, new_ty))\n        .collect();\n\n    // Populate a predicate searcher that knows about the `dyn` clauses.\n    let mut predicate_searcher = s.with_predicate_searcher(|ps| ps.clone());\n    predicate_searcher\n        .insert_bound_predicates(clauses.iter().filter_map(|clause| clause.as_trait_clause()));\n    predicate_searcher.set_param_env(rustc_trait_selection::traits::normalize_param_env_or_error(\n        tcx,\n        ty::ParamEnv::new(\n            tcx.mk_clauses_from_iter(\n                s.param_env()\n                    .caller_bounds()\n                    .iter()\n                    .chain(clauses.iter().copied()),\n            ),\n        ),\n        rustc_trait_selection::traits::ObligationCause::dummy(),\n    ));\n\n    // Using the predicate searcher, translate the predicates. Only the projection predicates need\n    // to be handled specially.\n    let predicates = clauses\n        .into_iter()\n        .map(|clause| {\n            let clause = match clause.as_projection_clause() {\n                // Translate normally\n                None => clause.sinto(s),\n                // Translate by hand using our predicate searcher. This does the same as\n                // `clause.sinto(s)` except that it uses our predicate searcher to resolve the\n                // projection `ImplExpr`.\n                Some(proj) => {\n                    let bound_vars = proj.bound_vars().sinto(s);\n                    let proj = {\n                        let alias_ty = &proj.skip_binder().projection_term.expect_ty(tcx);\n                        let impl_expr = {\n                            let poly_trait_ref = proj.rebind(alias_ty.trait_ref(tcx));\n                            predicate_searcher\n                                .resolve(&poly_trait_ref, &|_| {})\n                                .s_unwrap(s)\n                                .sinto(s)\n                        };\n                        let Term::Ty(ty) = proj.skip_binder().term.sinto(s) else {\n                            unreachable!()\n                        };\n                        let item = tcx.associated_item(alias_ty.def_id);\n                        ProjectionPredicate {\n                            impl_expr,\n                            assoc_item: AssocItem::sfrom(s, &item),\n                            ty,\n                        }\n                    };\n                    let kind = Binder {\n                        value: ClauseKind::Projection(proj),\n                        bound_vars,\n                    };\n                    let id = kind.clone().map(PredicateKind::Clause).predicate_id();\n                    Clause { kind, id }\n                }\n            };\n            (clause, span.clone())\n        })\n        .collect();\n\n    let predicates = GenericPredicates { predicates };\n    let param_ty = new_param_ty.sinto(s);\n    let region = region.sinto(s);\n    TyKind::Dynamic(param_ty, predicates, region)\n}\n\n/// Reflects [`ty::CanonicalUserTypeAnnotation`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::CanonicalUserTypeAnnotation<'tcx>, state: S as gstate)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub struct CanonicalUserTypeAnnotation {\n    pub user_ty: CanonicalUserType,\n    pub span: Span,\n    pub inferred_ty: Ty,\n}\n\n/// Reflects [`ty::AdtKind`]\n#[derive_group(Serializers)]\n#[derive(Copy, Clone, Debug, JsonSchema)]\npub enum AdtKind {\n    Struct,\n    Union,\n    Enum,\n    /// We sometimes pretend arrays are an ADT and generate a `FullDef` for them.\n    Array,\n    /// We sometimes pretend slices are an ADT and generate a `FullDef` for them.\n    Slice,\n    /// We sometimes pretend tuples are an ADT and generate a `FullDef` for them.\n    Tuple,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, AdtKind> for ty::AdtKind {\n    fn sinto(&self, _s: &S) -> AdtKind {\n        match self {\n            ty::AdtKind::Struct => AdtKind::Struct,\n            ty::AdtKind::Union => AdtKind::Union,\n            ty::AdtKind::Enum => AdtKind::Enum,\n        }\n    }\n}\n\nsinto_todo!(rustc_middle::ty, AdtFlags);\n\n/// Reflects [`ty::ReprOptions`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: rustc_abi::ReprOptions, state: S as s)]\npub struct ReprOptions {\n    /// Whether an explicit integer representation was specified.\n    #[value(self.int.is_some())]\n    pub int_specified: bool,\n    /// The actual discriminant type resulting from the representation options.\n    #[value({\n        use crate::rustc_middle::ty::util::IntTypeExt;\n        self.discr_type().to_ty(s.base().tcx).sinto(s)\n    })]\n    pub typ: Ty,\n    pub align: Option<Align>,\n    pub pack: Option<Align>,\n    #[value(ReprFlags { is_c: self.c(), is_transparent: self.transparent(), is_simd: self.simd() })]\n    pub flags: ReprFlags,\n}\n\n/// The representation flags without the ones irrelevant outside of rustc.\n#[derive_group(Serializers)]\n#[derive(Default, Clone, Debug, JsonSchema)]\npub struct ReprFlags {\n    pub is_c: bool,\n    pub is_transparent: bool,\n    pub is_simd: bool,\n}\n\n/// Reflects [`ty::Align`], but directly stores the number of bytes as a u64.\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: rustc_abi::Align, state: S as _s)]\npub struct Align {\n    #[value({\n        self.bytes()\n    })]\n    pub bytes: u64,\n}\n\n/// Reflects [`ty::adjustment::PointerCoercion`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum PointerCoercion {\n    ReifyFnPointer,\n    UnsafeFnPointer,\n    ClosureFnPointer(Safety),\n    MutToConstPointer,\n    ArrayToPointer,\n    Unsize(UnsizingMetadata),\n}\n\n/// The metadata to attach to the newly-unsized ptr.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema)]\npub enum UnsizingMetadata {\n    Length(ConstantExpr),\n    VTablePtr(ImplExpr),\n    Unknown,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl PointerCoercion {\n    pub fn sfrom<'tcx, S: UnderOwnerState<'tcx>>(\n        s: &S,\n        coercion: ty::adjustment::PointerCoercion,\n        src_ty: ty::Ty<'tcx>,\n        tgt_ty: ty::Ty<'tcx>,\n    ) -> PointerCoercion {\n        match coercion {\n            ty::adjustment::PointerCoercion::ReifyFnPointer => PointerCoercion::ReifyFnPointer,\n            ty::adjustment::PointerCoercion::UnsafeFnPointer => PointerCoercion::UnsafeFnPointer,\n            ty::adjustment::PointerCoercion::ClosureFnPointer(x) => {\n                PointerCoercion::ClosureFnPointer(x.sinto(s))\n            }\n            ty::adjustment::PointerCoercion::MutToConstPointer => {\n                PointerCoercion::MutToConstPointer\n            }\n            ty::adjustment::PointerCoercion::ArrayToPointer => PointerCoercion::ArrayToPointer,\n            ty::adjustment::PointerCoercion::Unsize => {\n                // We only support unsizing behind references, pointers and boxes for now.\n                let meta = match (src_ty.builtin_deref(true), tgt_ty.builtin_deref(true)) {\n                    (Some(src_ty), Some(tgt_ty)) => {\n                        let tcx = s.base().tcx;\n                        let typing_env = s.typing_env();\n                        let (src_ty, tgt_ty) =\n                            tcx.struct_lockstep_tails_raw(src_ty, tgt_ty, |ty| {\n                                normalize(tcx, typing_env, ty)\n                            });\n                        match tgt_ty.kind() {\n                            ty::Slice(_) | ty::Str => match src_ty.kind() {\n                                ty::Array(_, len) => {\n                                    let len = len.sinto(s);\n                                    UnsizingMetadata::Length(len)\n                                }\n                                _ => UnsizingMetadata::Unknown,\n                            },\n                            ty::Dynamic(preds, ..) => {\n                                let pred = preds[0].with_self_ty(tcx, src_ty);\n                                let clause = pred.as_trait_clause().expect(\n                                    \"the first `ExistentialPredicate` of `TyKind::Dynamic` \\\n                                        should be a trait clause\",\n                                );\n                                let tref = clause.rebind(clause.skip_binder().trait_ref);\n                                let impl_expr = solve_trait(s, tref);\n                                UnsizingMetadata::VTablePtr(impl_expr)\n                            }\n                            _ => UnsizingMetadata::Unknown,\n                        }\n                    }\n                    _ => UnsizingMetadata::Unknown,\n                };\n                PointerCoercion::Unsize(meta)\n            }\n        }\n    }\n}\n\n/// Reflects [`ty::FnSig`]\n#[derive_group(Serializers)]\n#[derive(AdtInto, Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::FnSig<'tcx>, state: S as s)]\npub struct TyFnSig {\n    #[value(self.inputs().sinto(s))]\n    pub inputs: Vec<Ty>,\n    #[value(self.output().sinto(s))]\n    pub output: Ty,\n    pub c_variadic: bool,\n    pub safety: Safety,\n    pub abi: ExternAbi,\n}\n\n/// Reflects [`ty::PolyFnSig`]\npub type PolyFnSig = Binder<TyFnSig>;\n\n/// Reflects [`ty::TraitRef`]\n/// Contains the def_id and arguments passed to the trait. The first type argument is the `Self`\n/// type. The `ImplExprs` are the _required_ predicate for this trait; currently they are always\n/// empty because we consider all trait predicates as implied.\n/// `self.in_trait` is always `None` because a trait can't be associated to another one.\npub type TraitRef = ItemRef;\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, TraitRef> for ty::TraitRef<'tcx> {\n    fn sinto(&self, s: &S) -> TraitRef {\n        translate_item_ref(s, self.def_id, self.args)\n    }\n}\n\n/// Reflects [`ty::TraitPredicate`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::TraitPredicate<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct TraitPredicate {\n    pub trait_ref: TraitRef,\n    #[map(*x == ty::PredicatePolarity::Positive)]\n    #[from(polarity)]\n    pub is_positive: bool,\n}\n\n/// Reflects [`ty::OutlivesPredicate`] as a named struct\n/// instead of a tuple struct. This is because the script converting\n/// JSONSchema types to OCaml doesn't support tuple structs, and this\n/// is the only tuple struct in the whole AST.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct OutlivesPredicate<T> {\n    pub lhs: T,\n    pub rhs: Region,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>, T, U> SInto<S, OutlivesPredicate<U>>\n    for ty::OutlivesPredicate<'tcx, T>\nwhere\n    T: SInto<S, U>,\n{\n    fn sinto(&self, s: &S) -> OutlivesPredicate<U> where {\n        OutlivesPredicate {\n            lhs: self.0.sinto(s),\n            rhs: self.1.sinto(s),\n        }\n    }\n}\n\n/// Reflects [`ty::RegionOutlivesPredicate`]\npub type RegionOutlivesPredicate = OutlivesPredicate<Region>;\n/// Reflects [`ty::TypeOutlivesPredicate`]\npub type TypeOutlivesPredicate = OutlivesPredicate<Ty>;\n\n/// Reflects [`ty::Term`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum Term {\n    Ty(Ty),\n    Const(ConstantExpr),\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, Term> for ty::Term<'tcx> {\n    fn sinto(&self, s: &S) -> Term {\n        use ty::TermKind;\n        match self.kind() {\n            TermKind::Ty(ty) => Term::Ty(ty.sinto(s)),\n            TermKind::Const(c) => Term::Const(c.sinto(s)),\n        }\n    }\n}\n\n/// Expresses a constraints over an associated type.\n///\n/// For instance:\n/// ```text\n/// fn f<T : Foo<S = String>>(...)\n///              ^^^^^^^^^^\n/// ```\n/// (provided the trait `Foo` has an associated type `S`).\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct ProjectionPredicate {\n    /// The `impl Trait for Ty` in `Ty: Trait<..., Type = U>`.\n    pub impl_expr: ImplExpr,\n    /// The `Type` in `Ty: Trait<..., Type = U>`.\n    pub assoc_item: AssocItem,\n    /// The type `U` in `Ty: Trait<..., Type = U>`.\n    pub ty: Ty,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderBinderState<'tcx>> SInto<S, ProjectionPredicate>\n    for ty::ProjectionPredicate<'tcx>\n{\n    fn sinto(&self, s: &S) -> ProjectionPredicate {\n        let tcx = s.base().tcx;\n        let alias_ty = &self.projection_term.expect_ty(tcx);\n        let poly_trait_ref = s.binder().rebind(alias_ty.trait_ref(tcx));\n        let Term::Ty(ty) = self.term.sinto(s) else {\n            unreachable!()\n        };\n        let item = tcx.associated_item(alias_ty.def_id);\n        ProjectionPredicate {\n            impl_expr: solve_trait(s, poly_trait_ref),\n            assoc_item: AssocItem::sfrom(s, &item),\n            ty,\n        }\n    }\n}\n\n/// Reflects [`ty::ClauseKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderBinderState<'tcx>>, from: ty::ClauseKind<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ClauseKind {\n    Trait(TraitPredicate),\n    RegionOutlives(RegionOutlivesPredicate),\n    TypeOutlives(TypeOutlivesPredicate),\n    Projection(ProjectionPredicate),\n    ConstArgHasType(ConstantExpr, Ty),\n    WellFormed(Term),\n    ConstEvaluatable(ConstantExpr),\n    HostEffect(HostEffectPredicate),\n    UnstableFeature(Symbol),\n}\n\nsinto_todo!(rustc_middle::ty, HostEffectPredicate<'tcx>);\n\n/// Reflects [`ty::Clause`] and adds a hash-consed predicate identifier.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct Clause {\n    pub kind: Binder<ClauseKind>,\n    pub id: PredicateId,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, Clause> for ty::Clause<'tcx> {\n    fn sinto(&self, s: &S) -> Clause {\n        let kind = self.kind().sinto(s);\n        let id = kind.clone().map(PredicateKind::Clause).predicate_id();\n        Clause { kind, id }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, Clause> for ty::PolyTraitPredicate<'tcx> {\n    fn sinto(&self, s: &S) -> Clause {\n        let kind: Binder<_> = self.sinto(s);\n        let kind: Binder<ClauseKind> = kind.map(ClauseKind::Trait);\n        let id = kind.clone().map(PredicateKind::Clause).predicate_id();\n        Clause { kind, id }\n    }\n}\n\n/// Reflects [`ty::Predicate`] and adds a hash-consed predicate identifier.\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct Predicate {\n    pub kind: Binder<PredicateKind>,\n    pub id: PredicateId,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, Predicate> for ty::Predicate<'tcx> {\n    fn sinto(&self, s: &S) -> Predicate {\n        let kind = self.kind().sinto(s);\n        let id = kind.predicate_id();\n        Predicate { kind, id }\n    }\n}\n\n/// Reflects [`ty::BoundVariableKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::BoundVariableKind, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum BoundVariableKind {\n    Ty(BoundTyKind),\n    Region(BoundRegionKind),\n    Const,\n}\n\n/// Reflects [`ty::Binder`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct Binder<T> {\n    pub value: T,\n    pub bound_vars: Vec<BoundVariableKind>,\n}\n\nimpl<T> Binder<T> {\n    pub fn as_ref(&self) -> Binder<&T> {\n        Binder {\n            value: &self.value,\n            bound_vars: self.bound_vars.clone(),\n        }\n    }\n\n    pub fn hax_skip_binder(self) -> T {\n        self.value\n    }\n\n    pub fn hax_skip_binder_ref(&self) -> &T {\n        &self.value\n    }\n\n    pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Binder<U> {\n        Binder {\n            value: f(self.value),\n            bound_vars: self.bound_vars,\n        }\n    }\n\n    pub fn inner_mut(&mut self) -> &mut T {\n        &mut self.value\n    }\n\n    pub fn rebind<U>(&self, value: U) -> Binder<U> {\n        self.as_ref().map(|_| value)\n    }\n}\n\n/// Reflects [`ty::GenericPredicates`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::GenericPredicates<'tcx>, state: S as s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, Default, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct GenericPredicates {\n    #[value(self.predicates.iter().map(|x| x.sinto(s)).collect())]\n    pub predicates: Vec<(Clause, Span)>,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>> SInto<S, GenericPredicates>\n    for crate::traits::Predicates<'tcx>\n{\n    fn sinto(&self, s: &S) -> GenericPredicates {\n        GenericPredicates {\n            predicates: self.as_ref().sinto(s),\n        }\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl<'tcx, S: UnderOwnerState<'tcx>, T1, T2> SInto<S, Binder<T2>> for ty::Binder<'tcx, T1>\nwhere\n    T1: SInto<StateWithBinder<'tcx>, T2>,\n{\n    fn sinto(&self, s: &S) -> Binder<T2> {\n        let bound_vars = self.bound_vars().sinto(s);\n        let value = {\n            let under_binder_s = &s.with_binder(self.as_ref().map_bound(|_| ()));\n            self.as_ref().skip_binder().sinto(under_binder_s)\n        };\n        Binder { value, bound_vars }\n    }\n}\n\n/// Reflects [`ty::SubtypePredicate`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::SubtypePredicate<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct SubtypePredicate {\n    pub a_is_expected: bool,\n    pub a: Ty,\n    pub b: Ty,\n}\n\n/// Reflects [`ty::CoercePredicate`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::CoercePredicate<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct CoercePredicate {\n    pub a: Ty,\n    pub b: Ty,\n}\n\n/// Reflects [`ty::AliasRelationDirection`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::AliasRelationDirection, state: S as _tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum AliasRelationDirection {\n    Equate,\n    Subtype,\n}\n\n/// Reflects [`ty::ClosureArgs`]\n#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]\n#[derive_group(Serializers)]\npub struct ClosureArgs {\n    pub item: ItemRef,\n    /// The base kind of this closure. The kinds are ordered by inclusion: any `Fn` works as an\n    /// `FnMut`, and any `FnMut` works as an `FnOnce`.\n    pub kind: ClosureKind,\n    /// The signature of the function that the closure implements, e.g. `fn(A, B, C) -> D`.\n    pub fn_sig: PolyFnSig,\n    /// The set of captured variables. Together they form the state of the closure.\n    pub upvar_tys: Vec<Ty>,\n}\n\nimpl ClosureArgs {\n    /// Iterate over the upvars that are borrows with erased regions. These may require allocating\n    /// fresh regions.\n    pub fn iter_upvar_borrows(&self) -> impl Iterator<Item = &Ty> {\n        self.upvar_tys.iter().filter(|ty| {\n            matches!(\n                ty.kind(),\n                TyKind::Ref(\n                    Region {\n                        kind: RegionKind::ReErased\n                    },\n                    ..\n                )\n            )\n        })\n    }\n}\n\n#[cfg(feature = \"rustc\")]\nimpl ClosureArgs {\n    // Manual implementation because we need the `def_id` of the closure.\n    pub(crate) fn sfrom<'tcx, S>(\n        s: &S,\n        def_id: RDefId,\n        from: ty::ClosureArgs<ty::TyCtxt<'tcx>>,\n    ) -> Self\n    where\n        S: UnderOwnerState<'tcx>,\n    {\n        let tcx = s.base().tcx;\n        let sig = from.sig();\n        let item = {\n            // The closure has no generics of its own: it inherits its parent generics and could\n            // have late-bound args but these are part of the signature.\n            let parent_args = tcx.mk_args(from.parent_args());\n            translate_item_ref(s, def_id, parent_args)\n        };\n        ClosureArgs {\n            item,\n            kind: from.kind().sinto(s),\n            fn_sig: tcx\n                .signature_unclosure(sig, rustc_hir::Safety::Safe)\n                .sinto(s),\n            upvar_tys: from.upvar_tys().sinto(s),\n        }\n    }\n}\n\n/// Reflects [`ty::ClosureKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderOwnerState<'tcx>>, from: ty::ClosureKind, state: S as _tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ClosureKind {\n    Fn,\n    FnMut,\n    FnOnce,\n}\n\nsinto_todo!(rustc_middle::ty, NormalizesTo<'tcx>);\n\n/// Reflects [`ty::PredicateKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: UnderBinderState<'tcx>>, from: ty::PredicateKind<'tcx>, state: S as tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum PredicateKind {\n    Clause(ClauseKind),\n    DynCompatible(DefId),\n    Subtype(SubtypePredicate),\n    Coerce(CoercePredicate),\n    ConstEquate(ConstantExpr, ConstantExpr),\n    Ambiguous,\n    AliasRelate(Term, Term, AliasRelationDirection),\n    NormalizesTo(NormalizesTo),\n}\n\n/// Reflects [`ty::AssocItem`]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub struct AssocItem {\n    pub def_id: DefId,\n    /// This is `None` for RPTITs.\n    pub name: Option<Symbol>,\n    pub kind: AssocKind,\n    pub container: AssocItemContainer,\n    /// Whether this item has a value (e.g. this is `false` for trait methods without default\n    /// implementations).\n    pub has_value: bool,\n}\n\n#[cfg(feature = \"rustc\")]\nimpl AssocItem {\n    pub fn sfrom<'tcx, S: BaseState<'tcx>>(s: &S, item: &ty::AssocItem) -> AssocItem {\n        Self::sfrom_instantiated(s, item, None)\n    }\n\n    /// Translate an `AssocItem` and optionally instantiate it with the provided arguments.\n    pub fn sfrom_instantiated<'tcx, S: BaseState<'tcx>>(\n        s: &S,\n        item: &ty::AssocItem,\n        item_args: Option<ty::GenericArgsRef<'tcx>>,\n    ) -> AssocItem {\n        let tcx = s.base().tcx;\n        // We want to solve traits in the context of this item.\n        let s = &s.with_owner_id(item.def_id);\n        let item_args =\n            item_args.unwrap_or_else(|| ty::GenericArgs::identity_for_item(tcx, item.def_id));\n        let container_id = item.container_id(tcx);\n        let container_args = item_args.truncate_to(tcx, tcx.generics_of(container_id));\n        let container = match item.container {\n            ty::AssocContainer::Trait => {\n                let trait_ref =\n                    ty::TraitRef::new_from_args(tcx, container_id, container_args).sinto(s);\n                AssocItemContainer::TraitContainer { trait_ref }\n            }\n            ty::AssocContainer::TraitImpl(implemented_item_id) => {\n                let implemented_item_id = implemented_item_id.unwrap();\n                let item = translate_item_ref(s, container_id, container_args);\n                let implemented_trait_ref = tcx\n                    .impl_trait_ref(container_id)\n                    .instantiate(tcx, container_args);\n                let implemented_trait_item = translate_item_ref(\n                    s,\n                    implemented_item_id,\n                    item_args.rebase_onto(tcx, container_id, implemented_trait_ref.args),\n                );\n                AssocItemContainer::TraitImplContainer {\n                    impl_: item,\n                    implemented_trait_ref: implemented_trait_ref.sinto(s),\n                    implemented_trait_item,\n                    overrides_default: tcx.defaultness(implemented_item_id).has_value(),\n                }\n            }\n            ty::AssocContainer::InherentImpl => AssocItemContainer::InherentImplContainer {\n                impl_id: container_id.sinto(s),\n            },\n        };\n        AssocItem {\n            def_id: item.def_id.sinto(s),\n            name: item.opt_name().sinto(s),\n            kind: item.kind.sinto(s),\n            container,\n            has_value: item.defaultness(tcx).has_value(),\n        }\n    }\n}\n\n/// Reflects [`ty::AssocKind`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: ty::AssocKind, state: S as _tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum AssocKind {\n    Const { name: Symbol },\n    Fn { name: Symbol, has_self: bool },\n    Type { data: AssocTypeData },\n}\n\n/// Reflects [`ty::AssocTypeData`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: ty::AssocTypeData, state: S as _tcx)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum AssocTypeData {\n    Normal(Symbol),\n    Rpitit(ImplTraitInTraitData),\n}\n\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum AssocItemContainer {\n    TraitContainer {\n        trait_ref: TraitRef,\n    },\n    TraitImplContainer {\n        /// Reference to the def_id of the impl block.\n        impl_: ItemRef,\n        /// The trait ref implemented by the impl block.\n        implemented_trait_ref: TraitRef,\n        /// The the associated item (in the trait declaration) that is being implemented.\n        implemented_trait_item: ItemRef,\n        /// Whether the corresponding trait item had a default (and therefore this one overrides\n        /// it).\n        overrides_default: bool,\n    },\n    InherentImplContainer {\n        impl_id: DefId,\n    },\n}\n\n/// Reflects [`ty::ImplTraitInTraitData`]\n#[derive(AdtInto)]\n#[args(<'tcx, S: BaseState<'tcx>>, from: ty::ImplTraitInTraitData, state: S as _s)]\n#[derive_group(Serializers)]\n#[derive(Clone, Debug, JsonSchema, Hash, PartialEq, Eq, PartialOrd, Ord)]\npub enum ImplTraitInTraitData {\n    Trait {\n        fn_def_id: DefId,\n        opaque_def_id: DefId,\n    },\n    Impl {\n        fn_def_id: DefId,\n    },\n}\n"
  },
  {
    "path": "frontend/exporter/src/utils/error_macros.rs",
    "content": "macro_rules! format_with_context {\n    ($format_str:expr $(,$arg:expr)* $(; {$($x:expr),*})?) => {\n        format!(\n            concat!(\n                $format_str\n                $(, \"\\n\\nContext:\\n\", $(concat!(\" - \", stringify!($x), \": \"), \"{:#?}\", \"\\n\",)*)?\n            ),\n            $($arg,)*\n            $($($x,)*)?\n        )\n    };\n    ($($tt:tt)*) => {format!($($tt)*)};\n}\n\nmod internal_helpers {\n    macro_rules! _verb {\n        (fatal, $o:expr, $message:expr) => {\n            $o.struct_fatal($message)\n        };\n        (error, $o:expr, $message:expr) => {\n            $o.struct_err($message)\n        };\n        (warn, $o:expr, $message:expr) => {\n            $o.struct_warn($message)\n        };\n    }\n    macro_rules! _span_verb_base {\n        ($verb:ident, $s:ident, $span:expr, $message:expr) => {{\n            let backtrace = std::backtrace::Backtrace::capture();\n            eprintln!(\"{}\", backtrace);\n            let mut builder = $crate::utils::_verb!($verb, $s.base().tcx.dcx(), $message);\n            if let Some(span) = $span {\n                builder.span(span.clone());\n            }\n            builder.code(rustc_errors::codes::ErrCode::MAX);\n            builder.note(\n                \"⚠️ This is a bug in Hax's frontend.\nPlease report this error to https://github.com/hacspec/hax/issues with some context (e.g. the current crate)!\",\n            );\n            builder.emit()\n        }};\n    }\n\n    pub(crate) use _span_verb_base;\n    pub(crate) use _verb;\n}\n\nmacro_rules! report {\n    ($verb:ident, $s:ident [$span:expr], $($tt:tt)*) => {\n        $crate::utils::_span_verb_base!($verb, $s, Some($span), $crate::utils::format_with_context!($($tt)*))\n    };\n    ($verb:ident, $s:ident, $($tt:tt)*) => {\n        $crate::utils::_span_verb_base!(\n            $verb,\n            $s,\n            $s.base().opt_def_id.map(|did| $s.base().tcx.def_span(did)),\n            $crate::utils::format_with_context!($($tt)*)\n        )\n    };\n}\n\nmacro_rules! error { ($($tt:tt)*) => {$crate::utils::report!(error, $($tt)*)} }\n#[allow(unused_macros)]\nmacro_rules! warning { ($($tt:tt)*) => {$crate::utils::report!(warn, $($tt)*)} }\nmacro_rules! fatal { ($($tt:tt)*) => {$crate::utils::report!(fatal, $($tt)*)} }\n\npub(crate) use format_with_context;\npub(crate) use internal_helpers::_span_verb_base;\npub(crate) use internal_helpers::_verb;\npub(crate) use report;\n\nmacro_rules! supposely_unreachable_message {\n    ($label:literal) => {\n        concat!(\n            \"Supposely unreachable place in the Rust AST. The label is \",\n            stringify!($label),\n            \".\\nThis error report happend because some assumption about the Rust AST was broken.\"\n        )\n    };\n}\n\nmacro_rules! supposely_unreachable {\n    ($s:ident $([$span:expr])?, $label:literal $($tt:tt)*) => {\n        {\n            $crate::utils::error!($s$([$span])?, $crate::utils::supposely_unreachable_message!($label) $($tt)+)\n        }\n    };\n}\nmacro_rules! supposely_unreachable_fatal {\n    ($s:ident $([$span:expr])?, $label:literal $($tt:tt)*) => {\n        $crate::utils::fatal!($s$([$span])?, $crate::utils::supposely_unreachable_message!($label) $($tt)+)\n    };\n}\n\npub(crate) use error;\npub(crate) use fatal;\npub(crate) use supposely_unreachable;\npub(crate) use supposely_unreachable_fatal;\npub(crate) use supposely_unreachable_message;\n#[allow(unused_imports)]\npub(crate) use warning;\n\npub trait SExpect: Sized {\n    type Output;\n    fn s_expect<'tcx, S: crate::BaseState<'tcx>>(self, s: &S, message: &str) -> Self::Output;\n\n    fn s_unwrap<'tcx, S: crate::BaseState<'tcx>>(self, s: &S) -> Self::Output {\n        self.s_expect(s, \"\")\n    }\n}\n\nmod s_expect_impls {\n    use super::*;\n    struct Dummy;\n    impl std::fmt::Debug for Dummy {\n        fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n            write!(f, \"...\")\n        }\n    }\n\n    fn s_expect_error<'tcx>(\n        s: &impl crate::BaseState<'tcx>,\n        expected: impl std::fmt::Debug,\n        got: impl std::fmt::Debug,\n        message: &str,\n    ) -> ! {\n        fatal!(\n            s,\n            \"s_expect: expected {:?}, got {:?}. {}\",\n            expected,\n            got,\n            message\n        )\n    }\n\n    impl<T: std::fmt::Debug> SExpect for Option<T> {\n        type Output = T;\n        fn s_expect<'tcx, S: crate::BaseState<'tcx>>(self, s: &S, message: &str) -> Self::Output {\n            self.unwrap_or_else(|| s_expect_error(s, Some(Dummy), None::<()>, message))\n        }\n    }\n\n    impl<T: std::fmt::Debug, E: std::fmt::Debug> SExpect for Result<T, E> {\n        type Output = T;\n        fn s_expect<'tcx, S: crate::BaseState<'tcx>>(self, s: &S, message: &str) -> Self::Output {\n            self.unwrap_or_else(|e| s_expect_error(s, Ok::<_, ()>(Dummy), Err::<(), _>(e), message))\n        }\n    }\n}\n\nmacro_rules! s_assert {\n    ($s:ident, $assertion:expr) => {{\n        if !($assertion) {\n            fatal!($s, \"assertion failed: {}\", stringify!($assertion))\n        }\n    }};\n}\npub(crate) use s_assert;\n"
  },
  {
    "path": "frontend/exporter/src/utils/mod.rs",
    "content": "mod error_macros;\nmod type_map;\n\npub use error_macros::*;\npub use type_map::*;\n"
  },
  {
    "path": "frontend/exporter/src/utils/type_map.rs",
    "content": "use std::{\n    any::{Any, TypeId},\n    collections::HashMap,\n    marker::PhantomData,\n};\n\npub trait TypeMappable = Any + Send + Sync;\n\n/// Defines a mapping from types to types.\npub trait TypeMapper {\n    type Value<T: TypeMappable>: TypeMappable;\n}\n\n/// A map that maps types to values in a generic manner: we store for each type `T` a value of\n/// type `M::Value<T>`.\npub struct TypeMap<M> {\n    data: HashMap<TypeId, Box<dyn TypeMappable>>,\n    phantom: PhantomData<M>,\n}\n\nimpl<M: TypeMapper> TypeMap<M> {\n    pub fn get<T: TypeMappable>(&self) -> Option<&M::Value<T>> {\n        self.data\n            .get(&TypeId::of::<T>())\n            // We must be careful to not accidentally cast the box itself as `dyn Any`.\n            .map(|val: &Box<dyn TypeMappable>| &**val)\n            .and_then(|val: &dyn TypeMappable| (val as &dyn Any).downcast_ref())\n    }\n\n    pub fn get_mut<T: TypeMappable>(&mut self) -> Option<&mut M::Value<T>> {\n        self.data\n            .get_mut(&TypeId::of::<T>())\n            // We must be careful to not accidentally cast the box itself as `dyn Any`.\n            .map(|val: &mut Box<dyn TypeMappable>| &mut **val)\n            .and_then(|val: &mut dyn TypeMappable| (val as &mut dyn Any).downcast_mut())\n    }\n    pub fn or_default<T: TypeMappable>(&mut self) -> &mut M::Value<T>\n    where\n        M::Value<T>: Default,\n    {\n        if self.get::<T>().is_none() {\n            self.insert::<T>(Default::default());\n        }\n        self.get_mut().unwrap()\n    }\n\n    pub fn insert<T: TypeMappable>(&mut self, val: M::Value<T>) -> Option<Box<M::Value<T>>> {\n        self.data\n            .insert(TypeId::of::<T>(), Box::new(val))\n            .and_then(|val: Box<dyn TypeMappable>| (val as Box<dyn Any>).downcast().ok())\n    }\n}\n\nimpl<M> Default for TypeMap<M> {\n    fn default() -> Self {\n        Self {\n            data: Default::default(),\n            phantom: Default::default(),\n        }\n    }\n}\n"
  },
  {
    "path": "hax-bounded-integers/Cargo.toml",
    "content": "[package]\nname = \"hax-bounded-integers\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"Newtypes for working with bounded integers with hax\"\n\n[dependencies]\nduplicate = \"1.0.0\"\nhax-lib.workspace = true\npaste = \"1.0.15\"\n"
  },
  {
    "path": "hax-bounded-integers/proofs/fstar/extraction/Hax_bounded_integers.Num_traits.fst",
    "content": "module Hax_bounded_integers.Num_traits\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen Core_models\nopen FStar.Mul\n\nclass t_BitOps (v_Self: Type0) = {\n  f_Output:Type0;\n  f_count_ones_pre:v_Self -> bool;\n  f_count_ones_post:v_Self -> u32 -> bool;\n  f_count_ones:x0: v_Self\n    -> Prims.Pure u32 (f_count_ones_pre x0) (fun result -> f_count_ones_post x0 result);\n  f_count_zeros_pre:v_Self -> bool;\n  f_count_zeros_post:v_Self -> u32 -> bool;\n  f_count_zeros:x0: v_Self\n    -> Prims.Pure u32 (f_count_zeros_pre x0) (fun result -> f_count_zeros_post x0 result);\n  f_leading_ones_pre:v_Self -> bool;\n  f_leading_ones_post:v_Self -> u32 -> bool;\n  f_leading_ones:x0: v_Self\n    -> Prims.Pure u32 (f_leading_ones_pre x0) (fun result -> f_leading_ones_post x0 result);\n  f_leading_zeros_pre:v_Self -> bool;\n  f_leading_zeros_post:v_Self -> u32 -> bool;\n  f_leading_zeros:x0: v_Self\n    -> Prims.Pure u32 (f_leading_zeros_pre x0) (fun result -> f_leading_zeros_post x0 result);\n  f_trailing_ones_pre:v_Self -> bool;\n  f_trailing_ones_post:v_Self -> u32 -> bool;\n  f_trailing_ones:x0: v_Self\n    -> Prims.Pure u32 (f_trailing_ones_pre x0) (fun result -> f_trailing_ones_post x0 result);\n  f_trailing_zeros_pre:v_Self -> bool;\n  f_trailing_zeros_post:v_Self -> u32 -> bool;\n  f_trailing_zeros:x0: v_Self\n    -> Prims.Pure u32 (f_trailing_zeros_pre x0) (fun result -> f_trailing_zeros_post x0 result);\n  f_rotate_left_pre:v_Self -> u32 -> bool;\n  f_rotate_left_post:v_Self -> u32 -> f_Output -> bool;\n  f_rotate_left:x0: v_Self -> x1: u32\n    -> Prims.Pure f_Output (f_rotate_left_pre x0 x1) (fun result -> f_rotate_left_post x0 x1 result);\n  f_rotate_right_pre:v_Self -> u32 -> bool;\n  f_rotate_right_post:v_Self -> u32 -> f_Output -> bool;\n  f_rotate_right:x0: v_Self -> x1: u32\n    -> Prims.Pure f_Output\n        (f_rotate_right_pre x0 x1)\n        (fun result -> f_rotate_right_post x0 x1 result);\n  f_from_be_pre:v_Self -> bool;\n  f_from_be_post:v_Self -> f_Output -> bool;\n  f_from_be:x0: v_Self\n    -> Prims.Pure f_Output (f_from_be_pre x0) (fun result -> f_from_be_post x0 result);\n  f_from_le_pre:v_Self -> bool;\n  f_from_le_post:v_Self -> f_Output -> bool;\n  f_from_le:x0: v_Self\n    -> Prims.Pure f_Output (f_from_le_pre x0) (fun result -> f_from_le_post x0 result);\n  f_to_be_pre:v_Self -> bool;\n  f_to_be_post:v_Self -> f_Output -> bool;\n  f_to_be:x0: v_Self -> Prims.Pure f_Output (f_to_be_pre x0) (fun result -> f_to_be_post x0 result);\n  f_to_le_pre:v_Self -> bool;\n  f_to_le_post:v_Self -> f_Output -> bool;\n  f_to_le:x0: v_Self -> Prims.Pure f_Output (f_to_le_pre x0) (fun result -> f_to_le_post x0 result);\n  f_pow_pre:v_Self -> u32 -> bool;\n  f_pow_post:v_Self -> u32 -> f_Output -> bool;\n  f_pow:x0: v_Self -> x1: u32\n    -> Prims.Pure f_Output (f_pow_pre x0 x1) (fun result -> f_pow_post x0 x1 result)\n}\n\nclass t_CheckedAdd (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_checked_add_pre:v_Self -> v_Rhs -> bool;\n  f_checked_add_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool;\n  f_checked_add:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure (Core_models.Option.t_Option f_Output)\n        (f_checked_add_pre x0 x1)\n        (fun result -> f_checked_add_post x0 x1 result)\n}\n\nclass t_CheckedDiv (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_checked_div_pre:v_Self -> v_Rhs -> bool;\n  f_checked_div_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool;\n  f_checked_div:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure (Core_models.Option.t_Option f_Output)\n        (f_checked_div_pre x0 x1)\n        (fun result -> f_checked_div_post x0 x1 result)\n}\n\nclass t_CheckedMul (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_checked_mul_pre:v_Self -> v_Rhs -> bool;\n  f_checked_mul_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool;\n  f_checked_mul:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure (Core_models.Option.t_Option f_Output)\n        (f_checked_mul_pre x0 x1)\n        (fun result -> f_checked_mul_post x0 x1 result)\n}\n\nclass t_CheckedNeg (v_Self: Type0) = {\n  f_Output:Type0;\n  f_checked_neg_pre:v_Self -> bool;\n  f_checked_neg_post:v_Self -> Core_models.Option.t_Option f_Output -> bool;\n  f_checked_neg:x0: v_Self\n    -> Prims.Pure (Core_models.Option.t_Option f_Output)\n        (f_checked_neg_pre x0)\n        (fun result -> f_checked_neg_post x0 result)\n}\n\nclass t_CheckedSub (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_checked_sub_pre:v_Self -> v_Rhs -> bool;\n  f_checked_sub_post:v_Self -> v_Rhs -> Core_models.Option.t_Option f_Output -> bool;\n  f_checked_sub:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure (Core_models.Option.t_Option f_Output)\n        (f_checked_sub_pre x0 x1)\n        (fun result -> f_checked_sub_post x0 x1 result)\n}\n\nclass t_FromBytes (v_Self: Type0) = {\n  f_BYTES:Type0;\n  f_from_le_bytes_pre:f_BYTES -> bool;\n  f_from_le_bytes_post:f_BYTES -> v_Self -> bool;\n  f_from_le_bytes:x0: f_BYTES\n    -> Prims.Pure v_Self (f_from_le_bytes_pre x0) (fun result -> f_from_le_bytes_post x0 result);\n  f_from_be_bytes_pre:f_BYTES -> bool;\n  f_from_be_bytes_post:f_BYTES -> v_Self -> bool;\n  f_from_be_bytes:x0: f_BYTES\n    -> Prims.Pure v_Self (f_from_be_bytes_pre x0) (fun result -> f_from_be_bytes_post x0 result)\n}\n\nclass t_NumOps (v_Self: Type0) (v_Rhs: Type0) (v_Output: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_9126539072073536218:Core_models.Ops.Arith.t_Add v_Self\n    v_Rhs;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_9784678892199232396:Core_models.Ops.Arith.t_Sub v_Self\n    v_Rhs;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_7005199110250618039:Core_models.Ops.Arith.t_Mul v_Self\n    v_Rhs;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_12366019628759357413:Core_models.Ops.Arith.t_Div v_Self\n    v_Rhs;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_11859756759858186302:Core_models.Ops.Arith.t_Rem v_Self\n    v_Rhs\n}\n\nclass t_One (v_Self: Type0) = {\n  f_one_pre:Prims.unit -> bool;\n  f_one_post:Prims.unit -> v_Self -> bool;\n  f_one:x0: Prims.unit -> Prims.Pure v_Self (f_one_pre x0) (fun result -> f_one_post x0 result)\n}\n\nclass t_ToBytes (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_3732703090464998751:t_FromBytes v_Self;\n  f_to_le_bytes_pre:v_Self -> bool;\n  f_to_le_bytes_post:v_Self -> v_3732703090464998751.f_BYTES -> bool;\n  f_to_le_bytes:x0: v_Self\n    -> Prims.Pure v_3732703090464998751.f_BYTES\n        (f_to_le_bytes_pre x0)\n        (fun result -> f_to_le_bytes_post x0 result);\n  f_to_be_bytes_pre:v_Self -> bool;\n  f_to_be_bytes_post:v_Self -> v_3732703090464998751.f_BYTES -> bool;\n  f_to_be_bytes:x0: v_Self\n    -> Prims.Pure v_3732703090464998751.f_BYTES\n        (f_to_be_bytes_pre x0)\n        (fun result -> f_to_be_bytes_post x0 result)\n}\n\nclass t_WrappingAdd (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_wrapping_add_pre:v_Self -> v_Rhs -> bool;\n  f_wrapping_add_post:v_Self -> v_Rhs -> f_Output -> bool;\n  f_wrapping_add:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output\n        (f_wrapping_add_pre x0 x1)\n        (fun result -> f_wrapping_add_post x0 x1 result)\n}\n\nclass t_WrappingDiv (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_wrapping_div_pre:v_Self -> v_Rhs -> bool;\n  f_wrapping_div_post:v_Self -> v_Rhs -> f_Output -> bool;\n  f_wrapping_div:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output\n        (f_wrapping_div_pre x0 x1)\n        (fun result -> f_wrapping_div_post x0 x1 result)\n}\n\nclass t_WrappingMul (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_wrapping_mul_pre:v_Self -> v_Rhs -> bool;\n  f_wrapping_mul_post:v_Self -> v_Rhs -> f_Output -> bool;\n  f_wrapping_mul:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output\n        (f_wrapping_mul_pre x0 x1)\n        (fun result -> f_wrapping_mul_post x0 x1 result)\n}\n\nclass t_WrappingSub (v_Self: Type0) (v_Rhs: Type0) = {\n  f_Output:Type0;\n  f_wrapping_sub_pre:v_Self -> v_Rhs -> bool;\n  f_wrapping_sub_post:v_Self -> v_Rhs -> f_Output -> bool;\n  f_wrapping_sub:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output\n        (f_wrapping_sub_pre x0 x1)\n        (fun result -> f_wrapping_sub_post x0 x1 result)\n}\n\nclass t_Zero (v_Self: Type0) = {\n  f_zero_pre:Prims.unit -> bool;\n  f_zero_post:Prims.unit -> v_Self -> bool;\n  f_zero:x0: Prims.unit -> Prims.Pure v_Self (f_zero_pre x0) (fun result -> f_zero_post x0 result)\n}\n\nclass t_MachineInt (v_Self: Type0) (v_Output: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_11581440318597584651:Core_models.Marker.t_Copy v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_12866954522599331834:Core_models.Cmp.t_PartialOrd v_Self\n    v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_13035911912416111195:Core_models.Cmp.t_Ord v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_12632649257025169145:Core_models.Cmp.t_PartialEq v_Self\n    v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_8099741844003281729:Core_models.Cmp.t_Eq v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_9841570312332416173:t_Zero v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_12668241202577409386:t_One v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_9487321769118300762:Core_models.Ops.Bit.t_Not v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_1980884762883925305:t_NumOps v_Self\n    v_Self\n    v_Output;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_13929479875548649875:Core_models.Ops.Bit.t_BitAnd v_Self\n    v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_1708325062211865233:Core_models.Ops.Bit.t_BitOr v_Self\n    v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_1501688608269502122:Core_models.Ops.Bit.t_BitXor v_Self\n    v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_15083490293093561556:Core_models.Ops.Bit.t_Shl v_Self\n    v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_9065931548762825726:Core_models.Ops.Bit.t_Shr v_Self\n    v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_5052970308637232515:t_CheckedAdd v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_739902999637339236:t_CheckedSub v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_15323401662629887609:t_CheckedMul v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_8119502507145032897:t_CheckedDiv v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_12846047806852469117:t_WrappingAdd v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_12408554086330550784:t_WrappingSub v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_8633193508996485932:t_WrappingMul v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_16339457892016115661:t_WrappingDiv v_Self v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_12348120774285878195:t_BitOps v_Self\n}\n"
  },
  {
    "path": "hax-bounded-integers/proofs/fstar/extraction/Hax_bounded_integers.fst",
    "content": "module Hax_bounded_integers\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen Core_models\nopen FStar.Mul\n\n///Bounded i128 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedI128 (v_MIN v_MAX: i128) = x: i128{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded i16 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedI16 (v_MIN v_MAX: i16) = x: i16{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded i32 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedI32 (v_MIN v_MAX: i32) = x: i32{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded i64 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedI64 (v_MIN v_MAX: i64) = x: i64{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded i8 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedI8 (v_MIN v_MAX: i8) = x: i8{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded isize integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedIsize (v_MIN v_MAX: isize) = x: isize{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded u128 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedU128 (v_MIN v_MAX: u128) = x: u128{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded u16 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedU16 (v_MIN v_MAX: u16) = x: u16{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded u32 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedU32 (v_MIN v_MAX: u32) = x: u32{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded u64 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedU64 (v_MIN v_MAX: u64) = x: u64{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded u8 integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedU8 (v_MIN v_MAX: u8) = x: u8{x >=. v_MIN && x <=. v_MAX}\n\n///Bounded usize integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\nlet t_BoundedUsize (v_MIN v_MAX: usize) = x: usize{x >=. v_MIN && x <=. v_MAX}\n"
  },
  {
    "path": "hax-bounded-integers/src/lib.rs",
    "content": "use hax_lib::Refinement;\npub mod num_traits;\n\npub mod _macro_utils {\n    pub use duplicate;\n    pub use paste;\n}\n\n#[doc(hidden)]\n#[macro_export]\nmacro_rules! derivate_binop_for_bounded {\n    ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident}; $($tt:tt)*) => {\n        $crate::derivate_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, get, Self::Output}; $($tt)*) ;\n    };\n    ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty};) => {};\n    ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty}; ($trait:ident, $meth:ident), $($tt:tt)*) => {\n        $crate::derivate_binop_for_bounded!(@$t, $bounded_t, $trait, $meth, $get, $out, $(<$(const $cst_name:$cst_ty),*>)?);\n        $crate::derivate_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, $get, $out}; $($tt)*);\n    };\n    (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty$(,)?) => {\n        $crate::derivate_binop_for_bounded!(\n            @$t, $bounded_t, $trait, $meth, $get, $out,\n            <const MIN: $t, const MAX: $t>\n        );\n    };\n    (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty,\n     <$(const $cst_name:ident : $cst_ty:ty),*>\n    ) => {\n        $crate::_macro_utils::paste::paste!{\n            // BoundedT<A, B> <OP> BoundedT<C, D>\n            impl<$(const [< $cst_name _LHS >]: $cst_ty,)* $(const [< $cst_name _RHS >]: $cst_ty,)*>\n                $trait<$bounded_t<$([< $cst_name _RHS >],)*>> for $bounded_t<$([< $cst_name _LHS >],)*>\n            {\n                type Output = $t;\n                #[inline(always)]\n                fn $meth(self, other: $bounded_t<$([< $cst_name _RHS >],)*>) -> $out {\n                    (self.$get()).$meth(other.$get())\n                }\n            }\n\n            // BoundedT<A, B> <OP> T\n            impl<$(const $cst_name: $cst_ty,)*> $trait<$t> for $bounded_t<$($cst_name,)*> {\n                type Output = $t;\n                #[inline(always)]\n                fn $meth(self, other: $t) -> $out {\n                    (self.$get()).$meth(other)\n                }\n            }\n\n\n            // T <OP> BoundedT<A, B>\n            impl<$(const $cst_name: $cst_ty,)*> $trait<$bounded_t<$($cst_name,)*>> for $t {\n                type Output = $t;\n                #[inline(always)]\n                fn $meth(self, other: $bounded_t<$($cst_name,)*>) -> $out {\n                    (self).$meth(other.$get())\n                }\n            }\n        }\n    };\n}\n\n#[doc(hidden)]\n#[macro_export]\nmacro_rules! derivate_assign_binop_for_bounded {\n    ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident}; $($tt:tt)*) => {\n        $crate::derivate_assign_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, get, Self::Output}; $($tt)*) ;\n    };\n    ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty};) => {};\n    ($(<$(const $cst_name:ident : $cst_ty:ty),*>)?{$t:ident, $bounded_t:ident, $get:ident, $out:ty}; ($trait:ident, $meth:ident), $($tt:tt)*) => {\n        $crate::derivate_assign_binop_for_bounded!(@$t, $bounded_t, $trait, $meth, $get, $out, $(<$(const $cst_name:$cst_ty),*>)?);\n        $crate::derivate_assign_binop_for_bounded!($(<$(const $cst_name:$cst_ty),*>)?{$t, $bounded_t, $get, $out}; $($tt)*);\n    };\n    (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty$(,)?) => {\n        $crate::derivate_assign_binop_for_bounded!(\n            @$t, $bounded_t, $trait, $meth, $get, $out,\n            <const MIN: $t, const MAX: $t>\n        );\n    };\n    (@$t:ident, $bounded_t:ident, $trait:ident, $meth:ident, $get:ident, $out:ty,\n     <$(const $cst_name:ident : $cst_ty:ty),*>\n    ) => {\n        $crate::_macro_utils::paste::paste!{\n            // BoundedT<A, B> <OP> BoundedT<C, D>\n            impl<$(const [< $cst_name _LHS >]: $cst_ty,)* $(const [< $cst_name _RHS >]: $cst_ty,)*>\n                $trait<$bounded_t<$([< $cst_name _RHS >],)*>> for $bounded_t<$([< $cst_name _LHS >],)*>\n            {\n                #[inline(always)]\n                fn $meth(&mut self, other: $bounded_t<$([< $cst_name _RHS >],)*>) {\n                    self.get_mut().$meth(other.$get())\n                }\n            }\n\n            // BoundedT<A, B> <OP> $t\n            impl<$(const [< $cst_name _LHS >]: $cst_ty,)*>\n                $trait<$t> for $bounded_t<$([< $cst_name _LHS >],)*>\n            {\n                #[inline(always)]\n                fn $meth(&mut self, other: $t) {\n                    self.get_mut().$meth(other)\n                }\n            }\n\n            // $t <OP> BoundedT<A, B>\n            impl<$(const [< $cst_name _RHS >]: $cst_ty,)*>\n                $trait<$bounded_t<$([< $cst_name _RHS >],)*>> for $t\n            {\n                #[inline(always)]\n                fn $meth(&mut self, other: $bounded_t<$([< $cst_name _RHS >],)*>) {\n                    self.$meth(other.get())\n                }\n            }\n        }\n    };\n}\n\n#[doc(hidden)]\n#[macro_export]\nmacro_rules! derivate_operations_for_bounded {\n    ($bounded_t:ident($t: ident $($bytes:expr)?)$(,)?\n     <$(const $cst_name:ident : $cst_ty:ty),*>\n    ) => {\n          #[$crate::_macro_utils::duplicate::duplicate_item(\n              INTRO_CONSTANTS USE_CONSTANTS;\n              [ $(const $cst_name:$cst_ty),* ] [ $($cst_name),* ];\n          )]\n          #[hax_lib::exclude]\n        const _: () = {\n            use ::core::ops::*;\n            use $crate::num_traits::*;\n            use ::hax_lib::Refinement;\n\n            $crate::derivate_assign_binop_for_bounded!(\n                <INTRO_CONSTANTS>\n                {$t, $bounded_t};\n                (AddAssign, add_assign),\n                (SubAssign, sub_assign),\n                (MulAssign, mul_assign),\n                (DivAssign, div_assign),\n                (RemAssign, rem_assign),\n                (ShlAssign, shl_assign),\n                (ShrAssign, shr_assign),\n                (BitAndAssign, bitand_assign),\n                (BitOrAssign, bitor_assign),\n                (BitXorAssign, bitxor_assign),\n            );\n\n            $crate::derivate_binop_for_bounded!(\n                <INTRO_CONSTANTS>\n                {$t, $bounded_t};\n                (Add, add), (Sub, sub), (Mul, mul), (Div, div), (Rem, rem),\n                (BitOr, bitor), (BitAnd, bitand), (BitXor, bitxor),\n                (Shl, shl), (Shr, shr),\n                (WrappingAdd, wrapping_add), (WrappingSub, wrapping_sub),\n                (WrappingMul, wrapping_mul), (WrappingDiv, wrapping_div),\n            );\n\n            $crate::derivate_binop_for_bounded!(\n                <INTRO_CONSTANTS>\n                {$t, $bounded_t, get, Option<Self::Output>};\n                (CheckedAdd, checked_add), (CheckedSub, checked_sub),\n                (CheckedMul, checked_mul), (CheckedDiv, checked_div),\n            );\n\n            impl<INTRO_CONSTANTS> CheckedNeg for $bounded_t<USE_CONSTANTS> {\n                type Output = $t;\n                #[inline(always)]\n                fn checked_neg(&self) -> Option<$t> {\n                    self.deref().checked_neg()\n                }\n            }\n\n            impl<INTRO_CONSTANTS> Not for $bounded_t<USE_CONSTANTS> {\n                type Output = $t;\n                #[inline(always)]\n                fn not(self) -> Self::Output {\n                    self.deref().not()\n                }\n            }\n\n            impl<INTRO_CONSTANTS> NumOps<Self, $t> for $bounded_t<USE_CONSTANTS> {}\n\n            // impl<INTRO_CONSTANTS> Bounded for $bounded_t<USE_CONSTANTS> {\n            //     #[inline(always)]\n            //     fn min_value() -> Self {\n            //         Self::new(MIN)\n            //     }\n            //     #[inline(always)]\n            //     fn max_value() -> Self {\n            //         Self::new(MAX)\n            //     }\n            // }\n\n            $(\n                impl<INTRO_CONSTANTS> FromBytes for $bounded_t<USE_CONSTANTS> {\n                    type BYTES = [u8; $bytes];\n\n                    #[inline(always)]\n                    fn from_le_bytes(bytes: Self::BYTES) -> Self {\n                        Self::new($t::from_le_bytes(bytes))\n                    }\n                    #[inline(always)]\n                    fn from_be_bytes(bytes: Self::BYTES) -> Self {\n                        Self::new($t::from_be_bytes(bytes))\n                    }\n                }\n\n                impl<INTRO_CONSTANTS> ToBytes for $bounded_t<USE_CONSTANTS> {\n                    #[inline(always)]\n                    fn to_le_bytes(self) -> Self::BYTES {\n                        self.get().to_le_bytes()\n                    }\n                    #[inline(always)]\n                    fn to_be_bytes(self) -> Self::BYTES {\n                        self.get().to_be_bytes()\n                    }\n                }\n            )?\n\n            impl<INTRO_CONSTANTS> Zero for $bounded_t<USE_CONSTANTS> {\n                #[inline(always)]\n                fn zero() -> Self {\n                    Self::new(0)\n                }\n            }\n\n            impl<INTRO_CONSTANTS> One for $bounded_t<USE_CONSTANTS> {\n                #[inline(always)]\n                fn one() -> Self {\n                    Self::new(1)\n                }\n            }\n\n            impl<INTRO_CONSTANTS> MachineInt<$t> for $bounded_t<USE_CONSTANTS> { }\n\n            impl<INTRO_CONSTANTS> BitOps for $bounded_t<USE_CONSTANTS> {\n                type Output = $t;\n\n                #[inline(always)]\n                fn count_ones(self) -> u32 {\n                    self.get().count_ones()\n                }\n                #[inline(always)]\n                fn count_zeros(self) -> u32 {\n                    self.get().count_zeros()\n                }\n                #[inline(always)]\n                fn leading_ones(self) -> u32 {\n                    self.get().leading_ones()\n                }\n                #[inline(always)]\n                fn leading_zeros(self) -> u32 {\n                    self.get().leading_zeros()\n                }\n                #[inline(always)]\n                fn trailing_ones(self) -> u32 {\n                    self.get().trailing_ones()\n                }\n                #[inline(always)]\n                fn trailing_zeros(self) -> u32 {\n                    self.get().trailing_zeros()\n                }\n                #[inline(always)]\n                fn rotate_left(self, n: u32) -> Self::Output {\n                    self.get().rotate_left(n)\n                }\n                #[inline(always)]\n                fn rotate_right(self, n: u32) -> Self::Output {\n                    self.get().rotate_right(n)\n                }\n                #[inline(always)]\n                fn from_be(x: Self) -> Self::Output {\n                    Self::Output::from_be(x.get())\n                }\n                #[inline(always)]\n                fn from_le(x: Self) -> Self::Output {\n                    Self::Output::from_le(x.get())\n                }\n                #[inline(always)]\n                fn to_be(self) -> Self::Output {\n                    Self::Output::to_be(self.get())\n                }\n                #[inline(always)]\n                fn to_le(self) -> Self::Output {\n                    Self::Output::to_le(self.get())\n                }\n                #[inline(always)]\n                fn pow(self, exp: u32) -> Self::Output {\n                    Self::Output::pow(self.get(), exp)\n                }\n            }\n        };\n    }\n}\n\n#[doc(hidden)]\n#[macro_export]\nmacro_rules! mk_bounded {\n    ($(#$attr:tt)* $bounded_t:ident<$(const $cst_name:ident : $cst_ty:ty),*>($t: ident $($bytes:expr)?, |$x:ident| $body:expr)$(,)?) => {\n        #[hax_lib::refinement_type(|$x| $body)]\n        #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]\n        $(#$attr)*\n        pub struct $bounded_t<$(const $cst_name : $cst_ty),*>($t);\n        $crate::derivate_operations_for_bounded!($bounded_t($t$($bytes)?)<$(const $cst_name : $cst_ty),*>);\n    };\n    ($bounded_t:ident($t: ident $($bytes:expr)?)$(,)?) => {\n        $crate::mk_bounded!(\n            #[doc = concat!(\"Bounded \", stringify!($t),\" integers. This struct enforces the invariant that values are greater or equal to `MIN` and less or equal to `MAX`.\")]\n            $bounded_t<const MIN: $t, const MAX: $t>($t $($bytes)?, |x| x >= MIN && x <= MAX)\n        );\n    };\n    ($bounded_t:ident($t: ident $($bytes:expr)?), $($tt:tt)+) => {\n        $crate::mk_bounded!($bounded_t($t $($bytes)?));\n        $crate::mk_bounded!($($tt)+);\n    };\n}\n\nmk_bounded!(\n    BoundedI8(i8 1),\n    BoundedI16(i16 2),\n    BoundedI32(i32 4),\n    BoundedI64(i64 8),\n    BoundedI128(i128 16),\n    BoundedIsize(isize),\n    BoundedU8(u8 1),\n    BoundedU16(u16 2),\n    BoundedU32(u32 4),\n    BoundedU64(u64 8),\n    BoundedU128(u128 16),\n    BoundedUsize(usize),\n);\n\n/// Makes a refined new type in a very similar way to\n/// `hax_lib::refinement_tyoe`, but derives the various traits an\n/// integer type is expected to implement.\n///\n/// Examples:\n/// ```rust\n/// # use hax_bounded_integers::refinement_int;\n/// refinement_int!(BoundedAbsI16<const B: usize>(i16, 2, |x| x >= -(B as i16) && x <= (B as i16)));\n/// refinement_int!(BoundedAbsIsize<const B: usize>(isize, |x| x >= -(B as isize) && x <= (B as isize)));\n/// ```\n#[macro_export]\nmacro_rules! refinement_int {\n    ($(#$attr:tt)* $bounded_t:ident$(<$(const $cst_name:ident : $cst_ty:ty),*$(,)?>)?($t: ident, $($bytes:literal,)? |$x:ident| $body:expr)$(,)?) => {\n        $crate::mk_bounded!($(#$attr)* $bounded_t<$($(const $cst_name:$cst_ty),*)?>($t $($bytes)?, |$x| $body));\n    };\n}\n\n#[hax_lib::exclude]\nconst _: () = {\n    impl<const MIN: usize, const MAX: usize, T> core::ops::Index<BoundedUsize<MIN, MAX>> for [T] {\n        type Output = T;\n        #[inline(always)]\n        fn index(&self, index: BoundedUsize<MIN, MAX>) -> &Self::Output {\n            &self[index.get()]\n        }\n    }\n\n    impl<const MIN: usize, const MAX: usize, T> core::ops::IndexMut<BoundedUsize<MIN, MAX>> for [T] {\n        #[inline(always)]\n        fn index_mut(&mut self, index: BoundedUsize<MIN, MAX>) -> &mut Self::Output {\n            &mut self[index.get()]\n        }\n    }\n};\n\n#[test]\nfn tests() {\n    refinement_int!(\n        Test<const B: usize>(i16, 2, |x| B < 32768 && x >= -(B as i16) && x <= (B as i16))\n    );\n\n    use hax_lib::*;\n\n    let mut zzz: Test<123> = (-122).into_checked();\n    zzz += 32;\n\n    let x: BoundedU8<0, 5> = 2.into_checked();\n    let y: BoundedU8<5, 10> = (x + x).into_checked();\n\n    let _ = x >> 3;\n    let _ = x >> BoundedU8::<0, 5>::new(3);\n\n    let _ = x / y;\n    let _ = x * y;\n    let _ = x + y;\n    let _ = y - x;\n\n    let _ = x / 1;\n    let _ = x * 1;\n    let _ = x + 1;\n    let _ = x - 1;\n    let _ = 4 / y;\n    let _ = 4 * y;\n    let _ = 4 + y;\n    let _ = 4 - y;\n}\n"
  },
  {
    "path": "hax-bounded-integers/src/num_traits.rs",
    "content": "//! This module provides traits for generic mathematics. This is a\n//! smaller and more opinionated version of\n//! [num_traits](https://docs.rs/num-traits/latest/num_traits/).\n//!\n//! This module is designed to make bounded integers ergonomic to use:\n//! virtually every operation on bounded integers maps to their\n//! underlying type. We also want binary operators to be sufficiently\n//! polymophic to allow any combination: for instance, we want the\n//! addition of differently bounded u8, or bounded u8 with u8 or vice\n//! versa to be possible.\n//!\n//! Also, the traits in this module are designed to work with types\n//! that implement `Copy`.\n\nuse core::ops::*;\n\npub trait Zero: Sized {\n    fn zero() -> Self;\n}\n\npub trait One: Sized {\n    fn one() -> Self;\n}\n\npub trait NumOps<Rhs = Self, Output = Self>:\n    Add<Rhs, Output = Output>\n    + Sub<Rhs, Output = Output>\n    + Mul<Rhs, Output = Output>\n    + Div<Rhs, Output = Output>\n    + Rem<Rhs, Output = Output>\n{\n}\n\n// pub trait Bounded {\n//     fn min_value() -> Self;\n//     fn max_value() -> Self;\n// }\n\npub trait WrappingAdd<Rhs = Self> {\n    type Output;\n    fn wrapping_add(self, v: Rhs) -> Self::Output;\n}\n\npub trait WrappingSub<Rhs = Self> {\n    type Output;\n    fn wrapping_sub(self, v: Rhs) -> Self::Output;\n}\n\npub trait WrappingMul<Rhs = Self> {\n    type Output;\n    fn wrapping_mul(self, v: Rhs) -> Self::Output;\n}\n\npub trait WrappingDiv<Rhs = Self> {\n    type Output;\n    fn wrapping_div(self, v: Rhs) -> Self::Output;\n}\n\npub trait CheckedAdd<Rhs = Self> {\n    type Output;\n    fn checked_add(self, v: Rhs) -> Option<Self::Output>;\n}\n\npub trait CheckedSub<Rhs = Self> {\n    type Output;\n    fn checked_sub(self, v: Rhs) -> Option<Self::Output>;\n}\n\npub trait CheckedMul<Rhs = Self> {\n    type Output;\n    fn checked_mul(self, v: Rhs) -> Option<Self::Output>;\n}\n\npub trait CheckedDiv<Rhs = Self> {\n    type Output;\n    fn checked_div(self, v: Rhs) -> Option<Self::Output>;\n}\n\npub trait CheckedNeg {\n    type Output;\n    fn checked_neg(&self) -> Option<Self::Output>;\n}\n\npub trait FromBytes {\n    type BYTES;\n\n    fn from_le_bytes(bytes: Self::BYTES) -> Self;\n    fn from_be_bytes(bytes: Self::BYTES) -> Self;\n}\n\npub trait ToBytes: FromBytes {\n    fn to_le_bytes(self) -> Self::BYTES;\n    fn to_be_bytes(self) -> Self::BYTES;\n}\n\npub trait MachineInt<Output>:\n    Copy\n    // + Bounded\n    + PartialOrd\n    + Ord\n    + PartialEq\n    + Eq\n    + Zero\n    + One\n    + Not\n    + NumOps<Self, Output>\n    + BitAnd<Output = Output>\n    + BitOr<Output = Output>\n    + BitXor<Output = Output>\n    + Shl<Self, Output = Output>\n    + Shr<Self, Output = Output>\n    + CheckedAdd<Output = Output>\n    + CheckedSub<Output = Output>\n    + CheckedMul<Output = Output>\n    + CheckedDiv<Output = Output>\n    + WrappingAdd<Output = Output>\n    + WrappingSub<Output = Output>\n    + WrappingMul<Output = Output>\n    + WrappingDiv<Output = Output>\n    + BitOps<Output = Output>\n{\n}\n\npub trait BitOps {\n    type Output;\n\n    fn count_ones(self) -> u32;\n    fn count_zeros(self) -> u32;\n    fn leading_ones(self) -> u32;\n    fn leading_zeros(self) -> u32;\n    fn trailing_ones(self) -> u32;\n    fn trailing_zeros(self) -> u32;\n    fn rotate_left(self, n: u32) -> Self::Output;\n    fn rotate_right(self, n: u32) -> Self::Output;\n    fn from_be(x: Self) -> Self::Output;\n    fn from_le(x: Self) -> Self::Output;\n    fn to_be(self) -> Self::Output;\n    fn to_le(self) -> Self::Output;\n\n    fn pow(self, exp: u32) -> Self::Output;\n}\n"
  },
  {
    "path": "hax-lib/Cargo.toml",
    "content": "[package]\nname = \"hax-lib\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition = \"2021\"\nrepository.workspace = true\nreadme = \"README.md\"\ndescription = \"Hax-specific helpers for Rust programs\"\n\n\n[target.'cfg(hax)'.dependencies]\nnum-bigint = { version = \"0.4\", default-features = false }\nnum-traits = { version = \"0.2\", default-features = false }\n\n[dependencies]\nhax-lib-macros = { workspace = true, optional = true }\n\n[features]\ndefault = [\"macros\"]\nmacros = [\"dep:hax-lib-macros\"]\n\n[lints.rust]\nunexpected_cfgs = { level = \"warn\", check-cfg = ['cfg(hax)'] }\n\n[package.metadata.\"docs.rs\"]\nrustdoc-args = [\"--cfg\", \"doc_cfg\", \"--cfg\", \"hax\"]\n"
  },
  {
    "path": "hax-lib/README.md",
    "content": "# hax library\n\nThis crate contains helpers that can be used when writing Rust code that is proven\nthrough the hax toolchain.\n\n**⚠️ The code in this crate has no effect when compiled without the `--cfg hax`.**\n\n## Examples:\n\n```rust\nfn sum(x: Vec<u32>, y: Vec<u32>) -> Vec<u32> {\n  hax_lib::assume!(x.len() == y.len());\n  hax_lib::assert!(hax_lib::forall(|i: usize| hax_lib::implies(i < x.len(), || x[i] < 4242)));\n  hax_lib::debug_assert!(hax_lib::exists(|i: usize| hax_lib::implies(i < x.len(), || x[i] > 123)));\n  x.into_iter().zip(y.into_iter()).map(|(x, y)| x + y).collect()\n}\n```\n"
  },
  {
    "path": "hax-lib/build.rs",
    "content": "use std::env;\nuse std::fs;\nuse std::path::Path;\n\nconst FSTAR_EXTRA: &str = r\"\npub use hax_lib_macros::fstar_options as options;\npub use hax_lib_macros::fstar_verification_status as verification_status;\npub use hax_lib_macros::fstar_smt_pat as smt_pat;\npub use hax_lib_macros::fstar_postprocess_with as postprocess_with;\n\";\n\nconst LEAN_EXTRA: &str = r\"\npub use hax_lib_macros::lean_proof as proof;\npub use hax_lib_macros::lean_pure_requires_proof as pure_requires_proof;\npub use hax_lib_macros::lean_pure_ensures_proof as pure_ensures_proof;\npub mod proof_method {\n    pub use hax_lib_macros::lean_proof_method_grind as grind;\n    pub use hax_lib_macros::lean_proof_method_bv_decide as bv_decide;\n}\n\";\n\nfn main() {\n    let code = |backend: &str, extra: &str| {\n        format!(\n            r#\"\npub use hax_lib_macros::{backend}_expr as {backend};\n#[doc(hidden)]\npub use hax_lib_macros::{backend}_unsafe_expr;\n#[doc(hidden)]\npub use hax_lib_macros::{backend}_prop_expr;\n\n/// Procedular macros that have an effect only for the backend {backend}.\npub mod {backend} {{\n    #[doc(hidden)]\n    pub use hax_lib_macros::{backend}_unsafe_expr as unsafe_expr;\n    pub use hax_lib_macros::{backend}_prop_expr as prop;\n    pub use hax_lib_macros::{backend}_after as after;\n    pub use hax_lib_macros::{backend}_before as before;\n    pub use hax_lib_macros::{backend}_replace as replace;\n    pub use hax_lib_macros::{backend}_replace_body as replace_body;\n\n    {extra}\n}}\n\"#\n        )\n    };\n\n    let out_dir = env::var_os(\"OUT_DIR\").unwrap();\n    let dest_path = Path::new(&out_dir).join(\"proc_macros_generated.rs\");\n    fs::write(\n        &dest_path,\n        [\n            code(\"fstar\", FSTAR_EXTRA),\n            code(\"proverif\", \"\"),\n            code(\"coq\", \"\"),\n            code(\"lean\", LEAN_EXTRA),\n        ]\n        .join(\"\\n\"),\n    )\n    .unwrap();\n\n    println!(\"cargo::rerun-if-changed=build.rs\");\n}\n"
  },
  {
    "path": "hax-lib/core-models/.gitignore",
    "content": "proofs\n"
  },
  {
    "path": "hax-lib/core-models/Cargo.toml",
    "content": "[package]\nname = \"core-models\"\nversion = \"0.1.0\"\nedition = \"2024\"\n\n[dependencies]\npastey = \"0.1.1\"\nrust_primitives = { \"path\" = \"rust_primitives\" }\nhax-lib.workspace = true\n\n[workspace.dependencies]\nhax-lib = { \"path\" = \"..\" }\n\n# Workaround for https://github.com/rust-lang/cargo/issues/6745\n[workspace]\nmembers = [\"alloc\", \"rand_core\", \"rust_primitives\", \"std\"]\n\n[lints.rust]\nunexpected_cfgs = { level = \"warn\", check-cfg = ['cfg(hax)', 'cfg(hax_compilation)', 'cfg(hax_backend_fstar)'] }\n"
  },
  {
    "path": "hax-lib/core-models/README.md",
    "content": "This crate contains a partial model of Rust core that should preserve the same types signatures and behaviour as the original Rust core library. It only contains code that can be extracted with hax and used in `proof-libs` to give a model to Rust core items in the different hax backends.\n\n## Contributing\n\nCurrently the only backend supported is F*, and the extracted models coexist with hand-written F* models. When a new module is added, the hand-written version should be deleted and replaced by the generated one. `.hax.sh extract` takes care of extracting and placing the result in `proof-libs`.\n\n## Style considerations\n\nHere is a list of things to pay attention to when contributing to the models:\n* When using the `Fn` traits, the syntax shortcuts `Fn(T) -> U` are not available for the model traits. We need to write `Fn<T, Output = U>`\n* The `core::mem::take`, `core::mem::swap`, etc. functions cannot be given a good model that fits the Rust interface, we can only use unsafe or the original version, or change the interface to something corresponding to the interface of translated code (state passing instead of `&mut`).\n\n## Adding new models\n\nTo add new models, you should place yourself in the right module (create it if it doesn't already exist) corresponding to where it is located in Rust core. Then create the items with the same interface as in Rust (the Rust documentation is a good source of information, or sometimes the actual code). The interface can be slightly modified sometimes (removing `const`, or traits that we erase with hax). The code you write for the body can also be based on the real code if it is simple enough, or you can write something new that models the behaviour.\n\n## Tests\n\nThis is a work in progress. All models should be executable, then the test strategy will be to test the model against its reference (probably with a property-based testing framework). Once the infrastructure is in place, all new models should come with tests. The extracted code should also be tested in each backend (to make sure the naming is correct, and basic proofs using the items can work).\n\n## Relying on primitives\n\nSome primitive operations are easier to model directly using the backend's language (integers, arithmetic, sequence-like data structures, etc.). This can happen in two different ways:\n- Implicitly: integer types and arithmetic operations, array and slice types can be used directly. Hax has a special treatment of them, so any use in the core models implicitly refers to their implementation in Rust primitives (implemented manually for each backend)\n- Explicitly: some more specific arithmetic operations, sequences, etc. are available in the rust_primitives crate. This crate provides all the other definitions that need a manual model in each backend. The definitions from this crate can be used in core models, but the crate itself is not extracted.\n\n## Example\n\nThe `core::options` module is a good example. It mostly contains the definition of the `Option` enum which can be copied:\n\n```Rust\npub enum Option<T> {\n    Some(T),\n    None,\n}\n```\n\nMost functions can be defined in a very similar way to the original versions like:\n```Rust\npub fn is_some(&self) -> bool {\n    matches!(*self, Some(_))\n}\n```\nThe definition is exactly the same except that it is not `const`, and the attributes have been removed.\n\nWhenever we take functions/closures as argument there is a bit more modification to be done. Indeed, we must use the `FnOnce` trait from our models and not the original one. For example:\n\n```rust\npub const fn is_some_and(self, f: impl [const] FnOnce(T) -> bool + [const] Destruct) -> bool {\n    match self {\n        None => false,\n        Some(x) => f(x),\n    }\n}\n```\nbecomes\n```rust\npub fn is_some_and<F: FnOnce<T, Output = bool>>(self, f: F) -> bool {\n    match self {\n        None => false,\n        Some(x) => f.call_once(x),\n    }\n}\n```\n"
  },
  {
    "path": "hax-lib/core-models/alloc/Cargo.toml",
    "content": "[package]\nname = \"alloc\"\nversion = \"0.1.0\"\nedition = \"2024\"\n\n[dependencies]\n\nrust_primitives = {path = \"../rust_primitives\"}\nhax-lib.workspace = true\n"
  },
  {
    "path": "hax-lib/core-models/alloc/src/lib.rs",
    "content": "mod alloc {\n    pub struct Global;\n}\n\nmod borrow {\n    struct Cow<T>(T);\n\n    pub trait ToOwned {\n        fn to_owned(self) -> Self;\n    }\n    impl<T> ToOwned for T {\n        fn to_owned(self) -> Self {\n            self\n        }\n    }\n}\n\nmod boxed {\n    pub struct Box<T>(pub T);\n    impl<T> Box<T> {\n        // Hax removes boxes, so this should be the identity\n        fn new(v: T) -> T {\n            v\n        }\n    }\n}\n\nmod collections {\n    // All implementations are dummy (for interfaces only)\n\n    mod binary_heap {\n        #[hax_lib::fstar::before(\"open Rust_primitives.Notations\")]\n        use crate::vec::*;\n        struct BinaryHeap<T, A>(Vec<T, A>);\n\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n        impl BinaryHeap<(), ()> {}\n\n        #[hax_lib::attributes]\n        impl<T: Ord, A> BinaryHeap<T, A> {\n            fn new() -> BinaryHeap<T, A> {\n                BinaryHeap(Vec(\n                    rust_primitives::sequence::seq_empty(),\n                    std::marker::PhantomData::<A>,\n                ))\n            }\n            #[hax_lib::requires(self.len() < core::primitive::usize::MAX)]\n            fn push(&mut self, v: T) {\n                self.0.push(v)\n            }\n            #[hax_lib::ensures(|res| (self.len() > 0) == res.is_some())]\n            fn pop(&mut self) -> Option<T> {\n                let mut max: Option<&T> = None;\n                let mut index = 0;\n                for i in 0..self.len() {\n                    hax_lib::loop_invariant!(|i: usize| (i > 0) == max.is_some());\n                    if max.is_none_or(|max| self.0[i] > *max) {\n                        max = Some(&self.0[i]);\n                        index = i;\n                    }\n                }\n                if max.is_some() {\n                    Some(self.0.remove(index))\n                } else {\n                    None\n                }\n            }\n        }\n\n        #[hax_lib::attributes]\n        impl<T: Ord, A> BinaryHeap<T, A> {\n            fn len(&self) -> usize {\n                self.0.len()\n            }\n\n            #[hax_lib::ensures(|res| (self.len() > 0) == res.is_some())]\n            fn peek(&self) -> Option<&T> {\n                let mut max: Option<&T> = None;\n                for i in 0..self.len() {\n                    hax_lib::loop_invariant!(|i: usize| (i > 0) == max.is_some());\n                    if max.is_none_or(|max| self.0[i] > *max) {\n                        max = Some(&self.0[i]);\n                    }\n                }\n                max\n            }\n        }\n\n        #[hax_lib::fstar::after(\"\nassume val lemma_peek_pop: #t:Type -> (#a: Type) -> (#i: Core_models.Cmp.t_Ord t) -> h: t_BinaryHeap t a\n  -> Lemma (impl_11__peek h == snd (impl_10__pop h))\n          [SMTPat (impl_11__peek #t #a h)]\n        \")]\n        use core::*;\n    }\n    mod btree {\n        mod set {\n            #[hax_lib::opaque]\n            struct BTreeSet<T, U>(Option<T>, Option<U>);\n\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n            impl BTreeSet<(), ()> {}\n\n            impl<T, U> BTreeSet<T, U> {\n                fn new() -> BTreeSet<T, U> {\n                    BTreeSet(None, None)\n                }\n            }\n        }\n    }\n    mod vec_deque {\n        use rust_primitives::sequence::*;\n        pub struct VecDeque<T, A>(pub Seq<T>, std::marker::PhantomData<A>);\n\n        impl VecDeque<(), ()> {}\n        impl VecDeque<(), ()> {}\n        impl VecDeque<(), ()> {}\n        impl VecDeque<(), ()> {}\n        impl VecDeque<(), ()> {}\n\n        impl<T, A> VecDeque<T, A> {\n            #[hax_lib::opaque]\n            fn push_back(&mut self, x: T) {}\n            fn len(&self) -> usize {\n                seq_len(&self.0)\n            }\n            fn pop_front(&mut self) -> Option<T> {\n                if self.len() == 0 {\n                    None\n                } else {\n                    Some(seq_last(&self.0))\n                }\n            }\n        }\n\n        impl<T, A> std::ops::Index<usize> for VecDeque<T, A> {\n            type Output = T;\n            fn index(&self, i: usize) -> &T {\n                seq_index(&self.0, i)\n            }\n        }\n    }\n}\n\nmod fmt {\n    #[hax_lib::opaque]\n    fn format(args: core::fmt::Arguments) -> String {\n        String::new()\n    }\n}\n\nmod slice {\n    #[hax_lib::exclude]\n    struct Dummy<T>(T);\n\n    use super::vec::Vec;\n    use rust_primitives::sequence::*;\n\n    impl<T> Dummy<T> {\n        fn to_vec(s: &[T]) -> Vec<T, crate::alloc::Global> {\n            Vec(\n                seq_from_slice(s),\n                std::marker::PhantomData::<crate::alloc::Global>,\n            )\n        }\n        fn into_vec<A>(s: Box<&[T]>) -> Vec<T, A> {\n            Vec(seq_from_slice(*s), std::marker::PhantomData::<A>)\n        }\n        #[hax_lib::opaque]\n        fn sort_by<F: Fn(&T, &T) -> core::cmp::Ordering>(s: &mut [T], compare: F) {}\n    }\n}\n\nmod string {\n    use rust_primitives::string::*;\n\n    struct String(&'static str);\n    impl String {\n        fn new() -> Self {\n            String(\"\")\n        }\n        fn push_str(&mut self, other: &'static str) {\n            *self = String(str_concat(self.0, other))\n        }\n        fn push(&mut self, c: char) {\n            *self = String(str_concat(self.0, str_of_char(c)))\n        }\n        fn pop(&mut self) -> Option<char> {\n            let l = self.0.len();\n            if l > 0 {\n                *self = String(str_sub(self.0, 0, l - 1));\n                Some(str_index(self.0, l - 1))\n            } else {\n                None\n            }\n        }\n    }\n}\n\npub mod vec {\n    // TODO drain (to be done with iterators)\n    use hax_lib::ToInt;\n    use rust_primitives::sequence::*;\n\n    pub struct Vec<T, A>(pub Seq<T>, pub std::marker::PhantomData<A>);\n\n    fn from_elem<T: Clone>(item: T, len: usize) -> Vec<T, crate::alloc::Global> {\n        Vec(\n            seq_create(item, len),\n            std::marker::PhantomData::<crate::alloc::Global>,\n        )\n    }\n\n    #[hax_lib::attributes]\n    impl<T> Vec<T, crate::alloc::Global> {\n        pub fn new() -> Vec<T, crate::alloc::Global> {\n            Vec(\n                seq_empty(),\n                std::marker::PhantomData::<crate::alloc::Global>,\n            )\n        }\n        pub fn with_capacity(_c: usize) -> Vec<T, crate::alloc::Global> {\n            Vec::new()\n        }\n    }\n\n    #[hax_lib::attributes]\n    impl<T, A> Vec<T, A> {\n        pub fn len(&self) -> usize {\n            seq_len(&self.0)\n        }\n        #[hax_lib::requires(seq_len(&self.0) < usize::MAX)]\n        pub fn push(&mut self, x: T) {\n            seq_concat(&mut self.0, &seq_one(x))\n        }\n        pub fn pop(&mut self) -> Option<T> {\n            if seq_len(&self.0) > 0 {\n                let last = seq_last(&self.0);\n                self.0 = seq_slice(&self.0, 0, seq_len(&self.0) - 1);\n                Some(last)\n            } else {\n                None\n            }\n        }\n        pub fn is_empty(&self) -> bool {\n            seq_len(&self.0) == 0\n        }\n        #[hax_lib::requires(index <= seq_len(&self.0) && seq_len(&self.0) < usize::MAX)]\n        pub fn insert(&mut self, index: usize, element: T) {\n            let mut left = seq_slice(&self.0, 0, index);\n            let right = seq_slice(&self.0, index, seq_len(&self.0));\n            seq_concat(&mut left, &seq_one(element));\n            seq_concat(&mut left, &right);\n            self.0 = left;\n        }\n        pub fn as_slice(&self) -> &[T] {\n            seq_to_slice(&self.0)\n        }\n        #[hax_lib::opaque]\n        pub fn truncate(&mut self, n: usize) {}\n        #[hax_lib::opaque]\n        pub fn swap_remove(&mut self, n: usize) -> T {\n            seq_last(&self.0)\n        }\n        #[hax_lib::opaque]\n        #[hax_lib::ensures(|_| future(self).len() == new_size)]\n        pub fn resize(&mut self, new_size: usize, value: &T) {}\n        #[hax_lib::opaque]\n        pub fn remove(&mut self, index: usize) -> T {\n            seq_last(&self.0)\n        }\n        #[hax_lib::opaque]\n        pub fn clear(&mut self) {}\n        #[hax_lib::requires(self.len().to_int() + other.len().to_int() <= usize::MAX.to_int())]\n        pub fn append(&mut self, other: &mut Vec<T, A>) {\n            seq_concat(&mut self.0, &other.0);\n            other.0 = seq_empty()\n        }\n        #[hax_lib::opaque]\n        pub fn drain<R /* : RangeBounds<usize> */>(&mut self, _range: R) -> drain::Drain<T, A> {\n            drain::Drain(\n                seq_slice(&self.0, 0, self.len()),\n                std::marker::PhantomData::<A>,\n            ) // TODO use range bounds\n        }\n    }\n    pub mod drain {\n        use rust_primitives::sequence::*;\n        pub struct Drain<T, A>(pub Seq<T>, pub std::marker::PhantomData<A>);\n        impl<T, A> Iterator for Drain<T, A> {\n            type Item = T;\n            fn next(&mut self) -> Option<Self::Item> {\n                if seq_len(&self.0) == 0 {\n                    Option::None\n                } else {\n                    let res = seq_first(&self.0);\n                    self.0 = seq_slice(&self.0, 1, seq_len(&self.0));\n                    Option::Some(res)\n                }\n            }\n        }\n    }\n\n    #[hax_lib::attributes]\n    impl<T, A> Vec<T, A> {\n        #[hax_lib::requires(seq_len(&s.0).to_int() + other.len().to_int() <= usize::MAX.to_int())]\n        fn extend_from_slice(s: &mut Vec<T, A>, other: &[T]) {\n            seq_concat(&mut s.0, &seq_from_slice(other))\n        }\n    }\n\n    #[hax_lib::attributes]\n    impl<T, A> std::ops::Index<usize> for Vec<T, A> {\n        type Output = T;\n        #[hax_lib::requires(i < self.len())]\n        fn index(&self, i: usize) -> &T {\n            seq_index(&self.0, i)\n        }\n    }\n\n    #[hax_lib::attributes]\n    impl<T, A> core::ops::Deref for Vec<T, A> {\n        type Target = [T];\n\n        fn deref(&self) -> &[T] {\n            self.as_slice()\n        }\n    }\n\n    #[hax_lib::attributes]\n    #[hax_lib::opaque]\n    impl<T> std::iter::FromIterator<T> for Vec<T, crate::alloc::Global> {\n        fn from_iter<I>(iter: I) -> Self\n        where\n            I: IntoIterator<Item = T>,\n        {\n            let mut res = Vec::new();\n            for el in iter {\n                res.push(el)\n            }\n            res\n        }\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/hax.sh",
    "content": "#!/usr/bin/env bash\nset -e\n\nfunction extract_fstar() {\n    go_to \"./\"\n    HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax into fstar --interfaces '+!core_models::str::* +!**::num::error +!**::panicking::internal +!core_models::borrow +!core_models::default +!core_models::error +!core_models::hash +!core_models::hint +!core_models::ops::bit +!core_models::ops::arith +!core_models::fmt +!core_models::fmt::rt +!core_models::mem +!core_models::mem::*'\n    cp proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core\n    HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax -C -p std \\; into -i '-core_models::**' fstar --interfaces '+!**' \n    cp std/proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core\n    HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax -C -p alloc \\; into fstar --interfaces '+!**::collections::btree::** +!**::collections::vec_deque::**' \n    cp alloc/proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core\n    HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax -C -p rand_core \\; into fstar --interfaces '+!**' \n    cp rand_core/proofs/fstar/extraction/*.fst* ../proof-libs/fstar/core\n}\n\nfunction extract_lean() {\n    go_to \"./\"\n    LEAN_FILTERS=\"\"\n    LEAN_FILTERS+=\" -core_models::result::**::unwrap\" # Issue #1818\n    LEAN_FILTERS+=\" -core_models::result::**::expect\" # Issue #1818\n    LEAN_FILTERS+=\" -core_models::option::**::expect\" # Issue #1818\n    LEAN_FILTERS+=\" -core_models::option::**::unwrap\" # Issue #1818\n    LEAN_FILTERS+=\" -core_models::num::**::saturating_add\"\n    LEAN_FILTERS+=\" -core_models::num::**::overflowing_add\"\n    LEAN_FILTERS+=\" -core_models::num::**::saturating_sub\"\n    LEAN_FILTERS+=\" -core_models::num::**::overflowing_sub\"\n    LEAN_FILTERS+=\" -core_models::num::**::saturating_mul\"\n    LEAN_FILTERS+=\" -core_models::num::**::overflowing_mul\"\n    LEAN_FILTERS+=\" -core_models::num::**::count_ones\"\n    LEAN_FILTERS+=\" -core_models::num::**::rem_euclid\"\n    LEAN_FILTERS+=\" -core_models::num::**::abs\"\n    LEAN_FILTERS+=\" -core_models::num::**::checked_add\"\n    LEAN_FILTERS+=\" -core_models::num::**::checked_sub\"\n    LEAN_FILTERS+=\" -core_models::num::**::checked_mul\"\n    LEAN_FILTERS+=\" -core_models::num::**::MIN\"\n    LEAN_FILTERS+=\" -core_models::num::**::MAX\"\n    LEAN_FILTERS+=\" -core_models::num::**::BITS\"\n    LEAN_FILTERS+=\" -core_models::num::**::from_be_bytes\"\n    LEAN_FILTERS+=\" -core_models::num::**::from_le_bytes\"\n    LEAN_FILTERS+=\" -core_models::num::**::to_be_bytes\"\n    LEAN_FILTERS+=\" -core_models::num::**::to_le_bytes\"\n    LEAN_FILTERS+=\" -core_models::num::**::rotate_left\"\n    LEAN_FILTERS+=\" -core_models::num::**::rotate_right\"\n    \n    LEAN_FILTERS=\"$(echo \"$LEAN_FILTERS\" | xargs)\"\n    HAX_CORE_MODELS_EXTRACTION_MODE=on cargo hax into -i \"$LEAN_FILTERS\" lean\n    OUT=\"proofs/lean/extraction/core_models.lean\"\n\n    sed -i 's/import Hax/import Hax.core_models.prologue\\nimport Hax.Tactic.HaxSpec/g' \"$OUT\"\n\n    cp \"$OUT\" ../proof-libs/lean/Hax/core_models/core_models.lean\n}\n\nfunction init_vars() {\n    SCRIPT_DIR=\"$(cd \"$(dirname \"${BASH_SOURCE[0]}\")\" && pwd)\"\n    SCRIPT_NAME=\"$(basename \"${BASH_SOURCE[0]}\")\"\n    SCRIPT_PATH=\"${SCRIPT_DIR}/${SCRIPT_NAME}\"\n\n    if [ -t 1 ]; then\n        BLUE='\\033[34m'\n        GREEN='\\033[32m'\n        BOLD='\\033[1m'\n        RESET='\\033[0m'\n    else\n        BLUE=''\n        GREEN=''\n        BOLD=''\n        RESET=''\n    fi\n}\n\nfunction go_to() {\n    ROOT=\"$SCRIPT_DIR\"\n    cd \"$ROOT\"\n    cd \"$1\"\n}\n\nfunction msg() {\n    echo -e \"$1[$SCRIPT_NAME]$RESET $2\"\n}\n\n\nfunction help() {\n    echo \"Script to extract to F* or Lean and place the result in proof-libs\"\n    echo \"\"\n    echo \"Usage: $0 [COMMAND]\"\n    echo \"\"\n    echo \"Commands:\"\n    echo \"\"\n    grep '[#]>' \"$SCRIPT_PATH\" | sed 's/[)] #[>]/\\t/g'\n    echo \"\"\n}\n\nfunction cli() {\n    if [ -z \"$1\" ]; then\n        help\n        exit 1\n    fi\n    # Check if an argument was provided\n\n    case \"$1\" in\n        --help) #> Show help message\n            help;;\n        extract) #> Extract the F* code and copy it to proof-libs. Use `extract fstar` for F*, `extract lean` for Lean, or `extract` for both\n            case \"$2\" in\n                \"\")  # no subcommand -> run both\n                    extract_fstar\n                    extract_lean\n                    msg \"$GREEN\" \"done\"\n                    ;;\n                fstar)\n                    extract_fstar\n                    msg \"$GREEN\" \"done\"\n                    ;;\n                lean)\n                    extract_lean\n                    msg \"$GREEN\" \"done\"\n                    ;;\n                *)\n                    echo \"Invalid option for extract: $2\"\n                    help\n                    exit 1\n                    ;;\n            esac\n            ;;\n        *)\n            echo \"Invalid option: $1\"\n            help\n            exit 1;;\n    esac\n}\n\ninit_vars\n\ncli \"$@\"\n"
  },
  {
    "path": "hax-lib/core-models/rand_core/Cargo.toml",
    "content": "[package]\nname = \"rand_core\"\nversion = \"0.1.0\"\nedition = \"2024\"\n\n[dependencies]\n"
  },
  {
    "path": "hax-lib/core-models/rand_core/src/lib.rs",
    "content": "pub trait RngCore {\n    // Required methods\n    fn next_u32(&mut self) -> u32;\n    fn next_u64(&mut self) -> u64;\n    fn fill_bytes(&mut self, dst: &mut [u8]);\n}\n\npub trait CryptoRng: RngCore {}\n\nmod os {\n    pub struct OsRng;\n    // Dummy impl\n    impl super::RngCore for OsRng {\n        fn next_u32(&mut self) -> u32 {\n            0\n        }\n        fn next_u64(&mut self) -> u64 {\n            0\n        }\n        fn fill_bytes(&mut self, dst: &mut [u8]) {}\n    }\n    impl super::CryptoRng for OsRng {}\n}\n"
  },
  {
    "path": "hax-lib/core-models/rust_primitives/Cargo.toml",
    "content": "[package]\nname = \"rust_primitives\"\nversion = \"0.1.0\"\nedition = \"2024\"\n\n[dependencies]\npastey = \"0.1.1\"\nhax-lib.workspace = true\n"
  },
  {
    "path": "hax-lib/core-models/rust_primitives/src/lib.rs",
    "content": "#![allow(unused_variables)]\n\npub mod slice {\n    pub fn slice_length<T>(s: &[T]) -> usize {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    #[hax_lib::requires(mid <= slice_length(s))]\n    pub fn slice_split_at<T>(s: &[T], mid: usize) -> (&[T], &[T]) {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn slice_contains<T>(s: &[T], v: T) -> bool {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    #[hax_lib::requires(i < slice_length(s))]\n    pub fn slice_index<T>(s: &[T], i: usize) -> &T {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn slice_slice<T>(s: &[T], b: usize, e: usize) -> &[T] {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    // In the following two functions, F is actually a function type.\n    // Not constraining that here allows to call it with closures,\n    // or to pass parameters that implement the `Fn` trait for core_models.\n    // Each backend can type `f` as needed.\n    pub fn array_from_fn<T, const N: usize, F>(f: F) -> [T; N] {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn array_map<T, U, const N: usize, F>(s: [T; N], f: F) -> [U; N] {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn array_as_slice<T, const N: usize>(s: &[T; N]) -> &[T] {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn array_slice<T, const N: usize>(a: &[T; N], b: usize, e: usize) -> &[T] {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn array_index<T, const N: usize>(a: &[T; N], i: usize) -> &T {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n}\n\npub mod sequence {\n    pub struct Seq<T>(Option<T>);\n    pub fn seq_empty<T>() -> Seq<T> {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_from_slice<T>(_s: &[T]) -> Seq<T> {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_from_array<T, const N: usize>(_s: [T; N]) -> Seq<T> {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_to_slice<T>(_s: &Seq<T>) -> &[T] {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_concat<T>(s1: &mut Seq<T>, s2: &Seq<T>) {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_one<T>(x: T) -> Seq<T> {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_create<T>(x: T, n: usize) -> Seq<T> {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_len<T>(s: &Seq<T>) -> usize {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_slice<T>(s: &Seq<T>, b: usize, e: usize) -> Seq<T> {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_last<T>(s: &Seq<T>) -> T {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_first<T>(s: &Seq<T>) -> T {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn seq_index<T>(s: &Seq<T>, i: usize) -> &T {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n}\n\npub mod string {\n    pub fn str_concat(s1: &'static str, s2: &'static str) -> &'static str {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn str_of_char(c: char) -> &'static str {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn str_sub(s: &'static str, b: usize, e: usize) -> &'static str {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn str_index(s: &'static str, i: usize) -> char {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n}\n\npub mod mem {\n    pub fn replace<'a, T: ?Sized>(dest: &'a mut T, src: &'a T) -> &'a T {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n    pub fn copy<T>(x: &T) -> T {\n        unimplemented!(\"This is a stub that is implemented in each backend\")\n    }\n}\n\npub mod arithmetic {\n    use pastey::paste;\n\n    macro_rules! arithmetic_ops {\n        (\n            types: $t:ident,\n            ops: $($op:ident)*,\n            overflowing_ops: $($ov_op:ident)*,\n        ) => {\n            paste!{\n                $(pub fn [<$op _ $t>](x: $t, y: $t) -> $t {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                })*\n                $(pub fn [<$ov_op _ $t>](x: $t, y: $t) -> ($t, bool) {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                })*\n            }\n        };\n\n        (\n            types: $first_t:ident $($t:ident)+,\n            ops: $($op:ident)*,\n            overflowing_ops: $($ov_op:ident)*,\n        ) => {\n            arithmetic_ops!(types: $first_t, ops: $($op)*, overflowing_ops: $($ov_op)*,);\n            arithmetic_ops!(types: $($t)*, ops: $($op)*, overflowing_ops: $($ov_op)*,);\n        };\n\n    }\n\n    macro_rules! all_ops {\n        (\n            $($Self: ident)*,\n            $($Bytes: expr)*,\n        ) => {\n            paste! {\n                $(\n                pub fn [<pow_ $Self>](x: $Self, exp: u32) -> $Self {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<count_ones_ $Self>](x: $Self) -> u32 {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<rotate_right_ $Self>](x: $Self, n: u32) -> $Self {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<rotate_left_ $Self>](x: $Self, n: u32) -> $Self {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<leading_zeros_ $Self>](x: $Self) -> u32 {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<ilog2_ $Self>](x: $Self) -> u32 {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<from_be_bytes_ $Self>](bytes: [u8; $Bytes]) -> $Self {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<from_le_bytes_ $Self>](bytes: [u8; $Bytes]) -> $Self {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<to_be_bytes_ $Self>](bytes: $Self) -> [u8; $Bytes] {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                pub fn [<to_le_bytes_ $Self>](bytes: $Self) -> [u8; $Bytes] {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                })*\n            }\n        }\n    }\n\n    macro_rules! signed_ops {\n        ($($Self: ident)*) => {\n            paste! {\n                $(\n                    pub fn [<abs_ $Self>](x: $Self) -> $Self {\n                    unimplemented!(\"This is a stub that is implemented in each backend\")\n                }\n                )*\n            }\n        }\n    }\n\n    // Rust inlines these values, for now we model usize by u64\n    // eventually we could try to define in the backend as 32 or 64\n    pub const SIZE_BYTES: usize = 8;\n    pub const SIZE_BITS: u32 = 64;\n    pub const USIZE_MAX: usize = u64::MAX as usize;\n    pub const ISIZE_MAX: isize = i64::MAX as isize;\n    pub const ISIZE_MIN: isize = i64::MIN as isize;\n\n    arithmetic_ops! {\n        types: u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize,\n        ops: wrapping_add saturating_add wrapping_sub saturating_sub wrapping_mul saturating_mul rem_euclid,\n        overflowing_ops: overflowing_add overflowing_sub overflowing_mul,\n    }\n\n    all_ops! {\n        u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize,\n        1 2 4 8 16 SIZE_BYTES 1 2 4 8 16 SIZE_BYTES,\n    }\n\n    signed_ops! {\n        i8 i16 i32 i64 i128 isize\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/array.rs",
    "content": "use rust_primitives::{sequence::*, slice::*};\n\npub struct TryFromSliceError;\n\n// Dummy type to allow impls\n#[hax_lib::exclude]\nstruct Dummy<T, const N: usize>([T; N]);\n\n// Dummy impls to get the right disambiguator (https://github.com/cryspen/hax/issues/828)\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\nimpl<T> Dummy<T, 0> {}\n\nimpl<T, const N: usize> Dummy<T, N> {\n    pub fn map<F: crate::ops::function::FnOnce<T, Output = U>, U>(\n        s: [T; N],\n        f: fn(T) -> U, // We cannot use type `F` because it is incompatible with `array_map`\n    ) -> [U; N] {\n        array_map(s, f)\n    }\n    pub fn as_slice(s: &[T; N]) -> &[T] {\n        array_as_slice(s)\n    }\n}\n\npub fn from_fn<T, const N: usize, F: crate::ops::function::FnOnce<usize, Output = T>>(\n    f: fn(usize) -> T, // We cannot use type `F` because it is incompatible with `array_from_fn`\n) -> [T; N] {\n    array_from_fn(f)\n}\n\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T, const N: usize> crate::iter::traits::collect::IntoIterator for [T; N] {\n    type IntoIter = iter::IntoIter<T, N>;\n    fn into_iter(self) -> iter::IntoIter<T, N> {\n        iter::IntoIter(seq_from_array(self))\n    }\n}\n\nuse crate::ops::{\n    index::Index,\n    range::{Range, RangeFrom, RangeFull, RangeTo},\n};\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T, const N: usize> Index<usize> for [T; N] {\n    type Output = T;\n    #[hax_lib::requires(i < self.len())]\n    fn index(&self, i: usize) -> &T {\n        rust_primitives::slice::array_index(self, i)\n    }\n}\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T, const N: usize> Index<Range<usize>> for [T; N] {\n    type Output = [T];\n    #[hax_lib::requires(i.start <= i.end && i.end <= self.len())]\n    fn index(&self, i: Range<usize>) -> &[T] {\n        array_slice(self, i.start, i.end)\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T, const N: usize> Index<RangeTo<usize>> for [T; N] {\n    type Output = [T];\n    #[hax_lib::requires(i.end <= self.len())]\n    fn index(&self, i: RangeTo<usize>) -> &[T] {\n        array_slice(self, 0, i.end)\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T, const N: usize> Index<RangeFrom<usize>> for [T; N] {\n    type Output = [T];\n    #[hax_lib::requires(i.start <= self.len())]\n    fn index(&self, i: RangeFrom<usize>) -> &[T] {\n        array_slice(self, i.start, N)\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T, const N: usize> Index<RangeFull> for [T; N] {\n    type Output = [T];\n    fn index(&self, i: RangeFull) -> &[T] {\n        array_slice(self, 0, N)\n    }\n}\n\nmod iter {\n    use crate::option::Option;\n    use rust_primitives::sequence::*;\n    pub struct IntoIter<T, const N: usize>(pub Seq<T>);\n    #[cfg_attr(hax_backend_lean, hax_lib::exclude)]\n    impl<T, const N: usize> crate::iter::traits::iterator::Iterator for IntoIter<T, N> {\n        type Item = T;\n        fn next(&mut self) -> Option<T> {\n            if seq_len(&self.0) == 0 {\n                Option::None\n            } else {\n                let res = seq_first(&self.0);\n                self.0 = seq_slice(&self.0, 1, seq_len(&self.0));\n                Option::Some(res)\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/borrow.rs",
    "content": "trait Borrow<Borrowed> {\n    fn borrow(&self) -> Borrowed;\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/clone.rs",
    "content": "// In F* we replace the definition to have the equality a value\n// and its clone.\n// We need to consume self, instead of taking a reference, otherwise Rust would\n// not allow returning an owned Self. This is the same after going through hax.\n#[hax_lib::fstar::replace(\n    \"class t_Clone self = {\n  f_clone_pre: self -> Type0;\n  f_clone_post: self -> self -> Type0;\n  f_clone: x:self -> r:self {x == r}\n}\"\n)]\npub trait Clone {\n    fn clone(self) -> Self;\n}\n\n// In our model, everything is clonable\nimpl<T> Clone for T {\n    fn clone(self) -> Self {\n        self\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/cmp.rs",
    "content": "use crate::option::Option;\n\n#[hax_lib::attributes]\npub trait PartialEq<Rhs>\nwhere\n    Rhs: ?Sized,\n{\n    #[hax_lib::requires(true)]\n    fn eq(&self, other: &Rhs) -> bool;\n}\n\npub trait Eq: PartialEq<Self> {}\n\npub enum Ordering {\n    Less = -1,\n    Equal = 0,\n    Greater = 1,\n}\n\n#[hax_lib::attributes]\npub trait PartialOrd<Rhs>: PartialEq<Rhs>\nwhere\n    Rhs: ?Sized,\n{\n    #[hax_lib::requires(true)]\n    fn partial_cmp(&self, other: &Rhs) -> Option<Ordering>;\n}\n\n// These methods in core are provided using trait defaults, but this is not supported by hax\n// so we have to define them in a different way.\n#[hax_lib::attributes]\ntrait Neq<Rhs> {\n    #[hax_lib::requires(true)]\n    fn neq(&self, y: &Rhs) -> bool;\n}\n\nimpl<T: PartialEq<T>> Neq<T> for T {\n    fn neq(&self, y: &T) -> bool {\n        // Not using negation is a workaround for the F* lib\n        self.eq(y) == false\n    }\n}\n\n#[hax_lib::attributes]\ntrait PartialOrdDefaults<Rhs> {\n    #[hax_lib::requires(true)]\n    fn lt(&self, y: &Rhs) -> bool\n    where\n        Self: PartialOrd<Rhs>;\n    #[hax_lib::requires(true)]\n    fn le(&self, y: &Rhs) -> bool\n    where\n        Self: PartialOrd<Rhs>;\n    #[hax_lib::requires(true)]\n    fn gt(&self, y: &Rhs) -> bool\n    where\n        Self: PartialOrd<Rhs>;\n    #[hax_lib::requires(true)]\n    fn ge(&self, y: &Rhs) -> bool\n    where\n        Self: PartialOrd<Rhs>;\n}\n\nimpl<T: PartialOrd<T>> PartialOrdDefaults<T> for T {\n    fn lt(&self, y: &T) -> bool\n    where\n        T: PartialOrd<T>,\n    {\n        matches!(self.partial_cmp(y), Option::Some(Ordering::Less))\n    }\n    fn le(&self, y: &T) -> bool\n    where\n        T: PartialOrd<T>,\n    {\n        matches!(\n            self.partial_cmp(y),\n            Option::Some(Ordering::Less | Ordering::Equal)\n        )\n    }\n    fn gt(&self, y: &T) -> bool\n    where\n        T: PartialOrd<T>,\n    {\n        matches!(self.partial_cmp(y), Option::Some(Ordering::Greater))\n    }\n    fn ge(&self, y: &T) -> bool\n    where\n        T: PartialOrd<T>,\n    {\n        matches!(\n            self.partial_cmp(y),\n            Option::Some(Ordering::Greater | Ordering::Equal)\n        )\n    }\n}\n\n#[hax_lib::attributes]\npub trait Ord: Eq + PartialOrd<Self> {\n    #[hax_lib::requires(true)]\n    fn cmp(&self, other: &Self) -> Ordering;\n}\n\npub fn max<T: Ord>(v1: T, v2: T) -> T {\n    match v1.cmp(&v2) {\n        Ordering::Greater => v1,\n        _ => v2,\n    }\n}\n\npub fn min<T: Ord>(v1: T, v2: T) -> T {\n    match v1.cmp(&v2) {\n        Ordering::Greater => v2,\n        _ => v1,\n    }\n}\n\npub struct Reverse<T>(pub T);\n\nimpl<T: PartialOrd<T>> PartialOrd<Reverse<T>> for Reverse<T> {\n    fn partial_cmp(&self, other: &Reverse<T>) -> Option<Ordering> {\n        other.0.partial_cmp(&self.0)\n    }\n}\n\nimpl<T: PartialEq<T>> PartialEq<Reverse<T>> for Reverse<T> {\n    fn eq(&self, other: &Reverse<T>) -> bool {\n        other.0.eq(&self.0)\n    }\n}\n\nimpl<T: Eq> Eq for Reverse<T> {}\n\nimpl<T: Ord> Ord for Reverse<T> {\n    fn cmp(&self, other: &Reverse<T>) -> Ordering {\n        other.0.cmp(&self.0)\n    }\n}\n\nmacro_rules! int_impls {\n    ($($t:ty)*) => ($(\n        #[hax_lib::attributes]\n        impl PartialOrd<$t> for $t {\n            #[hax_lib::ensures(|res| {\n                match res {\n                    Option::Some(Ordering::Less) => self < other,\n                    Option::Some(Ordering::Equal) => self == other,\n                    Option::Some(Ordering::Greater) => self > other,\n                    Option::None => false\n                }\n            })]\n            fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n                if self < other {Option::Some(Ordering::Less)}\n                else if self > other {Option::Some(Ordering::Greater)}\n                else {Option::Some(Ordering::Equal)}\n            }\n        }\n        #[hax_lib::attributes]\n        impl Ord for $t {\n            #[hax_lib::ensures(|res| {\n                match res {\n                    Ordering::Less => self < other,\n                    Ordering::Equal => self == other,\n                    Ordering::Greater => self > other,\n                }\n            })]\n            fn cmp(&self, other: &Self) -> Ordering {\n                if self < other {Ordering::Less}\n                else if self > other {Ordering::Greater}\n                else {Ordering::Equal}\n            }\n        }\n        impl PartialEq<$t> for $t {\n            fn eq(&self, other: &Self) -> bool {\n                self == other\n            }\n        }\n        impl Eq for $t {}\n    )*)\n}\n\nint_impls! { u8 i8 u16 i16 u32 i32 u64 i64 u128 i128 usize isize }\n"
  },
  {
    "path": "hax-lib/core-models/src/core/convert.rs",
    "content": "use super::result::Result;\n\n#[hax_lib::attributes]\ntrait TryInto<T> {\n    type Error;\n    #[hax_lib::requires(true)]\n    fn try_into(self) -> Result<T, Self::Error>;\n}\n\n#[hax_lib::attributes]\ntrait Into<T> {\n    #[hax_lib::requires(true)]\n    fn into(self) -> T;\n}\n\n#[hax_lib::attributes]\ntrait From<T> {\n    #[hax_lib::requires(true)]\n    fn from(x: T) -> Self;\n}\n\n#[hax_lib::attributes]\ntrait TryFrom<T>: Sized {\n    type Error;\n    #[hax_lib::requires(true)]\n    fn try_from(x: T) -> Result<Self, Self::Error>;\n}\n\nimpl<T, U: From<T>> Into<U> for T {\n    fn into(self) -> U {\n        U::from(self)\n    }\n}\n\npub struct Infallible;\n\nimpl<T, U: From<T>> TryFrom<T> for U {\n    type Error = Infallible;\n    fn try_from(x: T) -> Result<Self, Self::Error> {\n        Result::Ok(U::from(x))\n    }\n}\n\nuse crate::array::TryFromSliceError;\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T: Copy, const N: usize> TryFrom<&[T]> for [T; N] {\n    type Error = TryFromSliceError;\n    fn try_from(x: &[T]) -> Result<[T; N], TryFromSliceError> {\n        if x.len() == N {\n            Result::Ok(rust_primitives::slice::array_from_fn(|i| {\n                *rust_primitives::slice::slice_index(x, i)\n            }))\n        } else {\n            Result::Err(TryFromSliceError)\n        }\n    }\n}\n\nimpl<T, U: TryFrom<T>> TryInto<U> for T {\n    type Error = U::Error;\n    fn try_into(self) -> Result<U, Self::Error> {\n        U::try_from(self)\n    }\n}\n\nimpl<T> From<T> for T {\n    fn from(x: T) -> Self {\n        x\n    }\n}\n\n#[hax_lib::attributes]\ntrait AsRef<T> {\n    #[hax_lib::requires(true)]\n    fn as_ref(self) -> T;\n}\n\nimpl<T> AsRef<T> for T {\n    fn as_ref(self) -> T {\n        self\n    }\n}\n\nmacro_rules! int_from {\n    (\n        $($From_t: ident)*,\n        $($To_t: ident)*,\n    ) => {\n        $(\n            #[cfg_attr(hax_backend_lean, hax_lib::exclude)]\n            impl From<$From_t> for $To_t {\n                fn from(x: $From_t) -> $To_t {\n                    x as $To_t\n                }\n            }\n        )*\n    }\n}\n\nuse super::num::error::TryFromIntError;\n\nmacro_rules! int_try_from {\n    (\n        $($From_t: ident)*,\n        $($To_t: ident)*,\n    ) => {\n        $(\n            #[cfg_attr(hax_backend_lean, hax_lib::exclude)]\n            impl TryFrom<$From_t> for $To_t {\n                type Error = TryFromIntError;\n                fn try_from(x: $From_t) -> Result<$To_t, TryFromIntError> {\n                    if x > ($To_t::MAX as $From_t) || x < ($To_t::MIN as $From_t) {\n                        Result::Err(TryFromIntError(()))\n                    } else {\n                        Result::Ok(x as $To_t)\n                    }\n                }\n            }\n        )*\n    }\n}\n\nint_from! {\n    u8  u8  u16 u8  u16 u32 u8   u16  u32  u64  usize u8    u16,\n    u16 u32 u32 u64 u64 u64 u128 u128 u128 u128 u128  usize usize,\n}\n\nint_from! {\n    i8  i8  i16 i8  i16 i32 i8   i16  i32  i64  isize i8    i16,\n    i16 i32 i32 i64 i64 i64 i128 i128 i128 i128 i128  isize isize,\n}\n\nint_try_from! {\n    u16 u32 u32 u32   u64 u64 u64 u64   u128 u128 u128 u128 u128  usize usize usize usize,\n    u8  u8  u16 usize u8  u16 u32 usize u8   u16  u32  u64  usize u8    u16   u32   u64,\n}\n\nint_try_from! {\n    i16 i32 i32 i32   i64 i64 i64 i64   i128 i128 i128 i128 i128  isize isize isize isize,\n    i8  i8  i16 isize i8  i16 i32 isize i8   i16  i32  i64  isize i8    i16   i32   i64,\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/default.rs",
    "content": "#[hax_lib::attributes]\npub trait Default {\n    #[hax_lib::requires(true)]\n    fn default() -> Self;\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/error.rs",
    "content": "use super::fmt::{Debug, Display};\n\npub trait Error: Display + Debug {}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/f32.rs",
    "content": "#[allow(non_camel_case_types)]\n#[hax_lib::exclude]\nstruct f32;\n\nimpl f32 {\n    #[hax_lib::opaque]\n    fn abs(x: f64) -> f64 {\n        panic!()\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/fmt.rs",
    "content": "#![allow(unused_variables)]\n\npub struct Error;\n\npub type Result = super::result::Result<(), Error>;\n\npub struct Formatter;\n\npub trait Display {\n    fn fmt(&self, f: &mut Formatter) -> Result;\n}\n\npub trait Debug {\n    fn dbg_fmt(&self, f: &mut Formatter) -> Result;\n}\n\npub struct Arguments<'a>(&'a ());\n\nimpl<T> Debug for T {\n    fn dbg_fmt(&self, f: &mut Formatter) -> Result {\n        Result::Ok(())\n    }\n}\n\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {}\nimpl<'a> Arguments<'a> {\n    fn write_fmt(f: &mut Formatter, args: Arguments) -> Result {\n        Result::Ok(())\n    }\n}\n\nmod rt {\n    #[hax_lib::opaque]\n    // The internals of this are not important in this model\n    enum ArgumentType<'a> {\n        Placeholder {\n            /* value: NonNull<()>,\n            formatter: unsafe fn(NonNull<()>, &mut Formatter<'_>) -> Result, */\n            _lifetime: std::marker::PhantomData<&'a ()>,\n        },\n        /* Count(u16), */\n    }\n\n    pub struct Argument<'a> {\n        ty: ArgumentType<'a>,\n    }\n\n    impl Argument<'_> {\n        #[hax_lib::opaque]\n        fn new_display<T>(x: &T) -> Self {\n            crate::panicking::internal::panic()\n        }\n        #[hax_lib::opaque]\n        fn new_debug<T>(x: &T) -> Self {\n            crate::panicking::internal::panic()\n        }\n        #[hax_lib::opaque]\n        fn new_lower_hex<T>(x: &T) -> Self {\n            crate::panicking::internal::panic()\n        }\n    }\n    impl<'a> Argument<'a> {\n        #[hax_lib::opaque]\n        fn new_binary<T>(x: &T) -> Self {\n            crate::panicking::internal::panic()\n        }\n        #[hax_lib::opaque]\n        fn new_const<T, U>(x: &T, y: &U) -> super::Arguments<'a> {\n            crate::panicking::internal::panic()\n        }\n        #[hax_lib::opaque]\n        fn new_v1<T, U, V, W>(x: &T, y: &U, z: &V, t: &W) -> super::Arguments<'a> {\n            crate::panicking::internal::panic()\n        }\n        fn none() -> [Self; 0] {\n            []\n        }\n        #[hax_lib::opaque]\n        fn new_v1_formatted<T, U, V>(x: &T, y: &U, z: &V) -> super::Arguments<'a> {\n            crate::panicking::internal::panic()\n        }\n    }\n\n    enum Count {\n        Is(u16),\n        Param(u16),\n        Implied,\n    }\n\n    struct Placeholder {\n        position: usize,\n        flags: u32,\n        precision: Count,\n        width: Count,\n    }\n\n    struct UnsafeArg;\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/hash.rs",
    "content": "pub trait Hasher {}\n\n#[hax_lib::attributes]\npub trait Hash {\n    #[hax_lib::requires(true)]\n    fn hash<H: Hasher>(&self, h: H) -> H;\n}\n\n// Temporary\nimpl<T> Hash for T {\n    fn hash<H: Hasher>(&self, h: H) -> H {\n        crate::panicking::internal::panic()\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/hint.rs",
    "content": "#[hax_lib::ensures(|res| fstar!(\"$res == $dummy\"))]\npub fn black_box<T>(dummy: T) -> T {\n    dummy\n}\n\n#[hax_lib::ensures(|res| fstar!(\"$res == $value\"))]\npub fn must_use<T>(value: T) -> T {\n    value\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/iter.rs",
    "content": "// This model of iterators doesn't respect the signatures of the original definitions in Rust core.\n// We avoid default implementations for trait methods, and instead provide them as external to the trait.\n// This means overriding them is not possible.\n// We also avoid the coinductivity between `IntoIter` and `Iterator`.\n\npub mod traits {\n    pub mod iterator {\n        use super::super::adapters::{\n            enumerate::Enumerate, flat_map::FlatMap, flatten::Flatten, map::Map, step_by::StepBy,\n            take::Take, zip::Zip,\n        };\n        use crate::ops::function::*;\n        use crate::option::Option;\n        #[hax_lib::attributes]\n        pub trait Iterator {\n            type Item;\n            #[hax_lib::requires(true)]\n            fn next(&mut self) -> Option<Self::Item>;\n        }\n\n        // This trait is an addition to deal with the default methods that the F* backend doesn't handle\n        trait IteratorMethods: Iterator {\n            fn fold<B, F: FnOnce<(B, Self::Item), Output = B>>(self, init: B, f: F) -> B;\n            fn enumerate(self) -> Enumerate<Self>\n            where\n                Self: Sized;\n            fn step_by(self, step: usize) -> StepBy<Self>\n            where\n                Self: Sized;\n            fn map<O, F: FnOnce<Self::Item, Output = O>>(self, f: F) -> Map<Self, F>\n            where\n                Self: Sized;\n            fn all<F: FnOnce<Self::Item, Output = bool>>(self, f: F) -> bool;\n            fn take(self, n: usize) -> Take<Self>\n            where\n                Self: Sized;\n            fn flat_map<U: Iterator, F: FnOnce<Self::Item, Output = U>>(\n                self,\n                f: F,\n            ) -> FlatMap<Self, U, F>\n            where\n                Self: Sized;\n            fn flatten(self) -> Flatten<Self>\n            where\n                Self::Item: Iterator,\n                Self: Sized;\n            fn zip<I2: Iterator>(self, it2: I2) -> Zip<Self, I2>\n            where\n                Self: Sized;\n        }\n\n        impl<I: Iterator> IteratorMethods for I {\n            fn fold<B, F: FnOnce<(B, I::Item), Output = B>>(mut self, init: B, f: F) -> B {\n                let mut accum = init;\n                /* while let Option::Some(x) = self.next() {\n                    accum = f.call_once((accum, x));\n                } */\n                accum\n            }\n\n            fn enumerate(self) -> Enumerate<I> {\n                Enumerate::new(self)\n            }\n\n            fn step_by(self, step: usize) -> StepBy<I> {\n                StepBy::new(self, step)\n            }\n\n            fn map<O, F: FnOnce<I::Item, Output = O>>(self, f: F) -> Map<I, F> {\n                Map::new(self, f)\n            }\n\n            fn all<F: FnOnce<I::Item, Output = bool>>(mut self, f: F) -> bool {\n                /* while let Option::Some(x) = self.next() {\n                    if !f.call_once(x) {\n                        return false;\n                    }\n                } */\n                true\n            }\n\n            fn take(self, n: usize) -> Take<I> {\n                Take::new(self, n)\n            }\n\n            fn flat_map<U: Iterator, F: FnOnce<I::Item, Output = U>>(\n                self,\n                f: F,\n            ) -> FlatMap<I, U, F> {\n                FlatMap::new(self, f)\n            }\n\n            fn flatten(self) -> Flatten<I>\n            where\n                I::Item: Iterator,\n            {\n                Flatten::new(self)\n            }\n\n            fn zip<I2: Iterator>(self, it2: I2) -> Zip<Self, I2> {\n                Zip::new(self, it2)\n            }\n        }\n\n        impl<I: Iterator> super::collect::IntoIterator for I {\n            type IntoIter = Self;\n            fn into_iter(self) -> Self {\n                self\n            }\n        }\n\n        // TODO rev: DoubleEndedIterator?\n    }\n    pub mod collect {\n        pub trait IntoIterator {\n            // Ignoring type Item, and trait bound Iterator to avoid coinduction\n            // type Item;\n            type IntoIter; //: Iterator<Item = Self::Item>\n            fn into_iter(self) -> Self::IntoIter;\n        }\n        #[hax_lib::attributes]\n        pub trait FromIterator<A>: Sized {\n            #[hax_lib::requires(true)]\n            fn from_iter<T: IntoIterator>(iter: T) -> Self;\n        }\n    }\n}\n\npub mod adapters {\n    pub mod enumerate {\n        use super::super::traits::iterator::Iterator;\n        use crate::option::Option;\n        pub struct Enumerate<I> {\n            iter: I,\n            count: usize,\n        }\n        impl<I> Enumerate<I> {\n            pub fn new(iter: I) -> Enumerate<I> {\n                Enumerate { iter, count: 0 }\n            }\n        }\n        impl<I: Iterator> Iterator for Enumerate<I> {\n            type Item = (usize, <I as Iterator>::Item);\n\n            fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {\n                match self.iter.next() {\n                    Option::Some(a) => {\n                        let i = self.count;\n                        // TODO check what to do here. It would be bad to have an iterator with\n                        // more than usize::MAX elements, this could be a requirement (but hard to formulate).\n                        hax_lib::assume!(self.count < usize::MAX);\n                        self.count += 1;\n                        Option::Some((i, a))\n                    }\n                    Option::None => Option::None,\n                }\n            }\n        }\n    }\n    pub mod step_by {\n        use super::super::traits::iterator::Iterator;\n        use crate::option::Option;\n        pub struct StepBy<I> {\n            iter: I,\n            step: usize,\n        }\n        impl<I> StepBy<I> {\n            pub fn new(iter: I, step: usize) -> Self {\n                StepBy { iter, step }\n            }\n        }\n\n        #[hax_lib::opaque]\n        impl<I: Iterator> Iterator for StepBy<I> {\n            type Item = <I as Iterator>::Item;\n\n            fn next(&mut self) -> Option<<I as Iterator>::Item> {\n                for _ in 1..self.step {\n                    if let Option::None = self.iter.next() {\n                        return Option::None;\n                    }\n                }\n                self.iter.next()\n            }\n        }\n    }\n    pub mod map {\n        pub struct Map<I, F> {\n            iter: I,\n            f: F,\n        }\n        impl<I, F> Map<I, F> {\n            pub fn new(iter: I, f: F) -> Self {\n                Self { iter, f }\n            }\n        }\n        use super::super::traits::iterator::Iterator;\n        use crate::ops::function::*;\n        use crate::option::Option;\n        impl<I: Iterator, O, F: FnOnce<I::Item, Output = O>> Iterator for Map<I, F> {\n            type Item = O;\n\n            fn next(&mut self) -> Option<O> {\n                match self.iter.next() {\n                    Option::Some(v) => Option::Some(self.f.call_once(v)),\n                    Option::None => Option::None,\n                }\n            }\n        }\n    }\n    pub mod take {\n        use super::super::traits::iterator::Iterator;\n        use crate::option::Option;\n        pub struct Take<I> {\n            iter: I,\n            n: usize,\n        }\n        impl<I> Take<I> {\n            pub fn new(iter: I, n: usize) -> Take<I> {\n                Take { iter, n }\n            }\n        }\n        impl<I: Iterator> Iterator for Take<I> {\n            type Item = <I as Iterator>::Item;\n\n            fn next(&mut self) -> Option<<I as Iterator>::Item> {\n                if self.n != 0 {\n                    self.n -= 1;\n                    self.iter.next()\n                } else {\n                    Option::None\n                }\n            }\n        }\n    }\n    pub mod flat_map {\n        use super::super::traits::iterator::Iterator;\n        use crate::option::Option;\n        pub struct FlatMap<I, U, F> {\n            it: I,\n            f: F,\n            current: Option<U>,\n        }\n        impl<I: Iterator, U: Iterator, F: FnOnce<I::Item, Output = U>> FlatMap<I, U, F> {\n            pub fn new(it: I, f: F) -> Self {\n                Self {\n                    it,\n                    f,\n                    current: Option::None,\n                }\n            }\n        }\n        use crate::ops::function::*;\n        #[hax_lib::opaque]\n        impl<I: Iterator, U: Iterator, F: FnOnce<I::Item, Output = U>> Iterator for FlatMap<I, U, F> {\n            type Item = U::Item;\n            fn next(&mut self) -> Option<U::Item> {\n                loop {\n                    if let Option::Some(current_it) = &mut self.current\n                        && let Option::Some(v) = current_it.next()\n                    {\n                        return Option::Some(v);\n                    } else {\n                        match self.it.next() {\n                            Option::Some(c) => self.current = Option::Some(self.f.call_once(c)),\n                            Option::None => return Option::None,\n                        }\n                    }\n                }\n            }\n        }\n    }\n    pub mod flatten {\n        use super::super::traits::iterator::Iterator;\n        use crate::option::Option;\n        #[hax_lib::fstar::before(\"noeq\")] // https://github.com/cryspen/hax/issues/1810\n        pub struct Flatten<I: Iterator>\n        where\n            I::Item: Iterator,\n        {\n            it: I,\n            current: Option<I::Item>,\n        }\n        impl<I: Iterator> Flatten<I>\n        where\n            I::Item: Iterator,\n        {\n            pub fn new(it: I) -> Self {\n                Self {\n                    it,\n                    current: Option::None,\n                }\n            }\n        }\n        #[hax_lib::opaque]\n        impl<I: Iterator> Iterator for Flatten<I>\n        where\n            I::Item: Iterator,\n        {\n            type Item = <<I as Iterator>::Item as Iterator>::Item;\n            fn next(&mut self) -> Option<<<I as Iterator>::Item as Iterator>::Item> {\n                loop {\n                    if let Option::Some(current_it) = &mut self.current\n                        && let Option::Some(v) = current_it.next()\n                    {\n                        return Option::Some(v);\n                    } else {\n                        match self.it.next() {\n                            Option::Some(c) => self.current = Option::Some(c),\n                            Option::None => return Option::None,\n                        }\n                    }\n                }\n            }\n        }\n    }\n    pub mod zip {\n        use super::super::traits::iterator::Iterator;\n        use crate::option::Option;\n        pub struct Zip<I1, I2> {\n            it1: I1,\n            it2: I2,\n        }\n        impl<I1: Iterator, I2: Iterator> Zip<I1, I2> {\n            pub fn new(it1: I1, it2: I2) -> Self {\n                Self { it1, it2 }\n            }\n        }\n        #[hax_lib::opaque]\n        impl<I1: Iterator, I2: Iterator> Iterator for Zip<I1, I2> {\n            type Item = (I1::Item, I2::Item);\n            fn next(&mut self) -> Option<Self::Item> {\n                match self.it1.next() {\n                    Option::Some(v1) => match self.it2.next() {\n                        Option::Some(v2) => Option::Some((v1, v2)),\n                        Option::None => Option::None,\n                    },\n                    Option::None => Option::None,\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/marker.rs",
    "content": "use super::clone::Clone;\n\npub trait Copy: Clone {}\npub trait Send {}\npub trait Sync {}\npub trait Sized {}\npub trait StructuralPartialEq {}\n\n// In our models, all types implement those marker traits\nimpl<T> Send for T {}\nimpl<T> Sync for T {}\nimpl<T> Sized for T {}\nimpl<T: Clone> Copy for T {}\n\n#[hax_lib::fstar::replace(\"type t_PhantomData (v_T: Type0) = | PhantomData : t_PhantomData v_T\")]\n#[hax_lib::lean::replace(\"structure PhantomData (T : Type) where\")]\nstruct PhantomData<T>(T);\n"
  },
  {
    "path": "hax-lib/core-models/src/core/mem.rs",
    "content": "#![allow(unused_variables)]\n\nuse super::marker::Copy;\n\n#[hax_lib::opaque]\npub fn forget<T>(t: T) {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn forget_unsized<T>(t: T) {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn size_of<T>() -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn size_of_val<T: ?Sized>(val: &T) -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn min_align_of<T>() -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn align_of<T>() -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn align_of_val<T: ?Sized>(val: &T) -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub unsafe fn align_of_val_raw<T>(val: T) -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn needs_drop<T: ?Sized>() -> bool {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub unsafe fn uninitialized<T>() -> T {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn swap<T>(x: &mut T, y: &mut T) {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn replace<T>(dest: &mut T, src: T) -> T {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn drop<T>(_x: T) {}\n\npub fn copy<T: Copy>(x: &T) -> T {\n    rust_primitives::mem::copy(x)\n}\n\n#[hax_lib::opaque]\npub unsafe fn take<T>(x: &mut T) -> T {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub unsafe fn transmute_copy<Src, Dst>(src: &Src) -> Dst {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub fn variant_count<T>() -> usize {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub unsafe fn zeroed<T>() -> T {\n    panic!()\n}\n\n#[hax_lib::opaque]\npub unsafe fn transmute<Src, Dst>(src: Src) -> Dst {\n    panic!()\n}\n\nmod manually_drop {\n    pub struct ManuallyDrop<T: ?Sized> {\n        value: T,\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/num/error.rs",
    "content": "//! Error types for conversion to integral types.\n#![allow(unused_variables)]\n\npub struct TryFromIntError(pub(crate) ());\n\npub struct ParseIntError {\n    pub(super) kind: IntErrorKind,\n}\n\n// Because of representations, enums bring a dependency to isize.\n// TODO Fix the dependency issue and add `IntErrorKind`\n/* pub enum IntErrorKind {\n    Empty,\n    InvalidDigit,\n    PosOverflow,\n    NegOverflow,\n    Zero,\n} */\n\npub struct IntErrorKind;\n"
  },
  {
    "path": "hax-lib/core-models/src/core/num/mod.rs",
    "content": "#![allow(non_camel_case_types, unused_variables)]\n\nuse crate::result::Result;\nuse pastey::paste;\n\npub mod error;\n\nuse rust_primitives::arithmetic::*;\n\nmacro_rules! uint_impl {\n    (\n        $Self: ty,\n        $Name: ty,\n        $Max: expr,\n        $Bits: expr,\n        $Bytes: expr,\n    ) => {\n        #[hax_lib::attributes]\n        impl $Name {\n            pub const MIN: $Self = 0;\n            pub const MAX: $Self = $Max;\n            pub const BITS: core::primitive::u32 = $Bits;\n            fn wrapping_add(x: $Self, y: $Self) -> $Self {\n                paste! { [<wrapping_add_ $Name>](x, y) }\n            }\n            fn saturating_add(x: $Self, y: $Self) -> $Self {\n                paste! { [<saturating_add_ $Name>](x, y) }\n            }\n            fn overflowing_add(x: $Self, y: $Self) -> ($Self, bool) {\n                paste! { [<overflowing_add_ $Name>](x, y) }\n            }\n            fn checked_add(x: $Self, y: $Self) -> Option<$Self> {\n                if Self::MIN.to_int() <= x.to_int() + y.to_int()\n                    && x.to_int() + y.to_int() <= Self::MAX.to_int()\n                {\n                    Option::Some(x + y)\n                } else {\n                    Option::None\n                }\n            }\n            fn wrapping_sub(x: $Self, y: $Self) -> $Self {\n                paste! { [<wrapping_sub_ $Name>](x, y) }\n            }\n            fn saturating_sub(x: $Self, y: $Self) -> $Self {\n                paste! { [<saturating_sub_ $Name>](x, y) }\n            }\n            fn overflowing_sub(x: $Self, y: $Self) -> ($Self, bool) {\n                paste! { [<overflowing_sub_ $Name>](x, y) }\n            }\n            fn checked_sub(x: $Self, y: $Self) -> Option<$Self> {\n                if Self::MIN.to_int() <= x.to_int() - y.to_int()\n                    && x.to_int() - y.to_int() <= Self::MAX.to_int()\n                {\n                    Option::Some(x - y)\n                } else {\n                    Option::None\n                }\n            }\n            fn wrapping_mul(x: $Self, y: $Self) -> $Self {\n                paste! { [<wrapping_mul_ $Name>](x, y) }\n            }\n            fn saturating_mul(x: $Self, y: $Self) -> $Self {\n                paste! { [<saturating_mul_ $Name>](x, y) }\n            }\n            fn overflowing_mul(x: $Self, y: $Self) -> ($Self, bool) {\n                paste! { [<overflowing_mul_ $Name>](x, y) }\n            }\n            fn checked_mul(x: $Self, y: $Self) -> Option<$Self> {\n                if Self::MIN.to_int() <= x.to_int() * y.to_int()\n                    && x.to_int() * y.to_int() <= Self::MAX.to_int()\n                {\n                    Option::Some(x * y)\n                } else {\n                    Option::None\n                }\n            }\n            #[hax_lib::requires(y != 0)]\n            fn rem_euclid(x: $Self, y: $Self) -> $Self {\n                paste! { [<rem_euclid_ $Name>](x, y) }\n            }\n            fn pow(x: $Self, exp: core::primitive::u32) -> $Self {\n                paste! { [<pow_ $Name>](x, exp) }\n            }\n            fn count_ones(x: $Self) -> core::primitive::u32 {\n                paste! { [<count_ones_ $Name>](x) }\n            }\n            #[hax_lib::opaque]\n            fn rotate_right(x: $Self, n: core::primitive::u32) -> $Self {\n                paste! { [<rotate_right_ $Name>](x, n) }\n            }\n            #[hax_lib::opaque]\n            fn rotate_left(x: $Self, n: core::primitive::u32) -> $Self {\n                paste! { [<rotate_left_ $Name>](x, n) }\n            }\n            #[hax_lib::opaque]\n            fn leading_zeros(x: $Self) -> core::primitive::u32 {\n                paste! { [<leading_zeros_ $Name>](x) }\n            }\n            #[hax_lib::opaque]\n            fn ilog2(x: $Self) -> core::primitive::u32 {\n                paste! { [<ilog2_ $Name>](x) }\n            }\n            #[hax_lib::opaque]\n            fn from_str_radix(\n                src: &str,\n                radix: core::primitive::u32,\n            ) -> Result<$Self, error::ParseIntError> {\n                crate::panicking::internal::panic()\n            }\n            #[hax_lib::opaque]\n            fn from_be_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self {\n                paste! { [<from_be_bytes_ $Name>](bytes) }\n            }\n            #[hax_lib::opaque]\n            fn from_le_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self {\n                paste! { [<from_le_bytes_ $Name>](bytes) }\n            }\n            #[hax_lib::opaque]\n            fn to_be_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] {\n                paste! { [<to_be_bytes_ $Name>](bytes) }\n            }\n            #[hax_lib::opaque]\n            fn to_le_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] {\n                paste! { [<to_le_bytes_ $Name>](bytes) }\n            }\n        }\n    };\n}\n\nuse crate::option::Option;\nuse hax_lib::int::ToInt;\n\nmacro_rules! iint_impl {\n    (\n        $Self: ty,\n        $Name: ty,\n        $Max: expr,\n        $Min: expr,\n        $Bits: expr,\n        $Bytes: expr,\n    ) => {\n        #[hax_lib::attributes]\n        impl $Name {\n            pub const MIN: $Self = $Min;\n            pub const MAX: $Self = $Max;\n            pub const BITS: core::primitive::u32 = $Bits;\n            fn wrapping_add(x: $Self, y: $Self) -> $Self {\n                paste! { [<wrapping_add_ $Name>](x, y) }\n            }\n            fn saturating_add(x: $Self, y: $Self) -> $Self {\n                paste! { [<saturating_add_ $Name>](x, y) }\n            }\n            fn overflowing_add(x: $Self, y: $Self) -> ($Self, bool) {\n                paste! { [<overflowing_add_ $Name>](x, y) }\n            }\n            fn checked_add(x: $Self, y: $Self) -> Option<$Self> {\n                if Self::MIN.to_int() <= x.to_int() + y.to_int()\n                    && x.to_int() + y.to_int() <= Self::MAX.to_int()\n                {\n                    Option::Some(x + y)\n                } else {\n                    Option::None\n                }\n            }\n            fn wrapping_sub(x: $Self, y: $Self) -> $Self {\n                paste! { [<wrapping_sub_ $Name>](x, y) }\n            }\n            fn saturating_sub(x: $Self, y: $Self) -> $Self {\n                paste! { [<saturating_sub_ $Name>](x, y) }\n            }\n            fn overflowing_sub(x: $Self, y: $Self) -> ($Self, bool) {\n                paste! { [<overflowing_sub_ $Name>](x, y) }\n            }\n            fn checked_sub(x: $Self, y: $Self) -> Option<$Self> {\n                if Self::MIN.to_int() <= x.to_int() - y.to_int()\n                    && x.to_int() - y.to_int() <= Self::MAX.to_int()\n                {\n                    Option::Some(x - y)\n                } else {\n                    Option::None\n                }\n            }\n            fn wrapping_mul(x: $Self, y: $Self) -> $Self {\n                paste! { [<wrapping_mul_ $Name>](x, y) }\n            }\n            fn saturating_mul(x: $Self, y: $Self) -> $Self {\n                paste! { [<saturating_mul_ $Name>](x, y) }\n            }\n            fn overflowing_mul(x: $Self, y: $Self) -> ($Self, bool) {\n                paste! { [<overflowing_mul_ $Name>](x, y) }\n            }\n            fn checked_mul(x: $Self, y: $Self) -> Option<$Self> {\n                if Self::MIN.to_int() <= x.to_int() * y.to_int()\n                    && x.to_int() * y.to_int() <= Self::MAX.to_int()\n                {\n                    Option::Some(x * y)\n                } else {\n                    Option::None\n                }\n            }\n            #[hax_lib::requires(y != 0)]\n            fn rem_euclid(x: $Self, y: $Self) -> $Self {\n                paste! { [<rem_euclid_ $Name>](x, y) }\n            }\n            fn pow(x: $Self, exp: core::primitive::u32) -> $Self {\n                paste! { [<pow_ $Name>](x, exp) }\n            }\n            fn count_ones(x: $Self) -> core::primitive::u32 {\n                paste! { [<count_ones_ $Name>](x) }\n            }\n            #[hax_lib::requires(x > $Self::MIN)]\n            fn abs(x: $Self) -> $Self {\n                paste! { [<abs_ $Name>](x) }\n            }\n            #[hax_lib::opaque]\n            fn rotate_right(x: $Self, n: core::primitive::u32) -> $Self {\n                paste! { [<rotate_right_ $Name>](x, n) }\n            }\n            #[hax_lib::opaque]\n            fn rotate_left(x: $Self, n: core::primitive::u32) -> $Self {\n                paste! { [<rotate_left_ $Name>](x, n) }\n            }\n            #[hax_lib::opaque]\n            fn leading_zeros(x: $Self) -> core::primitive::u32 {\n                paste! { [<leading_zeros_ $Name>](x) }\n            }\n            #[hax_lib::opaque]\n            fn ilog2(x: $Self) -> core::primitive::u32 {\n                paste! { [<ilog2_ $Name>](x) }\n            }\n            #[hax_lib::opaque]\n            fn from_str_radix(\n                src: &str,\n                radix: core::primitive::u32,\n            ) -> Result<$Self, error::ParseIntError> {\n                crate::panicking::internal::panic()\n            }\n            #[hax_lib::opaque]\n            fn from_be_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self {\n                paste! { [<from_be_bytes_ $Name>](bytes) }\n            }\n            #[hax_lib::opaque]\n            fn from_le_bytes(bytes: [core::primitive::u8; $Bytes]) -> $Self {\n                paste! { [<from_le_bytes_ $Name>](bytes) }\n            }\n            #[hax_lib::opaque]\n            fn to_be_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] {\n                paste! { [<to_be_bytes_ $Name>](bytes) }\n            }\n            #[hax_lib::opaque]\n            fn to_le_bytes(bytes: $Self) -> [core::primitive::u8; $Bytes] {\n                paste! { [<to_le_bytes_ $Name>](bytes) }\n            }\n        }\n    };\n}\n\n// These types are a trick to define impls on the right names as\n// it is forbidden to do it on primitive types\n#[hax_lib::exclude]\npub struct u8;\n#[hax_lib::exclude]\npub struct u16;\n#[hax_lib::exclude]\npub struct u32;\n#[hax_lib::exclude]\npub struct u64;\n#[hax_lib::exclude]\npub struct u128;\n#[hax_lib::exclude]\npub struct usize;\n#[hax_lib::exclude]\npub struct i8;\n#[hax_lib::exclude]\npub struct i16;\n#[hax_lib::exclude]\npub struct i32;\n#[hax_lib::exclude]\npub struct i64;\n#[hax_lib::exclude]\npub struct i128;\n#[hax_lib::exclude]\npub struct isize;\n\n// Placeholders to get the same impl numbering as in core:\n#[hax_lib::attributes]\nimpl i8 {}\n#[hax_lib::attributes]\nimpl i16 {}\n#[hax_lib::attributes]\nimpl i32 {}\n#[hax_lib::attributes]\nimpl i64 {}\n#[hax_lib::attributes]\nimpl i128 {}\n#[hax_lib::attributes]\nimpl isize {}\n\nuint_impl! {\n    core::primitive::u8,\n    u8,\n    255,\n    8,\n    1,\n}\n\nuint_impl! {\n    core::primitive::u16,\n    u16,\n    65535,\n    16,\n    2,\n}\n\nuint_impl! {\n    core::primitive::u32,\n    u32,\n    4294967295,\n    32,\n    4,\n}\n\nuint_impl! {\n    core::primitive::u64,\n    u64,\n    18446744073709551615,\n    64,\n    8,\n}\n\nuint_impl! {\n    core::primitive::u128,\n    u128,\n    340282366920938463463374607431768211455,\n    128,\n    16,\n}\n\nuint_impl! {\n    core::primitive::usize,\n    usize,\n    USIZE_MAX,\n    SIZE_BITS,\n    SIZE_BYTES,\n}\n\niint_impl! {\n    core::primitive::i8,\n    i8,\n    127,\n    -128,\n    8,\n    1,\n}\n\niint_impl! {\n    core::primitive::i16,\n    i16,\n    32767,\n    -32768,\n    16,\n    2,\n}\n\niint_impl! {\n    core::primitive::i32,\n    i32,\n    2147483647,\n    -2147483648,\n    32,\n    4,\n}\n\niint_impl! {\n    core::primitive::i64,\n    i64,\n    9223372036854775807,\n    -9223372036854775808,\n    64,\n    8,\n}\n\niint_impl! {\n    core::primitive::i128,\n    i128,\n    170141183460469231731687303715884105727,\n    -170141183460469231731687303715884105728,\n    128,\n    16,\n}\n\niint_impl! {\n    core::primitive::isize,\n    isize,\n    ISIZE_MAX,\n    ISIZE_MIN,\n    SIZE_BITS,\n    SIZE_BYTES,\n}\n\nmacro_rules! impl_default_for_int {\n    ($($t:ty),*) => {\n        $(\n            #[hax_lib::attributes]\n            impl crate::default::Default for $t {\n                fn default() -> $t {\n                    0\n                }\n            }\n        )*\n    };\n}\n\nimpl_default_for_int!(\n    core::primitive::u8,\n    core::primitive::u16,\n    core::primitive::u32,\n    core::primitive::u64,\n    core::primitive::u128,\n    core::primitive::usize,\n    core::primitive::i8,\n    core::primitive::i16,\n    core::primitive::i32,\n    core::primitive::i64,\n    core::primitive::i128,\n    core::primitive::isize\n);\n"
  },
  {
    "path": "hax-lib/core-models/src/core/ops.rs",
    "content": "pub mod arith {\n    pub trait Add<Rhs = Self> {\n        type Output;\n        fn add(self, rhs: Rhs) -> Self::Output;\n    }\n    pub trait Sub<Rhs = Self> {\n        type Output;\n        fn sub(self, rhs: Rhs) -> Self::Output;\n    }\n    pub trait Mul<Rhs = Self> {\n        type Output;\n        fn mul(self, rhs: Rhs) -> Self::Output;\n    }\n    pub trait Div<Rhs = Self> {\n        type Output;\n        fn div(self, rhs: Rhs) -> Self::Output;\n    }\n    pub trait Neg {\n        type Output;\n        fn neg(self) -> Self::Output;\n    }\n    pub trait Rem<Rhs = Self> {\n        type Output;\n        fn rem(self, rhs: Rhs) -> Self::Output;\n    }\n    pub trait AddAssign<Rhs = Self> {\n        fn add_assign(&mut self, rhs: Rhs);\n    }\n    pub trait SubAssign<Rhs = Self> {\n        fn sub_assign(&mut self, rhs: Rhs);\n    }\n    pub trait MulAssign<Rhs = Self> {\n        fn mul_assign(&mut self, rhs: Rhs);\n    }\n    pub trait DivAssign<Rhs = Self> {\n        fn div_assign(&mut self, rhs: Rhs);\n    }\n    pub trait RemAssign<Rhs = Self> {\n        fn rem_assign(&mut self, rhs: Rhs);\n    }\n\n    macro_rules! int_trait_impls {\n        ($($Self:ty)*) => {\n            use hax_lib::ToInt;\n            $(\n            #[hax_lib::attributes]\n            #[cfg_attr(hax_backend_lean, hax_lib::exclude)]\n            impl crate::ops::arith::AddAssign<$Self> for $Self {\n                #[hax_lib::requires(self.to_int() + rhs.to_int() <= $Self::MAX.to_int())]\n                fn add_assign(&mut self, rhs: $Self) {\n                    *self = *self + rhs\n                }\n            }\n            #[hax_lib::attributes]\n            #[cfg_attr(hax_backend_lean, hax_lib::exclude)]\n            impl crate::ops::arith::SubAssign<$Self> for $Self {\n                #[hax_lib::requires(self.to_int() - rhs.to_int() >= 0.to_int())]\n                fn sub_assign(&mut self, rhs: $Self) {\n                    *self = *self - rhs\n                }\n            })*\n        }\n    }\n\n    int_trait_impls!(u8 u16 u32 u64);\n}\n\npub mod bit {\n    trait Shr<Rhs = Self> {\n        type Output;\n        fn shr(self, rhs: Rhs) -> Self::Output;\n    }\n    trait Shl<Rhs = Self> {\n        type Output;\n        fn shl(self, rhs: Rhs) -> Self::Output;\n    }\n    trait BitXor<Rhs = Self> {\n        type Output;\n        fn bitxor(self, rhs: Rhs) -> Self::Output;\n    }\n    trait BitAnd<Rhs = Self> {\n        type Output;\n        fn bitand(self, rhs: Rhs) -> Self::Output;\n    }\n    trait BitOr<Rhs = Self> {\n        type Output;\n        fn bitor(self, rhs: Rhs) -> Self::Output;\n    }\n}\n\npub mod control_flow {\n    pub enum ControlFlow<B, C> {\n        Continue(C),\n        Break(B),\n    }\n}\n\npub mod index {\n    pub trait Index<Idx> {\n        type Output: ?Sized;\n        fn index(&self, i: Idx) -> &Self::Output;\n    }\n}\n\npub mod function {\n    #[hax_lib::attributes]\n    pub trait FnOnce<Args> {\n        type Output;\n        #[hax_lib::requires(true)]\n        fn call_once(&self, args: Args) -> Self::Output;\n    }\n    #[hax_lib::attributes]\n    pub trait Fn<Args>: FnOnce<Args> {\n        #[hax_lib::requires(true)]\n        fn call(&self, args: Args) -> Self::Output;\n    }\n\n    /* These instances provide implementations of the F* type classes corresponding to Fn traits for anonymous functions.\n    This ensures that passing a closure where something implementing Fn works when translated to F* */\n    #[hax_lib::fstar::after(\n        \"unfold instance fnonce_arrow_binder t u\n  : t_FnOnce (_:t -> u) t = {\n    f_Output = u;\n    f_call_once_pre = (fun _ _ -> true);\n    f_call_once_post = (fun (x0: (_:t -> u)) (x1: t) (res: u) -> res == x0 x1);\n    f_call_once = (fun (x0: (_:t -> u)) (x1: t) -> x0 x1);\n  }\"\n    )]\n    impl<Arg, Out> FnOnce<Arg> for fn(Arg) -> Out {\n        type Output = Out;\n        fn call_once(&self, arg: Arg) -> Out {\n            self(arg)\n        }\n    }\n    impl<Arg1, Arg2, Out> FnOnce<(Arg1, Arg2)> for fn(Arg1, Arg2) -> Out {\n        type Output = Out;\n        fn call_once(&self, arg: (Arg1, Arg2)) -> Out {\n            self(arg.0, arg.1)\n        }\n    }\n    impl<Arg1, Arg2, Arg3, Out> FnOnce<(Arg1, Arg2, Arg3)> for fn(Arg1, Arg2, Arg3) -> Out {\n        type Output = Out;\n        fn call_once(&self, arg: (Arg1, Arg2, Arg3)) -> Out {\n            self(arg.0, arg.1, arg.2)\n        }\n    }\n}\n\nmod try_trait {\n    trait FromResidual<R> {\n        fn from_residual(x: R) -> Self;\n    }\n\n    trait Try {\n        type Output;\n        type Residual;\n        fn from_output(x: Self::Output) -> Self;\n        fn branch(&self) -> super::control_flow::ControlFlow<Self::Residual, Self::Output>;\n    }\n}\n\nmod deref {\n    pub trait Deref {\n        type Target: ?Sized;\n\n        fn deref(&self) -> &Self::Target;\n    }\n\n    impl<T> Deref for &T {\n        type Target = T;\n        fn deref(&self) -> &T {\n            &self\n        }\n    }\n}\n\nmod drop {\n    trait Drop {\n        fn drop(&mut self);\n    }\n}\n\npub mod range {\n    pub struct RangeTo<T> {\n        pub end: T,\n    }\n    pub struct RangeFrom<T> {\n        pub start: T,\n    }\n    pub struct Range<T> {\n        pub start: T,\n        pub end: T,\n    }\n    pub struct RangeFull;\n\n    macro_rules! impl_iterator_range_int {\n        ($($int_type: ident)*) => {\n            use crate::option::Option;\n            $(\n                #[cfg_attr(hax_backend_lean, hax_lib::exclude)]\n                impl crate::iter::traits::iterator::Iterator for Range<$int_type> {\n                    type Item = $int_type;\n                    fn next(&mut self) -> Option<$int_type> {\n                        if self.start >= self.end {\n                            Option::None\n                        } else {\n                            let res = self.start;\n                            self.start += 1;\n                            Option::Some(res)\n                        }\n                    }\n                }\n            )*\n        }\n    }\n\n    impl_iterator_range_int!(u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize);\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/option.rs",
    "content": "pub enum Option<T> {\n    Some(T),\n    None,\n}\n\nuse super::default::Default;\nuse super::ops::function::*;\nuse super::result::Result::*;\nuse super::result::*;\nuse Option::*;\n\n#[hax_lib::attributes]\nimpl<T> Option<T> {\n    #[hax_lib::ensures(|res| hax_lib::Prop::implies(res.into(), fstar!(\"Option_Some? self\")))]\n    pub fn is_some(&self) -> bool {\n        matches!(*self, Some(_))\n    }\n\n    pub fn is_some_and<F: FnOnce<T, Output = bool>>(self, f: F) -> bool {\n        match self {\n            None => false,\n            Some(x) => f.call_once(x),\n        }\n    }\n\n    pub fn is_none(&self) -> bool {\n        self.is_some() == false\n    }\n\n    pub fn is_none_or<F: FnOnce<T, Output = bool>>(self, f: F) -> bool {\n        match self {\n            None => true,\n            Some(x) => f.call_once(x),\n        }\n    }\n    pub const fn as_ref(&self) -> Option<&T> {\n        match *self {\n            Some(ref x) => Some(x),\n            None => None,\n        }\n    }\n\n    #[hax_lib::requires(self.is_some())]\n    pub fn expect(self, _msg: &str) -> T {\n        match self {\n            Some(val) => val,\n            None => super::panicking::internal::panic(),\n        }\n    }\n\n    #[hax_lib::requires(self.is_some())]\n    pub fn unwrap(self) -> T {\n        match self {\n            Some(val) => val,\n            None => super::panicking::internal::panic(),\n        }\n    }\n\n    pub fn unwrap_or(self, default: T) -> T {\n        match self {\n            Some(x) => x,\n            None => default,\n        }\n    }\n\n    pub fn unwrap_or_else<F: FnOnce<(), Output = T>>(self, f: F) -> T {\n        match self {\n            Some(x) => x,\n            None => f.call_once(()),\n        }\n    }\n\n    pub fn unwrap_or_default(self) -> T\n    where\n        T: Default,\n    {\n        match self {\n            Some(x) => x,\n            None => T::default(),\n        }\n    }\n\n    pub fn map<U, F>(self, f: F) -> Option<U>\n    where\n        F: FnOnce<T, Output = U>,\n    {\n        match self {\n            Some(x) => Some(f.call_once(x)),\n            None => None,\n        }\n    }\n\n    pub fn map_or<U, F>(self, default: U, f: F) -> U\n    where\n        F: FnOnce<T, Output = U>,\n    {\n        match self {\n            Some(t) => f.call_once(t),\n            None => default,\n        }\n    }\n\n    pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U\n    where\n        F: FnOnce<T, Output = U>,\n        D: FnOnce<(), Output = U>,\n    {\n        match self {\n            Some(t) => f.call_once(t),\n            None => default.call_once(()),\n        }\n    }\n\n    pub fn map_or_default<U, F>(self, f: F) -> U\n    where\n        F: FnOnce<T, Output = U>,\n        U: Default,\n    {\n        match self {\n            Some(t) => f.call_once(t),\n            None => U::default(),\n        }\n    }\n    pub fn ok_or<E>(self, err: E) -> Result<T, E> {\n        match self {\n            Some(v) => Ok(v),\n            None => Err(err),\n        }\n    }\n\n    pub fn ok_or_else<E, F: FnOnce<(), Output = E>>(self, err: F) -> Result<T, E> {\n        match self {\n            Some(v) => Ok(v),\n            None => Err(err.call_once(())),\n        }\n    }\n\n    pub fn and_then<U, F>(self, f: F) -> Option<U>\n    where\n        F: FnOnce<T, Output = Option<U>>,\n    {\n        match self {\n            Some(x) => f.call_once(x),\n            None => None,\n        }\n    }\n\n    // The interface in Rust is wrong. but is good after extraction.\n    // We cannot make a useful model with the right interface so we loose the executability.\n    pub fn take(self) -> (Option<T>, Option<T>) {\n        (None, self)\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/panicking.rs",
    "content": "#[hax_lib::opaque]\n#[hax_lib::requires(false)]\npub fn panic_explicit() -> ! {\n    panic!()\n}\n\n#[hax_lib::opaque]\n#[hax_lib::requires(false)]\npub fn panic(_msg: &str) -> ! {\n    panic!()\n}\n\n#[hax_lib::opaque]\n#[hax_lib::requires(false)]\npub fn panic_fmt(_fmt: super::fmt::Arguments) -> ! {\n    panic!()\n}\n\npub mod internal {\n    // This module is used to break a dependency cycle (other core modules have\n    // panics and this brings a dependency on core::fmt that we need to avoid)\n    #[hax_lib::opaque]\n    #[hax_lib::requires(false)]\n    pub fn panic<T>() -> T {\n        panic!(\"\")\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/result.rs",
    "content": "pub enum Result<T, E> {\n    Ok(T),\n    Err(E),\n}\n\nuse super::ops::function::*;\nuse super::option::Option;\nuse Result::*;\n\n#[hax_lib::attributes]\nimpl<T, E> Result<T, E> {\n    #[hax_lib::requires(self.is_ok())]\n    pub fn unwrap(self) -> T {\n        match self {\n            Ok(t) => t,\n            Err(_) => super::panicking::internal::panic(),\n        }\n    }\n    pub fn unwrap_or(self, default: T) -> T {\n        match self {\n            Ok(t) => t,\n            Err(_) => default,\n        }\n    }\n    #[hax_lib::requires(self.is_ok())]\n    pub fn expect(self, _msg: &str) -> T {\n        match self {\n            Ok(t) => t,\n            Err(_) => super::panicking::internal::panic(),\n        }\n    }\n    pub fn map<U, F>(self, op: F) -> Result<U, E>\n    where\n        F: FnOnce<T, Output = U>,\n    {\n        match self {\n            Ok(t) => Ok(op.call_once(t)),\n            Err(e) => Err(e),\n        }\n    }\n    pub fn map_or<U, F>(self, default: U, f: F) -> U\n    where\n        F: FnOnce<T, Output = U>,\n    {\n        match self {\n            Ok(t) => f.call_once(t),\n            Err(_e) => default,\n        }\n    }\n    pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U\n    where\n        F: FnOnce<T, Output = U>,\n        D: FnOnce<E, Output = U>,\n    {\n        match self {\n            Ok(t) => f.call_once(t),\n            Err(e) => default.call_once(e),\n        }\n    }\n    pub fn map_err<F, O>(self, op: O) -> Result<T, F>\n    where\n        O: FnOnce<E, Output = F>,\n    {\n        match self {\n            Ok(t) => Ok(t),\n            Err(e) => Err(op.call_once(e)),\n        }\n    }\n\n    pub fn is_ok(&self) -> bool {\n        matches!(*self, Ok(_))\n    }\n    pub fn and_then<U, F>(self, op: F) -> Result<U, E>\n    where\n        F: FnOnce<T, Output = Result<U, E>>,\n    {\n        match self {\n            Ok(t) => op.call_once(t),\n            Err(e) => Err(e),\n        }\n    }\n    pub fn ok(self) -> Option<T> {\n        match self {\n            Ok(x) => Option::Some(x),\n            Err(_) => Option::None,\n        }\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/slice.rs",
    "content": "use crate::result::Result;\n\n// Dummy type to allow impls\n#[hax_lib::exclude]\nstruct Slice<T>(T);\n\npub mod iter {\n    use crate::option::Option;\n    use rust_primitives::{sequence::*, slice::*};\n\n    pub struct Chunks<'a, T> {\n        cs: usize,\n        elements: &'a [T],\n    }\n    impl<'a, T> Chunks<'a, T> {\n        pub fn new(cs: usize, elements: &'a [T]) -> Chunks<'a, T> {\n            Chunks { cs, elements }\n        }\n    }\n    pub struct ChunksExact<'a, T> {\n        cs: usize,\n        elements: &'a [T],\n    }\n    impl<'a, T> ChunksExact<'a, T> {\n        pub fn new(cs: usize, elements: &'a [T]) -> ChunksExact<'a, T> {\n            ChunksExact { cs, elements }\n        }\n    }\n    pub struct Iter<T>(pub Seq<T>);\n\n    impl<T> crate::iter::traits::iterator::Iterator for Iter<T> {\n        type Item = T;\n        fn next(&mut self) -> Option<Self::Item> {\n            if seq_len(&self.0) == 0 {\n                Option::None\n            } else {\n                let res = seq_first(&self.0);\n                self.0 = seq_slice(&self.0, 1, seq_len(&self.0));\n                Option::Some(res)\n            }\n        }\n    }\n\n    impl<'a, T> crate::iter::traits::iterator::Iterator for Chunks<'a, T> {\n        type Item = &'a [T];\n        fn next(&mut self) -> Option<Self::Item> {\n            if slice_length(self.elements) == 0 {\n                Option::None\n            } else if slice_length(self.elements) < self.cs {\n                let res = self.elements;\n                self.elements = slice_slice(self.elements, 0, 0);\n                Option::Some(res)\n            } else {\n                let (res, new_elements) = slice_split_at(self.elements, self.cs);\n                self.elements = new_elements;\n                Option::Some(res)\n            }\n        }\n    }\n\n    impl<'a, T> crate::iter::traits::iterator::Iterator for ChunksExact<'a, T> {\n        type Item = &'a [T];\n        fn next(&mut self) -> Option<Self::Item> {\n            if slice_length(self.elements) < self.cs {\n                Option::None\n            } else {\n                let (res, new_elements) = slice_split_at(self.elements, self.cs);\n                self.elements = new_elements;\n                Option::Some(res)\n            }\n        }\n    }\n}\n\n#[hax_lib::attributes]\nimpl<T> Slice<T> {\n    fn len(s: &[T]) -> usize {\n        rust_primitives::slice::slice_length(s)\n    }\n    fn chunks<'a>(s: &'a [T], cs: usize) -> iter::Chunks<'a, T> {\n        iter::Chunks::new(cs, s)\n    }\n    fn iter(s: &[T]) -> iter::Iter<T> {\n        iter::Iter(rust_primitives::sequence::seq_from_slice(s))\n    }\n    fn chunks_exact<'a>(s: &'a [T], cs: usize) -> iter::ChunksExact<'a, T> {\n        iter::ChunksExact::new(cs, s)\n    }\n    #[hax_lib::requires(Slice::len(s) == Slice::len(src))]\n    fn copy_from_slice(s: &mut [T], src: &[T])\n    where\n        T: crate::marker::Copy,\n    {\n        rust_primitives::mem::replace(s, src);\n    }\n    #[hax_lib::requires(Slice::len(s) == Slice::len(src))]\n    fn clone_from_slice(s: &mut [T], src: &[T])\n    where\n        T: crate::clone::Clone,\n    {\n        rust_primitives::mem::replace(s, src);\n    }\n    #[hax_lib::requires(mid <= Slice::len(s))]\n    fn split_at(s: &[T], mid: usize) -> (&[T], &[T]) {\n        rust_primitives::slice::slice_split_at(s, mid)\n    }\n    fn split_at_checked(s: &[T], mid: usize) -> Option<(&[T], &[T])> {\n        if mid <= Slice::len(s) {\n            Option::Some(Self::split_at(s, mid))\n        } else {\n            Option::None\n        }\n    }\n    fn is_empty(s: &[T]) -> bool {\n        Self::len(s) == 0\n    }\n    #[hax_lib::opaque]\n    fn contains(s: &[T], v: T) -> bool {\n        rust_primitives::slice::slice_contains(s, v)\n    }\n    #[hax_lib::opaque]\n    fn copy_within<R>(s: &[T], src: R, dest: usize) -> &[T]\n    where\n        T: Copy,\n    {\n        todo!()\n    }\n    #[hax_lib::opaque]\n    fn binary_search(s: &[T], x: &T) -> Result<usize, usize> /* where T: super::ops::Ord */ {\n        todo!()\n    }\n    fn get<I: SliceIndex<[T]>>(s: &[T], index: I) -> Option<&<I as SliceIndex<[T]>>::Output> {\n        index.get(s)\n    }\n}\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> crate::iter::traits::collect::IntoIterator for &[T] {\n    type IntoIter = iter::Iter<T>;\n    fn into_iter(self) -> Self::IntoIter {\n        Slice::iter(self)\n    }\n}\nuse crate::option::Option;\nuse rust_primitives::slice::*;\n\n#[hax_lib::attributes]\npub trait SliceIndex<T: ?Sized> {\n    type Output: ?Sized;\n\n    #[hax_lib::requires(true)]\n    fn get(self, slice: &T) -> Option<&Self::Output>;\n    /* fn get_mut(self, slice: &mut T) -> Option<&mut Self::Output>;\n    unsafe fn get_unchecked(self, slice: *const T) -> *const Self::Output;\n    unsafe fn get_unchecked_mut(self, slice: *mut T) -> *mut Self::Output;\n    fn index(self, slice: &T) -> &Self::Output;\n    fn index_mut(self, slice: &mut T) -> &mut Self::Output; */\n}\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> SliceIndex<[T]> for usize {\n    type Output = T;\n    fn get(self, slice: &[T]) -> Option<&T> {\n        if self < slice.len() {\n            Option::Some(slice_index(slice, self))\n        } else {\n            Option::None\n        }\n    }\n}\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> SliceIndex<[T]> for crate::ops::range::RangeFull {\n    type Output = [T];\n    fn get(self, slice: &[T]) -> Option<&[T]> {\n        Option::Some(slice)\n    }\n}\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> SliceIndex<[T]> for crate::ops::range::RangeFrom<usize> {\n    type Output = [T];\n    fn get(self, slice: &[T]) -> Option<&[T]> {\n        if self.start < slice.len() {\n            Option::Some(slice_slice(slice, self.start, slice.len()))\n        } else {\n            Option::None\n        }\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> SliceIndex<[T]> for crate::ops::range::RangeTo<usize> {\n    type Output = [T];\n    fn get(self, slice: &[T]) -> Option<&[T]> {\n        if self.end <= slice.len() {\n            Option::Some(slice_slice(slice, 0, self.end))\n        } else {\n            Option::None\n        }\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> SliceIndex<[T]> for crate::ops::range::Range<usize> {\n    type Output = [T];\n    fn get(self, slice: &[T]) -> Option<&[T]> {\n        if self.start < self.end && self.end <= slice.len() {\n            Option::Some(slice_slice(slice, self.start, self.end))\n        } else {\n            Option::None\n        }\n    }\n}\n\nuse crate::ops::{\n    index::Index,\n    range::{Range, RangeFrom, RangeFull, RangeTo},\n};\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> Index<Range<usize>> for &[T] {\n    type Output = [T];\n    #[hax_lib::requires(i.start <= i.end && i.end <= self.len())]\n    fn index(&self, i: Range<usize>) -> &[T] {\n        slice_slice(self, i.start, i.end)\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> Index<RangeTo<usize>> for &[T] {\n    type Output = [T];\n    #[hax_lib::requires(i.end <= self.len())]\n    fn index(&self, i: RangeTo<usize>) -> &[T] {\n        slice_slice(self, 0, i.end)\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> Index<RangeFrom<usize>> for &[T] {\n    type Output = [T];\n    #[hax_lib::requires(i.start <= self.len())]\n    fn index(&self, i: RangeFrom<usize>) -> &[T] {\n        slice_slice(self, i.start, slice_length(self))\n    }\n}\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> Index<RangeFull> for &[T] {\n    type Output = [T];\n    fn index(&self, i: RangeFull) -> &[T] {\n        slice_slice(self, 0, slice_length(self))\n    }\n}\n\n#[hax_lib::attributes]\n#[cfg_attr(hax_backend_lean, hax_lib::exclude)]\nimpl<T> crate::ops::index::Index<usize> for &[T] {\n    type Output = T;\n    #[hax_lib::requires(i < self.len())]\n    fn index(&self, i: usize) -> &T {\n        rust_primitives::slice::slice_index(self, i)\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/core/str.rs",
    "content": "mod converts {\n    #[hax_lib::opaque]\n    fn from_utf8(s: &[u8]) -> crate::result::Result<&str, super::error::Utf8Error> {\n        panic!()\n    }\n}\n\nmod error {\n    pub struct Utf8Error;\n}\n\nmod iter {\n    struct Split<T>(T);\n}\n\nmod traits {\n    trait FromStr: Sized {\n        type Err;\n        fn from_str(s: &str) -> crate::result::Result<Self, Self::Err>;\n    }\n\n    #[hax_lib::opaque]\n    #[cfg_attr(hax_backend_lean, hax_lib::exclude)]\n    impl FromStr for u64 {\n        type Err = u64;\n        fn from_str(s: &str) -> crate::result::Result<Self, Self::Err> {\n            panic!()\n        }\n    }\n}\n"
  },
  {
    "path": "hax-lib/core-models/src/lib.rs",
    "content": "//! `core-models`: A Rust Model for the `core` Library\n//!\n//! `core-models` is a simplified, self-contained model of Rust’s `core` library. It aims to provide\n//! a purely Rust-based specification of `core`'s fundamental operations, making them easier to\n//! understand, analyze, and formally verify. Unlike `core`, which may rely on platform-specific\n//! intrinsics and compiler magic, `core-models` expresses everything in plain Rust, prioritizing\n//! clarity and explicitness over efficiency.\n//!\n//! ## Key Features\n//!\n//! - **Partial Modeling**: `core-models` includes only a subset of `core`, focusing on modeling\n//!   fundamental operations rather than providing a complete replacement.\n//! - **Exact Signatures**: Any item that exists in both `core-models` and `core` has the same type signature,\n//!   ensuring compatibility with formal verification efforts.\n//! - **Purely Functional Approach**: Where possible, `core-models` favors functional programming principles,\n//!   avoiding unnecessary mutation and side effects to facilitate formal reasoning.\n//! - **Explicit Implementations**: Even low-level operations, such as SIMD, are modeled explicitly using\n//!   Rust constructs like bit arrays and partial maps.\n//! - **Extra Abstractions**: `core-models` includes additional helper types and functions to support\n//!   modeling. These extra items are marked appropriately to distinguish them from `core` definitions.\n//!\n//! ## Intended Use\n//!\n//! `core-models` is designed as a reference model for formal verification and reasoning about Rust programs.\n//! By providing a readable, well-specified version of `core`'s behavior, it serves as a foundation for\n//! proof assistants and other verification tools.\n\n#![allow(dead_code)]\n\n#[path = \"core/array.rs\"]\npub mod array;\n#[path = \"core/borrow.rs\"]\npub mod borrow;\n#[path = \"core/clone.rs\"]\npub mod clone;\n#[path = \"core/cmp.rs\"]\npub mod cmp;\n#[path = \"core/convert.rs\"]\npub mod convert;\n#[path = \"core/default.rs\"]\npub mod default;\n#[path = \"core/error.rs\"]\npub mod error;\n#[path = \"core/f32.rs\"]\npub mod f32;\n#[path = \"core/fmt.rs\"]\npub mod fmt;\n#[path = \"core/hash.rs\"]\npub mod hash;\n#[path = \"core/hint.rs\"]\npub mod hint;\n#[path = \"core/iter.rs\"]\npub mod iter;\n#[path = \"core/marker.rs\"]\npub mod marker;\n#[path = \"core/mem.rs\"]\npub mod mem;\n#[path = \"core/num/mod.rs\"]\npub mod num;\n#[path = \"core/ops.rs\"]\npub mod ops;\n#[path = \"core/option.rs\"]\npub mod option;\n#[path = \"core/panicking.rs\"]\npub mod panicking;\n#[path = \"core/result.rs\"]\npub mod result;\n#[path = \"core/slice.rs\"]\npub mod slice;\n#[path = \"core/str.rs\"]\npub mod str;\n"
  },
  {
    "path": "hax-lib/core-models/std/Cargo.toml",
    "content": "[package]\nname = \"std\"\nversion = \"0.1.0\"\nedition = \"2024\"\n\n[dependencies]\nhax-lib.workspace = true\ncore-models = {path = \"..\"}\n"
  },
  {
    "path": "hax-lib/core-models/std/src/lib.rs",
    "content": "mod collections {\n    mod hash {\n        mod map {\n            #[hax_lib::opaque]\n            struct HashMap<K, V, S>(Option<K>, Option<V>, Option<S>);\n            impl<K, V> HashMap<K, V, crate::hash::random::RandomState> {\n                fn new() -> HashMap<K, V, crate::hash::random::RandomState> {\n                    HashMap(None, None, None)\n                }\n            }\n            // Dummy impl for disambiguator (https://github.com/cryspen/hax/issues/828)\n            impl HashMap<usize, usize, usize> {}\n            impl<K, V, S> HashMap<K, V, S> {\n                fn get<Y>(m: HashMap<K, V, S>, k: K) -> core_models::option::Option<V> {\n                    core_models::panicking::internal::panic()\n                }\n                fn insert(\n                    m: HashMap<K, V, S>,\n                    k: K,\n                    v: V,\n                ) -> (HashMap<K, V, S>, core_models::option::Option<V>) {\n                    core_models::panicking::internal::panic()\n                }\n            }\n        }\n    }\n}\n\nmod f64 {\n    #[hax_lib::exclude]\n    #[allow(non_camel_case_types)]\n    struct f64;\n    impl f64 {\n        fn powf(x: core::primitive::f64, y: core::primitive::f64) -> core::primitive::f64 {\n            core_models::panicking::internal::panic()\n        }\n    }\n}\n\npub mod hash {\n    pub mod random {\n        pub struct RandomState;\n    }\n}\n\nmod io {\n    #[hax_lib::attributes]\n    pub trait Read {\n        // Required method\n        #[hax_lib::requires(true)]\n        #[hax_lib::ensures(|_| future(buf).len() == buf.len())]\n        fn read(&mut self, buf: &mut [u8]) -> Result<usize, error::Error>;\n\n        // Provided methods (not provided in this model as hax doesn't support default methods)\n        /* fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> Result<usize>;\n        fn is_read_vectored(&self) -> bool;\n        fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize>;\n        fn read_to_string(&mut self, buf: &mut String) -> Result<usize>; */\n        #[hax_lib::requires(true)]\n        #[hax_lib::ensures(|_| future(buf).len() == buf.len())]\n        fn read_exact(&mut self, buf: &mut [u8]) -> Result<(), error::Error>;\n        /* fn read_buf(&mut self, buf: BorrowedCursor<'_>) -> Result<()>;\n        fn read_buf_exact(&mut self, cursor: BorrowedCursor<'_>) -> Result<()>;\n        fn by_ref(&mut self) -> &mut Self\n        where Self: Sized;\n        fn bytes(self) -> Bytes<Self>\n        where Self: Sized;\n        fn chain<R: Read>(self, next: R) -> Chain<Self, R>\n        where Self: Sized;\n        fn take(self, limit: u64) -> Take<Self>\n        where Self: Sized; */\n    }\n    #[hax_lib::attributes]\n    pub trait Write {\n        // Required methods\n        #[hax_lib::requires(true)]\n        fn write(&mut self, buf: &[u8]) -> Result<usize, error::Error>;\n        #[hax_lib::requires(true)]\n        fn flush(&mut self) -> Result<(), error::Error>;\n\n        // Provided methods (not provided in this model as hax doesn't support default methods)\n        /* fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> Result<usize>;\n        fn is_write_vectored(&self) -> bool; */\n        #[hax_lib::requires(true)]\n        fn write_all(&mut self, buf: &[u8]) -> Result<(), error::Error>;\n        /* fn write_all_vectored(&mut self, bufs: &mut [IoSlice<'_>]) -> Result<()>;\n        fn write_fmt(&mut self, args: Arguments<'_>) -> Result<()>;\n        fn by_ref(&mut self) -> &mut Self\n        where Self: Sized; */\n    }\n    pub mod error {\n        pub struct Error;\n        pub enum ErrorKind {\n            NotFound,\n            PermissionDenied,\n            ConnectionRefused,\n            ConnectionReset,\n            HostUnreachable,\n            NetworkUnreachable,\n            ConnectionAborted,\n            NotConnected,\n            AddrInUse,\n            AddrNotAvailable,\n            NetworkDown,\n            BrokenPipe,\n            AlreadyExists,\n            WouldBlock,\n            NotADirectory,\n            IsADirectory,\n            DirectoryNotEmpty,\n            ReadOnlyFilesystem,\n            FilesystemLoop,\n            StaleNetworkFileHandle,\n            InvalidInput,\n            InvalidData,\n            TimedOut,\n            WriteZero,\n            StorageFull,\n            NotSeekable,\n            QuotaExceeded,\n            FileTooLarge,\n            ResourceBusy,\n            ExecutableFileBusy,\n            Deadlock,\n            CrossesDevices,\n            TooManyLinks,\n            InvalidFilename,\n            ArgumentListTooLong,\n            Interrupted,\n            Unsupported,\n            UnexpectedEof,\n            OutOfMemory,\n            InProgress,\n            Other,\n        }\n        impl Error {\n            #[hax_lib::opaque]\n            fn kind(&self) -> ErrorKind {\n                core_models::panicking::internal::panic()\n            }\n        }\n    }\n    mod impls {\n        impl super::Read for &[u8] {\n            fn read(&mut self, buf: &mut [u8]) -> Result<usize, super::error::Error> {\n                let amt = core::cmp::min(buf.len(), self.len());\n                let (a, b) = self.split_at(amt);\n\n                buf[..amt].copy_from_slice(a);\n\n                *self = b;\n                Ok(amt)\n            }\n            fn read_exact(&mut self, buf: &mut [u8]) -> Result<(), super::error::Error> {\n                if buf.len() > self.len() {\n                    // `read_exact` makes no promise about the content of `buf` if it\n                    // fails so don't bother about that.\n                    *self = &self[self.len()..];\n                    return Err(super::error::Error);\n                }\n                let (a, b) = self.split_at(buf.len());\n\n                buf.copy_from_slice(a);\n\n                *self = b;\n                Ok(())\n            }\n        }\n        impl super::Write for Vec<u8> {\n            fn write(&mut self, buf: &[u8]) -> Result<usize, super::error::Error> {\n                self.extend_from_slice(buf);\n                Ok(buf.len())\n            }\n            fn write_all(&mut self, buf: &[u8]) -> Result<(), super::error::Error> {\n                self.extend_from_slice(buf);\n                Ok(())\n            }\n            fn flush(&mut self) -> Result<(), super::error::Error> {\n                Ok(())\n            }\n        }\n    }\n    mod stdio {\n        fn e_print(args: core::fmt::Arguments) {}\n    }\n}\n"
  },
  {
    "path": "hax-lib/macros/Cargo.toml",
    "content": "[package]\nname = \"hax-lib-macros\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition = \"2021\"\nrepository.workspace = true\nreadme = \"README.md\"\ndescription = \"Hax-specific proc-macros for Rust programs\"\n\n[lib]\nproc-macro = true\n\n[target.'cfg(hax)'.dependencies]\nproc-macro-error2 = { version = \"2.0\" }\nhax-lib-macros-types = { workspace = true }\nsyn = { version = \"2.0\", features = [\"full\", \"visit-mut\", \"visit\"] }\n\n[dependencies]\nsyn = { version = \"2.0\", features = [\"full\", \"visit\", \"visit-mut\"] }\nproc-macro2 = { workspace = true }\nquote = { workspace = true }\n\n[dev-dependencies]\nhax-lib = { path = \"..\" }\n\n[lints.rust]\nunexpected_cfgs = { level = \"warn\", check-cfg = ['cfg(hax)', 'cfg(doc_cfg)'] }\n"
  },
  {
    "path": "hax-lib/macros/README.md",
    "content": "# hax proc macros\n\nHax-specific proc-macros for Rust programs.\n\nThis crate defines proc macros to be used in Rust programs that are extracted with\nhax.\nIt provides proc macros such as `requires` and `ensures` to define pre- and post-conditions\nfor functions.\n"
  },
  {
    "path": "hax-lib/macros/src/dummy.rs",
    "content": "mod hax_paths;\n\nuse hax_paths::*;\nuse proc_macro::{TokenStream, TokenTree};\nuse quote::quote;\nuse syn::{visit_mut::VisitMut, *};\n\nmacro_rules! identity_proc_macro_attribute {\n    ($($name:ident),*$(,)?) => {\n        $(\n            #[proc_macro_attribute]\n            pub fn $name(_attr: TokenStream, item: TokenStream) -> TokenStream {\n                item\n            }\n        )*\n    }\n}\n\nidentity_proc_macro_attribute!(\n    fstar_options,\n    fstar_verification_status,\n    include,\n    exclude,\n    requires,\n    ensures,\n    decreases,\n    pv_handwritten,\n    pv_constructor,\n    protocol_messages,\n    process_init,\n    process_write,\n    process_read,\n    opaque,\n    opaque_type,\n    transparent,\n    refinement_type,\n    fstar_replace,\n    coq_replace,\n    lean_replace,\n    proverif_replace,\n    fstar_replace_body,\n    coq_replace_body,\n    lean_replace_body,\n    proverif_replace_body,\n    fstar_before,\n    coq_before,\n    lean_before,\n    proverif_before,\n    fstar_after,\n    coq_after,\n    lean_after,\n    proverif_after,\n    fstar_smt_pat,\n    fstar_postprocess_with,\n    lean_proof,\n    lean_pure_requires_proof,\n    lean_pure_ensures_proof,\n    lean_proof_method_grind,\n    lean_proof_method_bv_decide,\n);\n\n#[proc_macro]\npub fn fstar_expr(_payload: TokenStream) -> TokenStream {\n    quote! { () }.into()\n}\n#[proc_macro]\npub fn coq_expr(_payload: TokenStream) -> TokenStream {\n    quote! { () }.into()\n}\n#[proc_macro]\npub fn lean_expr(_payload: TokenStream) -> TokenStream {\n    quote! { () }.into()\n}\n#[proc_macro]\npub fn proverif_expr(_payload: TokenStream) -> TokenStream {\n    quote! { () }.into()\n}\n\n#[proc_macro_attribute]\npub fn lemma(_attr: TokenStream, _item: TokenStream) -> TokenStream {\n    quote! {}.into()\n}\n\nfn unsafe_expr() -> TokenStream {\n    // `*_unsafe_expr(\"<code>\")` are macro generating a Rust expression of any type, that will be replaced by `<code>` in the backends.\n    // This should be used solely in hax-only contextes.\n    // If this macro is used, that means the user broke this rule.\n    quote! { ::std::compile_error!(\"`hax_lib::unsafe_expr` has no meaning outside of hax extraction, please use it solely on hax-only places.\") }.into()\n}\n\n#[proc_macro]\npub fn fstar_unsafe_expr(_payload: TokenStream) -> TokenStream {\n    unsafe_expr()\n}\n#[proc_macro]\npub fn coq_unsafe_expr(_payload: TokenStream) -> TokenStream {\n    unsafe_expr()\n}\n#[proc_macro]\npub fn lean_unsafe_expr(_payload: TokenStream) -> TokenStream {\n    unsafe_expr()\n}\n#[proc_macro]\npub fn proverif_unsafe_expr(_payload: TokenStream) -> TokenStream {\n    unsafe_expr()\n}\n\n#[proc_macro]\npub fn fstar_prop_expr(_payload: TokenStream) -> TokenStream {\n    quote! {::hax_lib::Prop::from_bool(true)}.into()\n}\n#[proc_macro]\npub fn coq_prop_expr(_payload: TokenStream) -> TokenStream {\n    quote! {::hax_lib::Prop::from_bool(true)}.into()\n}\n#[proc_macro]\npub fn lean_prop_expr(_payload: TokenStream) -> TokenStream {\n    quote! {::hax_lib::Prop::from_bool(true)}.into()\n}\n#[proc_macro]\npub fn proverif_prop_expr(_payload: TokenStream) -> TokenStream {\n    quote! {::hax_lib::Prop::from_bool(true)}.into()\n}\n\nfn not_hax_attribute(attr: &syn::Attribute) -> bool {\n    if let Meta::List(ml) = &attr.meta {\n        !matches!(expects_path_decoration(&ml.path), Ok(Some(_)))\n    } else {\n        true\n    }\n}\n\nfn not_field_attribute(attr: &syn::Attribute) -> bool {\n    if let Meta::List(ml) = &attr.meta {\n        !(matches!(expects_refine(&ml.path), Ok(Some(_)))\n            || matches!(expects_order(&ml.path), Ok(Some(_))))\n    } else {\n        true\n    }\n}\n\n#[proc_macro_attribute]\npub fn attributes(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    let item: Item = parse_macro_input!(item);\n\n    struct AttrVisitor;\n\n    use syn::visit_mut;\n    impl VisitMut for AttrVisitor {\n        fn visit_item_trait_mut(&mut self, item: &mut ItemTrait) {\n            for ti in item.items.iter_mut() {\n                if let TraitItem::Fn(fun) = ti {\n                    fun.attrs.retain(not_hax_attribute)\n                }\n            }\n            visit_mut::visit_item_trait_mut(self, item);\n        }\n        fn visit_type_mut(&mut self, _type: &mut Type) {}\n        fn visit_item_impl_mut(&mut self, item: &mut ItemImpl) {\n            for ii in item.items.iter_mut() {\n                if let ImplItem::Fn(fun) = ii {\n                    fun.attrs.retain(not_hax_attribute)\n                }\n            }\n            visit_mut::visit_item_impl_mut(self, item);\n        }\n        fn visit_item_mut(&mut self, item: &mut Item) {\n            visit_mut::visit_item_mut(self, item);\n\n            match item {\n                Item::Struct(s) => {\n                    for field in s.fields.iter_mut() {\n                        field.attrs.retain(not_field_attribute)\n                    }\n                }\n                _ => (),\n            }\n        }\n    }\n\n    let mut item = item;\n    AttrVisitor.visit_item_mut(&mut item);\n\n    quote! { #item }.into()\n}\n\n#[proc_macro]\npub fn int(payload: TokenStream) -> TokenStream {\n    let mut tokens = payload.into_iter().peekable();\n    let negative = matches!(tokens.peek(), Some(TokenTree::Punct(p)) if p.as_char() == '-');\n    if negative {\n        tokens.next();\n    }\n    let [lit @ TokenTree::Literal(_)] = &tokens.collect::<Vec<_>>()[..] else {\n        return quote! { ::std::compile_error!(\"Expected exactly one numeric literal\") }.into();\n    };\n    let lit: proc_macro2::TokenStream = TokenStream::from(lit.clone()).into();\n    quote! {::hax_lib::int::Int(#lit)}.into()\n}\n\n#[proc_macro_attribute]\npub fn impl_fn_decoration(_attr: TokenStream, _item: TokenStream) -> TokenStream {\n    quote! { ::std::compile_error!(\"`impl_fn_decoration` is an internal macro and should never be used directly.\") }.into()\n}\n\n#[proc_macro_attribute]\npub fn trait_fn_decoration(_attr: TokenStream, _item: TokenStream) -> TokenStream {\n    quote! { ::std::compile_error!(\"`trait_fn_decoration` is an internal macro and should never be used directly.\") }.into()\n}\n\n#[proc_macro]\npub fn loop_invariant(_predicate: TokenStream) -> TokenStream {\n    quote! {}.into()\n}\n\n#[proc_macro]\npub fn loop_decreases(_predicate: TokenStream) -> TokenStream {\n    quote! {}.into()\n}\n"
  },
  {
    "path": "hax-lib/macros/src/hax_paths.rs",
    "content": "//! This module defines the `ImplFnDecoration` structure and utils\n//! around it.\n\nuse syn::spanned::Spanned;\nuse syn::*;\n\nfn expect_simple_path(path: &Path) -> Option<Vec<String>> {\n    let mut chunks = vec![];\n    if path.leading_colon.is_some() {\n        chunks.push(String::new())\n    }\n    for segment in &path.segments {\n        chunks.push(format!(\"{}\", segment.ident));\n        if !matches!(segment.arguments, PathArguments::None) {\n            return None;\n        }\n    }\n    Some(chunks)\n}\n\n/// The various strings allowed as decoration kinds.\npub const DECORATION_KINDS: &[&str] = &[\"decreases\", \"ensures\", \"requires\"];\n\n/// Expects a `Path` to be a decoration kind: `::hax_lib::<KIND>`,\n/// `hax_lib::<KIND>` or `<KIND>` in (with `KIND` in\n/// `DECORATION_KINDS`).\npub fn expects_path_decoration(path: &Path) -> Result<Option<String>> {\n    expects_hax_path(DECORATION_KINDS, path)\n}\n\n/// Expects a path to be `[[::]hax_lib]::refine`\npub fn expects_refine(path: &Path) -> Result<Option<String>> {\n    expects_hax_path(&[\"refine\"], path)\n}\n\n/// Expects a path to be `[[::]hax_lib]::order`\npub fn expects_order(path: &Path) -> Result<Option<String>> {\n    expects_hax_path(&[\"order\"], path)\n}\n\n/// Expects a `Path` to be a hax path: `::hax_lib::<KW>`,\n/// `hax_lib::<KW>` or `<KW>` in (with `KW` in `allowlist`).\npub fn expects_hax_path(allowlist: &[&str], path: &Path) -> Result<Option<String>> {\n    let path_span = path.span();\n    let path = expect_simple_path(path)\n        .ok_or_else(|| Error::new(path_span, \"Expected a simple path, with no `<...>`.\"))?;\n    Ok(\n        match path\n            .iter()\n            .map(|x| x.as_str())\n            .collect::<Vec<_>>()\n            .as_slice()\n        {\n            [kw] | [\"\", \"hax_lib\", kw] | [\"hax_lib\", kw] if allowlist.contains(kw) => {\n                Some(kw.to_string())\n            }\n            _ => None,\n        },\n    )\n}\n"
  },
  {
    "path": "hax-lib/macros/src/impl_fn_decoration.rs",
    "content": "//! This module defines the `ImplFnDecoration` structure and utils\n//! around it.\n\nuse crate::prelude::*;\nuse crate::utils::*;\n\n/// Supporting structure that holds the data required by the internal\n/// macro `impl_fn_decoration`.\npub struct ImplFnDecoration {\n    pub kind: FnDecorationKind,\n    pub phi: Expr,\n    pub generics: Generics,\n    pub self_ty: Type,\n}\n\nimpl parse::Parse for ImplFnDecoration {\n    fn parse(input: parse::ParseStream) -> Result<Self> {\n        let parse_next = || -> Result<_> {\n            input.parse::<Token![,]>()?;\n            let mut generics = input.parse::<Generics>()?;\n            input.parse::<Token![,]>()?;\n            generics.where_clause = input.parse::<Option<WhereClause>>()?;\n            input.parse::<Token![,]>()?;\n            let self_ty = input.parse::<Type>()?;\n            input.parse::<Token![,]>()?;\n            Ok((generics, self_ty))\n        };\n\n        let path = input.parse::<Path>()?;\n        let path_span = path.span();\n        let kind = match expects_path_decoration(&path)? {\n            Some(s) => match s.as_str() {\n                \"decreases\" => FnDecorationKind::Decreases,\n                \"requires\" => FnDecorationKind::Requires,\n                \"ensures\" => {\n                    let (generics, self_ty) = parse_next()?;\n                    let ExprClosure1 { arg, body } = input.parse::<ExprClosure1>()?;\n                    input.parse::<syn::parse::Nothing>()?;\n                    return Ok(ImplFnDecoration {\n                        kind: FnDecorationKind::Ensures { ret_binder: arg },\n                        phi: body,\n                        generics,\n                        self_ty,\n                    });\n                }\n                _ => unreachable!(),\n            }\n            None => Err(Error::new(path_span, \"Expected `::hax_lib::<KIND>`, `hax_lib::<KIND>` or `<KIND>` with `KIND` in {DECORATION_KINDS:?}\"))?,\n        };\n\n        let (generics, self_ty) = parse_next()?;\n        let phi = input.parse::<Expr>()?;\n        input.parse::<syn::parse::Nothing>()?;\n        Ok(ImplFnDecoration {\n            kind,\n            phi,\n            generics,\n            self_ty,\n        })\n    }\n}\n"
  },
  {
    "path": "hax-lib/macros/src/implementation.rs",
    "content": "mod hax_paths;\nmod impl_fn_decoration;\nmod quote;\nmod rewrite_self;\nmod syn_ext;\nmod utils;\n\nmod prelude {\n    pub use crate::hax_paths::*;\n    pub use crate::syn_ext::*;\n    pub use proc_macro as pm;\n    pub use proc_macro_error2::*;\n    pub use proc_macro2::*;\n    pub use quote::*;\n    pub use std::collections::HashSet;\n    pub use syn::spanned::Spanned;\n    pub use syn::{visit_mut::VisitMut, *};\n\n    pub use AttrPayload::Language as AttrHaxLang;\n    pub use hax_lib_macros_types::*;\n    pub type FnLike = syn::ImplItemFn;\n}\n\nuse impl_fn_decoration::*;\nuse prelude::*;\nuse utils::*;\n\n/// When extracting to F*, wrap this item in `#push-options \"...\"` and\n/// `#pop-options`.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn fstar_options(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: TokenStream = item.into();\n    let lit_str = parse_macro_input!(attr as LitStr);\n    let payload = format!(r#\"#push-options \"{}\"\"#, lit_str.value());\n    let payload = LitStr::new(&payload, lit_str.span());\n    quote! {\n        #[::hax_lib::fstar::before(#payload)]\n        #[::hax_lib::fstar::after(r#\"#pop-options\"#)]\n        #item\n    }\n    .into()\n}\n\n/// Add an invariant to a loop which deals with an index. The\n/// invariant cannot refer to any variable introduced within the\n/// loop. An invariant is a closure that takes one argument, the\n/// index, and returns a proposition.\n///\n/// Note that loop invariants are unstable (this will be handled in a\n/// better way in the future, see\n/// https://github.com/hacspec/hax/issues/858) and only supported on\n/// specific `for` loops with specific iterators:\n///\n///  - `for i in start..end {...}`\n///  - `for i in (start..end).step_by(n) {...}`\n///  - `for i in slice.enumerate() {...}`\n///  - `for i in slice.chunks_exact(n).enumerate() {...}`\n///\n/// This function must be called on the first line of a loop body to\n/// be effective. Note that in the invariant expression, `forall`,\n/// `exists`, and `BACKEND!` (`BACKEND` can be `fstar`, `proverif`,\n/// `coq`...) are in scope.\n#[proc_macro]\npub fn loop_invariant(predicate: pm::TokenStream) -> pm::TokenStream {\n    let predicate2: TokenStream = predicate.clone().into();\n    let predicate_expr: syn::Expr = parse_macro_input!(predicate);\n\n    let (invariant_f, predicate) = match predicate_expr {\n        syn::Expr::Closure(_) => (quote!(hax_lib::_internal_loop_invariant), predicate2),\n        _ => (\n            quote!(hax_lib::_internal_while_loop_invariant),\n            quote!(::hax_lib::Prop::from(#predicate2)),\n        ),\n    };\n    let ts: pm::TokenStream = quote! {\n        #[cfg(#HaxCfgOptionName)]\n        {\n            #invariant_f({\n                #HaxQuantifiers\n                #predicate\n            })\n        }\n    }\n    .into();\n    ts\n}\n\n/// Must be used to prove termination of while loops. This takes an\n/// expression that should be a usize that decreases at every iteration\n///\n/// This function must be called just after `loop_invariant`, or at the first\n/// line of the loop if there is no invariant.\n#[proc_macro]\npub fn loop_decreases(predicate: pm::TokenStream) -> pm::TokenStream {\n    let predicate: TokenStream = predicate.into();\n    let ts: pm::TokenStream = quote! {\n        #[cfg(#HaxCfgOptionName)]\n        {\n            hax_lib::_internal_loop_decreases({\n                #HaxQuantifiers\n                use ::hax_lib::int::ToInt;\n                (#predicate).to_int()\n            })\n        }\n    }\n    .into();\n    ts\n}\n\n/// When extracting to F*, inform about what is the current\n/// verification status for an item. It can either be `lax` or\n/// `panic_free`.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn fstar_verification_status(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let action = format!(\"{}\", parse_macro_input!(attr as Ident));\n    match action.as_str() {\n        \"lax\" => {\n            let item: TokenStream = item.into();\n            quote! {\n                #[::hax_lib::fstar::options(\"--admit_smt_queries true\")]\n                #item\n            }\n        }\n        \"panic_free\" => {\n            let mut item = parse_macro_input!(item as FnLike);\n            if let Some(last) = item\n                .block\n                .stmts\n                .iter_mut()\n                .rev()\n                .find(|stmt| matches!(stmt, syn::Stmt::Expr(_, None)))\n                .as_mut()\n            {\n                **last = syn::Stmt::Expr(\n                    parse_quote! {\n                        {let result = #last;\n                        ::hax_lib::fstar!(\"_hax_panic_freedom_admit_\");\n                         result}\n                    },\n                    None,\n                );\n            } else {\n                item.block.stmts.push(syn::Stmt::Expr(\n                    parse_quote! {::hax_lib::fstar!(\"_hax_panic_freedom_admit_\")},\n                    None,\n                ));\n            }\n            quote! {\n                #item\n            }\n        }\n        _ => abort_call_site!(format!(\"Expected `lax` or `panic_free`\")),\n    }\n    .into()\n}\n\n/// Postprocess an item with a given tactic. This macro takes the tactic in\n/// parameter: this may be a Rust identifier or a raw snippet of F* code as a\n/// string literal.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn fstar_postprocess_with(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: TokenStream = item.into();\n    let payload: String = if let Ok(s) = syn::parse::<LitStr>(attr.clone()) {\n        s.value()\n    } else {\n        let e = parse_macro_input!(attr as Expr);\n        format!(\" ${{ {} }} \", e.to_token_stream())\n    };\n    let payload = format!(\"[@@FStar.Tactics.postprocess_with ({payload})]\");\n    let payload: Lit = Lit::Str(syn::LitStr::new(&payload, Span::call_site()));\n    quote! {#[::hax_lib::fstar::before(#payload)] #item}.into()\n}\n\n/// Include this item in the Hax translation. This overrides any exclusion resulting of `-i` flag.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn include(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: TokenStream = item.into();\n    let _ = parse_macro_input!(attr as parse::Nothing);\n    let attr = AttrPayload::ItemStatus(ItemStatus::Included { late_skip: false });\n    quote! {#attr #item}.into()\n}\n\n/// Exclude this item from the Hax translation.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn exclude(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: TokenStream = item.into();\n    let _ = parse_macro_input!(attr as parse::Nothing);\n    let attr = AttrPayload::ItemStatus(ItemStatus::Excluded { modeled_by: None });\n    quote! {#attr #item}.into()\n}\n\n/*\nTODO: no support in any backends (see #297)\n\n/// Exclude this item from the Hax translation, and replace it with a\n/// axiomatized model in each backends. The path of the axiomatized\n/// model should be given in Rust syntax.\n///\n/// # Example\n///\n/// ```\n/// use hax_lib_macros::*;\n/// #[modeled_by(FStar::IO::debug_print_string)]\n/// fn f(line: String) {\n///   println!(\"{}\", line)\n/// }\n/// ```\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn modeled_by(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    use quote::ToTokens;\n    let model_path = parse_macro_input!(attr as syn::Path).to_token_stream();\n    let item: TokenStream = item.into();\n    let attr = AttrPayload::ItemStatus(ItemStatus::Excluded {\n        modeled_by: Some(model_path.to_string()),\n    });\n    quote! {#attr #item}.into()\n}\n*/\n\n/// Mark a `Proof<{STATEMENT}>`-returning function as a lemma, where\n/// `STATEMENT` is a `Prop` expression capturing any input\n/// variable.\n/// In the backends, this will generate a lemma with an empty proof.\n///\n/// # Example\n///\n/// ```\n/// use hax_lib_macros::*;\n// #[decreases((m, n))] (TODO: see #297)\n/// pub fn ackermann(m: u64, n: u64) -> u64 {\n///     match (m, n) {\n///         (0, _) => n + 1,\n///         (_, 0) => ackermann(m - 1, 1),\n///         _ => ackermann(m - 1, ackermann(m, n - 1)),\n///     }\n/// }\n///\n/// #[lemma]\n/// /// $`\\forall n \\in \\mathbb{N}, \\textrm{ackermann}(2, n) = 2 (n + 3) - 3`$\n/// pub fn ackermann_property_m1(n: u64) -> Proof<{ ackermann(2, n) == 2 * (n + 3) - 3 }> {}\n/// ```\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn lemma(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let mut item: syn::ItemFn = parse_macro_input!(item as ItemFn);\n    use syn::{GenericArgument, PathArguments, ReturnType, spanned::Spanned};\n\n    fn add_allow_unused_variables_to_args(func: &mut syn::ItemFn) {\n        let attr: syn::Attribute = parse_quote!(#[allow(unused_variables)]);\n\n        for input in &mut func.sig.inputs {\n            if let FnArg::Typed(pat_type) = input {\n                pat_type.attrs.push(attr.clone());\n            }\n        }\n    }\n\n    /// Parses a `syn::Type` of the shape `Proof<{FORMULA}>`.\n    fn parse_proof_type(r#type: syn::Type) -> Option<syn::Expr> {\n        let syn::Type::Path(syn::TypePath {\n            qself: None,\n            path:\n                syn::Path {\n                    leading_colon: None,\n                    segments,\n                },\n        }) = r#type\n        else {\n            return None;\n        };\n        let ps = (segments.len() == 1).then_some(()).and(segments.first())?;\n        (ps.ident == \"Proof\").then_some(())?;\n        let PathArguments::AngleBracketed(args) = &ps.arguments else {\n            None?\n        };\n        let args = args.args.clone();\n        let GenericArgument::Const(e) = (args.len() == 1).then_some(()).and(args.first())? else {\n            None?\n        };\n        Some(e.clone())\n    }\n    let _ = parse_macro_input!(attr as parse::Nothing);\n    let attr = &AttrPayload::Lemma;\n    add_allow_unused_variables_to_args(&mut item);\n    if let ReturnType::Type(_, r#type) = &item.sig.output {\n        if let Some(ensures_clause) = parse_proof_type(*r#type.clone()) {\n            use AttrPayload::NeverErased;\n            item.sig.output = ReturnType::Default;\n            return ensures(\n                quote! {|_| #ensures_clause}.into(),\n                quote! { #attr #NeverErased #item }.into(),\n            );\n        }\n    }\n\n    abort!(\n        item.sig.output.span(),\n        \"A lemma is expected to return a `Proof<{STATEMENT}>`, where {STATEMENT} is a `Prop` expression.\"\n    )\n}\n\n/// Provide a measure for a function: this measure will be used once\n/// extracted in a backend for checking termination. The expression\n/// that decreases can be of any type. (TODO: this is probably as it\n/// is true only for F*, see #297)\n///\n/// # Example\n///\n/// ```\n/// use hax_lib_macros::*;\n/// #[decreases((m, n))]\n/// pub fn ackermann(m: u64, n: u64) -> u64 {\n///     match (m, n) {\n///         (0, _) => n + 1,\n///         (_, 0) => ackermann(m - 1, 1),\n///         _ => ackermann(m - 1, ackermann(m, n - 1)),\n///     }\n/// }\n/// ```\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn decreases(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let phi: syn::Expr = parse_macro_input!(attr);\n    let item: FnLike = parse_macro_input!(item);\n    let (requires, attr) = make_fn_decoration(\n        phi,\n        item.sig.clone(),\n        FnDecorationKind::Decreases,\n        None,\n        None,\n    );\n    quote! {#requires #attr #item}.into()\n}\n\n/// Allows to add SMT patterns to a lemma.\n/// For more informations about SMT patterns, please take a look here: https://fstar-lang.org/tutorial/book/under_the_hood/uth_smt.html#designing-a-library-with-smt-patterns.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn fstar_smt_pat(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let phi: syn::Expr = parse_macro_input!(attr);\n    let item: FnLike = parse_macro_input!(item);\n    let (requires, attr) =\n        make_fn_decoration(phi, item.sig.clone(), FnDecorationKind::SMTPat, None, None);\n    quote! {#requires #attr #item}.into()\n}\n\n/// Add a logical precondition to a function.\n// Note you can use the `forall` and `exists` operators. (TODO: commented out for now, see #297)\n/// In the case of a function that has one or more `&mut` inputs, in\n/// the `ensures` clause, you can refer to such an `&mut` input `x` as\n/// `x` for its \"past\" value and `future(x)` for its \"future\" value.\n///\n/// You can use the (unqualified) macro `fstar!` (`BACKEND!` for any\n/// backend `BACKEND`) to inline F* (or Coq, ProVerif, etc.) code in\n/// the precondition, e.g. `fstar!(\"true\")`.\n///\n/// # Example\n///\n/// ```\n/// use hax_lib_macros::*;\n/// #[requires(x.len() == y.len())]\n// #[requires(x.len() == y.len() && forall(|i: usize| i >= x.len() || y[i] > 0))] (TODO: commented out for now, see #297)\n/// pub fn div_pairwise(x: Vec<u64>, y: Vec<u64>) -> Vec<u64> {\n///     x.iter()\n///         .copied()\n///         .zip(y.iter().copied())\n///         .map(|(x, y)| x / y)\n///         .collect()\n/// }\n/// ```\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn requires(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let phi: syn::Expr = parse_macro_input!(attr);\n    let item: FnLike = parse_macro_input!(item);\n    let (requires, attr) = make_fn_decoration(\n        phi.clone(),\n        item.sig.clone(),\n        FnDecorationKind::Requires,\n        None,\n        None,\n    );\n    let mut item_with_debug = item.clone();\n    item_with_debug\n        .block\n        .stmts\n        .insert(0, parse_quote! {debug_assert!(#phi);});\n    quote! {\n        #requires #attr\n        // TODO: disable `assert!`s for now (see #297)\n        #item\n        // #[cfg(    all(not(#HaxCfgOptionName),     debug_assertions )) ] #item_with_debug\n        // #[cfg(not(all(not(#HaxCfgOptionName),     debug_assertions )))] #item\n    }\n    .into()\n}\n\n/// Add a logical postcondition to a function. Note you can use the\n/// `forall` and `exists` operators.\n///\n/// You can use the (unqualified) macro `fstar!` (`BACKEND!` for any\n/// backend `BACKEND`) to inline F* (or Coq, ProVerif, etc.) code in\n/// the postcondition, e.g. `fstar!(\"true\")`.\n///\n/// # Example\n///\n/// ```\n/// use hax_lib_macros::*;\n/// #[ensures(|result| result == x * 2)]\n/// pub fn twice(x: u64) -> u64 {\n///     x + x\n/// }\n/// ```\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn ensures(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let ExprClosure1 {\n        arg: ret_binder,\n        body: phi,\n    } = parse_macro_input!(attr);\n    let item: FnLike = parse_macro_input!(item);\n    let kind = FnDecorationKind::Ensures {\n        ret_binder: ret_binder.clone(),\n    };\n    let (ensures, attr) = make_fn_decoration(phi.clone(), item.sig.clone(), kind, None, None);\n    let mut item_with_debug = item.clone();\n    let body = item.block.clone();\n    item_with_debug.block.stmts =\n        parse_quote!(let #ret_binder = #body; debug_assert!(#phi); #ret_binder);\n    quote! {\n        #ensures #attr\n        // TODO: disable `assert!`s for now (see #297)\n        #item\n        // #[cfg(    all(not(#HaxCfgOptionName),     debug_assertions )) ] #item_with_debug\n        // #[cfg(not(all(not(#HaxCfgOptionName),     debug_assertions )))] #item\n    }\n    .into()\n}\n\nmod kw {\n    syn::custom_keyword!(hax_lib);\n    syn::custom_keyword!(decreases);\n    syn::custom_keyword!(ensures);\n    syn::custom_keyword!(requires);\n    syn::custom_keyword!(refine);\n}\n\n/// Internal macro for dealing with function decorations\n/// (`#[decreases(...)]`, `#[ensures(...)]`, `#[requires(...)]`) on\n/// `fn` items within an `impl` block. There is special handling since\n/// such functions might have a `self` argument: in such cases, we\n/// rewrite function decorations as `#[impl_fn_decoration(<KIND>,\n/// <GENERICS>, <WHERE CLAUSE>, <SELF TYPE>, <BODY>)]`.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn impl_fn_decoration(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let ImplFnDecoration {\n        kind,\n        phi,\n        generics,\n        self_ty,\n    } = parse_macro_input!(attr);\n    let mut item: FnLike = parse_macro_input!(item);\n    let (decoration, attr) =\n        make_fn_decoration(phi, item.sig.clone(), kind, Some(generics), Some(self_ty));\n    let decoration = Stmt::Item(Item::Verbatim(decoration));\n    item.block.stmts.insert(0, decoration);\n    quote! {#attr #item}.into()\n}\n\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn trait_fn_decoration(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let ImplFnDecoration {\n        kind,\n        phi,\n        generics,\n        self_ty,\n    } = parse_macro_input!(attr);\n    let mut item: syn::TraitItemFn = parse_macro_input!(item);\n    let (decoration, attr) =\n        make_fn_decoration(phi, item.sig.clone(), kind, Some(generics), Some(self_ty));\n    let decoration = Stmt::Item(Item::Verbatim(decoration));\n    item.sig\n        .generics\n        .where_clause\n        .get_or_insert(parse_quote! {where})\n        .predicates\n        .push(parse_quote! {[(); {#decoration 0}]:});\n    quote! {#attr #item}.into()\n}\n\n/// Enable the following attrubutes in the annotated item and sub-items.\n///\n/// ### `refine` (on a field in a struct)\n/// Refine a type with a logical formula.\n///\n/// ### `order` (on a field in a struct or an enum)\n/// Reorders a field in the extracted code.\n///\n/// Rust fields order matters for bit-level representation. Similarly, in some\n/// situations, fields order matters in the backends: for instance in F*, one\n/// may refine a field with a formula referring to a later field.\n///\n/// Those two orders may conflict. Adding `#[hax_lib::order(n)]` on a field with\n/// override its order at extraction time.\n///\n/// By default, the order of a field is its index, e.g. the first field has\n/// order 0, the i-th field has order i+1.\n///\n/// ### `decreases`, `ensures` and `requires` (on a `fn` in an `impl`)\n/// `decreases`, `ensures`, `requires`: behave exactly as documented above on\n/// the proc attributes of the same name.\n///\n/// # Example\n///\n/// ```\n/// #[hax_lib_macros::attributes]\n/// mod foo {\n///     pub struct Hello {\n///         pub x: u32,\n///         #[refine(y > 3)]\n///         pub y: u32,\n///         #[refine(y + x + z > 3)]\n///         pub z: u32,\n///     }\n///     impl Hello {\n///         fn sum(&self) -> u32 {\n///             self.x + self.y + self.z\n///         }\n///         #[ensures(|result| result - n == self.sum())]\n///         fn plus(self, n: u32) -> u32 {\n///             self.sum() + n\n///         }\n///     }\n/// }\n/// ```\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn attributes(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: Item = parse_macro_input!(item);\n\n    #[derive(Default)]\n    struct AttrVisitor {\n        extra_items: Vec<TokenStream>,\n    }\n\n    use syn::visit_mut;\n    impl VisitMut for AttrVisitor {\n        fn visit_item_trait_mut(&mut self, item: &mut ItemTrait) {\n            let span = item.span();\n            for ti in item.items.iter_mut() {\n                if let TraitItem::Fn(fun) = ti {\n                    for attr in &mut fun.attrs {\n                        let Meta::List(ml) = attr.meta.clone() else {\n                            continue;\n                        };\n                        let Ok(Some(decoration)) = expects_path_decoration(&ml.path) else {\n                            continue;\n                        };\n                        let decoration = syn::Ident::new(&decoration, ml.path.span());\n\n                        let mut generics = item.generics.clone();\n                        let predicate = WherePredicate::Type(PredicateType {\n                            lifetimes: None,\n                            bounded_ty: parse_quote! {Self_},\n                            colon_token: Token![:](span),\n                            bounds: item.supertraits.clone(),\n                        });\n                        let mut where_clause = generics\n                            .where_clause\n                            .clone()\n                            .unwrap_or(parse_quote! {where});\n                        where_clause.predicates.push(predicate.clone());\n                        generics.where_clause = Some(where_clause.clone());\n                        let self_ty: Type = parse_quote! {Self_};\n                        let tokens = ml.tokens.clone();\n                        let generics = merge_generics(parse_quote! {<Self_>}, generics);\n                        let ImplFnDecoration {\n                            kind, phi, self_ty, ..\n                        } = parse_quote! {#decoration, #generics, where, #self_ty, #tokens};\n                        let (decoration, relation_attr) = make_fn_decoration(\n                            phi,\n                            fun.sig.clone(),\n                            kind,\n                            Some(generics),\n                            Some(self_ty),\n                        );\n                        *attr = parse_quote! {#relation_attr};\n                        self.extra_items.push(decoration);\n                    }\n                }\n            }\n            visit_mut::visit_item_trait_mut(self, item);\n        }\n        fn visit_type_mut(&mut self, _type: &mut Type) {}\n        fn visit_item_impl_mut(&mut self, item: &mut ItemImpl) {\n            for ii in item.items.iter_mut() {\n                if let ImplItem::Fn(fun) = ii {\n                    for attr in fun.attrs.iter_mut() {\n                        if let Meta::List(ml) = &mut attr.meta {\n                            let Ok(Some(decoration)) = expects_path_decoration(&ml.path) else {\n                                continue;\n                            };\n                            let decoration = syn::Ident::new(&decoration, ml.path.span());\n                            let tokens = ml.tokens.clone();\n                            let (generics, self_ty) = (&item.generics, &item.self_ty);\n                            let where_clause = &generics.where_clause;\n                            ml.tokens =\n                                quote! {#decoration, #generics, #where_clause, #self_ty, #tokens};\n                            ml.path = parse_quote! {::hax_lib::impl_fn_decoration};\n                        }\n                    }\n                }\n            }\n            visit_mut::visit_item_impl_mut(self, item);\n        }\n        fn visit_fields_named_mut(&mut self, fields_named: &mut FieldsNamed) {\n            visit_mut::visit_fields_named_mut(self, fields_named);\n\n            fn handle_reorder_attribute(attrs: &mut [Attribute], errors: &mut Vec<TokenStream>) {\n                let Some((attr, order)) = attrs.iter_mut().find_map(|attr| {\n                    if let Ok(Some(_)) = expects_order(attr.path()) {\n                        let lit: LitInt = attr.parse_args().ok()?;\n                        Some((attr, lit))\n                    } else {\n                        None\n                    }\n                }) else {\n                    return;\n                };\n\n                let Ok(n) = order.base10_parse() else {\n                    errors.push(parse_quote!{const _: () = {compile_error!(\"Expected a (base 10) i32 literal.\")};});\n                    return;\n                };\n                let payload = AttrPayload::Order(n);\n                *attr = parse_quote!(#payload);\n            }\n\n            for field in &mut fields_named.named {\n                handle_reorder_attribute(&mut field.attrs, &mut self.extra_items);\n            }\n        }\n        fn visit_item_mut(&mut self, item: &mut Item) {\n            visit_mut::visit_item_mut(self, item);\n\n            let mut extra: Vec<Item> = vec![];\n            match item {\n                Item::Struct(s) => {\n                    let only_one_field = s.fields.len() == 1;\n                    let idents: Vec<_> = s\n                        .fields\n                        .iter()\n                        .enumerate()\n                        .map(|(i, field)| {\n                            let ident = field.ident.clone().unwrap_or(if only_one_field {\n                                format_ident!(\"x\")\n                            } else {\n                                format_ident!(\"x{}\", i)\n                            });\n                            (ident, field.ty.clone())\n                        })\n                        .collect();\n                    for (i, field) in s.fields.iter_mut().enumerate() {\n                        let prev = &idents[0..=i];\n                        let refine: Option<(&mut Attribute, Expr)> =\n                            field.attrs.iter_mut().find_map(|attr| {\n                                if let Ok(Some(_)) = expects_refine(attr.path()) {\n                                    let payload = attr.parse_args().ok()?;\n                                    Some((attr, payload))\n                                } else {\n                                    None\n                                }\n                            });\n                        if let Some((attr, refine)) = refine {\n                            let binders: TokenStream = prev\n                                .iter()\n                                .map(|(name, ty)| quote! {#name: #ty, })\n                                .collect();\n                            let uid = ItemUid::fresh();\n                            let uid_attr = AttrPayload::Uid(uid.clone());\n                            let assoc_attr = AttrPayload::AssociatedItem {\n                                role: AssociationRole::Refine,\n                                item: uid,\n                            };\n                            *attr = syn::parse_quote! { #assoc_attr };\n                            let status_attr =\n                                &AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true });\n                            extra.push(syn::parse_quote! {\n                                #[cfg(#HaxCfgOptionName)]\n                                #status_attr\n                                const _: () = {\n                                    #uid_attr\n                                    #status_attr\n                                    fn refinement(#binders) -> ::hax_lib::Prop { ::hax_lib::Prop::from(#refine) }\n                                };\n                            })\n                        }\n                    }\n                }\n                _ => (),\n            }\n            let extra: TokenStream = extra.iter().map(|extra| quote! {#extra}).collect();\n            *item = Item::Verbatim(quote! {#extra #item});\n        }\n    }\n\n    let mut v = AttrVisitor::default();\n    let mut item = item;\n    v.visit_item_mut(&mut item);\n    let extra_items = v.extra_items;\n\n    quote! { #item #(#extra_items)* }.into()\n}\n\n/// Mark an item opaque: the extraction will assume the\n/// type without revealing its definition.\n#[proc_macro_error]\n#[proc_macro_attribute]\n#[deprecated(note = \"Please use 'opaque' instead\")]\npub fn opaque_type(attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    opaque(attr, item)\n}\n\n/// Mark an item opaque: the extraction will assume the\n/// type without revealing its definition.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn opaque(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: Item = parse_macro_input!(item);\n    let attr = AttrPayload::Erased;\n    quote! {#attr #item}.into()\n}\n\n/// Mark an item transparent: the extraction will not\n/// make it opaque regardless of the `-i` flag default.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn transparent(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: Item = parse_macro_input!(item);\n    let attr = AttrPayload::NeverErased;\n    quote! {#attr #item}.into()\n}\n\n/// A marker indicating a `fn` as a ProVerif process read.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn process_read(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let attr = AttrPayload::ProcessRead;\n    quote! {#attr #item}.into()\n}\n\n/// A marker indicating a `fn` as a ProVerif process write.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn process_write(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let attr = AttrPayload::ProcessWrite;\n    quote! {#attr #item}.into()\n}\n\n/// A marker indicating a `fn` as a ProVerif process initialization.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn process_init(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let attr = AttrPayload::ProcessInit;\n    quote! {#attr #item}.into()\n}\n\n/// A marker indicating an `enum` as describing the protocol messages.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn protocol_messages(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemEnum = parse_macro_input!(item);\n    let attr = AttrPayload::ProtocolMessages;\n    quote! {#attr #item}.into()\n}\n\n/// A marker indicating a `fn` should be automatically translated to a ProVerif constructor.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn pv_constructor(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let attr = AttrPayload::PVConstructor;\n    quote! {#attr #item}.into()\n}\n\n/// A marker indicating a `fn` requires manual modelling in ProVerif.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn pv_handwritten(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let attr = AttrPayload::PVHandwritten;\n    quote! {#attr #item}.into()\n}\n\n/// Create a mathematical integer. This macro expects a Rust integer\n/// literal without suffix.\n///\n/// ## Examples:\n/// - `int!(0x101010)`\n/// - `int!(42)`\n/// - `int!(0o52)`\n/// - `int!(0h2A)`\n#[proc_macro_error]\n#[proc_macro]\npub fn int(payload: pm::TokenStream) -> pm::TokenStream {\n    let n: LitInt = parse_macro_input!(payload);\n    let suffix = n.suffix();\n    if !suffix.is_empty() {\n        abort_call_site!(\"The literal suffix `{suffix}` was unexpected.\")\n    }\n    let digits = n.base10_digits();\n    quote! {::hax_lib::int::Int::_unsafe_from_str(#digits)}.into()\n}\n\n/// This macro inserts a verbatim Lean proof into the extracted code.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn lean_proof(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let payload = parse_macro_input!(payload as LitStr).value();\n    let attr = AttrPayload::Proof(payload);\n    quote! {#attr #item}.into()\n}\n\n/// This macro inserts a verbatim Lean proof showing that the `requires`-condition is panic-free.\n/// The proof is inserted into the `pureRequires` field of the Lean spec.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn lean_pure_requires_proof(\n    payload: pm::TokenStream,\n    item: pm::TokenStream,\n) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let payload = parse_macro_input!(payload as LitStr).value();\n    let attr = AttrPayload::PureRequiresProof(payload);\n    quote! {#attr #item}.into()\n}\n\n/// This macro inserts a verbatim Lean proof showing that the `ensures`-condition is panic-free.\n/// The proof is inserted into the `pureEnsures` field of the Lean spec.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn lean_pure_ensures_proof(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let payload = parse_macro_input!(payload as LitStr).value();\n    let attr = AttrPayload::PureEnsuresProof(payload);\n    quote! {#attr #item}.into()\n}\n\n/// Use the proof method `grind`. This influences the tactic and spec set used by Lean.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn lean_proof_method_grind(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let attr = AttrPayload::ProofMethod(hax_lib_macros_types::ProofMethod::Grind);\n    quote! {#attr #item}.into()\n}\n\n/// Use the proof method `bv_decide`. This influences the tactic and spec set used by Lean.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn lean_proof_method_bv_decide(\n    _attr: pm::TokenStream,\n    item: pm::TokenStream,\n) -> pm::TokenStream {\n    let item: ItemFn = parse_macro_input!(item);\n    let attr = AttrPayload::ProofMethod(hax_lib_macros_types::ProofMethod::BvDecide);\n    quote! {#attr #item}.into()\n}\n\nmacro_rules! make_quoting_item_proc_macro {\n    ($backend:ident, $macro_name:ident, $position:expr, $cfg_name:ident) => {\n        #[doc = concat!(\"This macro inlines verbatim \", stringify!($backend),\" code before a Rust item.\")]\n        ///\n        /// This macro takes a string literal containing backend\n        /// code. Just as backend expression macros, this literal can\n        /// contains dollar-prefixed Rust names.\n        ///\n        /// Note: when targetting F*, you can prepend a first\n        /// comma-separated argument: `interface`, `impl` or\n        /// `both`. This controls where the code will apprear: in the\n        /// `fst` or `fsti` files or both.\n        #[proc_macro_error]\n        #[proc_macro_attribute]\n        pub fn $macro_name(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n            let mut fstar_options = None;\n            let item: TokenStream = item.into();\n            let payload = {\n                let mut tokens = payload.into_iter().peekable();\n                if let Some(pm::TokenTree::Ident(ident)) = tokens.peek() {\n                    let ident_str = format!(\"{}\", ident);\n                    fstar_options = Some(ItemQuoteFStarOpts {\n                        intf: ident_str == \"interface\" || ident_str == \"both\",\n                        r#impl: ident_str == \"impl\" || ident_str == \"both\",\n                    });\n                    if !matches!(ident_str.as_str(), \"impl\" | \"both\" | \"interface\") {\n                        proc_macro_error2::abort!(\n                            ident.span(),\n                            \"Expected `impl`, `both` or `interface`\"\n                        );\n                    }\n                    // Consume the ident\n                    let _ = tokens.next();\n                    // Expect a comma, fail otherwise\n                    let comma = pm::TokenStream::from_iter(tokens.next().into_iter());\n                    let _: syn::token::Comma = parse_macro_input!(comma);\n                }\n                pm::TokenStream::from_iter(tokens)\n            };\n\n            let ts: TokenStream = quote::item(\n                ItemQuote {\n                    position: $position,\n                    fstar_options,\n                },\n                quote! {#[cfg($cfg_name)]},\n                payload,\n                quote! {#item}.into(),\n            )\n            .into();\n            ts.into()\n        }\n    };\n}\n\nmacro_rules! make_quoting_proc_macro {\n    ($backend:ident) => {\n        #[doc = concat!(\"Embed \", stringify!($backend), \" expression inside a Rust expression. This macro takes only one argument: some raw \", stringify!($backend), \" code as a string literal.\")]\n        ///\n\n        /// While it is possible to directly write raw backend code,\n        /// sometimes it can be inconvenient. For example, referencing\n        /// Rust names can be a bit cumbersome: for example, the name\n        /// `my_crate::my_module::CONSTANT` might be translated\n        /// differently in a backend (e.g. in the F* backend, it will\n        /// probably be `My_crate.My_module.v_CONSTANT`).\n        ///\n\n        /// To facilitate this, you can write Rust names directly,\n        /// using the prefix `$`: `f $my_crate::my_module__CONSTANT + 3`\n        /// will be replaced with `f My_crate.My_module.v_CONSTANT + 3`\n        /// in the F* backend for instance.\n\n        /// If you want to refer to the Rust constructor\n        /// `Enum::Variant`, you should write `$$Enum::Variant` (note\n        /// the double dollar).\n\n        /// If the name refers to something polymorphic, you need to\n        /// signal it by adding _any_ type informations,\n        /// e.g. `${my_module::function<()>}`. The curly braces are\n        /// needed for such more complex expressions.\n\n        /// You can also write Rust patterns with the `$?{SYNTAX}`\n        /// syntax, where `SYNTAX` is a Rust pattern. The syntax\n        /// `${EXPR}` also allows any Rust expressions\n        /// `EXPR` to be embedded.\n\n        /// Types can be refered to with the syntax `$:{TYPE}`.\n        #[proc_macro]\n        pub fn ${concat($backend, _expr)}(payload: pm::TokenStream) -> pm::TokenStream {\n            let ts: TokenStream = quote::expression(quote::InlineExprType::Unit, payload).into();\n            quote!{{\n                #[cfg(${concat(hax_backend_, $backend)})]\n                {\n                    #ts\n                }\n            }}.into()\n        }\n\n        #[doc = concat!(\"The `Prop` version of `\", stringify!($backend), \"_expr`.\")]\n        #[proc_macro]\n        pub fn ${concat($backend, _prop_expr)}(payload: pm::TokenStream) -> pm::TokenStream {\n            let ts: TokenStream = quote::expression(quote::InlineExprType::Prop, payload).into();\n            quote!{{\n                #[cfg(${concat(hax_backend_, $backend)})]\n                {\n                    #ts\n                }\n                #[cfg(not(${concat(hax_backend_, $backend)}))]\n                {\n                    ::hax_lib::Prop::from_bool(true)\n                }\n            }}.into()\n        }\n\n        #[doc = concat!(\"The unsafe (because polymorphic: even computationally relevant code can be inlined!) version of `\", stringify!($backend), \"_expr`.\")]\n        #[proc_macro]\n        #[doc(hidden)]\n        pub fn ${concat($backend, _unsafe_expr)}(payload: pm::TokenStream) -> pm::TokenStream {\n            let ts: TokenStream = quote::expression(quote::InlineExprType::Anything, payload).into();\n            quote!{{\n                #[cfg(${concat(hax_backend_, $backend)})]\n                {\n                    #ts\n                }\n            }}.into()\n        }\n\n        make_quoting_item_proc_macro!($backend, ${concat($backend, _before)}, ItemQuotePosition::Before, ${concat(hax_backend_, $backend)});\n        make_quoting_item_proc_macro!($backend, ${concat($backend, _after)}, ItemQuotePosition::After, ${concat(hax_backend_, $backend)});\n\n        #[doc = concat!(\"Replaces a Rust item with some verbatim \", stringify!($backend),\" code.\")]\n        #[proc_macro_error]\n        #[proc_macro_attribute]\n        pub fn ${concat($backend, _replace)}(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n            let item: TokenStream = item.into();\n            let payload: TokenStream = payload.into();\n            let attr = AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true });\n            quote! {\n                #[cfg(${concat(hax_backend_, $backend)})]\n                #[::hax_lib::$backend::before(#payload)]\n                #attr\n                #item\n\n                #[cfg(not(${concat(hax_backend_, $backend)}))]\n                #item\n            }\n            .into()\n        }\n\n        #[doc = concat!(\"Replaces the body of a Rust function with some verbatim \", stringify!($backend),\" code.\")]\n        #[proc_macro_error]\n        #[proc_macro_attribute]\n        pub fn ${concat($backend, _replace_body)}(payload: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n            let payload: TokenStream = payload.into();\n            let item: ItemFn = parse_macro_input!(item);\n            let mut hax_item = item.clone();\n            *hax_item.block.as_mut() = parse_quote!{\n                {\n                    ::hax_lib::$backend::unsafe_expr!(#payload)\n                }\n            };\n            quote!{\n                #[cfg(${concat(hax_backend_, $backend)})]\n                #hax_item\n\n                #[cfg(not(${concat(hax_backend_, $backend)}))]\n                #item\n            }.into()\n        }\n    };\n    ($($backend:ident)*) => {\n        $(make_quoting_proc_macro!($backend);)*\n    }\n}\n\nmake_quoting_proc_macro!(fstar coq proverif lean);\n\n/// Marks a newtype `struct RefinedT(T);` as a refinement type. The\n/// struct should have exactly one unnamed private field.\n///\n/// This macro takes one argument: a `Prop` proposition that refines\n/// values of type `SomeType`.\n///\n/// For example, the following type defines bounded `u64` integers.\n///\n/// ```\n/// #[hax_lib::refinement_type(|x| x >= MIN && x <= MAX)]\n/// pub struct BoundedU64<const MIN: u64, const MAX: u64>(u64);\n/// ```\n///\n/// This macro will generate an implementation of the [`Deref`] trait\n/// and of the [`hax_lib::Refinement`] type. Those two traits are\n/// the only interface to this newtype: one is allowed only to\n/// construct or destruct refined type via those smart constructors\n/// and destructors, ensuring the abstraction.\n///\n/// A refinement of a type `T` with a formula `f` can be seen as a box\n/// that contains a value of type `T` and a proof that this value\n/// satisfies the formula `f`.\n///\n/// In debug mode, the refinement will be checked at run-time. This\n/// requires the base type `T` to implement `Clone`. Pass a first\n/// parameter `no_debug_runtime_check` to disable this behavior.\n///\n/// When extracted via hax, this is interpreted in the backend as a\n/// refinement type: the use of such a type yields static proof\n/// obligations.\n#[proc_macro_error]\n#[proc_macro_attribute]\npub fn refinement_type(mut attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream {\n    let mut item = parse_macro_input!(item as syn::ItemStruct);\n\n    let syn::Fields::Unnamed(fields) = &item.fields else {\n        proc_macro_error2::abort!(\n            item.generics.span(),\n            \"Expected a newtype (a struct with one unnamed field), got one or more named field\"\n        );\n    };\n    let paren_token = fields.paren_token;\n    let fields = fields.unnamed.iter().collect::<Vec<_>>();\n    let [field] = &fields[..] else {\n        proc_macro_error2::abort!(\n            item.generics.span(),\n            \"Expected a newtype (a struct with one unnamed field), got {} fields\",\n            fields.len()\n        );\n    };\n    if !matches!(field.vis, syn::Visibility::Inherited) {\n        proc_macro_error2::abort!(field.vis.span(), \"This field was expected to be private\");\n    }\n\n    let no_debug_assert = {\n        let mut tokens = attr.clone().into_iter();\n        if let (Some(pm::TokenTree::Ident(ident)), Some(pm::TokenTree::Punct(comma))) =\n            (tokens.next(), tokens.next())\n        {\n            if ident.to_string() != \"no_debug_runtime_check\" {\n                proc_macro_error2::abort!(ident.span(), \"Expected 'no_debug_runtime_check'\");\n            }\n            if comma.as_char() != ',' {\n                proc_macro_error2::abort!(ident.span(), \"Expected a comma\");\n            }\n            attr = pm::TokenStream::from_iter(tokens);\n            true\n        } else {\n            false\n        }\n    };\n\n    let ExprClosure1 {\n        arg: ret_binder,\n        body: phi,\n    } = parse_macro_input!(attr);\n\n    let kind = FnDecorationKind::Ensures {\n        ret_binder: ret_binder.clone(),\n    };\n    let sig = syn::Signature {\n        constness: None,\n        asyncness: None,\n        unsafety: None,\n        abi: None,\n        variadic: None,\n        fn_token: syn::Token![fn](item.span()),\n        ident: parse_quote! {dummy},\n        generics: item.generics.clone(),\n        paren_token,\n        inputs: syn::punctuated::Punctuated::new(),\n        output: syn::ReturnType::Type(parse_quote! {->}, Box::new(field.ty.clone())),\n    };\n    let ident = &item.ident;\n    let generics = &item.generics;\n    let vis = item.vis.clone();\n    let generics_args: syn::punctuated::Punctuated<_, syn::token::Comma> = item\n        .generics\n        .params\n        .iter()\n        .map(|g| match g {\n            syn::GenericParam::Lifetime(p) => {\n                let i = &p.lifetime;\n                quote! { #i }\n            }\n            syn::GenericParam::Type(p) => {\n                let i = &p.ident;\n                quote! { #i }\n            }\n            syn::GenericParam::Const(p) => {\n                let i = &p.ident;\n                quote! { #i }\n            }\n        })\n        .collect();\n    let inner_ty = &field.ty;\n    let (refinement_item, refinement_attr) = make_fn_decoration(phi.clone(), sig, kind, None, None);\n    let module_ident = syn::Ident::new(\n        &format!(\"hax__autogenerated_refinement__{}\", ident),\n        ident.span(),\n    );\n\n    item.vis = parse_quote! {pub};\n    let debug_assert =\n        no_debug_assert.then_some(quote! {::core::debug_assert!(Self::invariant(x.clone()));});\n    let newtype_as_ref_attr = AttrPayload::NewtypeAsRefinement;\n    quote! {\n        #[allow(non_snake_case)]\n        mod #module_ident {\n            #[allow(unused_imports)]\n            use super::*;\n\n            #refinement_item\n\n            #newtype_as_ref_attr\n            #refinement_attr\n            #item\n\n            #[::hax_lib::exclude]\n            impl #generics ::hax_lib::Refinement for #ident <#generics_args> {\n\n                type InnerType = #inner_ty;\n\n                fn new(x: Self::InnerType) -> Self {\n                    #debug_assert\n                    Self(x)\n                }\n                fn get(self) -> Self::InnerType {\n                    self.0\n                }\n                fn get_mut(&mut self) -> &mut Self::InnerType {\n                    &mut self.0\n                }\n                fn invariant(#ret_binder: Self::InnerType) -> ::hax_lib::Prop {\n                    ::hax_lib::Prop::from(#phi)\n                }\n            }\n\n            #[::hax_lib::exclude]\n            impl #generics ::std::ops::Deref for #ident <#generics_args> {\n                type Target = #inner_ty;\n                fn deref(&self) -> &Self::Target {\n                    &self.0\n                }\n            }\n\n            #[::hax_lib::exclude]\n            impl #generics ::hax_lib::RefineAs<#ident <#generics_args>> for #inner_ty {\n                fn into_checked(self) -> #ident <#generics_args> {\n                    use ::hax_lib::Refinement;\n                    #ident::new(self)\n                }\n            }\n        }\n        #vis use #module_ident::#ident;\n\n    }\n    .into()\n}\n"
  },
  {
    "path": "hax-lib/macros/src/lib.rs",
    "content": "// Proc-macros must \"reside in the root of the crate\": whence the use\n// of `std::include!` instead of proper module declaration.\n\n#![cfg_attr(hax, feature(macro_metavar_expr_concat))]\n\n#[cfg(hax)]\nstd::include!(\"implementation.rs\");\n\n#[cfg(not(hax))]\nstd::include!(\"dummy.rs\");\n"
  },
  {
    "path": "hax-lib/macros/src/quote.rs",
    "content": "//! This module provides the logic for the quotation macros, which\n//! allow for quoting F*/Coq/... code directly from Rust.\n//!\n//! In a F*/Coq/... quote, one can write antiquotations, that is,\n//! embedded Rust snippets. The syntax is `$<PREFIX><PAYLOAD>`. The\n//! payload `<PAYLOAD>` should be a Rust path, or a group with\n//! arbitrary contents `{...contents...}`.\n//!\n//! The `<PREFIX>` describes the kind of the antiquotation:\n//!  - empty prefix, the antiquotation is an expression;\n//!  - `?`, the antiquotation is a pattern;\n//!  - `$`, the antiquotation is a constructor name;\n//!  - `:`, the antiquotation is a type.\n\nuse crate::prelude::*;\n\n/// Marker that indicates a place where a antiquotation will be inserted\nconst SPLIT_MARK: &str = \"SPLIT_QUOTE\";\n\n/// The different kinds of antiquotations\nenum AntiquoteKind {\n    Expr,\n    Constructor,\n    Pat,\n    Ty,\n}\n\nimpl ToTokens for AntiquoteKind {\n    fn to_tokens(&self, tokens: &mut TokenStream) {\n        tokens.extend([match self {\n            Self::Expr => quote! {_expr},\n            Self::Constructor => quote! {_constructor},\n            Self::Pat => quote! {_pat},\n            Self::Ty => quote! {_ty},\n        }])\n    }\n}\n\n/// An antiquotation\nstruct Antiquote {\n    ts: pm::TokenStream,\n    kind: AntiquoteKind,\n}\n\nimpl ToTokens for Antiquote {\n    fn to_tokens(&self, tokens: &mut TokenStream) {\n        let ts = TokenStream::from(self.ts.clone());\n        fn wrap_pattern(pat: TokenStream) -> TokenStream {\n            quote! {{#[allow(unreachable_code)]\n                 match None { Some(#pat) => (), _ => () }\n            }}\n        }\n        let ts = match self.kind {\n            AntiquoteKind::Expr => ts,\n            AntiquoteKind::Constructor => wrap_pattern(quote! {#ts {..}}),\n            AntiquoteKind::Pat => wrap_pattern(ts),\n            AntiquoteKind::Ty => quote! {None::<#ts>},\n        };\n        tokens.extend([ts])\n    }\n}\n\n/// Extract antiquotations (`$[?][$][:]...`, `$[?][$][:]{...}`) and parses them.\nfn process_string(s: &str) -> std::result::Result<(String, Vec<Antiquote>), String> {\n    let mut chars = s.chars().peekable();\n    let mut antiquotations = vec![];\n    let mut output = String::new();\n    while let Some(ch) = chars.next() {\n        match ch {\n            '$' => {\n                let mut s = String::new();\n                let mut kind = AntiquoteKind::Expr;\n                if let Some(prefix) = chars.next_if(|ch| *ch == '?' || *ch == '$' || *ch == ':') {\n                    kind = match prefix {\n                        '?' => AntiquoteKind::Pat,\n                        '$' => AntiquoteKind::Constructor,\n                        ':' => AntiquoteKind::Ty,\n                        _ => unreachable!(),\n                    };\n                }\n                // If the first character is `{`, we parse the block\n                if let Some('{') = chars.peek() {\n                    chars.next(); // Consume `{`\n                    let mut level = 0;\n                    for ch in chars.by_ref() {\n                        level += match ch {\n                            '{' => 1,\n                            '}' => -1,\n                            _ => 0,\n                        };\n                        if level < 0 {\n                            break;\n                        }\n                        s.push(ch);\n                    }\n                } else {\n                    while let Some(ch) = chars.next_if(|ch| {\n                        !matches!(ch, ' ' | '\\t' | '\\n' | '(' | '{' | ')' | ';' | '!' | '?')\n                    }) {\n                        s.push(ch)\n                    }\n                }\n                if s.is_empty() {\n                    return Err(format!(\n                        \"Empty antiquotation just before `{}`\",\n                        chars.collect::<String>()\n                    ));\n                }\n                output += SPLIT_MARK;\n                // See https://github.com/rust-lang/rust/issues/58736\n                let ts: std::result::Result<TokenStream, _> = syn::parse_str(&s)\n                    .map_err(|err| format!(\"Could not parse antiquotation `{s}`: got error {err}\"));\n                if let Err(message) = &ts {\n                    // If we don't panic, the error won't show up,\n                    // this is because `parse_str` is not only\n                    // panicking, but also makes rustc to exit earlier.\n                    panic!(\"{message}\");\n                }\n                let ts: pm::TokenStream = ts?.into();\n                antiquotations.push(Antiquote { ts, kind })\n            }\n            _ => output.push(ch),\n        }\n    }\n    Ok((output, antiquotations))\n}\n\npub(super) fn item(\n    kind: ItemQuote,\n    attribute_to_inject: TokenStream,\n    payload: pm::TokenStream,\n    item: pm::TokenStream,\n) -> pm::TokenStream {\n    let expr = TokenStream::from(expression(InlineExprType::Unit, payload));\n    let item = TokenStream::from(item);\n    let uid = ItemUid::fresh();\n    let uid_attr = AttrPayload::Uid(uid.clone());\n    let assoc_attr = AttrPayload::AssociatedItem {\n        role: AssociationRole::ItemQuote,\n        item: uid,\n    };\n    let kind_attr = AttrPayload::ItemQuote(kind);\n    let status_attr = AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true });\n    use AttrPayload::NeverErased;\n    quote! {\n        #assoc_attr\n        #item\n        #attribute_to_inject\n        #status_attr\n        const _: () = {\n            #NeverErased\n            #uid_attr\n            #kind_attr\n            fn quote_contents() {\n                #expr\n            }\n        };\n    }\n    .into()\n}\n\npub(super) fn detect_future_node_in_expression(e: &syn::Expr) -> bool {\n    struct Visitor(bool);\n    use syn::visit::*;\n    impl<'a> Visit<'a> for Visitor {\n        fn visit_expr(&mut self, e: &'a Expr) {\n            if let Some(Ok(_)) = crate::utils::expect_future_expr(e) {\n                self.0 = true;\n            }\n        }\n    }\n    let mut visitor = Visitor(false);\n    visitor.visit_expr(e);\n    visitor.0\n}\n\npub(super) enum InlineExprType {\n    Unit,\n    Prop,\n    Anything,\n}\n\npub(super) fn expression(typ: InlineExprType, payload: pm::TokenStream) -> pm::TokenStream {\n    let (mut backend_code, antiquotes) = {\n        let payload = parse_macro_input!(payload as LitStr).value();\n        if payload.contains(SPLIT_MARK) {\n            return quote! {std::compile_error!(std::concat!($SPLIT_MARK, \" is reserved\"))}.into();\n        }\n        let (string, antiquotes) = match process_string(&payload) {\n            Ok(x) => x,\n            Err(message) => return quote! {std::compile_error!(#message)}.into(),\n        };\n        let string = proc_macro2::Literal::string(&string);\n        let string: TokenStream = [proc_macro2::TokenTree::Literal(string)]\n            .into_iter()\n            .collect();\n        (quote! {#string}, antiquotes)\n    };\n    for user in antiquotes.iter().rev() {\n        if !matches!(typ, InlineExprType::Unit)\n            && syn::parse(user.ts.clone())\n                .as_ref()\n                .map(detect_future_node_in_expression)\n                .unwrap_or(false)\n        {\n            let ts: proc_macro2::TokenStream = user.ts.clone().into();\n            return quote! {\n                ::std::compile_error!(concat!(\"The `future` operator cannot be used within a quote. Hint: move `\", stringify!(#ts), \"` to a let binding and use the binding name instead.\"))\n            }.into();\n        }\n        let kind = &user.kind;\n        backend_code = quote! {\n            let #kind = #user;\n            #backend_code\n        };\n    }\n\n    let function = match typ {\n        InlineExprType::Unit => quote! {inline},\n        InlineExprType::Prop => quote! {inline_unsafe::<::hax_lib::Prop>},\n        InlineExprType::Anything => quote! {inline_unsafe},\n    };\n\n    quote! {\n        ::hax_lib::#function(#[allow(unused_variables)]{#backend_code})\n    }\n    .into()\n}\n"
  },
  {
    "path": "hax-lib/macros/src/rewrite_self.rs",
    "content": "use crate::syn_ext::*;\nuse proc_macro2::Span;\nuse syn::spanned::Spanned;\nuse syn::*;\n\n/// The `RewriteSelf` structure is hidden in a module so that only its\n/// method can mutate its fields.\nmod rewrite_self {\n    use super::*;\n    use std::collections::HashSet;\n\n    /// Small & dirty wrapper around spans to make them `Eq`,\n    /// `PartialEq` and `Hash`\n    #[derive(Clone, Debug)]\n    struct SpanWrapper(Span);\n    const _: () = {\n        impl Eq for SpanWrapper {}\n        impl PartialEq for SpanWrapper {\n            fn eq(&self, other: &Self) -> bool {\n                format!(\"{self:?}\") == format!(\"{other:?}\")\n            }\n        }\n        use std::hash::*;\n        impl Hash for SpanWrapper {\n            fn hash<H: Hasher>(&self, state: &mut H) {\n                format!(\"{self:?}\").hash(state)\n            }\n        }\n    };\n\n    /// A struct that carries informations for substituting `self` and\n    /// `Self`. Note `typ` is an option:\n    #[must_use]\n    pub struct RewriteSelf {\n        typ: Option<Type>,\n        ident: Ident,\n        self_spans: HashSet<SpanWrapper>,\n    }\n\n    impl RewriteSelf {\n        /// Consumes `RewriteSelf`, optionally outputing errors.\n        pub fn get_error(self) -> Option<proc_macro2::TokenStream> {\n            if self.typ.is_some() || self.self_spans.is_empty() {\n                return None;\n            }\n\n            let mut error = Error::new(Span::call_site(), \"This macro doesn't work on trait or impl items: you need to add a `#[hax_lib::attributes]` on the enclosing impl block or trait.\");\n            for SpanWrapper(span) in self.self_spans {\n                let use_site = Error::new(\n                    span,\n                    \"Here, the function you are trying to annotate has a `Self`.\",\n                );\n                error.combine(use_site);\n            }\n            Some(error.to_compile_error())\n        }\n\n        fn self_detected(&mut self, span: Span) {\n            self.self_spans.insert(SpanWrapper(span));\n        }\n\n        /// Requests the ident with which `self` should be substituted.\n        pub fn self_ident(&mut self, span: Span) -> &Ident {\n            self.self_detected(span);\n            &self.ident\n        }\n        /// Requests the type with which `Self` should be substituted with.\n        pub fn self_ty(&mut self, span: Span) -> Type {\n            self.self_detected(span);\n            self.typ.clone().unwrap_or_else(|| {\n                parse_quote! {Self}\n            })\n        }\n        /// Construct a rewritter\n        pub fn new(ident: Ident, typ: Option<Type>) -> Self {\n            Self {\n                typ,\n                ident,\n                self_spans: HashSet::new(),\n            }\n        }\n    }\n}\npub use rewrite_self::*;\n\nimpl visit_mut::VisitMut for RewriteSelf {\n    fn visit_expr_mut(&mut self, e: &mut Expr) {\n        visit_mut::visit_expr_mut(self, e);\n        if e.is_ident(\"self\") {\n            let into = self.self_ident(e.span()).clone();\n            *e = parse_quote! {#into}\n        }\n    }\n    fn visit_type_mut(&mut self, ty: &mut Type) {\n        visit_mut::visit_type_mut(self, ty);\n        if ty.is_ident(\"Self\") {\n            *ty = self.self_ty(ty.span())\n        }\n    }\n    fn visit_fn_arg_mut(&mut self, arg: &mut FnArg) {\n        visit_mut::visit_fn_arg_mut(self, arg);\n        let arg_span = arg.span();\n        if let FnArg::Receiver(r) = arg {\n            let span = r.self_token.span();\n            *arg = FnArg::Typed(PatType {\n                attrs: r.attrs.clone(),\n                pat: Box::new(Pat::Ident(PatIdent {\n                    attrs: vec![],\n                    by_ref: None,\n                    mutability: None,\n                    ident: self.self_ident(span).clone(),\n                    subpat: None,\n                })),\n                colon_token: token::Colon(arg_span),\n                ty: Box::new({\n                    let ty = self.self_ty(span);\n                    let (reference, lt) = r\n                        .reference\n                        .clone()\n                        .map(|(r, lt)| (Some(r), lt))\n                        .unwrap_or((None, None));\n                    let mutability = reference.and(r.mutability.clone());\n                    parse_quote! {#reference #lt #mutability #ty}\n                }),\n            });\n        }\n    }\n    fn visit_item_impl_mut(&mut self, _i: &mut ItemImpl) {\n        // Do nothing! We allow user to write self if it's nested in a impl block\n    }\n}\n"
  },
  {
    "path": "hax-lib/macros/src/syn_ext.rs",
    "content": "use crate::prelude::*;\nuse syn::parse::*;\nuse syn::punctuated::Punctuated;\n\n/// A closure expression of arity 1, e.g. `|x| x + 3`\npub struct ExprClosure1 {\n    pub arg: Pat,\n    pub body: Expr,\n}\n\nimpl Parse for ExprClosure1 {\n    fn parse(ps: ParseStream) -> Result<Self> {\n        let closure: ExprClosure = Parse::parse(ps as ParseStream)?;\n        let inputs = closure.inputs;\n        if inputs.len() != 1 {\n            Err(Error::new(inputs.span(), \"Expected exactly one argument\"))?;\n        }\n        Ok(ExprClosure1 {\n            arg: inputs[0].clone(),\n            body: *closure.body.clone(),\n        })\n    }\n}\n\n/// Utility trait to extract an `Ident` from various syn types\npub trait ExpectIdent {\n    /// Is `self` an `Ident`?\n    fn expect_ident(&self) -> Option<Ident>;\n    /// Is `self` a specific ident named `name`?\n    fn is_ident(&self, name: &str) -> bool {\n        self.expect_ident()\n            .filter(|ident| &ident.to_string() == name)\n            .is_some()\n    }\n}\n\nimpl<T: ExpectIdent> ExpectIdent for Box<T> {\n    fn expect_ident(&self) -> Option<Ident> {\n        let this: &T = self;\n        this.expect_ident()\n    }\n}\n\nfn expect_punctuated_1<T: Clone, S>(x: &Punctuated<T, S>) -> Option<T> {\n    (x.len() == 1).then(|| x.first().unwrap().clone())\n}\n\nimpl ExpectIdent for Path {\n    fn expect_ident(&self) -> Option<Ident> {\n        expect_punctuated_1(&self.segments).map(|s| s.ident)\n    }\n}\n\nimpl ExpectIdent for Expr {\n    fn expect_ident(&self) -> Option<Ident> {\n        match self {\n            Expr::Path(ExprPath {\n                qself: None, path, ..\n            }) => path.expect_ident(),\n            _ => None,\n        }\n    }\n}\n\nimpl ExpectIdent for Type {\n    fn expect_ident(&self) -> Option<Ident> {\n        match self {\n            Type::Path(TypePath {\n                qself: None, path, ..\n            }) => path.expect_ident(),\n            _ => None,\n        }\n    }\n}\n\nimpl ExpectIdent for Pat {\n    fn expect_ident(&self) -> Option<Ident> {\n        match self {\n            Pat::Ident(PatIdent {\n                by_ref: None,\n                mutability: None,\n                ident,\n                subpat: None,\n                ..\n            }) => Some(ident.clone()),\n            _ => None,\n        }\n    }\n}\n"
  },
  {
    "path": "hax-lib/macros/src/utils.rs",
    "content": "use syn::visit::Visit;\n\nuse crate::prelude::*;\nuse crate::rewrite_self::*;\n\n/// `HaxQuantifiers` makes polymorphic expression inlining functions available\npub struct HaxQuantifiers;\nimpl ToTokens for HaxQuantifiers {\n    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {\n        quote! {\n            use ::hax_lib::fstar::prop as fstar;\n            use ::hax_lib::coq::prop as coq;\n            use ::hax_lib::lean::prop as lean;\n            use ::hax_lib::proverif::prop as proverif;\n        }\n        .to_tokens(tokens)\n    }\n}\n\n/// Meta informations about functions decorations\npub enum FnDecorationKind {\n    Requires,\n    Ensures { ret_binder: Pat },\n    Decreases,\n    SMTPat,\n}\n\nimpl ToString for FnDecorationKind {\n    fn to_string(&self) -> String {\n        match self {\n            FnDecorationKind::Requires => \"requires\".to_string(),\n            FnDecorationKind::Ensures { .. } => \"ensures\".to_string(),\n            FnDecorationKind::Decreases { .. } => \"decreases\".to_string(),\n            FnDecorationKind::SMTPat { .. } => \"SMTPat\".to_string(),\n        }\n    }\n}\n\nimpl From<FnDecorationKind> for AssociationRole {\n    fn from(kind: FnDecorationKind) -> Self {\n        match &kind {\n            FnDecorationKind::Requires => AssociationRole::Requires,\n            FnDecorationKind::Ensures { .. } => AssociationRole::Ensures,\n            FnDecorationKind::Decreases => AssociationRole::Decreases,\n            FnDecorationKind::SMTPat => AssociationRole::SMTPat,\n        }\n    }\n}\n\n/// Merge two `syn::Generics`, respecting lifetime orders\npub(crate) fn merge_generics(x: Generics, y: Generics) -> Generics {\n    Generics {\n        lt_token: x.lt_token.or(y.lt_token),\n        gt_token: x.gt_token.or(y.gt_token),\n        params: {\n            let lts = x\n                .lifetimes()\n                .chain(y.lifetimes())\n                .cloned()\n                .map(GenericParam::Lifetime);\n            let not_lts = x\n                .params\n                .clone()\n                .into_iter()\n                .filter(|p| !matches!(p, GenericParam::Lifetime(_)))\n                .chain(\n                    y.params\n                        .clone()\n                        .into_iter()\n                        .filter(|p| !matches!(p, GenericParam::Lifetime(_))),\n                );\n            lts.chain(not_lts).collect()\n        },\n        where_clause: match (x.where_clause, y.where_clause) {\n            (Some(wx), Some(wy)) => Some(syn::WhereClause {\n                where_token: wx.where_token,\n                predicates: wx.predicates.into_iter().chain(wy.predicates).collect(),\n            }),\n            (Some(w), None) | (None, Some(w)) => Some(w),\n            (None, None) => None,\n        },\n    }\n}\n\n/// Transform every `x: &mut T` input into `x: &T` in a signature, and\n/// returns a list of such transformed `x: &T` inputs\nfn unmut_references_in_inputs(sig: &mut Signature) -> Vec<FnArg> {\n    let mut mutable_inputs = vec![];\n    for input in &mut sig.inputs {\n        if let Some(mutability) = match input {\n            FnArg::Receiver(syn::Receiver {\n                reference: Some(_),\n                mutability,\n                ..\n            }) => Some(mutability),\n            FnArg::Typed(syn::PatType { ty, .. }) => {\n                use std::borrow::BorrowMut;\n                if let syn::Type::Reference(syn::TypeReference { mutability, .. }) = ty.borrow_mut()\n                {\n                    Some(mutability)\n                } else {\n                    None\n                }\n            }\n            _ => None,\n        } {\n            if mutability.is_some() {\n                *mutability = None;\n                mutable_inputs.push(input.clone());\n            }\n        }\n    }\n    mutable_inputs\n}\n\n/// Expects a `FnArg` to be a simple variable pattern\nfn expect_fn_arg_var_pat(arg: &FnArg) -> Option<(String, syn::Type)> {\n    match arg {\n        FnArg::Receiver(recv) => Some((\"self\".into(), *recv.ty.clone())),\n        FnArg::Typed(pat_type) => match &*pat_type.pat {\n            syn::Pat::Wild(_) => Some((\"\".into(), *pat_type.ty.clone())),\n            syn::Pat::Ident(pat_ident) => {\n                Some((format!(\"{}\", pat_ident.ident), *pat_type.ty.clone()))\n            }\n            _ => None,\n        },\n    }\n}\n\npub(crate) enum NotFutureExpr {\n    BadNumberOfArgs,\n    ArgNotIdent,\n}\n\n/// `expect_future_expr(e)` tries to match the pattern\n/// `future(<syn::Ident>)` in expression `e`\npub(crate) fn expect_future_expr(e: &Expr) -> Option<std::result::Result<Ident, NotFutureExpr>> {\n    if let Expr::Call(call) = e {\n        if call.func.is_ident(\"future\") {\n            return Some(match call.args.iter().collect::<Vec<_>>().as_slice() {\n                [arg] => arg.expect_ident().ok_or(NotFutureExpr::ArgNotIdent),\n                _ => Err(NotFutureExpr::BadNumberOfArgs),\n            });\n        }\n    }\n    None\n}\n\n#[derive(Default)]\npub struct IdentCollector {\n    pub idents: Vec<Ident>,\n}\n\nimpl<'ast> syn::visit::Visit<'ast> for IdentCollector {\n    fn visit_ident(&mut self, ident: &'ast Ident) {\n        self.idents.push(ident.clone());\n    }\n}\n\nimpl IdentCollector {\n    /// Returns a fresh identifier with the given prefix that is not in the collected identifiers.\n    pub fn fresh_ident(&self, prefix: &str) -> Ident {\n        let idents: HashSet<&Ident> = HashSet::from_iter(self.idents.iter());\n        let mk = |s| Ident::new(s, Span::call_site());\n        std::iter::once(mk(prefix))\n            .chain((0u64..).map(|i| Ident::new(&format!(\"{}{}\", prefix, i), Span::call_site())))\n            .find(|ident| !idents.contains(ident))\n            .unwrap()\n    }\n}\n\n/// Rewrites `future(x)` nodes in an expression when (1) `x` is an\n/// ident and (2) the ident `x` is contained in the HashSet.\nstruct RewriteFuture(HashSet<String>);\nimpl VisitMut for RewriteFuture {\n    fn visit_expr_mut(&mut self, e: &mut Expr) {\n        syn::visit_mut::visit_expr_mut(self, e);\n        let error = match expect_future_expr(e) {\n            Some(Ok(arg)) => {\n                let arg = format!(\"{}\", arg);\n                if self.0.contains(&arg) {\n                    let arg = create_future_ident(&arg);\n                    *e = parse_quote! {#arg};\n                    return;\n                }\n                Some(format!(\"Cannot find an input `{arg}` of type `&mut _`. In the context, `future` can be called on the following inputs: {:?}.\", self.0))\n            }\n            Some(Err(error_kind)) => {\n                let message = match error_kind {\n                    NotFutureExpr::BadNumberOfArgs => {\n                        \"`future` can only be called with one argument: a `&mut` input name\"\n                    }\n                    NotFutureExpr::ArgNotIdent => {\n                        \"`future` can only be called with an `&mut` input name\"\n                    }\n                };\n                let help_message = match self.0.iter().next() {\n                    None => \" In the context, there is no `&mut` input.\".to_string(),\n                    Some(var) => {\n                        format!(\" For example, in the context you can write `future({var})`.\")\n                    }\n                };\n                Some(format!(\"{message}.{}\", help_message))\n            }\n            None => None,\n        };\n        if let Some(error) = error {\n            *e = parse_quote! {::std::compile_error!(#error)};\n        }\n    }\n}\n\nfn create_future_ident(name: &str) -> syn::Ident {\n    proc_macro2::Ident::new(&format!(\"{name}_future\"), proc_macro2::Span::call_site())\n}\n\n/// The engine translates functions of arity zero to functions that\n/// takes exactly one unit argument. The zero-arity functions we\n/// generate are translated correctly as well. But in the case of a\n/// `ensures` clause, that's an issue: we produce a function of arity\n/// one, whose first argument is the result of the function. Instead,\n/// we need a function of arity two.\n/// `fix_signature_arity` adds a `unit` if needed.\nfn add_unit_to_sig_if_needed(signature: &mut Signature) {\n    if signature.inputs.is_empty() {\n        signature.inputs.push(parse_quote! {_: ()})\n    }\n}\n\n/// Common logic when generating a function decoration\npub fn make_fn_decoration(\n    mut phi: Expr,\n    mut signature: Signature,\n    kind: FnDecorationKind,\n    mut generics: Option<Generics>,\n    self_type: Option<Type>,\n) -> (TokenStream, AttrPayload) {\n    let self_ident: Ident = {\n        let mut idents = IdentCollector::default();\n        idents.visit_expr(&phi);\n        idents.visit_signature(&signature);\n        idents.fresh_ident(\"self_\")\n    };\n    let error = {\n        let mut rewriter = RewriteSelf::new(self_ident, self_type);\n        rewriter.visit_expr_mut(&mut phi);\n        rewriter.visit_signature_mut(&mut signature);\n        if let Some(generics) = generics.as_mut() {\n            rewriter.visit_generics_mut(generics);\n        }\n        rewriter.get_error()\n    };\n    let uid = ItemUid::fresh();\n    let mut_ref_inputs = unmut_references_in_inputs(&mut signature);\n    let decoration = {\n        let decoration_sig = {\n            let mut sig = signature.clone();\n            sig.ident = format_ident!(\"{}\", kind.to_string());\n            if let FnDecorationKind::Ensures { ret_binder } = &kind {\n                add_unit_to_sig_if_needed(&mut sig);\n                let output_typ = match sig.output {\n                    syn::ReturnType::Default => parse_quote! {()},\n                    syn::ReturnType::Type(_, t) => t,\n                };\n                let mut_ref_inputs = mut_ref_inputs\n                    .iter()\n                    .map(|mut_ref_input| {\n                        expect_fn_arg_var_pat(mut_ref_input).expect(\n                            \"Every `&mut` input of a function annotated with a `ensures` clause is expected to be a simple variable pattern.\",\n                        )\n                    });\n                let mut rewrite_future =\n                    RewriteFuture(mut_ref_inputs.clone().map(|x| x.0).collect());\n                rewrite_future.visit_expr_mut(&mut phi);\n                let (mut pats, mut tys): (Vec<_>, Vec<_>) = mut_ref_inputs\n                    .map(|(name, ty)| {\n                        (\n                            create_future_ident(&name).to_token_stream(),\n                            ty.to_token_stream(),\n                        )\n                    })\n                    .unzip();\n\n                let is_output_typ_unit = if let syn::Type::Tuple(tuple) = &*output_typ {\n                    tuple.elems.is_empty()\n                } else {\n                    false\n                };\n\n                if !is_output_typ_unit || pats.is_empty() {\n                    pats.push(ret_binder.to_token_stream());\n                    tys.push(quote! {#output_typ});\n                }\n\n                sig.inputs\n                    .push(syn::parse_quote! {(#(#pats),*): (#(#tys),*)});\n            }\n            if let Some(generics) = generics {\n                sig.generics = merge_generics(generics, sig.generics);\n            }\n            sig.output = match &kind {\n                FnDecorationKind::Decreases | FnDecorationKind::SMTPat => {\n                    syn::parse_quote! { -> () }\n                }\n                _ => syn::parse_quote! { -> impl core::convert::Into<::hax_lib::Prop> },\n            };\n            sig\n        };\n        let uid_attr = AttrPayload::Uid(uid.clone());\n        let late_skip = &AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true });\n        if let FnDecorationKind::Decreases | FnDecorationKind::SMTPat = &kind {\n            phi = parse_quote! {::hax_lib::any_to_unit(#phi)};\n        };\n        let quantifiers = if let FnDecorationKind::Decreases = &kind {\n            None\n        } else {\n            Some(HaxQuantifiers)\n        };\n        let future = if let FnDecorationKind::Ensures { .. } = &kind {\n            quote! { #late_skip #AttrHaxLang fn future<T>(x: &mut T) -> &T { x } }\n        } else {\n            quote! {}\n        };\n        use AttrPayload::NeverErased;\n        quote! {\n            #[cfg(#DebugOrHaxCfgExpr)]\n            #late_skip\n            const _: () = {\n                #quantifiers\n                #future\n                #uid_attr\n                #late_skip\n                #[allow(unused)]\n                #NeverErased\n                #decoration_sig {\n                    #phi\n                }\n            };\n        }\n    };\n\n    let assoc_attr = AttrPayload::AssociatedItem {\n        role: kind.into(),\n        item: uid,\n    };\n    (quote! {#error #decoration}, assoc_attr)\n}\n"
  },
  {
    "path": "hax-lib/macros/types/Cargo.toml",
    "content": "[package]\nname = \"hax-lib-macros-types\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition = \"2021\"\nrepository.workspace = true\nreadme = \"README.md\"\ndescription = \"Hax-internal types\"\n\n[dependencies]\nserde.workspace = true\nserde_json.workspace = true\nschemars = {workspace = true, optional = true}\nquote.workspace = true\nproc-macro2.workspace = true\nuuid = { version = \"1.5\", features = [\"v4\"] }\n\n"
  },
  {
    "path": "hax-lib/macros/types/README.md",
    "content": "# hax internal types\n\nA crate that defines the types of the various payloads of the attributes produced by the crate `hax-lib-macros` and consumed internally by the engine of hax.\n"
  },
  {
    "path": "hax-lib/macros/types/src/lib.rs",
    "content": "use serde::{Deserialize, Serialize};\n\n/// Each item can be marked with a *u*nique *id*entifier. This is\n/// useful whenever the payload of an attribute is a piece of Rust code\n/// (an expression, a path, a type...). We don't want to retrieve those\n/// pieces of Rust code as raw token stream: we want to let Rustc give\n/// meaning to those. For instance, we want Rustc to type expressions\n/// and to resolve paths.\n///\n/// Thus, we expand attributes with Rust-code-payloads as top-level\n/// items marked with an `ItemUid`. The attributes are then replaced\n/// in place with a simple reference (the `ItemUid` in stake).\n///\n/// Morally, we expand `struct Foo { #[refine(x > 3)] x: u32 }` to:\n///  1. `#[uuid(A_UNIQUE_ID_123)] fn refinement(x: u32) -> hax_lib::Prop {x > 3}`;\n///  2. `struct Foo { #[refined_by(A_UNIQUE_ID_123)] x: u32 }`.\n#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaUid\")]\npub struct ItemUid {\n    /// Currently, this is a UUID.\n    pub uid: String,\n}\n\nimpl std::fmt::Display for ItemUid {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        std::fmt::Display::fmt(&self.uid, f)\n    }\n}\n\nimpl ItemUid {\n    pub fn fresh() -> Self {\n        use uuid::Uuid;\n        let uid = format!(\"{}\", Uuid::new_v4().simple());\n        ItemUid { uid }\n    }\n}\n\n/// What shall Hax do with an item?\n#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaItemStatus\")]\npub enum ItemStatus {\n    /// Include this item in the translation\n    Included {\n        /// Should Hax drop this item just before code generation?\n        late_skip: bool,\n    },\n    /// Exclude this item from the translation, optionally replacing it in the backends\n    Excluded { modeled_by: Option<String> },\n}\n\n/// An item can be associated to another one for multiple reasons:\n/// `AssociationRole` capture the nature of the (directed) relation\n/// between two items\n#[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaAssocRole\")]\npub enum AssociationRole {\n    Requires,\n    Ensures,\n    Decreases,\n    SMTPat,\n    Refine,\n    /// A quoted piece of backend code to place after or before the\n    /// extraction of the marked item\n    ItemQuote,\n    ProcessRead,\n    ProcessWrite,\n    ProcessInit,\n    ProtocolMessages,\n}\n\n/// Where should a item quote appear?\n#[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaItemQuotePosition\")]\npub enum ItemQuotePosition {\n    /// Should appear just before the item in the extraction\n    Before,\n    /// Should appear right after the item in the extraction\n    After,\n}\n\n/// F*-specific options for item quotes\n#[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaItemQuoteFStarOpts\")]\npub struct ItemQuoteFStarOpts {\n    /// Shall we output this in F* interfaces (`*.fsti` files)?\n    pub intf: bool,\n    /// Shall we output this in F* implementations (`*.fst` files)?\n    pub r#impl: bool,\n}\n\n/// An item quote is a verbatim piece of backend code included in\n/// Rust. [`ItemQuote`] encodes the various options a item quote can\n/// have.\n#[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaItemQuote\")]\npub struct ItemQuote {\n    pub position: ItemQuotePosition,\n    pub fstar_options: Option<ItemQuoteFStarOpts>,\n}\n\n/// The proof method to use for verification condition generation and discharge.\n#[derive(Debug, Copy, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaProofMethod\")]\npub enum ProofMethod {\n    BvDecide,\n    Grind,\n}\n\n/// Hax only understands one attribute: `#[hax::json(PAYLOAD)]` where\n/// `PAYLOAD` is a JSON serialization of an inhabitant of\n/// `AttrPayload`.\n#[derive(Debug, Clone, Serialize, Deserialize, Hash, Eq, PartialEq, Ord, PartialOrd)]\n#[cfg_attr(feature = \"schemars\", derive(schemars::JsonSchema))]\n#[serde(rename = \"HaPayload\")]\npub enum AttrPayload {\n    ItemStatus(ItemStatus),\n    /// Mark an item as associated with another one\n    AssociatedItem {\n        /// What is the nature of the association?\n        role: AssociationRole,\n        /// What is the identifier of the target item?\n        item: ItemUid,\n    },\n    Uid(ItemUid),\n    /// Decides of the position of a item quote\n    ItemQuote(ItemQuote),\n    /// Mark an item so that hax never drop its body (this is useful\n    /// for pre- and post- conditions of a function we dropped the\n    /// body of: pre and post are part of type signature)\n    NeverErased,\n    NewtypeAsRefinement,\n    /// Mark an item as a lemma statement to prove in the backend\n    Lemma,\n    Language,\n    ProcessRead,\n    ProcessWrite,\n    ProcessInit,\n    Proof(String),\n    PureRequiresProof(String),\n    PureEnsuresProof(String),\n    ProofMethod(ProofMethod),\n    ProtocolMessages,\n    PVConstructor,\n    PVHandwritten,\n    TraitMethodNoPrePost,\n    /// Make an item opaque\n    Erased,\n    /// In the context of a set of fields (e.g. on a `struct`), overrides its\n    /// order. By default, the order of a field is its index, e.g. the first\n    /// field has order 0, the i-th field has order i+1. Rust fields order\n    /// matters: it rules how bits are represented. Once extracted, the order\n    /// matters, but for different reasons, e.g. a field is refined with\n    /// another, requiring a specific order.\n    Order(i32),\n}\n\npub const HAX_TOOL: &str = \"_hax\";\npub const HAX_CFG_OPTION_NAME: &str = \"hax_compilation\";\n\npub struct HaxTool;\npub struct HaxCfgOptionName;\npub struct DebugOrHaxCfgExpr;\nimpl ToTokens for HaxTool {\n    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {\n        format_ident!(\"{}\", HAX_TOOL).to_tokens(tokens)\n    }\n}\nimpl ToTokens for HaxCfgOptionName {\n    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {\n        format_ident!(\"{}\", HAX_CFG_OPTION_NAME).to_tokens(tokens)\n    }\n}\nimpl ToTokens for DebugOrHaxCfgExpr {\n    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {\n        quote! {any(#HaxCfgOptionName, debug_assertions)}.to_tokens(tokens)\n    }\n}\n\nuse quote::*;\n\nimpl From<&AttrPayload> for proc_macro2::TokenStream {\n    fn from(payload: &AttrPayload) -> Self {\n        let payload: String = serde_json::to_string(payload).unwrap();\n        quote! {#[cfg_attr(#HaxCfgOptionName, #HaxTool::json(#payload))]}\n    }\n}\n\nimpl ToTokens for AttrPayload {\n    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {\n        proc_macro2::TokenStream::from(self).to_tokens(tokens)\n    }\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/.gitignore",
    "content": "*.vo*\n*.aux\n*.glob\n*.cache\n.Makefile.d\nMakefile\nMakefile.conf\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/default.nix",
    "content": "{ stdenv ? (import <nixpkgs> { }).stdenv\n, coqPackages ? (import <nixpkgs> { }).coqPackages_8_19, }:\nstdenv.mkDerivation {\n  name = \"hax-coq-generated-core\";\n  src = ./generated-core;\n  buildPhase = ''\n    coq_makefile -f _CoqProject -o Makefile\n    make\n  '';\n  installPhase = ''\n    export DESTDIR=$out\n    make install\n    mv $out/nix/store/*/lib $out\n    rm -rf $out/nix\n  '';\n  buildInputs = [\n    coqPackages.coq-record-update\n    coqPackages.coq\n  ];\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/_CoqProject",
    "content": "-R src/ Core\n-R spec/ Core\n-R phase_library/ Core\n-arg -w\n-arg all\n\n./src/Core_Clone.v\n./src/Core_Marker.v\n\n./src/Core_Panicking.v\n\n./src/Core_Ops_Function.v\n\n./src/Core_Option.v\n./src/Core_Cmp.v\n\n./spec/Core_Base_Spec_Haxint.v\n./spec/Core_Base_Spec_Unary.v\n\n./spec/Core_Base_Spec_Binary_Positive.v\n./spec/Core_Base_Spec_Binary_Pos.v\n\n./spec/Core_Base_Spec_Binary.v\n\n./spec/Core_Base_Spec_Z.v\n\n./spec/Core_Base_Spec_Seq.v\n\n./spec/Core_Base_Spec_Constants.v\n\n./spec/Core_Base_Spec.v\n\n./src/Core_Base_Binary.v\n./src/Core_Base_Pos.v\n./src/Core_Base_Z.v\n\n./src/Core_Base_Seq.v\n\n./src/Core_Base.v\n\n./src/Core_Convert.v\n\n./src/Core_Ops_Index.v\n\n./src/Core_Ops_Bit.v\n./src/Core_Ops_Arith.v\n\n./src/Core_Ops_Range.v\n\n./src/Core_Iter_Traits_Iterator.v\n\n./src/Core_Ops_Index_range.v\n\n./src/Core_Ops.v\n\n./src/Core_Base_interface_Coerce.v\n\n./src/Core_Base_interface_Int.v\n\n./src/Core_Base_interface.v\n\n./src/Core_Num_Uint_macros.v # Empty\n./src/Core_Num_Int_macros.v # Empty\n\n./src/Core_Result.v\n\n./phase_library/ControlFlow.v\n\n# Bundles: Core_Primitive.v,\n./src/Core_Array_Rec_bundle_579704328.v\n\n# ./src/Core_Primitive_Number_conversion.v\n# ./src/Core_Primitive_Number_conversion_i.v\n\n./src/Core_Primitive.v\n\n./phase_library/NumberNotation.v\n./phase_library/TODO.v\n\n./src/Core_Intrinsics.v\n\n./src/Core_Num.v # Broken?\n\n./src/Core_Slice_Iter.v\n./src/Core_Slice.v\n\n./src/Core_Array_Iter.v\n./src/Core_Array.v\n\n./src/Core.v\n\n# # Extra\n\n# Core_Slice_Iter_Macros.v\n# ----- Core_Slice_Iter.v\n# Core_Slice_Index_Private_slice_index.v\n# Core_Slice_Index.v\n# ----- Core_Slice.v\n# ----- Core_Result.v\n# ----- Core_Primitive_Number_conversion_i.v\n# ----- Core_Primitive_Number_conversion.v\n# ----- Core_Primitive.v\n# ----- Core_Panicking.v\n# ----- Core_Option.v\n# ----- Core_Ops_Range.v\n# Core_Ops_Index_range.v\n# ----- Core_Ops_Index.v\n# Core_Ops_Function.v\n# Core_Ops_Bit_Impls_for_prims.v\n# ----- Core_Ops_Bit.v\n# Core_Ops_Arith_Impls_for_prims.v\n# ----- Core_Ops_Arith.v\n# ----- Core_Ops.v\n# ----- Core_Num_Uint_macros.v\n# ----- Core_Num_Int_macros.v\n# ----- Core_Num.v\n# ----- Core_Marker.v\n# Core_Iter_Traits_Marker.v\n# Core_Iter_Traits_Iterator.v\n# Core_Iter_Traits_Exact_size.v\n# Core_Iter_Traits_Collect.v\n# Core_Iter_Traits.v\n# Core_Iter_Range.v\n# Core_Iter.v\n# ----- Core_Intrinsics.v\n# Core_Fmt.v\n# ----- Core_Convert.v\n# ----- Core_Cmp.v\n# ----- Core_Clone.v\n# Core_Base_interface_Int_U8_proofs.v\n# Core_Base_interface_Int_U64_proofs.v\n# Core_Base_interface_Int_U32_proofs.v\n# Core_Base_interface_Int_U16_proofs.v\n# Core_Base_interface_Int_U128_proofs.v\n# Core_Base_interface_Int_I8_proofs.v\n# Core_Base_interface_Int_I64_proofs.v\n# Core_Base_interface_Int_I32_proofs.v\n# Core_Base_interface_Int_I16_proofs.v\n# Core_Base_interface_Int_I128_proofs.v\n# ----- Core_Base_interface_Int.v\n# ----- Core_Base_interface_Coerce.v\n# ----- Core_Base_interface.v\n# ----- Core_Base_Z.v\n# ----- Core_Base_Spec_Z.v\n# ----- Core_Base_Spec_Unary.v\n# ----- Core_Base_Spec_Seq.v\n# ----- Core_Base_Spec_Haxint.v\n# ----- Core_Base_Spec_Constants.v\n# ----- Core_Base_Spec_Binary_Positive.v\n# ----- Core_Base_Spec_Binary_Pos.v\n# ----- Core_Base_Spec_Binary.v\n# ----- Core_Base_Spec.v\n# ----- Core_Base_Seq.v\n# ----- Core_Base_Pos.v\n# Core_Base_Number_conversion.v\n# ----- Core_Base_Binary.v\n# ----- Core_Base.v\n# ----- Core_Array_Rec_bundle_579704328.v\n# ----- Core_Array_Iter.v\n# ----- Core_Array.v\n# ----- Core.v"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/phase_library/ControlFlow.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\n\nFrom Core Require Import Core_Convert.\n\nFrom Core Require Import Core_Base_interface_Int.\n\nFrom Core Require Import Core_Result.\n\nInductive t_ControlFlow a b :=\n| ControlFlow_Continue : a -> t_ControlFlow a b\n| ControlFlow_Break : b -> t_ControlFlow a b.\nArguments ControlFlow_Continue {a} {b}.\nArguments ControlFlow_Break {a} {b}.\n\n(* Run exception *)\nDefinition run {a} (x : t_ControlFlow a a) : a :=\n  match x with\n  | ControlFlow_Continue x => x\n  | ControlFlow_Break x => x\n  end.\n\nDefinition bind_exception {a c}\n  (x : t_ControlFlow a c)\n  (f : forall (k : a) `{x = ControlFlow_Continue k}, t_ControlFlow a c) : t_ControlFlow a c :=\n  match x as k return x = k -> _ with\n  | ControlFlow_Continue o => fun k => f (H := k) o\n  | ControlFlow_Break o => fun _ => ControlFlow_Break o\n  end eq_refl.\n\nNotation \"'letb' p ':=' e 'in' rhs\" :=\n  (bind_exception e (fun p _ => rhs)) (at level 100).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/phase_library/NumberNotation.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\nRequire Import Core_Primitive.\nExport Core_Primitive.\n\n(* Handwritten *)\n\nCoercion Build_t_i8 : t_I8 >-> t_i8.\nCoercion Build_t_I8 : Z >-> t_I8.\n\nCoercion Build_t_i16 : t_I16 >-> t_i16.\nCoercion Build_t_I16 : Z >-> t_I16.\n\nCoercion Build_t_i32 : t_I32 >-> t_i32.\nCoercion Build_t_I32 : Z >-> t_I32.\n\nCoercion Build_t_i64 : t_I64 >-> t_i64.\nCoercion Build_t_I64 : Z >-> t_I64.\n\nCoercion Build_t_i128 : t_I128 >-> t_i128.\nCoercion Build_t_I128 : Z >-> t_I128.\n\nCoercion Build_t_isize : t_I64 >-> t_isize.\n\nCoercion Build_t_u8 : t_U8 >-> t_u8.\nCoercion Build_t_U8 : N >-> t_U8.\n\nCoercion Build_t_u16 : t_U16 >-> t_u16.\nCoercion Build_t_U16 : N >-> t_U16.\n\nCoercion Build_t_u32 : t_U32 >-> t_u32.\nCoercion Build_t_U32 : N >-> t_U32.\n\nCoercion Build_t_u64 : t_U64 >-> t_u64.\nCoercion Build_t_U64 : N >-> t_U64.\n\nCoercion Build_t_u128 : t_U128 >-> t_u128.\nCoercion Build_t_U128 : N >-> t_U128.\n\nCoercion Build_t_usize : t_U64 >-> t_usize.\n\nCoercion Z.to_N : Z >-> N.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/phase_library/TODO.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\nRequire Import Core_Primitive.\nExport Core_Primitive.\n\n(* Array coercions *)\nCoercion Build_t_Array : t_Slice >-> t_Array.\nCoercion Build_t_Slice : list >-> t_Slice.\n\nDefinition unsize {A} (x : A) := x.\nDefinition repeat {v_T} (a : v_T) b : t_Array v_T b := List.repeat a (N.to_nat (U64_f_v (usize_0 b))).\n\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) : string := x.\n\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\n(* Inductive globality := | t_Global. *)\n(* Definition t_Vec T (_ : globality) : Type := list T. *)\n(* Definition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2). *)\n(* Definition impl_1__len {A} (l : list A) := Z.of_nat (List.length l). *)\n(* Definition impl__new {A} (_ : Datatypes.unit) : list A := nil. *)\n(* Definition impl__with_capacity {A} (_ : Z)  : list A := nil. *)\n(* Definition impl_1__push {A} l (x : A) := cons l x. *)\n(* Definition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := {| x |}. *)\n(* Definition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l). *)\n\nFixpoint build_range (l : nat) (f : nat) (a : list t_usize) : list t_usize :=\n  match f with\n  | 0%nat => a\n  | (S n)%nat => build_range (S l) n (cons a (Build_t_usize (Build_t_U64 (unary_to_int l))))\n  end.\n\nDefinition fold_range {A : Type} (l : t_usize) (u : t_usize) (_ : A -> t_usize -> bool) (x : A) (f : A -> t_usize -> A) : A := List.fold_left f (build_range (unary_from_int (U64_f_v (usize_0 l))) (unary_from_int (U64_f_v (usize_0 (Sub_f_sub u l)))) nil) x.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec_Haxint.\nExport Core_Base_Spec_Haxint.\n\nFrom Core Require Import Core_Base_Spec_Unary.\nExport Core_Base_Spec_Unary.\n\nFrom Core Require Import Core_Base_Spec_Binary.\nExport Core_Base_Spec_Binary.\n\nFrom Core Require Import Core_Base_Spec_Z.\nExport Core_Base_Spec_Z.\n\nFrom Core Require Import Core_Base_Spec_Seq.\nExport Core_Base_Spec_Seq.\n\nFrom Core Require Import Core_Base_Spec_Constants.\nExport Core_Base_Spec_Constants.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Binary.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec_Binary_Pos.\nExport Core_Base_Spec_Binary_Pos.\n\nFrom Core Require Import Core_Base_Spec_Binary_Positive.\nExport Core_Base_Spec_Binary_Positive.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Binary_Pos.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec_Haxint.\nExport Core_Base_Spec_Haxint.\n\nFrom Core Require Import Core_Base_Spec_Binary_Positive.\nExport Core_Base_Spec_Binary_Positive.\n\nNotation \"'t_POS'\" := N.\nNotation \"'POS_ZERO'\" := N0.\nNotation \"'POS_POS'\" := Npos.\n\nDefinition match_pos (s : t_HaxInt) : t_POS := s.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Binary_Positive.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec_Haxint.\nExport Core_Base_Spec_Haxint.\n\nFrom Core Require Import Core_Clone.\nExport Core_Clone.\n\nNotation \"'t_Positive'\" := positive.\nNotation \"'t_POSITIVE'\" := positive.\nNotation \"'POSITIVE_XH'\" := xH.\nNotation \"'POSITIVE_XO'\" := xO.\nNotation \"'POSITIVE_XI'\" := xI.\n\nDefinition positive_from_int (x : t_HaxInt) `{Hpos : x <> N0} : t_Positive :=\n  match x return x <> N0 -> _ with | N0 => fun Hpos => False_rect _ (Hpos eq_refl) | Npos p => fun _ => p end Hpos.\n\nDefinition positive_to_int (s : t_Positive) : t_HaxInt := Npos s.\n\nDefinition xH : t_Positive := xH.\nDefinition xI (s : t_Positive) : t_Positive := xI s.\nDefinition xO (s : t_Positive) : t_Positive := xO s.\n\nDefinition match_positive (s : t_Positive) : t_POSITIVE := s.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Constants.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec_Haxint.\nExport Core_Base_Spec_Haxint.\n\nDefinition v_BITS_128_ : t_HaxInt := 128.\nDefinition v_BITS_16_ : t_HaxInt := 16.\nDefinition v_BITS_32_ : t_HaxInt := 32.\nDefinition v_BITS_64_ : t_HaxInt := 64.\nDefinition v_BITS_8_ : t_HaxInt := 8.\n\nDefinition v_WORDSIZE_128_ : t_HaxInt := N.pow 2 128.\nDefinition v_WORDSIZE_128_SUB_1_ : t_HaxInt := N.pow 2 128 - 1.\n\nDefinition v_WORDSIZE_16_ : t_HaxInt := N.pow 2 16.\nDefinition v_WORDSIZE_16_SUB_1_ : t_HaxInt := N.pow 2 16.\n\nDefinition v_WORDSIZE_32_ : t_HaxInt := N.pow 2 32.\nDefinition v_WORDSIZE_32_SUB_1_ : t_HaxInt := N.pow 2 32 - 1.\n\nDefinition v_WORDSIZE_4_ : t_HaxInt := N.pow 2 4.\nDefinition v_WORDSIZE_4_SUB_1_ : t_HaxInt := N.pow 2 4 - 1.\n\nDefinition v_WORDSIZE_64_ : t_HaxInt := N.pow 2 64.\nDefinition v_WORDSIZE_64_SUB_1_ : t_HaxInt := N.pow 2 64 - 1.\n\nDefinition v_WORDSIZE_8_ : t_HaxInt := N.pow 2 8.\nDefinition v_WORDSIZE_8_SUB_1_ : t_HaxInt := N.pow 2 8 - 1.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Haxint.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nNotation \"'t_HaxInt'\" := N.\n\nDefinition v_HaxInt_ONE : t_HaxInt := 1.\nDefinition v_HaxInt_TWO : t_HaxInt := 2.\nDefinition v_HaxInt_ZERO : t_HaxInt := 0.\n\nDefinition div2 (s : t_HaxInt) : t_HaxInt := s / 2.\n\nDefinition is_zero (s : t_HaxInt) : bool := match s with | N0 => true | _ => false end.\n\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Seq.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nNotation \"'t_Seq'\" := list.\n\nNotation \"'t_LIST'\" := list.\nNotation \"'LIST_NIL'\" := nil.\nNotation \"'LIST_CONS'\" := cons.\n\nNotation \"'nil'\" := nil.\nNotation \"'cons'\" := (fun x y => cons y x).\n\nDefinition match_list {T} (x : t_Seq T) : t_LIST T := x.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Unary.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec_Haxint.\nExport Core_Base_Spec_Haxint.\n\nNotation \"'t_Unary'\" := nat.\n\nNotation \"'t_UNARY'\" := nat.\nNotation \"'UNARY_ZERO'\" := O.\nNotation \"'UNARY_SUCC'\" := S.\n\nDefinition unary_from_int (x : t_HaxInt) : t_Unary := N.to_nat x.\nDefinition unary_to_int (s : t_Unary) : t_HaxInt := N.of_nat s.\n\nDefinition pred (x : t_Unary) : t_Unary := Nat.pred x.\n\nDefinition match_unary (s : t_Unary) : t_UNARY := s.\n\nDefinition succ (x : t_Unary) : t_Unary := S x.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/spec/Core_Base_Spec_Z.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec_Binary.\nExport Core_Base_Spec_Binary.\n\nNotation \"'t_Z'\" := Z.\nNotation \"'Z_NEG'\" := Zneg.\nNotation \"'Z_ZERO'\" := Z0.\nNotation \"'Z_POS'\" := Zpos.\n\nDefinition v_Z_ONE : t_Z := 1%Z.\nDefinition v_Z_TWO : t_Z := 2%Z.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nFrom Core Require Import Core_Option.\nExport Core_Option.\n\nFrom Core Require Import Core_Array_Rec_bundle_579704328.\nExport Core_Array_Rec_bundle_579704328.\n\nFrom Core Require Import Core_Ops.\nExport Core_Ops.\n\nFrom Core Require Import Core_Ops_Index.\nExport Core_Ops_Index.\n\nFrom Core Require Import NumberNotation.\nExport NumberNotation.\n\nFrom Core Require Import TODO.\nExport TODO.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Array.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Ops_Index.\nExport Core_Ops_Index.\n\n(* From Core Require Import Core_Ops_IndexMut. *)\n(* Export Core_Ops (t_IndexMut). *)\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\nFrom Core Require Import Core_Array_Iter.\nExport Core_Array_Iter.\n\nNotation \"'t_TryFromSliceError'\" := (t_TryFromSliceError).\n\nNotation \"'TryFromSliceError_0'\" := (TryFromSliceError_0).\n\n(* NotImplementedYet *)\n\n(* Notation \"'impl_2'\" := (impl_2). *)\n\n(* Notation \"'impl_1'\" := (impl_1). *)\n\n(* Notation \"'impl'\" := (impl). *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Array_Iter.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Num.\nExport Core_Num.\n\n\n\nFrom Core Require Import Core_Ops_Index_range.\nExport Core_Ops_Index_range.\n\nFrom Core Require Import Core_Ops_Range.\nExport Core_Ops_Range.\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\n(* From Core Require Import Core_Iter (t_IntoIterator). *)\n(* Export Core_Iter (t_IntoIterator). *)\n\nFrom Core Require Import Core_Clone.\nExport Core_Clone.\n\nFrom Core Require Import Core_Base.\nExport Core_Base.\n\n(* From Core Require Import hax_lib. *)\n(* Export hax_lib. *)\n\nRecord t_IntoIter (v_T : Type) (v_N : t_usize) `{t_Sized (v_T)} : Type :=\n  {\n    IntoIter_f_data : t_Array ((v_T)) (v_N);\n    IntoIter_f_alive : t_IndexRange;\n  }.\nArguments Build_t_IntoIter (_) (_) {_}.\nArguments IntoIter_f_data {_} {_} {_}.\nArguments IntoIter_f_alive {_} {_} {_}.\n#[export] Instance settable_t_IntoIter `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} : Settable _ :=\n  settable! (Build_t_IntoIter v_T v_N) <IntoIter_f_data; IntoIter_f_alive>.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Array_Rec_bundle_579704328.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\n\nFrom Core Require Import Core_Convert.\n\nFrom Core Require Import Core_Base_interface_Int.\n\nFrom Core Require Import ControlFlow.\n\nRecord t_i128 : Type :=\n  {\n    i128_0 : t_I128;\n  }.\nArguments Build_t_i128.\nArguments i128_0.\n#[export] Instance settable_t_i128 : Settable _ :=\n  settable! (Build_t_i128) <i128_0>.\nNotation \"'i128'\" := Build_t_i128.\n\n#[global] Instance t_Clone_173398349 : t_Clone ((t_i128)) :=\n  {\n    Clone_f_clone := fun  (self : t_i128)=> self;\n  }.\n\nRecord t_i16 : Type :=\n  {\n    i16_0 : t_I16;\n  }.\nArguments Build_t_i16.\nArguments i16_0.\n#[export] Instance settable_t_i16 : Settable _ :=\n  settable! (Build_t_i16) <i16_0>.\nNotation \"'i16'\" := Build_t_i16.\n\n#[global] Instance t_Clone_192670426 : t_Clone ((t_i16)) :=\n  {\n    Clone_f_clone := fun  (self : t_i16)=>\n      Build_t_i16 (Clone_f_clone (i16_0 self));\n  }.\n\nRecord t_i32 : Type :=\n  {\n    i32_0 : t_I32;\n  }.\nArguments Build_t_i32.\nArguments i32_0.\n#[export] Instance settable_t_i32 : Settable _ :=\n  settable! (Build_t_i32) <i32_0>.\nNotation \"'i32'\" := Build_t_i32.\n\n#[global] Instance t_Clone_502683757 : t_Clone ((t_i32)) :=\n  {\n    Clone_f_clone := fun  (self : t_i32)=>\n      Build_t_i32 (Clone_f_clone (i32_0 self));\n  }.\n\nRecord t_i64 : Type :=\n  {\n    i64_0 : t_I64;\n  }.\nArguments Build_t_i64.\nArguments i64_0.\n#[export] Instance settable_t_i64 : Settable _ :=\n  settable! (Build_t_i64) <i64_0>.\nNotation \"'i64'\" := Build_t_i64.\n\n#[global] Instance t_Clone_208076318 : t_Clone ((t_i64)) :=\n  {\n    Clone_f_clone := fun  (self : t_i64)=>\n      Build_t_i64 (Clone_f_clone (i64_0 self));\n  }.\n\nRecord t_i8 : Type :=\n  {\n    i8_0 : t_I8;\n  }.\nArguments Build_t_i8.\nArguments i8_0.\n#[export] Instance settable_t_i8 : Settable _ :=\n  settable! (Build_t_i8) <i8_0>.\nNotation \"'i8'\" := Build_t_i8.\n\n#[global] Instance t_Clone_654126073 : t_Clone ((t_i8)) :=\n  {\n    Clone_f_clone := fun  (self : t_i8)=>\n      Build_t_i8 (Clone_f_clone (i8_0 self));\n  }.\n\nRecord t_isize : Type :=\n  {\n    isize_0 : t_I64;\n  }.\nArguments Build_t_isize.\nArguments isize_0.\n#[export] Instance settable_t_isize : Settable _ :=\n  settable! (Build_t_isize) <isize_0>.\nNotation \"'isize'\" := Build_t_isize.\n\n#[global] Instance t_Clone_36465747 : t_Clone ((t_isize)) :=\n  {\n    Clone_f_clone := fun  (self : t_isize)=>\n      Build_t_isize (Clone_f_clone (isize_0 self));\n  }.\n\n#[global] Instance t_From_200584765 : t_From ((t_isize)) ((t_i64)) :=\n  {\n    From_f_from := fun  (x : t_i64)=>\n      Build_t_isize (Into_f_into (i64_0 x));\n  }.\n\n#[global] Instance t_From_705632684 : t_From ((t_i64)) ((t_isize)) :=\n  {\n    From_f_from := fun  (x : t_isize)=>\n      Build_t_i64 (Into_f_into (isize_0 x));\n  }.\n\nRecord t_u128 : Type :=\n  {\n    u128_0 : t_U128;\n  }.\nArguments Build_t_u128.\nArguments u128_0.\n#[export] Instance settable_t_u128 : Settable _ :=\n  settable! (Build_t_u128) <u128_0>.\nNotation \"'u128'\" := Build_t_u128.\n\nDefinition from_le715594649 (x : t_u128) : t_u128 :=\n  x.\n\nDefinition to_le902648378 (self : t_u128) : t_u128 :=\n  self.\n\nRecord t_u16 : Type :=\n  {\n    u16_0 : t_U16;\n  }.\nArguments Build_t_u16.\nArguments u16_0.\n#[export] Instance settable_t_u16 : Settable _ :=\n  settable! (Build_t_u16) <u16_0>.\nNotation \"'u16'\" := Build_t_u16.\n\nDefinition from_le793045973 (x : t_u16) : t_u16 :=\n  x.\n\nDefinition to_le1012469456 (self : t_u16) : t_u16 :=\n  self.\n\nRecord t_u32 : Type :=\n  {\n    u32_0 : t_U32;\n  }.\nArguments Build_t_u32.\nArguments u32_0.\n#[export] Instance settable_t_u32 : Settable _ :=\n  settable! (Build_t_u32) <u32_0>.\nNotation \"'u32'\" := Build_t_u32.\n\nDefinition from_le706338679 (x : t_u32) : t_u32 :=\n  x.\n\nDefinition to_le724624277 (self : t_u32) : t_u32 :=\n  self.\n\nRecord t_u64 : Type :=\n  {\n    u64_0 : t_U64;\n  }.\nArguments Build_t_u64.\nArguments u64_0.\n#[export] Instance settable_t_u64 : Settable _ :=\n  settable! (Build_t_u64) <u64_0>.\nNotation \"'u64'\" := Build_t_u64.\n\nDefinition from_le435089922 (x : t_u64) : t_u64 :=\n  x.\n\nDefinition to_le2703875 (self : t_u64) : t_u64 :=\n  self.\n\nRecord t_u8 : Type :=\n  {\n    u8_0 : t_U8;\n  }.\nArguments Build_t_u8.\nArguments u8_0.\n#[export] Instance settable_t_u8 : Settable _ :=\n  settable! (Build_t_u8) <u8_0>.\nNotation \"'u8'\" := Build_t_u8.\n\nDefinition from_le529489651 (x : t_u8) : t_u8 :=\n  x.\n\nDefinition to_le523556665 (self : t_u8) : t_u8 :=\n  self.\n\nRecord t_usize : Type :=\n  {\n    usize_0 : t_U64;\n  }.\nArguments Build_t_usize.\nArguments usize_0.\n#[export] Instance settable_t_usize : Settable _ :=\n  settable! (Build_t_usize) <usize_0>.\nNotation \"'usize'\" := Build_t_usize.\n\nDefinition from_le418743864 (x : t_usize) : t_usize :=\n  x.\n\nDefinition to_le946822077 (self : t_usize) : t_usize :=\n  self.\n\n#[global] Instance t_From_1035345737 : t_From ((t_usize)) ((t_u64)) :=\n  {\n    From_f_from := fun  (x : t_u64)=>\n      Build_t_usize (Into_f_into (u64_0 x));\n  }.\n\n#[global] Instance t_From_478985084 : t_From ((t_u64)) ((t_usize)) :=\n  {\n    From_f_from := fun  (x : t_usize)=>\n      Build_t_u64 (Into_f_into (usize_0 x));\n  }.\n\nClass v_Sealed (v_Self : Type) : Type :=\n  {\n  }.\nArguments v_Sealed (_).\n\n#[global] Instance v_Sealed_639968800 : v_Sealed ((t_usize)) :=\n  {\n  }.\n\n#[global] Instance v_Sealed_740757788 : v_Sealed ((t_Range ((t_usize)))) :=\n  {\n  }.\n\n(* Instance v_Sealed_1056036517 : v_Sealed ((t_RangeTo ((t_usize)))) := *)\n(*   { *)\n(*   }. *)\n\n(* Instance v_Sealed_277245654 : v_Sealed ((t_RangeFrom ((t_usize)))) := *)\n(*   { *)\n(*   }. *)\n\n(* Instance v_Sealed_1032594188 : v_Sealed ((t_RangeFull)) := *)\n(*   { *)\n(*   }. *)\n\n(* Instance v_Sealed_135080564 : v_Sealed ((t_RangeInclusive ((t_usize)))) := *)\n(*   { *)\n(*   }. *)\n\n(* Instance v_Sealed_919294089 : v_Sealed ((t_RangeToInclusive ((t_usize)))) := *)\n(*   { *)\n(*   }. *)\n\n(* Instance v_Sealed_254412259 : v_Sealed (((t_Bound ((t_usize))*t_Bound ((t_usize))))) := *)\n(*   { *)\n(*   }. *)\n\n(* Instance v_Sealed_463870686 : v_Sealed ((t_IndexRange)) := *)\n(*   { *)\n(*   }. *)\n\nDefinition v_BITS80497669 : t_u32 :=\n  Build_t_u32 (impl_97__BITS).\n\nDefinition v_MAX626626007 : t_i8 :=\n  Build_t_i8 (Constants_f_MAX).\n\nDefinition v_MIN19747349 : t_i8 :=\n  Build_t_i8 (Constants_f_MIN).\n\nDefinition v_BITS421056295 : t_u32 :=\n  Build_t_u32 (impl_83__BITS).\n\nDefinition v_MAX474501300 : t_i16 :=\n  Build_t_i16 (Constants_f_MAX).\n\nDefinition v_MIN776391606 : t_i16 :=\n  Build_t_i16 (Constants_f_MIN).\n\nDefinition v_BITS465526498 : t_u32 :=\n  Build_t_u32 (impl_69__BITS).\n\nDefinition v_MAX106630818 : t_i32 :=\n  Build_t_i32 (Constants_f_MAX).\n\nDefinition v_MIN682967538 : t_i32 :=\n  Build_t_i32 (Constants_f_MIN).\n\nDefinition v_BITS419886578 : t_u32 :=\n  Build_t_u32 (impl_55__BITS).\n\nDefinition v_MAX527043787 : t_i64 :=\n  Build_t_i64 (Constants_f_MAX).\n\nDefinition v_MIN654206259 : t_i64 :=\n  Build_t_i64 (Constants_f_MIN).\n\nDefinition v_BITS992667165 : t_u32 :=\n  Build_t_u32 (impl_41__BITS).\n\nDefinition v_MAX375377319 : t_i128 :=\n  Build_t_i128 (Constants_f_MAX).\n\nDefinition v_MIN79612531 : t_i128 :=\n  Build_t_i128 (Constants_f_MIN).\n\nDefinition v_BITS211584016 : t_u32 :=\n  Build_t_u32 (impl_55__BITS).\n\nDefinition v_MAX937003029 : t_isize :=\n  Build_t_isize (Constants_f_MAX).\n\nDefinition v_MIN1017039533 : t_isize :=\n  Build_t_isize (Constants_f_MIN).\n\nDefinition v_BITS690311813 : t_u32 :=\n  Build_t_u32 (impl_219__BITS).\n\nDefinition v_MAX310118176 : t_u8 :=\n  Build_t_u8 (Constants_f_MAX).\n\nDefinition v_MIN41851434 : t_u8 :=\n  Build_t_u8 (Constants_f_MIN).\n\nDefinition v_BITS277333551 : t_u32 :=\n  Build_t_u32 (impl_192__BITS).\n\nDefinition v_MAX487295910 : t_u16 :=\n  Build_t_u16 (Constants_f_MAX).\n\nDefinition v_MIN592300287 : t_u16 :=\n  Build_t_u16 (Constants_f_MIN).\n\nDefinition v_BITS473478051 : t_u32 :=\n  Build_t_u32 (impl_165__BITS).\n\nDefinition v_MAX826434525 : t_u32 :=\n  Build_t_u32 (Constants_f_MAX).\n\nDefinition v_MIN932777089 : t_u32 :=\n  Build_t_u32 (Constants_f_MIN).\n\nDefinition v_BITS177666292 : t_u32 :=\n  Build_t_u32 (impl_138__BITS).\n\nDefinition v_MAX815180633 : t_u64 :=\n  Build_t_u64 (Constants_f_MAX).\n\nDefinition v_MIN631333594 : t_u64 :=\n  Build_t_u64 (Constants_f_MIN).\n\nDefinition v_BITS136999051 : t_u32 :=\n  Build_t_u32 (impl_111__BITS).\n\nDefinition v_MAX404543799 : t_u128 :=\n  Build_t_u128 (Constants_f_MAX).\n\nDefinition v_MIN668621698 : t_u128 :=\n  Build_t_u128 (Constants_f_MIN).\n\nDefinition v_BITS229952196 : t_u32 :=\n  Build_t_u32 (impl_138__BITS).\n\nDefinition v_MAX750570916 : t_usize :=\n  Build_t_usize (Constants_f_MAX).\n\nDefinition v_MIN861571008 : t_usize :=\n  Build_t_usize (Constants_f_MIN).\n\n#[global] Instance t_Clone_832469823 : t_Clone ((t_u8)) :=\n  {\n    Clone_f_clone := fun  (self : t_u8)=>\n      Build_t_u8 (Clone_f_clone (u8_0 self));\n  }.\n\n#[global] Instance t_Clone_562622454 : t_Clone ((t_u16)) :=\n  {\n    Clone_f_clone := fun  (self : t_u16)=>\n      Build_t_u16 (Clone_f_clone (u16_0 self));\n  }.\n\n#[global] Instance t_Clone_1034302141 : t_Clone ((t_u32)) :=\n  {\n    Clone_f_clone := fun  (self : t_u32)=>\n      Build_t_u32 (Clone_f_clone (u32_0 self));\n  }.\n\n#[global] Instance t_Clone_189576787 : t_Clone ((t_u64)) :=\n  {\n    Clone_f_clone := fun  (self : t_u64)=>\n      Build_t_u64 (Clone_f_clone (u64_0 self));\n  }.\n\n#[global] Instance t_Clone_296673181 : t_Clone ((t_u128)) :=\n  {\n    Clone_f_clone := fun  (self : t_u128)=>\n      Build_t_u128 (Clone_f_clone (u128_0 self));\n  }.\n\n#[global] Instance t_Clone_466142540 : t_Clone ((t_usize)) :=\n  {\n    Clone_f_clone := fun  (self : t_usize)=>\n      Build_t_usize (Clone_f_clone (usize_0 self));\n  }.\n\nClass v_SliceIndex (v_Self : Type) (v_T : Type) `{v_Sealed (v_Self)} : Type :=\n  {\n    SliceIndex_f_Output : Type;\n    SliceIndex_f_index : v_Self -> v_T -> SliceIndex_f_Output;\n  }.\nArguments v_SliceIndex (_) (_) {_}.\n\n#[global] Instance t_PartialEq_234431236 : t_PartialEq ((t_u8)) ((t_u8)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_u8) (rhs : t_u8)=>\n      PartialEq_f_eq (u8_0 self) (u8_0 rhs);\n    PartialEq_f_ne := fun  (self : t_u8) (rhs : t_u8)=>\n      negb (PartialEq_f_eq (u8_0 self) (u8_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_835131600 : t_PartialOrd ((t_u8)) ((t_u8)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_u8) (rhs : t_u8)=>\n      PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_u8) (rhs : t_u8)=>\n      match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_u8) (rhs : t_u8)=>\n      match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_u8) (rhs : t_u8)=>\n      match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_u8) (rhs : t_u8)=>\n      match PartialOrd_f_partial_cmp (u8_0 self) (u8_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_965259828 : t_PartialEq ((t_u16)) ((t_u16)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_u16) (rhs : t_u16)=>\n      PartialEq_f_eq (u16_0 self) (u16_0 rhs);\n    PartialEq_f_ne := fun  (self : t_u16) (rhs : t_u16)=>\n      negb (PartialEq_f_eq (u16_0 self) (u16_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_116974173 : t_PartialOrd ((t_u16)) ((t_u16)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_u16) (rhs : t_u16)=>\n      PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_u16) (rhs : t_u16)=>\n      match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_u16) (rhs : t_u16)=>\n      match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_u16) (rhs : t_u16)=>\n      match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_u16) (rhs : t_u16)=>\n      match PartialOrd_f_partial_cmp (u16_0 self) (u16_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_739399974 : t_PartialEq ((t_u32)) ((t_u32)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_u32) (rhs : t_u32)=>\n      PartialEq_f_eq (u32_0 self) (u32_0 rhs);\n    PartialEq_f_ne := fun  (self : t_u32) (rhs : t_u32)=>\n      negb (PartialEq_f_eq (u32_0 self) (u32_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_553141371 : t_PartialOrd ((t_u32)) ((t_u32)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_u32) (rhs : t_u32)=>\n      PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_u32) (rhs : t_u32)=>\n      match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_u32) (rhs : t_u32)=>\n      match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_u32) (rhs : t_u32)=>\n      match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_u32) (rhs : t_u32)=>\n      match PartialOrd_f_partial_cmp (u32_0 self) (u32_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_464367537 : t_PartialEq ((t_u64)) ((t_u64)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_u64) (rhs : t_u64)=>\n      PartialEq_f_eq (u64_0 self) (u64_0 rhs);\n    PartialEq_f_ne := fun  (self : t_u64) (rhs : t_u64)=>\n      negb (PartialEq_f_eq (u64_0 self) (u64_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_207997255 : t_PartialOrd ((t_u64)) ((t_u64)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_u64) (rhs : t_u64)=>\n      PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_u64) (rhs : t_u64)=>\n      match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_u64) (rhs : t_u64)=>\n      match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_u64) (rhs : t_u64)=>\n      match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_u64) (rhs : t_u64)=>\n      match PartialOrd_f_partial_cmp (u64_0 self) (u64_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_876938738 : t_PartialEq ((t_u128)) ((t_u128)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_u128) (rhs : t_u128)=>\n      PartialEq_f_eq (u128_0 self) (u128_0 rhs);\n    PartialEq_f_ne := fun  (self : t_u128) (rhs : t_u128)=>\n      negb (PartialEq_f_eq (u128_0 self) (u128_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_566729496 : t_PartialOrd ((t_u128)) ((t_u128)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_u128) (rhs : t_u128)=>\n      PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_u128) (rhs : t_u128)=>\n      match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_u128) (rhs : t_u128)=>\n      match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_u128) (rhs : t_u128)=>\n      match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_u128) (rhs : t_u128)=>\n      match PartialOrd_f_partial_cmp (u128_0 self) (u128_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_1011013145 : t_PartialEq ((t_usize)) ((t_usize)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_usize) (rhs : t_usize)=>\n      PartialEq_f_eq (usize_0 self) (usize_0 rhs);\n    PartialEq_f_ne := fun  (self : t_usize) (rhs : t_usize)=>\n      negb (PartialEq_f_eq (usize_0 self) (usize_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_917114071 : t_PartialOrd ((t_usize)) ((t_usize)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_usize) (rhs : t_usize)=>\n      PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_usize) (rhs : t_usize)=>\n      match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_usize) (rhs : t_usize)=>\n      match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_usize) (rhs : t_usize)=>\n      match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_usize) (rhs : t_usize)=>\n      match PartialOrd_f_partial_cmp (usize_0 self) (usize_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_515285814 : t_PartialEq ((t_i8)) ((t_i8)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_i8) (rhs : t_i8)=>\n      PartialEq_f_eq (i8_0 self) (i8_0 rhs);\n    PartialEq_f_ne := fun  (self : t_i8) (rhs : t_i8)=>\n      negb (PartialEq_f_eq (i8_0 self) (i8_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_610141491 : t_PartialOrd ((t_i8)) ((t_i8)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_i8) (rhs : t_i8)=>\n      PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_i8) (rhs : t_i8)=>\n      match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_i8) (rhs : t_i8)=>\n      match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_i8) (rhs : t_i8)=>\n      match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_i8) (rhs : t_i8)=>\n      match PartialOrd_f_partial_cmp (i8_0 self) (i8_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_341364762 : t_PartialEq ((t_i16)) ((t_i16)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_i16) (rhs : t_i16)=>\n      PartialEq_f_eq (i16_0 self) (i16_0 rhs);\n    PartialEq_f_ne := fun  (self : t_i16) (rhs : t_i16)=>\n      negb (PartialEq_f_eq (i16_0 self) (i16_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_685280672 : t_PartialOrd ((t_i16)) ((t_i16)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_i16) (rhs : t_i16)=>\n      PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_i16) (rhs : t_i16)=>\n      match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_i16) (rhs : t_i16)=>\n      match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_i16) (rhs : t_i16)=>\n      match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_i16) (rhs : t_i16)=>\n      match PartialOrd_f_partial_cmp (i16_0 self) (i16_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_335582486 : t_PartialEq ((t_i32)) ((t_i32)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_i32) (rhs : t_i32)=>\n      PartialEq_f_eq (i32_0 self) (i32_0 rhs);\n    PartialEq_f_ne := fun  (self : t_i32) (rhs : t_i32)=>\n      negb (PartialEq_f_eq (i32_0 self) (i32_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_776800970 : t_PartialOrd ((t_i32)) ((t_i32)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_i32) (rhs : t_i32)=>\n      PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_i32) (rhs : t_i32)=>\n      match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_i32) (rhs : t_i32)=>\n      match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_i32) (rhs : t_i32)=>\n      match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_i32) (rhs : t_i32)=>\n      match PartialOrd_f_partial_cmp (i32_0 self) (i32_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_1019995697 : t_PartialEq ((t_i64)) ((t_i64)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_i64) (rhs : t_i64)=>\n      PartialEq_f_eq (i64_0 self) (i64_0 rhs);\n    PartialEq_f_ne := fun  (self : t_i64) (rhs : t_i64)=>\n      negb (PartialEq_f_eq (i64_0 self) (i64_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_354028907 : t_PartialOrd ((t_i64)) ((t_i64)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_i64) (rhs : t_i64)=>\n      PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_i64) (rhs : t_i64)=>\n      match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_i64) (rhs : t_i64)=>\n      match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_i64) (rhs : t_i64)=>\n      match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_i64) (rhs : t_i64)=>\n      match PartialOrd_f_partial_cmp (i64_0 self) (i64_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_476424898 : t_PartialEq ((t_i128)) ((t_i128)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_i128) (rhs : t_i128)=>\n      PartialEq_f_eq (i128_0 self) (i128_0 rhs);\n    PartialEq_f_ne := fun  (self : t_i128) (rhs : t_i128)=>\n      negb (PartialEq_f_eq (i128_0 self) (i128_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_532073533 : t_PartialOrd ((t_i128)) ((t_i128)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_i128) (rhs : t_i128)=>\n      PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_i128) (rhs : t_i128)=>\n      match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_i128) (rhs : t_i128)=>\n      match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_i128) (rhs : t_i128)=>\n      match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_i128) (rhs : t_i128)=>\n      match PartialOrd_f_partial_cmp (i128_0 self) (i128_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_PartialEq_675022234 : t_PartialEq ((t_isize)) ((t_isize)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_isize) (rhs : t_isize)=>\n      PartialEq_f_eq (isize_0 self) (isize_0 rhs);\n    PartialEq_f_ne := fun  (self : t_isize) (rhs : t_isize)=>\n      negb (PartialEq_f_eq (isize_0 self) (isize_0 rhs));\n  }.\n\n#[global] Instance t_PartialOrd_661215608 : t_PartialOrd ((t_isize)) ((t_isize)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_isize) (rhs : t_isize)=>\n      PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs);\n    PartialOrd_f_lt := fun  (self : t_isize) (rhs : t_isize)=>\n      match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with\n      | Option_Some (Ordering_Less) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_isize) (rhs : t_isize)=>\n      match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with\n      | Option_Some (Ordering_Less\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_isize) (rhs : t_isize)=>\n      match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with\n      | Option_Some (Ordering_Greater) =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_isize) (rhs : t_isize)=>\n      match PartialOrd_f_partial_cmp (isize_0 self) (isize_0 rhs) with\n      | Option_Some (Ordering_Greater\n      | Ordering_Equal) =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_From_number_i8 : t_From t_i8 Z :=\n  {\n    From_f_from (x : Z) := Build_t_i8 (Build_t_I8 x)\n  }.\n\n#[global] Instance t_From_number_i16 : t_From t_i16 Z :=\n  {\n    From_f_from (x : Z) := Build_t_i16 (Build_t_I16 x)\n  }.\n\n#[global] Instance t_From_number_i32 : t_From t_i32 Z :=\n  {\n    From_f_from (x : Z) := Build_t_i32 (Build_t_I32 x)\n  }.\n\n#[global] Instance t_From_number_i64 : t_From t_i64 Z :=\n  {\n    From_f_from (x : Z) := Build_t_i64 (Build_t_I64 x)\n  }.\n\n#[global] Instance t_From_number_i128 : t_From t_i128 Z :=\n  {\n    From_f_from (x : Z) := Build_t_i128 (Build_t_I128 x)\n  }.\n\n#[global] Instance t_From_number_isize : t_From t_isize Z :=\n  {\n    From_f_from (x : Z) := Build_t_isize (Build_t_I64 x)\n  }.\n\n#[global] Instance t_From_number_Zi8 : t_From Z t_i8 :=\n  {\n    From_f_from (x : t_i8) := I8_f_v (i8_0 x)\n  }.\n\n#[global] Instance t_From_number_Zi16 : t_From Z t_i16 :=\n  {\n    From_f_from (x : t_i16) := I16_f_v (i16_0 x)\n  }.\n\n#[global] Instance t_From_number_Zi32 : t_From Z t_i32 :=\n  {\n    From_f_from (x : t_i32) := I32_f_v (i32_0 x)\n  }.\n\n#[global] Instance t_From_number_Zi64 : t_From Z t_i64 :=\n  {\n    From_f_from (x : t_i64) := I64_f_v (i64_0 x)\n  }.\n\n#[global] Instance t_From_number_Zi128 : t_From Z t_i128 :=\n  {\n    From_f_from (x : t_i128) := I128_f_v (i128_0 x)\n  }.\n\n#[global] Instance t_From_number_Zisize : t_From Z t_isize :=\n  {\n    From_f_from (x : t_isize) := I64_f_v (isize_0 x)\n  }.\n\nDefinition is_negative350273175 (self : t_i8) : bool :=\n  PartialOrd_f_lt (self) (Into_f_into (0)).\n\nDefinition is_positive286955196 (self : t_i8) : bool :=\n  PartialOrd_f_gt (self) (Into_f_into (0)).\n\nDefinition signum721334203 (self : t_i8) : t_i8 :=\n  if\n    PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0))\n  then\n    Into_f_into (-1)\n  else\n    if\n      PartialEq_f_eq (self) (Into_f_into (0))\n    then\n      Into_f_into (0)\n    else\n      Into_f_into (1).\n\nInstance t_From_687588567 : t_From ((t_i8)) ((t_i8)) :=\n  {\n    From_f_from := fun  (x : t_i8)=>\n      Into_f_into (I8_f_v (i8_0 x));\n  }.\n\nInstance t_From_257005484 : t_From ((t_i16)) ((t_i16)) :=\n  {\n    From_f_from := fun  (x : t_i16)=>\n      Build_t_i16 (Build_t_I16 (Into_f_into (x)));\n  }.\n\nDefinition is_negative477067241 (self : t_i16) : bool :=\n  PartialOrd_f_lt (self) (Into_f_into (0)).\n\nDefinition is_positive821581438 (self : t_i16) : bool :=\n  PartialOrd_f_gt (self) (Into_f_into (0)).\n\nDefinition signum243706004 (self : t_i16) : t_i16 :=\n  if\n    PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0))\n  then\n    Into_f_into (-1)\n  else\n    if\n      PartialEq_f_eq (self) (Into_f_into (0))\n    then\n      Into_f_into (0)\n    else\n      Into_f_into (1).\n\n(* Instance t_From_560870163 : t_From ((t_i16)) ((t_i16)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i16)=> *)\n(*       Into_f_into (I16_f_v (i16_0 x)); *)\n(*   }. *)\n\n(* Instance t_From_17641682 : t_From ((t_i32)) ((t_i32)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i32)=> *)\n(*       t_i32 (Build_t_I32 (Into_f_into (x))); *)\n(*   }. *)\n\nDefinition is_negative1035644813 (self : t_i32) : bool :=\n  PartialOrd_f_lt (self) (Into_f_into (0)).\n\nDefinition is_positive401652342 (self : t_i32) : bool :=\n  PartialOrd_f_gt (self) (Into_f_into (0)).\n\nDefinition signum323641039 (self : t_i32) : t_i32 :=\n  if\n    PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0))\n  then\n    Into_f_into (-1)\n  else\n    if\n      PartialEq_f_eq (self) (Into_f_into (0))\n    then\n      Into_f_into (0)\n    else\n      Into_f_into (1).\n\n(* Instance t_From_865467252 : t_From ((t_i32)) ((t_i32)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i32)=> *)\n(*       Into_f_into (I32_f_v (i32_0 x)); *)\n(*   }. *)\n\n(* Instance t_From_881024429 : t_From ((t_i64)) ((t_i64)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i64)=> *)\n(*       t_i64 (Build_t_I64 (Into_f_into (x))); *)\n(*   }. *)\n\nDefinition is_negative1066124578 (self : t_i64) : bool :=\n  PartialOrd_f_lt (self) (Into_f_into (0)).\n\nDefinition is_positive16569358 (self : t_i64) : bool :=\n  PartialOrd_f_gt (self) (Into_f_into (0)).\n\nDefinition signum582963664 (self : t_i64) : t_i64 :=\n  if\n    PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0))\n  then\n    Into_f_into (-1)\n  else\n    if\n      PartialEq_f_eq (self) (Into_f_into (0))\n    then\n      Into_f_into (0)\n    else\n      Into_f_into (1).\n\n(* Instance t_From_101582575 : t_From ((t_i64)) ((t_i64)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i64)=> *)\n(*       Into_f_into (I64_f_v i64_0 x); *)\n(*   }. *)\n\n(* Instance t_From_954204920 : t_From ((t_i128)) ((t_i128)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i128)=> *)\n(*       t_i128 (Build_t_I128 (Into_f_into (x))); *)\n(*   }. *)\n\nDefinition is_negative221698470 (self : t_i128) : bool :=\n  PartialOrd_f_lt (self) (Into_f_into (0)).\n\nDefinition is_positive883218309 (self : t_i128) : bool :=\n  PartialOrd_f_gt (self) (Into_f_into (0)).\n\nDefinition signum408800799 (self : t_i128) : t_i128 :=\n  if\n    PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0))\n  then\n    Into_f_into (-1)\n  else\n    if\n      PartialEq_f_eq (self) (Into_f_into (0))\n    then\n      Into_f_into (0)\n    else\n      Into_f_into (1).\n\n(* Instance t_From_515435087 : t_From ((t_i128)) ((t_i128)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i128)=> *)\n(*       Into_f_into (I128_f_v i128_0 x); *)\n(*   }. *)\n\n(* Instance t_From_1044036214 : t_From ((t_isize)) ((t_isize)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_isize)=> *)\n(*       t_isize (Build_t_I64 (Into_f_into (x))); *)\n(*   }. *)\n\nDefinition is_negative693446369 (self : t_isize) : bool :=\n  PartialOrd_f_lt (self) (Into_f_into (0)).\n\nDefinition is_positive169998680 (self : t_isize) : bool :=\n  PartialOrd_f_gt (self) (Into_f_into (0)).\n\nDefinition signum91486536 (self : t_isize) : t_isize :=\n  if\n    PartialOrd_f_lt (Clone_f_clone (self)) (Into_f_into (0))\n  then\n    Into_f_into (-1)\n  else\n    if\n      PartialEq_f_eq (self) (Into_f_into (0))\n    then\n      Into_f_into (0)\n    else\n      Into_f_into (1).\n\n#[global] Instance t_From_202441647 : t_From ((t_isize)) ((t_isize)) :=\n  {\n    From_f_from := fun  (x : t_isize)=>\n      Into_f_into (I64_f_v (isize_0 x));\n  }.\n\n#[global] Instance t_From_100016775 : t_From ((t_i16)) ((t_i8)) :=\n  {\n    From_f_from := fun  (x : t_i8)=>\n      Build_t_i16 (Into_f_into (i8_0 x));\n  }.\n\n#[global] Instance t_From_964712142 : t_From ((t_i32)) ((t_i8)) :=\n  {\n    From_f_from := fun  (x : t_i8)=>\n      Build_t_i32 (Into_f_into (i8_0 x));\n  }.\n\n#[global] Instance t_From_512166668 : t_From ((t_i64)) ((t_i8)) :=\n  {\n    From_f_from := fun  (x : t_i8)=>\n      Build_t_i64 (Into_f_into (i8_0 x));\n  }.\n\n#[global] Instance t_From_95828634 : t_From ((t_i128)) ((t_i8)) :=\n  {\n    From_f_from := fun  (x : t_i8)=>\n      Build_t_i128 (Into_f_into (i8_0 x));\n  }.\n\n#[global] Instance t_From_48986939 : t_From ((t_isize)) ((t_i8)) :=\n  {\n    From_f_from := fun  (x : t_i8)=>\n      Build_t_isize (Into_f_into (i8_0 x));\n  }.\n\n#[global] Instance t_From_325010041 : t_From ((t_i8)) ((t_i16)) :=\n  {\n    From_f_from := fun  (x : t_i16)=>\n      Build_t_i8 (Into_f_into (i16_0 x));\n  }.\n\n#[global] Instance t_From_64357194 : t_From ((t_i32)) ((t_i16)) :=\n  {\n    From_f_from := fun  (x : t_i16)=>\n       Build_t_i32 (Into_f_into (i16_0 x));\n  }.\n\n#[global] Instance t_From_840335964 : t_From ((t_i64)) ((t_i16)) :=\n  {\n    From_f_from := fun  (x : t_i16)=>\n      Build_t_i64 (Into_f_into (i16_0 x));\n  }.\n\n#[global] Instance t_From_601385454 : t_From ((t_i128)) ((t_i16)) :=\n  {\n    From_f_from := fun  (x : t_i16)=>\n      Build_t_i128 (Into_f_into (i16_0 x));\n  }.\n\n#[global] Instance t_From_755383497 : t_From ((t_isize)) ((t_i16)) :=\n  {\n    From_f_from := fun  (x : t_i16)=>\n      Build_t_isize (Into_f_into (i16_0 x));\n  }.\n\n#[global] Instance t_From_926112880 : t_From ((t_i8)) ((t_i32)) :=\n  {\n    From_f_from := fun  (x : t_i32)=>\n      Build_t_i8 (Into_f_into (i32_0 x));\n  }.\n\n#[global] Instance t_From_81353160 : t_From ((t_i16)) ((t_i32)) :=\n  {\n    From_f_from := fun  (x : t_i32)=>\n      Build_t_i16 (Into_f_into (i32_0 x));\n  }.\n\n#[global] Instance t_From_549703007 : t_From ((t_i64)) ((t_i32)) :=\n  {\n    From_f_from := fun  (x : t_i32)=>\n      Build_t_i64 (Into_f_into (i32_0 x));\n  }.\n\n#[global] Instance t_From_1001458175 : t_From ((t_i128)) ((t_i32)) :=\n  {\n    From_f_from := fun  (x : t_i32)=>\n      Build_t_i128 (Into_f_into (i32_0 x));\n  }.\n\n#[global] Instance t_From_329934859 : t_From ((t_isize)) ((t_i32)) :=\n  {\n    From_f_from := fun  (x : t_i32)=>\n      Build_t_isize (Into_f_into (i32_0 x));\n  }.\n\n#[global] Instance t_From_381441019 : t_From ((t_i8)) ((t_i64)) :=\n  {\n    From_f_from := fun  (x : t_i64)=>\n      Build_t_i8 (Into_f_into (i64_0 x));\n  }.\n\n#[global] Instance t_From_728811179 : t_From ((t_i16)) ((t_i64)) :=\n  {\n    From_f_from := fun  (x : t_i64)=>\n      Build_t_i16 (Into_f_into (i64_0 x));\n  }.\n\n#[global] Instance t_From_1003839356 : t_From ((t_i32)) ((t_i64)) :=\n  {\n    From_f_from := fun  (x : t_i64)=>\n      Build_t_i32 (Into_f_into (i64_0 x));\n  }.\n\n#[global] Instance t_From_625109732 : t_From ((t_i128)) ((t_i64)) :=\n  {\n    From_f_from := fun  (x : t_i64)=>\n      Build_t_i128 (Into_f_into (i64_0 x));\n  }.\n\n#[global] Instance t_From_34424521 : t_From ((t_i8)) ((t_i128)) :=\n  {\n    From_f_from := fun  (x : t_i128)=>\n      Build_t_i8 (Into_f_into (i128_0 x));\n  }.\n\n#[global] Instance t_From_603602239 : t_From ((t_i16)) ((t_i128)) :=\n  {\n    From_f_from := fun  (x : t_i128)=>\n      Build_t_i16 (Into_f_into (i128_0 x));\n  }.\n\n#[global] Instance t_From_479038908 : t_From ((t_i32)) ((t_i128)) :=\n  {\n    From_f_from := fun  (x : t_i128)=>\n      Build_t_i32 (Into_f_into (i128_0 x));\n  }.\n\n#[global] Instance t_From_299745195 : t_From ((t_i64)) ((t_i128)) :=\n  {\n    From_f_from := fun  (x : t_i128)=>\n      Build_t_i64 (Into_f_into (i128_0 x));\n  }.\n\n#[global] Instance t_From_615821455 : t_From ((t_isize)) ((t_i128)) :=\n  {\n    From_f_from := fun  (x : t_i128)=>\n      Build_t_isize (Into_f_into (i128_0 x));\n  }.\n\n#[global] Instance t_From_376191918 : t_From ((t_i8)) ((t_isize)) :=\n  {\n    From_f_from := fun  (x : t_isize)=>\n      Build_t_i8 (Into_f_into (isize_0 x));\n  }.\n\n#[global] Instance t_From_649927535 : t_From ((t_i16)) ((t_isize)) :=\n  {\n    From_f_from := fun  (x : t_isize)=>\n      Build_t_i16 (Into_f_into (isize_0 x));\n  }.\n\n#[global] Instance t_From_395262437 : t_From ((t_i32)) ((t_isize)) :=\n  {\n    From_f_from := fun  (x : t_isize)=>\n      Build_t_i32 (Into_f_into (isize_0 x));\n  }.\n\n#[global] Instance t_From_218237752 : t_From ((t_i128)) ((t_isize)) :=\n  {\n    From_f_from := fun  (x : t_isize)=>\n      Build_t_i128 (Into_f_into (isize_0 x));\n  }.\n\nDefinition add_with_overflow_i128 (x : t_i128) (y : t_i128) : (t_i128*bool) :=\n  let overflow := z_add (Abstraction_f_lift (i128_0 x)) (Abstraction_f_lift (i128_0 y)) in\n  let res : t_I128 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_i128 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_i16 (x : t_i16) (y : t_i16) : (t_i16*bool) :=\n  let overflow := z_add (Abstraction_f_lift (i16_0 x)) (Abstraction_f_lift (i16_0 y)) in\n  let res : t_I16 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_i16 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_i32 (x : t_i32) (y : t_i32) : (t_i32*bool) :=\n  let overflow := z_add (Abstraction_f_lift (i32_0 x)) (Abstraction_f_lift (i32_0 y)) in\n  let res : t_I32 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_i32 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_i64 (x : t_i64) (y : t_i64) : (t_i64*bool) :=\n  let overflow := z_add (Abstraction_f_lift (i64_0 x)) (Abstraction_f_lift (i64_0 y)) in\n  let res : t_I64 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_i64 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_i8 (x : t_i8) (y : t_i8) : (t_i8*bool) :=\n  let overflow := z_add (Abstraction_f_lift (i8_0 x)) (Abstraction_f_lift (i8_0 y)) in\n  let res : t_I8 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_i8 (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_isize (x : t_isize) (y : t_isize) : (t_isize*bool) :=\n  let overflow := z_add (Abstraction_f_lift (isize_0 x)) (Abstraction_f_lift (isize_0 y)) in\n  let res : t_I64 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_isize (Clone_f_clone (res)),z_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition unchecked_add_i128 (x : t_i128) (y : t_i128) : t_i128 :=\n  Build_t_i128 (Build_t_I128 (z_add (Abstraction_f_lift (i128_0 x)) (Abstraction_f_lift (i128_0 y)))).\n\nDefinition unchecked_add_i16 (x : t_i16) (y : t_i16) : t_i16 :=\n  Build_t_i16 (Build_t_I16 (z_add (Abstraction_f_lift (i16_0 x)) (Abstraction_f_lift (i16_0 y)))).\n\nDefinition unchecked_add_i32 (x : t_i32) (y : t_i32) : t_i32 :=\n  Build_t_i32 (Build_t_I32 (z_add (Abstraction_f_lift (i32_0 x)) (Abstraction_f_lift (i32_0 y)))).\n\nDefinition unchecked_add_i64 (x : t_i64) (y : t_i64) : t_i64 :=\n  Build_t_i64 (Build_t_I64 (z_add (Abstraction_f_lift (i64_0 x)) (Abstraction_f_lift (i64_0 y)))).\n\nDefinition unchecked_add_i8 (x : t_i8) (y : t_i8) : t_i8 :=\n  Build_t_i8 (Build_t_I8 (z_add (Abstraction_f_lift (i8_0 x)) (Abstraction_f_lift (i8_0 y)))).\n\nDefinition unchecked_add_isize (x : t_isize) (y : t_isize) : t_isize :=\n  Build_t_isize (Build_t_I64 (z_add (Abstraction_f_lift (isize_0 x)) (Abstraction_f_lift (isize_0 y)))).\n\nDefinition unchecked_add_u128 (x : t_u128) (y : t_u128) : t_u128 :=\n  Build_t_u128 (Build_t_U128 (haxint_add (Abstraction_f_lift (u128_0 x)) (Abstraction_f_lift (u128_0 y)))).\n\nDefinition unchecked_add_u16 (x : t_u16) (y : t_u16) : t_u16 :=\n  Build_t_u16 (Build_t_U16 (haxint_add (Abstraction_f_lift (u16_0 x)) (Abstraction_f_lift (u16_0 y)))).\n\nDefinition unchecked_add_u32 (x : t_u32) (y : t_u32) : t_u32 :=\n  Build_t_u32 (Build_t_U32 (haxint_add (Abstraction_f_lift (u32_0 x)) (Abstraction_f_lift (u32_0 y)))).\n\nDefinition unchecked_add_u64 (x : t_u64) (y : t_u64) : t_u64 :=\n  Build_t_u64 (Build_t_U64 (haxint_add (Abstraction_f_lift (u64_0 x)) (Abstraction_f_lift (u64_0 y)))).\n\nDefinition unchecked_add_u8 (x : t_u8) (y : t_u8) : t_u8 :=\n  Build_t_u8 (Build_t_U8 (haxint_add (Abstraction_f_lift (u8_0 x)) (Abstraction_f_lift (u8_0 y)))).\n\nDefinition unchecked_add_usize (x : t_usize) (y : t_usize) : t_usize :=\n  Build_t_usize (Build_t_U64 (haxint_add (Abstraction_f_lift (usize_0 x)) (Abstraction_f_lift (usize_0 y)))).\n\nDefinition checked_add268751055 (self : t_u8) (rhs : t_u8) : t_Option ((t_u8)) :=\n  Option_Some (unchecked_add_u8 (self) (rhs)).\n\nDefinition checked_add132377399 (self : t_u16) (rhs : t_u16) : t_Option ((t_u16)) :=\n  Option_Some (unchecked_add_u16 (self) (rhs)).\n\nDefinition checked_add985437730 (self : t_u32) (rhs : t_u32) : t_Option ((t_u32)) :=\n  Option_Some (unchecked_add_u32 (self) (rhs)).\n\nDefinition checked_add586246465 (self : t_u64) (rhs : t_u64) : t_Option ((t_u64)) :=\n  Option_Some (unchecked_add_u64 (self) (rhs)).\n\nDefinition checked_add218978451 (self : t_u128) (rhs : t_u128) : t_Option ((t_u128)) :=\n  Option_Some (unchecked_add_u128 (self) (rhs)).\n\nDefinition checked_add984013567 (self : t_usize) (rhs : t_usize) : t_Option ((t_usize)) :=\n  Option_Some (unchecked_add_usize (self) (rhs)).\n\nDefinition add_with_overflow_u128 (x : t_u128) (y : t_u128) : (t_u128*bool) :=\n  let overflow := haxint_add (Abstraction_f_lift (u128_0 x)) (Abstraction_f_lift (u128_0 y)) in\n  let res : t_U128 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_u128 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_u16 (x : t_u16) (y : t_u16) : (t_u16*bool) :=\n  let overflow := haxint_add (Abstraction_f_lift (u16_0 x)) (Abstraction_f_lift (u16_0 y)) in\n  let res : t_U16 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_u16 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_u32 (x : t_u32) (y : t_u32) : (t_u32*bool) :=\n  let overflow := haxint_add (Abstraction_f_lift (u32_0 x)) (Abstraction_f_lift (u32_0 y)) in\n  let res : t_U32 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_u32 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_u64 (x : t_u64) (y : t_u64) : (t_u64*bool) :=\n  let overflow := haxint_add (Abstraction_f_lift (u64_0 x)) (Abstraction_f_lift (u64_0 y)) in\n  let res : t_U64 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_u64 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_u8 (x : t_u8) (y : t_u8) : (t_u8*bool) :=\n  let overflow := haxint_add (Abstraction_f_lift (u8_0 x)) (Abstraction_f_lift (u8_0 y)) in\n  let res : t_U8 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_u8 (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition add_with_overflow_usize (x : t_usize) (y : t_usize) : (t_usize*bool) :=\n  let overflow := haxint_add (Abstraction_f_lift (usize_0 x)) (Abstraction_f_lift (usize_0 y)) in\n  let res : t_U64 := Concretization_f_concretize (Clone_f_clone (overflow)) in\n  (Build_t_usize (Clone_f_clone (res)),haxint_lt (Abstraction_f_lift (res)) (overflow)).\n\nDefinition unchecked_div_u128 (x : t_u128) (y : t_u128) : t_u128 :=\n  Build_t_u128 (Build_t_U128 (haxint_div (Abstraction_f_lift (u128_0 x)) (Abstraction_f_lift (u128_0 y)))).\n\nDefinition unchecked_div_u16 (x : t_u16) (y : t_u16) : t_u16 :=\n  Build_t_u16 (Build_t_U16 (haxint_div (Abstraction_f_lift (u16_0 x)) (Abstraction_f_lift (u16_0 y)))).\n\nDefinition unchecked_div_u32 (x : t_u32) (y : t_u32) : t_u32 :=\n  Build_t_u32 (Build_t_U32 (haxint_div (Abstraction_f_lift (u32_0 x)) (Abstraction_f_lift (u32_0 y)))).\n\nDefinition unchecked_div_u64 (x : t_u64) (y : t_u64) : t_u64 :=\n  Build_t_u64 (Build_t_U64 (haxint_div (Abstraction_f_lift (u64_0 x)) (Abstraction_f_lift (u64_0 y)))).\n\nDefinition unchecked_div_u8 (x : t_u8) (y : t_u8) : t_u8 :=\n  Build_t_u8 (Build_t_U8 (haxint_div (Abstraction_f_lift (u8_0 x)) (Abstraction_f_lift (u8_0 y)))).\n\nDefinition unchecked_div_usize (x : t_usize) (y : t_usize) : t_usize :=\n  Build_t_usize (Build_t_U64 (haxint_div (Abstraction_f_lift (usize_0 x)) (Abstraction_f_lift (usize_0 y)))).\n\nDefinition wrapping_add_i128 (a : t_i128) (b : t_i128) : t_i128 :=\n  Build_t_i128 (Add_f_add (i128_0 a) (i128_0 b)).\n\nDefinition wrapping_add_i16 (a : t_i16) (b : t_i16) : t_i16 :=\n  Build_t_i16 (Add_f_add (i16_0 a) (i16_0 b)).\n\nDefinition wrapping_add_i32 (a : t_i32) (b : t_i32) : t_i32 :=\n  Build_t_i32 (Add_f_add (i32_0 a) (i32_0 b)).\n\nDefinition wrapping_add_i64 (a : t_i64) (b : t_i64) : t_i64 :=\n  Build_t_i64 (Add_f_add (i64_0 a) (i64_0 b)).\n\nDefinition wrapping_add_i8 (a : t_i8) (b : t_i8) : t_i8 :=\n  Build_t_i8 (Add_f_add (i8_0 a) (i8_0 b)).\n\nDefinition wrapping_add_isize (a : t_isize) (b : t_isize) : t_isize :=\n  Build_t_isize (Add_f_add (isize_0 a) (isize_0 b)).\n\nDefinition wrapping_sub_i128 (a : t_i128) (b : t_i128) : t_i128 :=\n  Build_t_i128 (Sub_f_sub (i128_0 a) (i128_0 b)).\n\nDefinition wrapping_sub_i16 (a : t_i16) (b : t_i16) : t_i16 :=\n  Build_t_i16 (Sub_f_sub (i16_0 a) (i16_0 b)).\n\nDefinition wrapping_sub_i32 (a : t_i32) (b : t_i32) : t_i32 :=\n  Build_t_i32 (Sub_f_sub (i32_0 a) (i32_0 b)).\n\nDefinition wrapping_sub_i64 (a : t_i64) (b : t_i64) : t_i64 :=\n  Build_t_i64 (Sub_f_sub (i64_0 a) (i64_0 b)).\n\nDefinition wrapping_sub_i8 (a : t_i8) (b : t_i8) : t_i8 :=\n  Build_t_i8 (Sub_f_sub (i8_0 a) (i8_0 b)).\n\nDefinition wrapping_sub_isize (a : t_isize) (b : t_isize) : t_isize :=\n  Build_t_isize (Sub_f_sub (isize_0 a) (isize_0 b)).\n\nDefinition wrapping_add634491935 (self : t_i8) (rhs : t_i8) : t_i8 :=\n  wrapping_add_i8 (self) (rhs).\n\nDefinition wrapping_sub973428293 (self : t_i8) (rhs : t_i8) : t_i8 :=\n  wrapping_sub_i8 (self) (rhs).\n\nDefinition wrapping_neg400701205 (self : t_i8) : t_i8 :=\n  wrapping_sub973428293 (Into_f_into (0)) (self).\n\nDefinition wrapping_abs400396545 (self : t_i8) : t_i8 :=\n  if\n    is_negative350273175 (Clone_f_clone (self))\n  then\n    wrapping_neg400701205 (self)\n  else\n    self.\n\nDefinition wrapping_add868559108 (self : t_i16) (rhs : t_i16) : t_i16 :=\n  wrapping_add_i16 (self) (rhs).\n\nDefinition wrapping_sub189469152 (self : t_i16) (rhs : t_i16) : t_i16 :=\n  wrapping_sub_i16 (self) (rhs).\n\nDefinition wrapping_neg860505723 (self : t_i16) : t_i16 :=\n  wrapping_sub189469152 (Into_f_into (0)) (self).\n\nDefinition wrapping_abs229076826 (self : t_i16) : t_i16 :=\n  if\n    is_negative477067241 (Clone_f_clone (self))\n  then\n    wrapping_neg860505723 (self)\n  else\n    self.\n\nDefinition wrapping_add475006616 (self : t_i32) (rhs : t_i32) : t_i32 :=\n  wrapping_add_i32 (self) (rhs).\n\nDefinition wrapping_sub298337071 (self : t_i32) (rhs : t_i32) : t_i32 :=\n  wrapping_sub_i32 (self) (rhs).\n\nDefinition wrapping_neg636433078 (self : t_i32) : t_i32 :=\n  wrapping_sub298337071 (Into_f_into (0)) (self).\n\nDefinition wrapping_abs729536875 (self : t_i32) : t_i32 :=\n  if\n    is_negative1035644813 (Clone_f_clone (self))\n  then\n    wrapping_neg636433078 (self)\n  else\n    self.\n\nDefinition wrapping_add590074241 (self : t_i64) (rhs : t_i64) : t_i64 :=\n  wrapping_add_i64 (self) (rhs).\n\nDefinition wrapping_sub334584751 (self : t_i64) (rhs : t_i64) : t_i64 :=\n  wrapping_sub_i64 (self) (rhs).\n\nDefinition wrapping_neg868282938 (self : t_i64) : t_i64 :=\n  wrapping_sub334584751 (Into_f_into (0)) (self).\n\nDefinition wrapping_abs285829312 (self : t_i64) : t_i64 :=\n  if\n    is_negative1066124578 (Clone_f_clone (self))\n  then\n    wrapping_neg868282938 (self)\n  else\n    self.\n\nDefinition wrapping_add251385439 (self : t_i128) (rhs : t_i128) : t_i128 :=\n  wrapping_add_i128 (self) (rhs).\n\nDefinition wrapping_sub681598071 (self : t_i128) (rhs : t_i128) : t_i128 :=\n  wrapping_sub_i128 (self) (rhs).\n\nDefinition wrapping_neg446546984 (self : t_i128) : t_i128 :=\n  wrapping_sub681598071 (Into_f_into (0)) (self).\n\nDefinition wrapping_abs281925696 (self : t_i128) : t_i128 :=\n  if\n    is_negative221698470 (Clone_f_clone (self))\n  then\n    wrapping_neg446546984 (self)\n  else\n    self.\n\nDefinition wrapping_add226040243 (self : t_isize) (rhs : t_isize) : t_isize :=\n  wrapping_add_isize (self) (rhs).\n\nDefinition wrapping_sub698035192 (self : t_isize) (rhs : t_isize) : t_isize :=\n  wrapping_sub_isize (self) (rhs).\n\nDefinition wrapping_neg912291768 (self : t_isize) : t_isize :=\n  wrapping_sub698035192 (Into_f_into (0)) (self).\n\nDefinition wrapping_abs347300819 (self : t_isize) : t_isize :=\n  if\n    is_negative693446369 (Clone_f_clone (self))\n  then\n    wrapping_neg912291768 (self)\n  else\n    self.\n\n#[global] Instance f_into_t_u8 : t_From t_u8 N :=\n  {\n    From_f_from (x : N) := Build_t_u8 (Build_t_U8 x)\n  }.\n\n#[global] Instance f_into_t_u16 : t_From t_u16 N :=\n  {\n    From_f_from (x : N) := Build_t_u16 (Build_t_U16 x)\n  }.\n\n#[global] Instance f_into_t_u32 : t_From t_u32 N :=\n  {\n    From_f_from (x : N) := Build_t_u32 (Build_t_U32 x)\n  }.\n\n#[global] Instance f_into_t_u64 : t_From t_u64 N :=\n  {\n    From_f_from (x : N) := Build_t_u64 (Build_t_U64 x)\n  }.\n\n#[global] Instance f_into_t_u128 : t_From t_u128 N :=\n  {\n    From_f_from (x : N) := Build_t_u128 (Build_t_U128 x)\n  }.\n\n#[global] Instance f_into_t_usize : t_From t_usize N :=\n  {\n    From_f_from (x : N) := Build_t_usize (Build_t_U64 x)\n  }.\n\nDefinition checked_div508301931 (self : t_u8) (rhs : t_u8) : t_Option ((t_u8)) :=\n  if\n    PartialEq_f_eq (rhs) (Into_f_into 0%N)\n  then\n    Option_None\n  else\n    Option_Some (unchecked_div_u8 (self) (rhs)).\n\nDefinition overflowing_add708890057 (self : t_u8) (rhs : t_u8) : (t_u8*bool) :=\n  add_with_overflow_u8 (self) (rhs).\n\nDefinition checked_div614920780 (self : t_u16) (rhs : t_u16) : t_Option ((t_u16)) :=\n  if\n    PartialEq_f_eq (rhs) (Into_f_into (0%N))\n  then\n    Option_None\n  else\n    Option_Some (unchecked_div_u16 (self) (rhs)).\n\nDefinition overflowing_add1023344178 (self : t_u16) (rhs : t_u16) : (t_u16*bool) :=\n  add_with_overflow_u16 (self) (rhs).\n\nDefinition checked_div979383477 (self : t_u32) (rhs : t_u32) : t_Option ((t_u32)) :=\n  if\n    PartialEq_f_eq (rhs) (Into_f_into (0%N))\n  then\n    Option_None\n  else\n    Option_Some (unchecked_div_u32 (self) (rhs)).\n\nDefinition overflowing_add905744292 (self : t_u32) (rhs : t_u32) : (t_u32*bool) :=\n  add_with_overflow_u32 (self) (rhs).\n\nDefinition checked_div988689127 (self : t_u64) (rhs : t_u64) : t_Option ((t_u64)) :=\n  if\n    PartialEq_f_eq (rhs) (Into_f_into (0%N))\n  then\n    Option_None\n  else\n    Option_Some (unchecked_div_u64 (self) (rhs)).\n\nDefinition overflowing_add581983607 (self : t_u64) (rhs : t_u64) : (t_u64*bool) :=\n  add_with_overflow_u64 (self) (rhs).\n\nDefinition checked_div344106746 (self : t_u128) (rhs : t_u128) : t_Option ((t_u128)) :=\n  if\n    PartialEq_f_eq (rhs) (Into_f_into (0%N))\n  then\n    Option_None\n  else\n    Option_Some (unchecked_div_u128 (self) (rhs)).\n\nDefinition overflowing_add458293681 (self : t_u128) (rhs : t_u128) : (t_u128*bool) :=\n  add_with_overflow_u128 (self) (rhs).\n\nDefinition checked_div80223906 (self : t_usize) (rhs : t_usize) : t_Option ((t_usize)) :=\n  if\n    PartialEq_f_eq (rhs) (Into_f_into (0%N))\n  then\n    Option_None\n  else\n    Option_Some (unchecked_div_usize (self) (rhs)).\n\nDefinition overflowing_add682280407 (self : t_usize) (rhs : t_usize) : (t_usize*bool) :=\n  add_with_overflow_usize (self) (rhs).\n\nCheck t_Neg.\n#[global] Instance t_Neg_125588538 : t_Neg ((t_i8)) :=\n  {\n    Neg_f_Output := t_i8;\n    Neg_f_neg := fun  (self : t_i8)=>\n      Build_t_i8 (Neg_f_neg (i8_0 self));\n  }.\n\nDefinition abs945505614 (self : t_i8) : t_i8 :=\n  if\n    is_negative350273175 (Clone_f_clone (self))\n  then\n    Neg_f_neg (self)\n  else\n    self.\n\n#[global] Instance t_Neg_977573626 : t_Neg ((t_i16)) :=\n  {\n    Neg_f_Output := t_i16;\n    Neg_f_neg := fun  (self : t_i16)=>\n      Build_t_i16 (Neg_f_neg (i16_0 self));\n  }.\n\nDefinition abs581170970 (self : t_i16) : t_i16 :=\n  if\n    is_negative477067241 (Clone_f_clone (self))\n  then\n    Neg_f_neg (self)\n  else\n    self.\n\n#[global] Instance t_Neg_289824503 : t_Neg ((t_i32)) :=\n  {\n    Neg_f_Output := t_i32;\n    Neg_f_neg := fun  (self : t_i32)=>\n      Build_t_i32 (Neg_f_neg (i32_0 self));\n  }.\n\nDefinition abs590464694 (self : t_i32) : t_i32 :=\n  if\n    is_negative1035644813 (Clone_f_clone (self))\n  then\n    Neg_f_neg (self)\n  else\n    self.\n\n#[global] Instance t_Neg_895800448 : t_Neg ((t_i64)) :=\n  {\n    Neg_f_Output := t_i64;\n    Neg_f_neg := fun  (self : t_i64)=>\n      Build_t_i64 (Neg_f_neg (i64_0 self));\n  }.\n\nDefinition abs654781043 (self : t_i64) : t_i64 :=\n  if\n    is_negative1066124578 (Clone_f_clone (self))\n  then\n    Neg_f_neg (self)\n  else\n    self.\n\n#[global] Instance t_Neg_830237431 : t_Neg ((t_i128)) :=\n  {\n    Neg_f_Output := t_i128;\n    Neg_f_neg := fun  (self : t_i128)=>\n      Build_t_i128 (Neg_f_neg (i128_0 self));\n  }.\n\nDefinition abs204417539 (self : t_i128) : t_i128 :=\n  if\n    is_negative221698470 (Clone_f_clone (self))\n  then\n    Neg_f_neg (self)\n  else\n    self.\n\n#[global] Instance t_Neg_693499423 : t_Neg ((t_isize)) :=\n  {\n    Neg_f_Output := t_isize;\n    Neg_f_neg := fun  (self : t_isize)=>\n      Build_t_isize (Neg_f_neg (isize_0 self));\n  }.\n\nDefinition abs220926056 (self : t_isize) : t_isize :=\n  if\n    is_negative693446369 (Clone_f_clone (self))\n  then\n    Neg_f_neg (self)\n  else\n    self.\n\n#[global] Instance t_BitOr_174929276 : t_BitOr ((t_i8)) ((t_i8)) :=\n  {\n    BitOr_f_Output := t_i8;\n    BitOr_f_bitor := fun  (self : t_i8) (other : t_i8)=>\n      Build_t_i8 (BitOr_f_bitor (i8_0 self) (i8_0 other));\n  }.\n\n#[global] Instance t_BitOr_162600380 : t_BitOr ((t_i16)) ((t_i16)) :=\n  {\n    BitOr_f_Output := t_i16;\n    BitOr_f_bitor := fun  (self : t_i16) (other : t_i16)=>\n      Build_t_i16 (BitOr_f_bitor (i16_0 self) (i16_0 other));\n  }.\n\n#[global] Instance t_BitOr_64689421 : t_BitOr ((t_i32)) ((t_i32)) :=\n  {\n    BitOr_f_Output := t_i32;\n    BitOr_f_bitor := fun  (self : t_i32) (other : t_i32)=>\n      Build_t_i32 (BitOr_f_bitor (i32_0 self) (i32_0 other));\n  }.\n\n#[global] Instance t_BitOr_348780956 : t_BitOr ((t_i64)) ((t_i64)) :=\n  {\n    BitOr_f_Output := t_i64;\n    BitOr_f_bitor := fun  (self : t_i64) (other : t_i64)=>\n      Build_t_i64 (BitOr_f_bitor (i64_0 self) (i64_0 other));\n  }.\n\n#[global] Instance t_BitOr_643690063 : t_BitOr ((t_i128)) ((t_i128)) :=\n  {\n    BitOr_f_Output := t_i128;\n    BitOr_f_bitor := fun  (self : t_i128) (other : t_i128)=>\n      Build_t_i128 (BitOr_f_bitor (i128_0 self) (i128_0 other));\n  }.\n\n#[global] Instance t_BitOr_1027404433 : t_BitOr ((t_isize)) ((t_isize)) :=\n  {\n    BitOr_f_Output := t_isize;\n    BitOr_f_bitor := fun  (self : t_isize) (other : t_isize)=>\n      Build_t_isize (BitOr_f_bitor (isize_0 self) (isize_0 other));\n  }.\n\n#[global] Instance t_From_124503227 : t_From ((t_u16)) ((t_u8)) :=\n  {\n    From_f_from := fun  (x : t_u8)=>\n      Build_t_u16 (Into_f_into (u8_0 x));\n  }.\n\n#[global] Instance t_From_499390246 : t_From ((t_u32)) ((t_u8)) :=\n  {\n    From_f_from := fun  (x : t_u8)=>\n      Build_t_u32 (Into_f_into (u8_0 x));\n  }.\n\n#[global] Instance t_From_1040523499 : t_From ((t_u64)) ((t_u8)) :=\n  {\n    From_f_from := fun  (x : t_u8)=>\n      Build_t_u64 (Into_f_into (u8_0 x));\n  }.\n\n#[global] Instance t_From_827336555 : t_From ((t_u128)) ((t_u8)) :=\n  {\n    From_f_from := fun  (x : t_u8)=>\n      Build_t_u128 (Into_f_into (u8_0 x));\n  }.\n\n#[global] Instance t_From_1002852925 : t_From ((t_usize)) ((t_u8)) :=\n  {\n    From_f_from := fun  (x : t_u8)=>\n      Build_t_usize (Into_f_into (u8_0 x));\n  }.\n\n#[global] Instance t_From_476851440 : t_From ((t_u8)) ((t_u16)) :=\n  {\n    From_f_from := fun  (x : t_u16)=>\n      Build_t_u8 (Into_f_into (u16_0 x));\n  }.\n\n#[global] Instance t_From_590504350 : t_From ((t_u32)) ((t_u16)) :=\n  {\n    From_f_from := fun  (x : t_u16)=>\n      Build_t_u32 (Into_f_into (u16_0 x));\n  }.\n\n#[global] Instance t_From_786143320 : t_From ((t_u64)) ((t_u16)) :=\n  {\n    From_f_from := fun  (x : t_u16)=>\n      Build_t_u64 (Into_f_into (u16_0 x));\n  }.\n\n#[global] Instance t_From_98507156 : t_From ((t_u128)) ((t_u16)) :=\n  {\n    From_f_from := fun  (x : t_u16)=>\n      Build_t_u128 (Into_f_into (u16_0 x));\n  }.\n\n#[global] Instance t_From_427149512 : t_From ((t_usize)) ((t_u16)) :=\n  {\n    From_f_from := fun  (x : t_u16)=>\n      Build_t_usize (Into_f_into (u16_0 x));\n  }.\n\n#[global] Instance t_From_306676060 : t_From ((t_u8)) ((t_u32)) :=\n  {\n    From_f_from := fun  (x : t_u32)=>\n      Build_t_u8 (Into_f_into (u32_0 x));\n  }.\n\n#[global] Instance t_From_55624543 : t_From ((t_u16)) ((t_u32)) :=\n  {\n    From_f_from := fun  (x : t_u32)=>\n      Build_t_u16 (Into_f_into (u32_0 x));\n  }.\n\n#[global] Instance t_From_863285405 : t_From ((t_u64)) ((t_u32)) :=\n  {\n    From_f_from := fun  (x : t_u32)=>\n      Build_t_u64 (Into_f_into (u32_0 x));\n  }.\n\n#[global] Instance t_From_675130423 : t_From ((t_u128)) ((t_u32)) :=\n  {\n    From_f_from := fun  (x : t_u32)=>\n      Build_t_u128 (Into_f_into (u32_0 x));\n  }.\n\n#[global] Instance t_From_295642421 : t_From ((t_usize)) ((t_u32)) :=\n  {\n    From_f_from := fun  (x : t_u32)=>\n      Build_t_usize (Into_f_into (u32_0 x));\n  }.\n\n#[global] Instance t_From_690942554 : t_From ((t_u8)) ((t_u64)) :=\n  {\n    From_f_from := fun  (x : t_u64)=>\n      Build_t_u8 (Into_f_into (u64_0 x));\n  }.\n\n#[global] Instance t_From_956877210 : t_From ((t_u16)) ((t_u64)) :=\n  {\n    From_f_from := fun  (x : t_u64)=>\n      Build_t_u16 (Into_f_into (u64_0 x));\n  }.\n\n#[global] Instance t_From_124072492 : t_From ((t_u32)) ((t_u64)) :=\n  {\n    From_f_from := fun  (x : t_u64)=>\n      Build_t_u32 (Into_f_into (u64_0 x));\n  }.\n\n#[global] Instance t_From_882228220 : t_From ((t_u128)) ((t_u64)) :=\n  {\n    From_f_from := fun  (x : t_u64)=>\n      Build_t_u128 (Into_f_into (u64_0 x));\n  }.\n\n#[global] Instance t_From_1060762174 : t_From ((t_u8)) ((t_u128)) :=\n  {\n    From_f_from := fun  (x : t_u128)=>\n      Build_t_u8 (Into_f_into (u128_0 x));\n  }.\n\n#[global] Instance t_From_437123664 : t_From ((t_u16)) ((t_u128)) :=\n  {\n    From_f_from := fun  (x : t_u128)=>\n      Build_t_u16 (Into_f_into (u128_0 x));\n  }.\n\n#[global] Instance t_From_685712174 : t_From ((t_u32)) ((t_u128)) :=\n  {\n    From_f_from := fun  (x : t_u128)=>\n      Build_t_u32 (Into_f_into (u128_0 x));\n  }.\n\n#[global] Instance t_From_239215567 : t_From ((t_u64)) ((t_u128)) :=\n  {\n    From_f_from := fun  (x : t_u128)=>\n      Build_t_u64 (Into_f_into (u128_0 x));\n  }.\n\n#[global] Instance t_From_583993496 : t_From ((t_usize)) ((t_u128)) :=\n  {\n    From_f_from := fun  (x : t_u128)=>\n      Build_t_usize (Into_f_into (u128_0 x));\n  }.\n\n#[global] Instance t_From_1069835847 : t_From ((t_u8)) ((t_usize)) :=\n  {\n    From_f_from := fun  (x : t_usize)=>\n      Build_t_u8 (Into_f_into (usize_0 x));\n  }.\n\n#[global] Instance t_From_976343396 : t_From ((t_u16)) ((t_usize)) :=\n  {\n    From_f_from := fun  (x : t_usize)=>\n      Build_t_u16 (Into_f_into (usize_0 x));\n  }.\n\n#[global] Instance t_From_448121712 : t_From ((t_u32)) ((t_usize)) :=\n  {\n    From_f_from := fun  (x : t_usize)=>\n      Build_t_u32 (Into_f_into (usize_0 x));\n  }.\n\n#[global] Instance t_From_448032498 : t_From ((t_u128)) ((t_usize)) :=\n  {\n    From_f_from := fun  (x : t_usize)=>\n      Build_t_u128 (Into_f_into (usize_0 x));\n  }.\n\nDefinition unchecked_div_i128 (x : t_i128) (y : t_i128) : t_i128 :=\n  Build_t_i128 (Build_t_I128 (z_div (Abstraction_f_lift (i128_0 x)) (Abstraction_f_lift (i128_0 y)))).\n\nDefinition unchecked_div_i16 (x : t_i16) (y : t_i16) : t_i16 :=\n  Build_t_i16 (Build_t_I16 (z_div (Abstraction_f_lift (i16_0 x)) (Abstraction_f_lift (i16_0 y)))).\n\nDefinition unchecked_div_i32 (x : t_i32) (y : t_i32) : t_i32 :=\n  Build_t_i32 (Build_t_I32 (z_div (Abstraction_f_lift (i32_0 x)) (Abstraction_f_lift (i32_0 y)))).\n\nDefinition unchecked_div_i64 (x : t_i64) (y : t_i64) : t_i64 :=\n  Build_t_i64 (Build_t_I64 (z_div (Abstraction_f_lift (i64_0 x)) (Abstraction_f_lift (i64_0 y)))).\n\nDefinition unchecked_div_i8 (x : t_i8) (y : t_i8) : t_i8 :=\n  Build_t_i8 (Build_t_I8 (z_div (Abstraction_f_lift (i8_0 x)) (Abstraction_f_lift (i8_0 y)))).\n\nDefinition unchecked_div_isize (x : t_isize) (y : t_isize) : t_isize :=\n  Build_t_isize (Build_t_I64 (z_div (Abstraction_f_lift (isize_0 x)) (Abstraction_f_lift (isize_0 y)))).\n\nDefinition wrapping_add_u128 (a : t_u128) (b : t_u128) : t_u128 :=\n  Build_t_u128 (Add_f_add (u128_0 a) (u128_0 b)).\n\nDefinition wrapping_add_u16 (a : t_u16) (b : t_u16) : t_u16 :=\n  Build_t_u16 (Add_f_add (u16_0 a) (u16_0 b)).\n\nDefinition wrapping_add_u32 (a : t_u32) (b : t_u32) : t_u32 :=\n  Build_t_u32 (Add_f_add (u32_0 a) (u32_0 b)).\n\nDefinition wrapping_add_u64 (a : t_u64) (b : t_u64) : t_u64 :=\n  Build_t_u64 (Add_f_add (u64_0 a) (u64_0 b)).\n\nDefinition wrapping_add_u8 (a : t_u8) (b : t_u8) : t_u8 :=\n  Build_t_u8 (Add_f_add (u8_0 a) (u8_0 b)).\n\nDefinition wrapping_add_usize (a : t_usize) (b : t_usize) : t_usize :=\n  Build_t_usize (Add_f_add (usize_0 a) (usize_0 b)).\n\nDefinition wrapping_mul_i128 (a : t_i128) (b : t_i128) : t_i128 :=\n  Build_t_i128 (Mul_f_mul (i128_0 a) (i128_0 b)).\n\nDefinition wrapping_mul_i16 (a : t_i16) (b : t_i16) : t_i16 :=\n  Build_t_i16 (Mul_f_mul (i16_0 a) (i16_0 b)).\n\nDefinition wrapping_mul_i32 (a : t_i32) (b : t_i32) : t_i32 :=\n  Build_t_i32 (Mul_f_mul (i32_0 a) (i32_0 b)).\n\nDefinition wrapping_mul_i64 (a : t_i64) (b : t_i64) : t_i64 :=\n  Build_t_i64 (Mul_f_mul (i64_0 a) (i64_0 b)).\n\nDefinition wrapping_mul_i8 (a : t_i8) (b : t_i8) : t_i8 :=\n  Build_t_i8 (Mul_f_mul (i8_0 a) (i8_0 b)).\n\nDefinition wrapping_mul_isize (a : t_isize) (b : t_isize) : t_isize :=\n  Build_t_isize (Mul_f_mul (isize_0 a) (isize_0 b)).\n\nDefinition wrapping_mul_u128 (a : t_u128) (b : t_u128) : t_u128 :=\n  Build_t_u128 (Mul_f_mul (u128_0 a) (u128_0 b)).\n\nDefinition wrapping_mul_u16 (a : t_u16) (b : t_u16) : t_u16 :=\n  Build_t_u16 (Mul_f_mul (u16_0 a) (u16_0 b)).\n\nDefinition wrapping_mul_u32 (a : t_u32) (b : t_u32) : t_u32 :=\n  Build_t_u32 (Mul_f_mul (u32_0 a) (u32_0 b)).\n\nDefinition wrapping_mul_u64 (a : t_u64) (b : t_u64) : t_u64 :=\n  Build_t_u64 (Mul_f_mul (u64_0 a) (u64_0 b)).\n\nDefinition wrapping_mul_u8 (a : t_u8) (b : t_u8) : t_u8 :=\n  Build_t_u8 (Mul_f_mul (u8_0 a) (u8_0 b)).\n\nDefinition wrapping_mul_usize (a : t_usize) (b : t_usize) : t_usize :=\n  Build_t_usize (Mul_f_mul (usize_0 a) (usize_0 b)).\n\nDefinition wrapping_add480603777 (self : t_u8) (rhs : t_u8) : t_u8 :=\n  wrapping_add_u8 (self) (rhs).\n\nDefinition wrapping_mul885216284 (self : t_u8) (rhs : t_u8) : t_u8 :=\n  wrapping_mul_u8 (self) (rhs).\n\nDefinition wrapping_add124432709 (self : t_u16) (rhs : t_u16) : t_u16 :=\n  wrapping_add_u16 (self) (rhs).\n\nDefinition wrapping_mul14465189 (self : t_u16) (rhs : t_u16) : t_u16 :=\n  wrapping_mul_u16 (self) (rhs).\n\nDefinition wrapping_add1049665857 (self : t_u32) (rhs : t_u32) : t_u32 :=\n  wrapping_add_u32 (self) (rhs).\n\nDefinition wrapping_mul203346768 (self : t_u32) (rhs : t_u32) : t_u32 :=\n  wrapping_mul_u32 (self) (rhs).\n\nDefinition wrapping_add865565639 (self : t_u64) (rhs : t_u64) : t_u64 :=\n  wrapping_add_u64 (self) (rhs).\n\nDefinition wrapping_mul742978873 (self : t_u64) (rhs : t_u64) : t_u64 :=\n  wrapping_mul_u64 (self) (rhs).\n\nDefinition wrapping_add40844100 (self : t_u128) (rhs : t_u128) : t_u128 :=\n  wrapping_add_u128 (self) (rhs).\n\nDefinition wrapping_mul294115024 (self : t_u128) (rhs : t_u128) : t_u128 :=\n  wrapping_mul_u128 (self) (rhs).\n\nDefinition wrapping_add427637036 (self : t_usize) (rhs : t_usize) : t_usize :=\n  wrapping_add_usize (self) (rhs).\n\nDefinition wrapping_mul680896953 (self : t_usize) (rhs : t_usize) : t_usize :=\n  wrapping_mul_usize (self) (rhs).\n\n#[global] Instance t_Add_695878175 : t_Add ((t_i8)) ((t_i8)) :=\n  {\n    Add_f_Output := t_i8;\n    Add_f_add := fun  (self : t_i8) (other : t_i8)=>\n      Build_t_i8 (Add_f_add (i8_0 self) (i8_0 other));\n  }.\n\n#[global] Instance t_Add_877139857 : t_Add ((t_i16)) ((t_i16)) :=\n  {\n    Add_f_Output := t_i16;\n    Add_f_add := fun  (self : t_i16) (other : t_i16)=>\n      Build_t_i16 (Add_f_add (i16_0 self) (i16_0 other));\n  }.\n\n#[global] Instance t_Add_426581780 : t_Add ((t_i32)) ((t_i32)) :=\n  {\n    Add_f_Output := t_i32;\n    Add_f_add := fun  (self : t_i32) (other : t_i32)=>\n      Build_t_i32 (Add_f_add (i32_0 self) (i32_0 other));\n  }.\n\n#[global] Instance t_Add_113633409 : t_Add ((t_i64)) ((t_i64)) :=\n  {\n    Add_f_Output := t_i64;\n    Add_f_add := fun  (self : t_i64) (other : t_i64)=>\n      Build_t_i64 (Add_f_add (i64_0 self) (i64_0 other));\n  }.\n\n#[global] Instance t_Add_788236527 : t_Add ((t_i128)) ((t_i128)) :=\n  {\n    Add_f_Output := t_i128;\n    Add_f_add := fun  (self : t_i128) (other : t_i128)=>\n      Build_t_i128 (Add_f_add (i128_0 self) (i128_0 other));\n  }.\n\n#[global] Instance t_Add_247333017 : t_Add ((t_isize)) ((t_isize)) :=\n  {\n    Add_f_Output := t_isize;\n    Add_f_add := fun  (self : t_isize) (other : t_isize)=>\n      Build_t_isize (Add_f_add (isize_0 self) (isize_0 other));\n  }.\n\n#[global] Instance t_Sub_756206062 : t_Sub ((t_i8)) ((t_i8)) :=\n  {\n    Sub_f_Output := t_i8;\n    Sub_f_sub := fun  (self : t_i8) (other : t_i8)=>\n      Build_t_i8 (Sub_f_sub (i8_0 self) (i8_0 other));\n  }.\n\n#[global] Instance t_Sub_618838212 : t_Sub ((t_i16)) ((t_i16)) :=\n  {\n    Sub_f_Output := t_i16;\n    Sub_f_sub := fun  (self : t_i16) (other : t_i16)=>\n      Build_t_i16 (Sub_f_sub (i16_0 self) (i16_0 other));\n  }.\n\n#[global] Instance t_Sub_44574118 : t_Sub ((t_i32)) ((t_i32)) :=\n  {\n    Sub_f_Output := t_i32;\n    Sub_f_sub := fun  (self : t_i32) (other : t_i32)=>\n      Build_t_i32 (Sub_f_sub (i32_0 self) (i32_0 other));\n  }.\n\n#[global] Instance t_Sub_287793174 : t_Sub ((t_i64)) ((t_i64)) :=\n  {\n    Sub_f_Output := t_i64;\n    Sub_f_sub := fun  (self : t_i64) (other : t_i64)=>\n      Build_t_i64 (Sub_f_sub (i64_0 self) (i64_0 other));\n  }.\n\n#[global] Instance t_Sub_837338145 : t_Sub ((t_i128)) ((t_i128)) :=\n  {\n    Sub_f_Output := t_i128;\n    Sub_f_sub := fun  (self : t_i128) (other : t_i128)=>\n      Build_t_i128 (Sub_f_sub (i128_0 self) (i128_0 other));\n  }.\n\n#[global] Instance t_Sub_22961567 : t_Sub ((t_isize)) ((t_isize)) :=\n  {\n    Sub_f_Output := t_isize;\n    Sub_f_sub := fun  (self : t_isize) (other : t_isize)=>\n      Build_t_isize (Sub_f_sub (isize_0 self) (isize_0 other));\n  }.\n\nDefinition wrapping_sub_u128 (a : t_u128) (b : t_u128) : t_u128 :=\n  Build_t_u128 (Sub_f_sub (u128_0 a) (u128_0 b)).\n\nDefinition wrapping_sub_u16 (a : t_u16) (b : t_u16) : t_u16 :=\n  Build_t_u16 (Sub_f_sub (u16_0 a) (u16_0 b)).\n\nDefinition wrapping_sub_u32 (a : t_u32) (b : t_u32) : t_u32 :=\n  Build_t_u32 (Sub_f_sub (u32_0 a) (u32_0 b)).\n\nDefinition wrapping_sub_u64 (a : t_u64) (b : t_u64) : t_u64 :=\n  Build_t_u64 (Sub_f_sub (u64_0 a) (u64_0 b)).\n\nDefinition wrapping_sub_u8 (a : t_u8) (b : t_u8) : t_u8 :=\n  Build_t_u8 (Sub_f_sub (u8_0 a) (u8_0 b)).\n\nDefinition wrapping_sub_usize (a : t_usize) (b : t_usize) : t_usize :=\n  Build_t_usize (Sub_f_sub (usize_0 a) (usize_0 b)).\n\nDefinition wrapping_sub403906422 (self : t_u8) (rhs : t_u8) : t_u8 :=\n  wrapping_sub_u8 (self) (rhs).\n\nDefinition wrapping_neg123212788 (self : t_u8) : t_u8 :=\n  wrapping_sub403906422 (Build_t_u8 (Constants_f_ZERO)) (self).\n\nDefinition wrapping_sub811251034 (self : t_u16) (rhs : t_u16) : t_u16 :=\n  wrapping_sub_u16 (self) (rhs).\n\nDefinition wrapping_neg128555595 (self : t_u16) : t_u16 :=\n  wrapping_sub811251034 (Build_t_u16 (Constants_f_ZERO)) (self).\n\nDefinition wrapping_sub708953500 (self : t_u32) (rhs : t_u32) : t_u32 :=\n  wrapping_sub_u32 (self) (rhs).\n\nDefinition wrapping_neg328220773 (self : t_u32) : t_u32 :=\n  wrapping_sub708953500 (Build_t_u32 (Constants_f_ZERO)) (self).\n\nDefinition wrapping_sub762520851 (self : t_u64) (rhs : t_u64) : t_u64 :=\n  wrapping_sub_u64 (self) (rhs).\n\nDefinition wrapping_neg617136337 (self : t_u64) : t_u64 :=\n  wrapping_sub762520851 (Build_t_u64 (Constants_f_ZERO)) (self).\n\nDefinition wrapping_sub409310259 (self : t_u128) (rhs : t_u128) : t_u128 :=\n  wrapping_sub_u128 (self) (rhs).\n\nDefinition wrapping_neg729451428 (self : t_u128) : t_u128 :=\n  wrapping_sub409310259 (Build_t_u128 (Constants_f_ZERO)) (self).\n\nDefinition wrapping_sub813101882 (self : t_usize) (rhs : t_usize) : t_usize :=\n  wrapping_sub_usize (self) (rhs).\n\nDefinition wrapping_neg342773446 (self : t_usize) : t_usize :=\n  wrapping_sub813101882 (Build_t_usize (Constants_f_ZERO)) (self).\n\n#[global] Instance t_Add_63222257 : t_Add ((t_u8)) ((t_u8)) :=\n  {\n    Add_f_Output := t_u8;\n    Add_f_add := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (Add_f_add (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Add_568595401 : t_Add ((t_u16)) ((t_u16)) :=\n  {\n    Add_f_Output := t_u16;\n    Add_f_add := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (Add_f_add (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Add_99427071 : t_Add ((t_u32)) ((t_u32)) :=\n  {\n    Add_f_Output := t_u32;\n    Add_f_add := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (Add_f_add (u32_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Add_963057404 : t_Add ((t_u64)) ((t_u64)) :=\n  {\n    Add_f_Output := t_u64;\n    Add_f_add := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (Add_f_add (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Add_258013445 : t_Add ((t_u128)) ((t_u128)) :=\n  {\n    Add_f_Output := t_u128;\n    Add_f_add := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (Add_f_add (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Add_192585125 : t_Add ((t_usize)) ((t_usize)) :=\n  {\n    Add_f_Output := t_usize;\n    Add_f_add := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (Add_f_add (usize_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Mul_307943337 : t_Mul ((t_u8)) ((t_u8)) :=\n  {\n    Mul_f_Output := t_u8;\n    Mul_f_mul := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (Mul_f_mul (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Mul_579880302 : t_Mul ((t_u16)) ((t_u16)) :=\n  {\n    Mul_f_Output := t_u16;\n    Mul_f_mul := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (Mul_f_mul (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Mul_969448321 : t_Mul ((t_u32)) ((t_u32)) :=\n  {\n    Mul_f_Output := t_u32;\n    Mul_f_mul := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (Mul_f_mul (u32_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Mul_572333733 : t_Mul ((t_u64)) ((t_u64)) :=\n  {\n    Mul_f_Output := t_u64;\n    Mul_f_mul := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (Mul_f_mul (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Mul_904691459 : t_Mul ((t_u128)) ((t_u128)) :=\n  {\n    Mul_f_Output := t_u128;\n    Mul_f_mul := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (Mul_f_mul (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Mul_490480124 : t_Mul ((t_usize)) ((t_usize)) :=\n  {\n    Mul_f_Output := t_usize;\n    Mul_f_mul := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (Mul_f_mul (usize_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Mul_542253756 : t_Mul ((t_i8)) ((t_i8)) :=\n  {\n    Mul_f_Output := t_i8;\n    Mul_f_mul := fun  (self : t_i8) (other : t_i8)=>\n      Build_t_i8 (Mul_f_mul (i8_0 self) (i8_0 other));\n  }.\n\n#[global] Instance t_Mul_586956420 : t_Mul ((t_i16)) ((t_i16)) :=\n  {\n    Mul_f_Output := t_i16;\n    Mul_f_mul := fun  (self : t_i16) (other : t_i16)=>\n      Build_t_i16 (Mul_f_mul (i16_0 self) (i16_0 other));\n  }.\n\n#[global] Instance t_Mul_622712365 : t_Mul ((t_i32)) ((t_i32)) :=\n  {\n    Mul_f_Output := t_i32;\n    Mul_f_mul := fun  (self : t_i32) (other : t_i32)=>\n      Build_t_i32 (Mul_f_mul (i32_0 self) (i32_0 other));\n  }.\n\n#[global] Instance t_Mul_167399285 : t_Mul ((t_i64)) ((t_i64)) :=\n  {\n    Mul_f_Output := t_i64;\n    Mul_f_mul := fun  (self : t_i64) (other : t_i64)=>\n      Build_t_i64 (Mul_f_mul (i64_0 self) (i64_0 other));\n  }.\n\n#[global] Instance t_Mul_264435207 : t_Mul ((t_i128)) ((t_i128)) :=\n  {\n    Mul_f_Output := t_i128;\n    Mul_f_mul := fun  (self : t_i128) (other : t_i128)=>\n      Build_t_i128 (Mul_f_mul (i128_0 self) (i128_0 other));\n  }.\n\n#[global] Instance t_Mul_9915144 : t_Mul ((t_isize)) ((t_isize)) :=\n  {\n    Mul_f_Output := t_isize;\n    Mul_f_mul := fun  (self : t_isize) (other : t_isize)=>\n      Build_t_isize (Mul_f_mul (isize_0 self) (isize_0 other));\n  }.\n\n#[global] Instance t_Div_23426959 : t_Div ((t_u8)) ((t_u8)) :=\n  {\n    Div_f_Output := t_u8;\n    Div_f_div := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (Div_f_div (u8_0 self) (u8_0 other));\n  }.\n\nDefinition wrapping_div660080892 (self : t_u8) (rhs : t_u8) : t_u8 :=\n  Div_f_div (self) (rhs).\n\nDefinition wrapping_div_euclid481233436 (self : t_u8) (rhs : t_u8) : t_u8 :=\n  Div_f_div (self) (rhs).\n\n#[global] Instance t_Div_469212879 : t_Div ((t_u16)) ((t_u16)) :=\n  {\n    Div_f_Output := t_u16;\n    Div_f_div := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (Div_f_div (u16_0 self) (u16_0 other));\n  }.\n\nDefinition wrapping_div366977334 (self : t_u16) (rhs : t_u16) : t_u16 :=\n  Div_f_div (self) (rhs).\n\nDefinition wrapping_div_euclid22267888 (self : t_u16) (rhs : t_u16) : t_u16 :=\n  Div_f_div (self) (rhs).\n\n#[global] Instance t_Div_248596974 : t_Div ((t_u32)) ((t_u32)) :=\n  {\n    Div_f_Output := t_u32;\n    Div_f_div := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (Div_f_div (u32_0 self) (u32_0 other));\n  }.\n\nDefinition wrapping_div931150450 (self : t_u32) (rhs : t_u32) : t_u32 :=\n  Div_f_div (self) (rhs).\n\nDefinition wrapping_div_euclid606291997 (self : t_u32) (rhs : t_u32) : t_u32 :=\n  Div_f_div (self) (rhs).\n\n#[global] Instance t_Div_901268642 : t_Div ((t_u64)) ((t_u64)) :=\n  {\n    Div_f_Output := t_u64;\n    Div_f_div := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (Div_f_div (u64_0 self) (u64_0 other));\n  }.\n\nDefinition wrapping_div168427046 (self : t_u64) (rhs : t_u64) : t_u64 :=\n  Div_f_div (self) (rhs).\n\nDefinition wrapping_div_euclid321252086 (self : t_u64) (rhs : t_u64) : t_u64 :=\n  Div_f_div (self) (rhs).\n\n#[global] Instance t_Div_868602092 : t_Div ((t_u128)) ((t_u128)) :=\n  {\n    Div_f_Output := t_u128;\n    Div_f_div := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (Div_f_div (u128_0 self) (u128_0 other));\n  }.\n\nDefinition wrapping_div692427683 (self : t_u128) (rhs : t_u128) : t_u128 :=\n  Div_f_div (self) (rhs).\n\nDefinition wrapping_div_euclid926334515 (self : t_u128) (rhs : t_u128) : t_u128 :=\n  Div_f_div (self) (rhs).\n\n#[global] Instance t_Div_740920454 : t_Div ((t_usize)) ((t_usize)) :=\n  {\n    Div_f_Output := t_usize;\n    Div_f_div := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (Div_f_div (usize_0 self) (usize_0 other));\n  }.\n\nDefinition wrapping_div905768546 (self : t_usize) (rhs : t_usize) : t_usize :=\n  Div_f_div (self) (rhs).\n\nDefinition wrapping_div_euclid90317722 (self : t_usize) (rhs : t_usize) : t_usize :=\n  Div_f_div (self) (rhs).\n\n#[global] Instance t_Rem_485335443 : t_Rem ((t_u8)) ((t_u8)) :=\n  {\n    Rem_f_Output := t_u8;\n    Rem_f_rem := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (Rem_f_rem (u8_0 self) (u8_0 other));\n  }.\n\nDefinition wrapping_rem984569721 (self : t_u8) (rhs : t_u8) : t_u8 :=\n  Rem_f_rem (self) (rhs).\n\nDefinition wrapping_rem_euclid946579345 (self : t_u8) (rhs : t_u8) : t_u8 :=\n  Rem_f_rem (self) (rhs).\n\n#[global] Instance t_Rem_780488465 : t_Rem ((t_u16)) ((t_u16)) :=\n  {\n    Rem_f_Output := t_u16;\n    Rem_f_rem := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (Rem_f_rem (u16_0 self) (u16_0 other));\n  }.\n\nDefinition wrapping_rem378598035 (self : t_u16) (rhs : t_u16) : t_u16 :=\n  Rem_f_rem (self) (rhs).\n\nDefinition wrapping_rem_euclid602402638 (self : t_u16) (rhs : t_u16) : t_u16 :=\n  Rem_f_rem (self) (rhs).\n\n#[global] Instance t_Rem_734014529 : t_Rem ((t_u32)) ((t_u32)) :=\n  {\n    Rem_f_Output := t_u32;\n    Rem_f_rem := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (Rem_f_rem (u32_0 self) (u32_0 other));\n  }.\n\nDefinition wrapping_rem292009099 (self : t_u32) (rhs : t_u32) : t_u32 :=\n  Rem_f_rem (self) (rhs).\n\nDefinition wrapping_rem_euclid1020271291 (self : t_u32) (rhs : t_u32) : t_u32 :=\n  Rem_f_rem (self) (rhs).\n\n#[global] Instance t_Rem_455480749 : t_Rem ((t_u64)) ((t_u64)) :=\n  {\n    Rem_f_Output := t_u64;\n    Rem_f_rem := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (Rem_f_rem (u64_0 self) (u64_0 other));\n  }.\n\nDefinition wrapping_rem390602260 (self : t_u64) (rhs : t_u64) : t_u64 :=\n  Rem_f_rem (self) (rhs).\n\nDefinition wrapping_rem_euclid839264546 (self : t_u64) (rhs : t_u64) : t_u64 :=\n  Rem_f_rem (self) (rhs).\n\n#[global] Instance t_Rem_412060686 : t_Rem ((t_u128)) ((t_u128)) :=\n  {\n    Rem_f_Output := t_u128;\n    Rem_f_rem := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (Rem_f_rem (u128_0 self) (u128_0 other));\n  }.\n\nDefinition wrapping_rem332379920 (self : t_u128) (rhs : t_u128) : t_u128 :=\n  Rem_f_rem (self) (rhs).\n\nDefinition wrapping_rem_euclid646122423 (self : t_u128) (rhs : t_u128) : t_u128 :=\n  Rem_f_rem (self) (rhs).\n\n#[global] Instance t_Rem_796467486 : t_Rem ((t_usize)) ((t_usize)) :=\n  {\n    Rem_f_Output := t_usize;\n    Rem_f_rem := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (Rem_f_rem (usize_0 self) (usize_0 other));\n  }.\n\nDefinition wrapping_rem333089373 (self : t_usize) (rhs : t_usize) : t_usize :=\n  Rem_f_rem (self) (rhs).\n\nDefinition wrapping_rem_euclid769656504 (self : t_usize) (rhs : t_usize) : t_usize :=\n  Rem_f_rem (self) (rhs).\n\n#[global] Instance t_Shr_1061808511 : t_Shr ((t_u8)) ((t_u8)) :=\n  {\n    Shr_f_Output := t_u8;\n    Shr_f_shr := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (Shr_f_shr (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shr_590944100 : t_Shr ((t_u8)) ((t_u16)) :=\n  {\n    Shr_f_Output := t_u8;\n    Shr_f_shr := fun  (self : t_u8) (other : t_u16)=>\n      Build_t_u8 (Shr_f_shr (u8_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shr_267395304 : t_Shr ((t_u8)) ((t_u32)) :=\n  {\n    Shr_f_Output := t_u8;\n    Shr_f_shr := fun  (self : t_u8) (other : t_u32)=>\n      Build_t_u8 (Shr_f_shr (u8_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shr_922719969 : t_Shr ((t_u8)) ((t_u64)) :=\n  {\n    Shr_f_Output := t_u8;\n    Shr_f_shr := fun  (self : t_u8) (other : t_u64)=>\n      Build_t_u8 (Shr_f_shr (u8_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shr_138723873 : t_Shr ((t_u8)) ((t_u128)) :=\n  {\n    Shr_f_Output := t_u8;\n    Shr_f_shr := fun  (self : t_u8) (other : t_u128)=>\n      Build_t_u8 (Shr_f_shr (u8_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shr_558887005 : t_Shr ((t_u8)) ((t_usize)) :=\n  {\n    Shr_f_Output := t_u8;\n    Shr_f_shr := fun  (self : t_u8) (other : t_usize)=>\n      Build_t_u8 (Shr_f_shr (u8_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shr_170693446 : t_Shr ((t_u16)) ((t_u8)) :=\n  {\n    Shr_f_Output := t_u16;\n    Shr_f_shr := fun  (self : t_u16) (other : t_u8)=>\n      Build_t_u16 (Shr_f_shr (u16_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shr_899863737 : t_Shr ((t_u16)) ((t_u16)) :=\n  {\n    Shr_f_Output := t_u16;\n    Shr_f_shr := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (Shr_f_shr (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shr_290867596 : t_Shr ((t_u16)) ((t_u32)) :=\n  {\n    Shr_f_Output := t_u16;\n    Shr_f_shr := fun  (self : t_u16) (other : t_u32)=>\n      Build_t_u16 (Shr_f_shr (u16_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shr_630800316 : t_Shr ((t_u16)) ((t_u64)) :=\n  {\n    Shr_f_Output := t_u16;\n    Shr_f_shr := fun  (self : t_u16) (other : t_u64)=>\n      Build_t_u16 (Shr_f_shr (u16_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shr_51138976 : t_Shr ((t_u16)) ((t_u128)) :=\n  {\n    Shr_f_Output := t_u16;\n    Shr_f_shr := fun  (self : t_u16) (other : t_u128)=>\n      Build_t_u16 (Shr_f_shr (u16_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shr_82567397 : t_Shr ((t_u16)) ((t_usize)) :=\n  {\n    Shr_f_Output := t_u16;\n    Shr_f_shr := fun  (self : t_u16) (other : t_usize)=>\n      Build_t_u16 (Shr_f_shr (u16_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shr_430948219 : t_Shr ((t_u32)) ((t_u8)) :=\n  {\n    Shr_f_Output := t_u32;\n    Shr_f_shr := fun  (self : t_u32) (other : t_u8)=>\n      Build_t_u32 (Shr_f_shr (u32_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shr_157675832 : t_Shr ((t_u32)) ((t_u16)) :=\n  {\n    Shr_f_Output := t_u32;\n    Shr_f_shr := fun  (self : t_u32) (other : t_u16)=>\n      Build_t_u32 (Shr_f_shr (u32_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shr_708845947 : t_Shr ((t_u32)) ((t_u32)) :=\n  {\n    Shr_f_Output := t_u32;\n    Shr_f_shr := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (Shr_f_shr (u32_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shr_1060262347 : t_Shr ((t_u32)) ((t_u64)) :=\n  {\n    Shr_f_Output := t_u32;\n    Shr_f_shr := fun  (self : t_u32) (other : t_u64)=>\n      Build_t_u32 (Shr_f_shr (u32_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shr_372764217 : t_Shr ((t_u32)) ((t_u128)) :=\n  {\n    Shr_f_Output := t_u32;\n    Shr_f_shr := fun  (self : t_u32) (other : t_u128)=>\n      Build_t_u32 (Shr_f_shr (u32_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shr_534962338 : t_Shr ((t_u32)) ((t_usize)) :=\n  {\n    Shr_f_Output := t_u32;\n    Shr_f_shr := fun  (self : t_u32) (other : t_usize)=>\n      Build_t_u32 (Shr_f_shr (u32_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shr_45695168 : t_Shr ((t_u64)) ((t_u8)) :=\n  {\n    Shr_f_Output := t_u64;\n    Shr_f_shr := fun  (self : t_u64) (other : t_u8)=>\n      Build_t_u64 (Shr_f_shr (u64_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shr_1027310629 : t_Shr ((t_u64)) ((t_u16)) :=\n  {\n    Shr_f_Output := t_u64;\n    Shr_f_shr := fun  (self : t_u64) (other : t_u16)=>\n      Build_t_u64 (Shr_f_shr (u64_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shr_357793917 : t_Shr ((t_u64)) ((t_u32)) :=\n  {\n    Shr_f_Output := t_u64;\n    Shr_f_shr := fun  (self : t_u64) (other : t_u32)=>\n      Build_t_u64 (Shr_f_shr (u64_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shr_1038705817 : t_Shr ((t_u64)) ((t_u64)) :=\n  {\n    Shr_f_Output := t_u64;\n    Shr_f_shr := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (Shr_f_shr (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shr_567649567 : t_Shr ((t_u64)) ((t_u128)) :=\n  {\n    Shr_f_Output := t_u64;\n    Shr_f_shr := fun  (self : t_u64) (other : t_u128)=>\n      Build_t_u64 (Shr_f_shr (u64_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shr_380280894 : t_Shr ((t_u64)) ((t_usize)) :=\n  {\n    Shr_f_Output := t_u64;\n    Shr_f_shr := fun  (self : t_u64) (other : t_usize)=>\n      Build_t_u64 (Shr_f_shr (u64_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shr_555027554 : t_Shr ((t_u128)) ((t_u8)) :=\n  {\n    Shr_f_Output := t_u128;\n    Shr_f_shr := fun  (self : t_u128) (other : t_u8)=>\n      Build_t_u128 (Shr_f_shr (u128_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shr_225523666 : t_Shr ((t_u128)) ((t_u16)) :=\n  {\n    Shr_f_Output := t_u128;\n    Shr_f_shr := fun  (self : t_u128) (other : t_u16)=>\n      Build_t_u128 (Shr_f_shr (u128_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shr_910916464 : t_Shr ((t_u128)) ((t_u32)) :=\n  {\n    Shr_f_Output := t_u128;\n    Shr_f_shr := fun  (self : t_u128) (other : t_u32)=>\n      Build_t_u128 (Shr_f_shr (u128_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shr_137291592 : t_Shr ((t_u128)) ((t_u64)) :=\n  {\n    Shr_f_Output := t_u128;\n    Shr_f_shr := fun  (self : t_u128) (other : t_u64)=>\n      Build_t_u128 (Shr_f_shr (u128_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shr_1070013296 : t_Shr ((t_u128)) ((t_u128)) :=\n  {\n    Shr_f_Output := t_u128;\n    Shr_f_shr := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (Shr_f_shr (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shr_1009428374 : t_Shr ((t_u128)) ((t_usize)) :=\n  {\n    Shr_f_Output := t_u128;\n    Shr_f_shr := fun  (self : t_u128) (other : t_usize)=>\n      Build_t_u128 (Shr_f_shr (u128_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shr_94723353 : t_Shr ((t_usize)) ((t_u8)) :=\n  {\n    Shr_f_Output := t_usize;\n    Shr_f_shr := fun  (self : t_usize) (other : t_u8)=>\n      Build_t_usize (Shr_f_shr (usize_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shr_18219058 : t_Shr ((t_usize)) ((t_u16)) :=\n  {\n    Shr_f_Output := t_usize;\n    Shr_f_shr := fun  (self : t_usize) (other : t_u16)=>\n      Build_t_usize (Shr_f_shr (usize_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shr_14441839 : t_Shr ((t_usize)) ((t_u32)) :=\n  {\n    Shr_f_Output := t_usize;\n    Shr_f_shr := fun  (self : t_usize) (other : t_u32)=>\n      Build_t_usize (Shr_f_shr (usize_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shr_642676920 : t_Shr ((t_usize)) ((t_u64)) :=\n  {\n    Shr_f_Output := t_usize;\n    Shr_f_shr := fun  (self : t_usize) (other : t_u64)=>\n      Build_t_usize (Shr_f_shr (usize_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shr_65876869 : t_Shr ((t_usize)) ((t_u128)) :=\n  {\n    Shr_f_Output := t_usize;\n    Shr_f_shr := fun  (self : t_usize) (other : t_u128)=>\n      Build_t_usize (Shr_f_shr (usize_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shr_833436714 : t_Shr ((t_usize)) ((t_usize)) :=\n  {\n    Shr_f_Output := t_usize;\n    Shr_f_shr := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (Shr_f_shr (usize_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shl_161455974 : t_Shl ((t_u8)) ((t_u8)) :=\n  {\n    Shl_f_Output := t_u8;\n    Shl_f_shl := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (Shl_f_shl (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shl_861055562 : t_Shl ((t_u8)) ((t_u16)) :=\n  {\n    Shl_f_Output := t_u8;\n    Shl_f_shl := fun  (self : t_u8) (other : t_u16)=>\n      Build_t_u8 (Shl_f_shl (u8_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shl_479938796 : t_Shl ((t_u8)) ((t_u32)) :=\n  {\n    Shl_f_Output := t_u8;\n    Shl_f_shl := fun  (self : t_u8) (other : t_u32)=>\n      Build_t_u8 (Shl_f_shl (u8_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shl_373462431 : t_Shl ((t_u8)) ((t_u64)) :=\n  {\n    Shl_f_Output := t_u8;\n    Shl_f_shl := fun  (self : t_u8) (other : t_u64)=>\n      Build_t_u8 (Shl_f_shl (u8_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shl_356733585 : t_Shl ((t_u8)) ((t_u128)) :=\n  {\n    Shl_f_Output := t_u8;\n    Shl_f_shl := fun  (self : t_u8) (other : t_u128)=>\n      Build_t_u8 (Shl_f_shl (u8_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shl_138823384 : t_Shl ((t_u8)) ((t_usize)) :=\n  {\n    Shl_f_Output := t_u8;\n    Shl_f_shl := fun  (self : t_u8) (other : t_usize)=>\n      Build_t_u8 (Shl_f_shl (u8_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shl_492599436 : t_Shl ((t_u16)) ((t_u8)) :=\n  {\n    Shl_f_Output := t_u16;\n    Shl_f_shl := fun  (self : t_u16) (other : t_u8)=>\n      Build_t_u16 (Shl_f_shl (u16_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shl_254997522 : t_Shl ((t_u16)) ((t_u16)) :=\n  {\n    Shl_f_Output := t_u16;\n    Shl_f_shl := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (Shl_f_shl (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shl_840888059 : t_Shl ((t_u16)) ((t_u32)) :=\n  {\n    Shl_f_Output := t_u16;\n    Shl_f_shl := fun  (self : t_u16) (other : t_u32)=>\n      Build_t_u16 (Shl_f_shl (u16_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shl_1017206779 : t_Shl ((t_u16)) ((t_u64)) :=\n  {\n    Shl_f_Output := t_u16;\n    Shl_f_shl := fun  (self : t_u16) (other : t_u64)=>\n      Build_t_u16 (Shl_f_shl (u16_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shl_751151164 : t_Shl ((t_u16)) ((t_u128)) :=\n  {\n    Shl_f_Output := t_u16;\n    Shl_f_shl := fun  (self : t_u16) (other : t_u128)=>\n      Build_t_u16 (Shl_f_shl (u16_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shl_303578486 : t_Shl ((t_u16)) ((t_usize)) :=\n  {\n    Shl_f_Output := t_u16;\n    Shl_f_shl := fun  (self : t_u16) (other : t_usize)=>\n      Build_t_u16 (Shl_f_shl (u16_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shl_186069032 : t_Shl ((t_u32)) ((t_u8)) :=\n  {\n    Shl_f_Output := t_u32;\n    Shl_f_shl := fun  (self : t_u32) (other : t_u8)=>\n      Build_t_u32 (Shl_f_shl (u32_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shl_320616735 : t_Shl ((t_u32)) ((t_u16)) :=\n  {\n    Shl_f_Output := t_u32;\n    Shl_f_shl := fun  (self : t_u32) (other : t_u16)=>\n      Build_t_u32 (Shl_f_shl (u32_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shl_325940784 : t_Shl ((t_u32)) ((t_u32)) :=\n  {\n    Shl_f_Output := t_u32;\n    Shl_f_shl := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (Shl_f_shl (u32_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shl_398883535 : t_Shl ((t_u32)) ((t_u64)) :=\n  {\n    Shl_f_Output := t_u32;\n    Shl_f_shl := fun  (self : t_u32) (other : t_u64)=>\n      Build_t_u32 (Shl_f_shl (u32_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shl_700909976 : t_Shl ((t_u32)) ((t_u128)) :=\n  {\n    Shl_f_Output := t_u32;\n    Shl_f_shl := fun  (self : t_u32) (other : t_u128)=>\n      Build_t_u32 (Shl_f_shl (u32_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shl_475027367 : t_Shl ((t_u32)) ((t_usize)) :=\n  {\n    Shl_f_Output := t_u32;\n    Shl_f_shl := fun  (self : t_u32) (other : t_usize)=>\n      Build_t_u32 (Shl_f_shl (u32_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shl_620046856 : t_Shl ((t_u64)) ((t_u8)) :=\n  {\n    Shl_f_Output := t_u64;\n    Shl_f_shl := fun  (self : t_u64) (other : t_u8)=>\n      Build_t_u64 (Shl_f_shl (u64_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shl_158077515 : t_Shl ((t_u64)) ((t_u16)) :=\n  {\n    Shl_f_Output := t_u64;\n    Shl_f_shl := fun  (self : t_u64) (other : t_u16)=>\n      Build_t_u64 (Shl_f_shl (u64_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shl_1071441050 : t_Shl ((t_u64)) ((t_u32)) :=\n  {\n    Shl_f_Output := t_u64;\n    Shl_f_shl := fun  (self : t_u64) (other : t_u32)=>\n      Build_t_u64 (Shl_f_shl (u64_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shl_581241894 : t_Shl ((t_u64)) ((t_u64)) :=\n  {\n    Shl_f_Output := t_u64;\n    Shl_f_shl := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (Shl_f_shl (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shl_916302310 : t_Shl ((t_u64)) ((t_u128)) :=\n  {\n    Shl_f_Output := t_u64;\n    Shl_f_shl := fun  (self : t_u64) (other : t_u128)=>\n      Build_t_u64 (Shl_f_shl (u64_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shl_59609547 : t_Shl ((t_u64)) ((t_usize)) :=\n  {\n    Shl_f_Output := t_u64;\n    Shl_f_shl := fun  (self : t_u64) (other : t_usize)=>\n      Build_t_u64 (Shl_f_shl (u64_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shl_308574333 : t_Shl ((t_u128)) ((t_u8)) :=\n  {\n    Shl_f_Output := t_u128;\n    Shl_f_shl := fun  (self : t_u128) (other : t_u8)=>\n      Build_t_u128 (Shl_f_shl (u128_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shl_966677877 : t_Shl ((t_u128)) ((t_u16)) :=\n  {\n    Shl_f_Output := t_u128;\n    Shl_f_shl := fun  (self : t_u128) (other : t_u16)=>\n      Build_t_u128 (Shl_f_shl (u128_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shl_38932717 : t_Shl ((t_u128)) ((t_u32)) :=\n  {\n    Shl_f_Output := t_u128;\n    Shl_f_shl := fun  (self : t_u128) (other : t_u32)=>\n      Build_t_u128 (Shl_f_shl (u128_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shl_108085956 : t_Shl ((t_u128)) ((t_u64)) :=\n  {\n    Shl_f_Output := t_u128;\n    Shl_f_shl := fun  (self : t_u128) (other : t_u64)=>\n      Build_t_u128 (Shl_f_shl (u128_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shl_489587677 : t_Shl ((t_u128)) ((t_u128)) :=\n  {\n    Shl_f_Output := t_u128;\n    Shl_f_shl := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (Shl_f_shl (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shl_837150634 : t_Shl ((t_u128)) ((t_usize)) :=\n  {\n    Shl_f_Output := t_u128;\n    Shl_f_shl := fun  (self : t_u128) (other : t_usize)=>\n      Build_t_u128 (Shl_f_shl (u128_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Shl_736165651 : t_Shl ((t_usize)) ((t_u8)) :=\n  {\n    Shl_f_Output := t_usize;\n    Shl_f_shl := fun  (self : t_usize) (other : t_u8)=>\n      Build_t_usize (Shl_f_shl (usize_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Shl_740886741 : t_Shl ((t_usize)) ((t_u16)) :=\n  {\n    Shl_f_Output := t_usize;\n    Shl_f_shl := fun  (self : t_usize) (other : t_u16)=>\n      Build_t_usize (Shl_f_shl (usize_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Shl_683246358 : t_Shl ((t_usize)) ((t_u32)) :=\n  {\n    Shl_f_Output := t_usize;\n    Shl_f_shl := fun  (self : t_usize) (other : t_u32)=>\n      Build_t_usize (Shl_f_shl (usize_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_Shl_436746920 : t_Shl ((t_usize)) ((t_u64)) :=\n  {\n    Shl_f_Output := t_usize;\n    Shl_f_shl := fun  (self : t_usize) (other : t_u64)=>\n      Build_t_usize (Shl_f_shl (usize_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Shl_527409353 : t_Shl ((t_usize)) ((t_u128)) :=\n  {\n    Shl_f_Output := t_usize;\n    Shl_f_shl := fun  (self : t_usize) (other : t_u128)=>\n      Build_t_usize (Shl_f_shl (usize_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Shl_982380013 : t_Shl ((t_usize)) ((t_usize)) :=\n  {\n    Shl_f_Output := t_usize;\n    Shl_f_shl := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (Shl_f_shl (usize_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_BitOr_669654947 : t_BitOr ((t_u8)) ((t_u8)) :=\n  {\n    BitOr_f_Output := t_u8;\n    BitOr_f_bitor := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (BitOr_f_bitor (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_BitOr_892941557 : t_BitOr ((t_u16)) ((t_u16)) :=\n  {\n    BitOr_f_Output := t_u16;\n    BitOr_f_bitor := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (BitOr_f_bitor (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_BitOr_991330847 : t_BitOr ((t_u32)) ((t_u32)) :=\n  {\n    BitOr_f_Output := t_u32;\n    BitOr_f_bitor := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (BitOr_f_bitor (u32_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_BitOr_692971983 : t_BitOr ((t_u64)) ((t_u64)) :=\n  {\n    BitOr_f_Output := t_u64;\n    BitOr_f_bitor := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (BitOr_f_bitor (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_BitOr_227319538 : t_BitOr ((t_u128)) ((t_u128)) :=\n  {\n    BitOr_f_Output := t_u128;\n    BitOr_f_bitor := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (BitOr_f_bitor (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_BitOr_669787696 : t_BitOr ((t_usize)) ((t_usize)) :=\n  {\n    BitOr_f_Output := t_usize;\n    BitOr_f_bitor := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (BitOr_f_bitor (usize_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_BitXor_327788827 : t_BitXor ((t_u8)) ((t_u8)) :=\n  {\n    BitXor_f_Output := t_u8;\n    BitXor_f_bitxor := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (BitXor_f_bitxor (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_BitXor_661040931 : t_BitXor ((t_u16)) ((t_u16)) :=\n  {\n    BitXor_f_Output := t_u16;\n    BitXor_f_bitxor := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (BitXor_f_bitxor (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_BitXor_222957020 : t_BitXor ((t_u32)) ((t_u32)) :=\n  {\n    BitXor_f_Output := t_u32;\n    BitXor_f_bitxor := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (BitXor_f_bitxor (u32_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_BitXor_530545977 : t_BitXor ((t_u64)) ((t_u64)) :=\n  {\n    BitXor_f_Output := t_u64;\n    BitXor_f_bitxor := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (BitXor_f_bitxor (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_BitXor_112780081 : t_BitXor ((t_u128)) ((t_u128)) :=\n  {\n    BitXor_f_Output := t_u128;\n    BitXor_f_bitxor := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (BitXor_f_bitxor (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_BitXor_969810999 : t_BitXor ((t_usize)) ((t_usize)) :=\n  {\n    BitXor_f_Output := t_usize;\n    BitXor_f_bitxor := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (BitXor_f_bitxor (usize_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_BitAnd_126469303 : t_BitAnd ((t_u8)) ((t_u8)) :=\n  {\n    BitAnd_f_Output := t_u8;\n    BitAnd_f_bitand := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (BitAnd_f_bitand (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_BitAnd_531525101 : t_BitAnd ((t_u16)) ((t_u16)) :=\n  {\n    BitAnd_f_Output := t_u16;\n    BitAnd_f_bitand := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (BitAnd_f_bitand (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_BitAnd_24728760 : t_BitAnd ((t_u32)) ((t_u32)) :=\n  {\n    BitAnd_f_Output := t_u32;\n    BitAnd_f_bitand := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (BitAnd_f_bitand (u32_0 self) (u32_0 other));\n  }.\n\n#[global] Instance t_BitAnd_35845574 : t_BitAnd ((t_u64)) ((t_u64)) :=\n  {\n    BitAnd_f_Output := t_u64;\n    BitAnd_f_bitand := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (BitAnd_f_bitand (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_BitAnd_396424214 : t_BitAnd ((t_u128)) ((t_u128)) :=\n  {\n    BitAnd_f_Output := t_u128;\n    BitAnd_f_bitand := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (BitAnd_f_bitand (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_BitAnd_652458180 : t_BitAnd ((t_usize)) ((t_usize)) :=\n  {\n    BitAnd_f_Output := t_usize;\n    BitAnd_f_bitand := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (BitAnd_f_bitand (usize_0 self) (usize_0 other));\n  }.\n\n#[global] Instance t_Sub_81344668 : t_Sub ((t_u8)) ((t_u8)) :=\n  {\n    Sub_f_Output := t_u8;\n    Sub_f_sub := fun  (self : t_u8) (other : t_u8)=>\n      Build_t_u8 (Sub_f_sub (u8_0 self) (u8_0 other));\n  }.\n\n#[global] Instance t_Sub_1011801854 : t_Sub ((t_u16)) ((t_u16)) :=\n  {\n    Sub_f_Output := t_u16;\n    Sub_f_sub := fun  (self : t_u16) (other : t_u16)=>\n      Build_t_u16 (Sub_f_sub (u16_0 self) (u16_0 other));\n  }.\n\n#[global] Instance t_Sub_1070652436 : t_Sub ((t_u32)) ((t_u32)) :=\n  {\n    Sub_f_Output := t_u32;\n    Sub_f_sub := fun  (self : t_u32) (other : t_u32)=>\n      Build_t_u32 (Sub_f_sub (u32_0 self) (u32_0 other));\n  }.\n\nDefinition rotate_left_u128 (x : t_u128) (shift : t_u32) : t_u128 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS136999051) in\n  let left : t_u128 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u128 := Shr_f_shr (t_Shr :=  _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS136999051) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_left_u16 (x : t_u16) (shift : t_u32) : t_u16 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS277333551) in\n  let left : t_u16 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u16 := Shr_f_shr (t_Shr :=  _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS277333551) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_left_u32 (x : t_u32) (shift : t_u32) : t_u32 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS473478051) in\n  let left : t_u32 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u32 := Shr_f_shr (t_Shr :=  _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS473478051) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_left_u64 (x : t_u64) (shift : t_u32) : t_u64 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS177666292) in\n  let left : t_u64 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u64 := Shr_f_shr (t_Shr :=  _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS177666292) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_left_u8 (x : t_u8) (shift : t_u32) : t_u8 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS690311813) in\n  let left : t_u8 := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u8 := Shr_f_shr (t_Shr :=  _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS690311813) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_left_usize (x : t_usize) (shift : t_u32) : t_usize :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS229952196) in\n  let left : t_usize := Shl_f_shl (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_usize := Shr_f_shr (t_Shr :=  _ : t_Shr _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS229952196) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_right_u128 (x : t_u128) (shift : t_u32) : t_u128 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS136999051) in\n  let left : t_u128 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u128 := Shl_f_shl (t_Shl :=  _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS136999051) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_right_u16 (x : t_u16) (shift : t_u32) : t_u16 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS277333551) in\n  let left : t_u16 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u16 := Shl_f_shl (t_Shl :=  _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS277333551) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_right_u32 (x : t_u32) (shift : t_u32) : t_u32 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS473478051) in\n  let left : t_u32 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u32 := Shl_f_shl (t_Shl :=  _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS473478051) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_right_u64 (x : t_u64) (shift : t_u32) : t_u64 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS177666292) in\n  let left : t_u64 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u64 := Shl_f_shl (t_Shl :=  _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS177666292) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_right_u8 (x : t_u8) (shift : t_u32) : t_u8 :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS690311813) in\n  let left : t_u8 := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_u8 := Shl_f_shl (t_Shl :=  _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS690311813) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nDefinition rotate_right_usize (x : t_usize) (shift : t_u32) : t_usize :=\n  let shift : t_u32 := Rem_f_rem (shift) (v_BITS229952196) in\n  let left : t_usize := Shr_f_shr (Clone_f_clone (x)) (Clone_f_clone (shift)) in\n  let right : t_usize := Shl_f_shl (t_Shl :=  _ : t_Shl _ t_u32) (Clone_f_clone (x)) (Sub_f_sub (v_BITS229952196) (Clone_f_clone (shift))) in\n  BitOr_f_bitor (left) (right).\n\nProgram Definition rotate_left792925914 (self : t_u8) (n : t_u32) : t_u8 :=\n  run (letb hoist1 := ControlFlow_Break (rotate_left_u8 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist1 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_right166090082 (self : t_u8) (n : t_u32) : t_u8 :=\n  run (letb hoist2 := ControlFlow_Break (rotate_right_u8 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist2 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_left297034175 (self : t_u16) (n : t_u32) : t_u16 :=\n  run (letb hoist3 := ControlFlow_Break (rotate_left_u16 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist3 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_right138522246 (self : t_u16) (n : t_u32) : t_u16 :=\n  run (letb hoist4 := ControlFlow_Break (rotate_right_u16 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist4 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_left823573251 (self : t_u32) (n : t_u32) : t_u32 :=\n  run (letb hoist5 := ControlFlow_Break (rotate_left_u32 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist5 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_right869195717 (self : t_u32) (n : t_u32) : t_u32 :=\n  run (letb hoist6 := ControlFlow_Break (rotate_right_u32 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist6 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_left618936072 (self : t_u64) (n : t_u32) : t_u64 :=\n  run (letb hoist7 := ControlFlow_Break (rotate_left_u64 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist7 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_right1041614027 (self : t_u64) (n : t_u32) : t_u64 :=\n  run (letb hoist8 := ControlFlow_Break (rotate_right_u64 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist8 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_left1065866885 (self : t_u128) (n : t_u32) : t_u128 :=\n  run (letb hoist9 := ControlFlow_Break (rotate_left_u128 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist9 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_right591112338 (self : t_u128) (n : t_u32) : t_u128 :=\n  run (letb hoist10 := ControlFlow_Break (rotate_right_u128 (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist10 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_left996672710 (self : t_usize) (n : t_u32) : t_usize :=\n  run (letb hoist11 := ControlFlow_Break (rotate_left_usize (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist11 *)))).\nFail Next Obligation.\n\nProgram Definition rotate_right442734174 (self : t_usize) (n : t_u32) : t_usize :=\n  run (letb hoist12 := ControlFlow_Break (rotate_right_usize (self) (n)) in\n  ControlFlow_Continue (never_to_any (_ (* hoist12 *)))).\nFail Next Obligation.\n\n#[global] Instance t_Sub_788323603 : t_Sub ((t_u64)) ((t_u64)) :=\n  {\n    Sub_f_Output := t_u64;\n    Sub_f_sub := fun  (self : t_u64) (other : t_u64)=>\n      Build_t_u64 (Sub_f_sub (u64_0 self) (u64_0 other));\n  }.\n\n#[global] Instance t_Sub_1046324685 : t_Sub ((t_u128)) ((t_u128)) :=\n  {\n    Sub_f_Output := t_u128;\n    Sub_f_sub := fun  (self : t_u128) (other : t_u128)=>\n      Build_t_u128 (Sub_f_sub (u128_0 self) (u128_0 other));\n  }.\n\n#[global] Instance t_Sub_1064369889 : t_Sub ((t_usize)) ((t_usize)) :=\n  {\n    Sub_f_Output := t_usize;\n    Sub_f_sub := fun  (self : t_usize) (other : t_usize)=>\n      Build_t_usize (Sub_f_sub (usize_0 self) (usize_0 other));\n  }.\n\n(* Program Definition bswap_u128 (x : t_u128) : t_u128 := *)\n(*   let count : t_u128 := Into_f_into (0%N) in *)\n(*   let count := fold_range (Build_t_usize (Build_t_U64 0%N)) (Into_f_into (v_BITS136999051)) (fun count _ => *)\n(*     true) (count) (fun (count : t_u128) (i : t_usize) => *)\n(*     let low_bit : t_u128 := (* Into_f_into *) (BitAnd_f_bitand (t_BitAnd := _ : t_BitAnd t_u128 t_u128) (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1%N) : t_u128)) in *)\n(*     let count : t_u128 := Add_f_add (t_Add := t_Add_258013445 : t_Add t_u128 t_u128) (Shl_f_shl (t_Shl := t_Shl_38932717 : t_Shl t_u128 t_u32)  (count) (Into_f_into (1%N) : t_u32)) (low_bit) in *)\n(*     count) in *)\n(*   count. *)\n\n(* Program Definition bswap_u16 (x : t_u16) : t_u16 := *)\n(*   let count : t_u16 := Into_f_into (0) in *)\n(*   let count := fold_range (Build_t_usize (Build_t_U64 0%N)) (Into_f_into (v_BITS277333551)) (fun count _ => *)\n(*     true) (count) (fun (count : t_u16) (i : t_usize) => *)\n(*     let low_bit : t_u16 := (* Into_f_into *) (BitAnd_f_bitand (t_BitAnd := _ : t_BitAnd t_u16 t_u16) (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1%N) : t_u16)) in *)\n(*     let count := Add_f_add (t_Add := _ : t_Add _ _) (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *)\n(*     count) in *)\n(*   count. *)\n\n(* Definition bswap_u32 (x : t_u32) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS473478051)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *)\n(*     count) in *)\n(*   count. *)\n\n(* Definition bswap_u64 (x : t_u64) : t_u64 := *)\n(*   let count : t_u64 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS177666292)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     let low_bit : t_u64 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *)\n(*     count) in *)\n(*   count. *)\n\n(* Definition bswap_u8 (x : t_u8) : t_u8 := *)\n(*   let count : t_u8 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS690311813)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     let low_bit : t_u8 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *)\n(*     count) in *)\n(*   count. *)\n\n(* Definition bswap_usize (x : t_usize) : t_usize := *)\n(*   let count : t_usize := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS229952196)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     let low_bit : t_usize := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     let count := Add_f_add (Shl_f_shl (count) (Into_f_into (1))) (low_bit) in *)\n(*     count) in *)\n(*   count. *)\n\n(* Definition ctlz_u128 (x : t_u128) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS136999051)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS136999051) (Into_f_into (1))))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition ctlz_u16 (x : t_u16) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS277333551)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS277333551) (Into_f_into (1))))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition ctlz_u32 (x : t_u32) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS473478051)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS473478051) (Into_f_into (1))))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition ctlz_u64 (x : t_u64) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS177666292)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS177666292) (Into_f_into (1))))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition ctlz_u8 (x : t_u8) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS690311813)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS690311813) (Into_f_into (1))))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition ctlz_usize (x : t_usize) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS229952196)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let high_bit : t_u32 := Into_f_into (Shr_f_shr (Shl_f_shl (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (Sub_f_sub (v_BITS229952196) (Into_f_into (1))))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (high_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition ctpop_u128 (x : t_u128) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS136999051)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *)\n(*   count. *)\n\n(* Definition ctpop_u16 (x : t_u16) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS277333551)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *)\n(*   count. *)\n\n(* Definition ctpop_u32 (x : t_u32) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS473478051)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *)\n(*   count. *)\n\n(* Definition ctpop_u64 (x : t_u64) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS177666292)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *)\n(*   count. *)\n\n(* Definition ctpop_u8 (x : t_u8) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS690311813)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *)\n(*   count. *)\n\n(* Definition ctpop_usize (x : t_usize) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let count := fold_range (0) (Into_f_into (v_BITS229952196)) (fun count _ => *)\n(*     true) (count) (fun count i => *)\n(*     Add_f_add (count) (Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))))) in *)\n(*   count. *)\n\n(* Definition cttz_u128 (x : t_u128) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS136999051)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition cttz_u16 (x : t_u16) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS277333551)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition cttz_u32 (x : t_u32) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS473478051)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition cttz_u64 (x : t_u64) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS177666292)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition cttz_u8 (x : t_u8) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS690311813)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition cttz_usize (x : t_usize) : t_u32 := *)\n(*   let count : t_u32 := Into_f_into (0) in *)\n(*   let done := false in *)\n(*   let (count,done) := fold_range (0) (Into_f_into (v_BITS229952196)) (fun (count,done) _ => *)\n(*     true) ((count,done)) (fun (count,done) i => *)\n(*     let low_bit : t_u32 := Into_f_into (BitAnd_f_bitand (Shr_f_shr (Clone_f_clone (x)) (Into_f_into (i))) (Into_f_into (1))) in *)\n(*     if *)\n(*       orb (PartialEq_f_eq (low_bit) (Into_f_into (1))) (done) *)\n(*     then *)\n(*       let done := true in *)\n(*       (count,done) *)\n(*     else *)\n(*       let count := Add_f_add (count) (Into_f_into (1)) in *)\n(*       (count,done)) in *)\n(*   count. *)\n\n(* Definition count_ones202509899 (self : t_u8) : t_u32 := *)\n(*   run (let hoist13 := ControlFlow_Break (ctpop_u8 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist13))). *)\n\n(* Definition leading_zeros75047366 (self : t_u8) : t_u32 := *)\n(*   run (let hoist14 := ControlFlow_Break (ctlz_u8 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist14))). *)\n\n(* Definition swap_bytes657156997 (self : t_u8) : t_u8 := *)\n(*   Into_f_into (bswap_u8 (self)). *)\n\n(* Definition from_be746282521 (x : t_u8) : t_u8 := *)\n(*   swap_bytes657156997 (x). *)\n\n(* Definition to_be972448780 (self : t_u8) : t_u8 := *)\n(*   swap_bytes657156997 (self). *)\n\n(* Definition trailing_zeros572929871 (self : t_u8) : t_u32 := *)\n(*   run (let hoist15 := ControlFlow_Break (cttz_u8 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist15))). *)\n\n(* Definition count_ones91875752 (self : t_u16) : t_u32 := *)\n(*   run (let hoist16 := ControlFlow_Break (ctpop_u16 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist16))). *)\n\n(* Definition leading_zeros462412478 (self : t_u16) : t_u32 := *)\n(*   run (let hoist17 := ControlFlow_Break (ctlz_u16 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist17))). *)\n\n(* Definition swap_bytes926722059 (self : t_u16) : t_u16 := *)\n(*   Into_f_into (bswap_u16 (self)). *)\n\n(* Definition from_be510959665 (x : t_u16) : t_u16 := *)\n(*   swap_bytes926722059 (x). *)\n\n(* Definition to_be551590602 (self : t_u16) : t_u16 := *)\n(*   swap_bytes926722059 (self). *)\n\n(* Definition trailing_zeros421474733 (self : t_u16) : t_u32 := *)\n(*   run (let hoist18 := ControlFlow_Break (cttz_u16 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist18))). *)\n\n(* Definition count_ones776185738 (self : t_u32) : t_u32 := *)\n(*   run (let hoist19 := ControlFlow_Break (ctpop_u32 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist19))). *)\n\n(* Definition leading_zeros698221972 (self : t_u32) : t_u32 := *)\n(*   run (let hoist20 := ControlFlow_Break (ctlz_u32 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist20))). *)\n\n(* Definition swap_bytes320480126 (self : t_u32) : t_u32 := *)\n(*   Into_f_into (bswap_u32 (self)). *)\n\n(* Definition from_be664756649 (x : t_u32) : t_u32 := *)\n(*   swap_bytes320480126 (x). *)\n\n(* Definition to_be82825962 (self : t_u32) : t_u32 := *)\n(*   swap_bytes320480126 (self). *)\n\n(* Definition trailing_zeros1061560720 (self : t_u32) : t_u32 := *)\n(*   run (let hoist21 := ControlFlow_Break (cttz_u32 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist21))). *)\n\n(* Definition count_ones235885653 (self : t_u64) : t_u32 := *)\n(*   run (let hoist22 := ControlFlow_Break (ctpop_u64 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist22))). *)\n\n(* Definition leading_zeros338302110 (self : t_u64) : t_u32 := *)\n(*   run (let hoist23 := ControlFlow_Break (ctlz_u64 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist23))). *)\n\n(* Definition swap_bytes722254271 (self : t_u64) : t_u64 := *)\n(*   Into_f_into (bswap_u64 (self)). *)\n\n(* Definition from_be16013635 (x : t_u64) : t_u64 := *)\n(*   swap_bytes722254271 (x). *)\n\n(* Definition to_be376714729 (self : t_u64) : t_u64 := *)\n(*   swap_bytes722254271 (self). *)\n\n(* Definition trailing_zeros188346231 (self : t_u64) : t_u32 := *)\n(*   run (let hoist24 := ControlFlow_Break (cttz_u64 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist24))). *)\n\n(* Definition count_ones926736261 (self : t_u128) : t_u32 := *)\n(*   run (let hoist25 := ControlFlow_Break (ctpop_u128 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist25))). *)\n\n(* Definition leading_zeros19644612 (self : t_u128) : t_u32 := *)\n(*   run (let hoist26 := ControlFlow_Break (ctlz_u128 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist26))). *)\n\n(* Definition swap_bytes420879368 (self : t_u128) : t_u128 := *)\n(*   Into_f_into (bswap_u128 (self)). *)\n\n(* Definition from_be191085771 (x : t_u128) : t_u128 := *)\n(*   swap_bytes420879368 (x). *)\n\n(* Definition to_be555075987 (self : t_u128) : t_u128 := *)\n(*   swap_bytes420879368 (self). *)\n\n(* Definition trailing_zeros821715250 (self : t_u128) : t_u32 := *)\n(*   run (let hoist27 := ControlFlow_Break (cttz_u128 (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist27))). *)\n\n(* Definition count_ones441645762 (self : t_usize) : t_u32 := *)\n(*   run (let hoist28 := ControlFlow_Break (ctpop_usize (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist28))). *)\n\n(* Definition leading_zeros905233489 (self : t_usize) : t_u32 := *)\n(*   run (let hoist29 := ControlFlow_Break (ctlz_usize (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist29))). *)\n\n(* Definition swap_bytes268673424 (self : t_usize) : t_usize := *)\n(*   Into_f_into (bswap_usize (self)). *)\n\n(* Definition from_be607978059 (x : t_usize) : t_usize := *)\n(*   swap_bytes268673424 (x). *)\n\n(* Definition to_be561847134 (self : t_usize) : t_usize := *)\n(*   swap_bytes268673424 (self). *)\n\n(* Definition trailing_zeros42066260 (self : t_usize) : t_u32 := *)\n(*   run (let hoist30 := ControlFlow_Break (cttz_usize (self)) in *)\n(*   ControlFlow_Continue (never_to_any (hoist30))). *)\n\n#[global] Instance t_Div_345870802 : t_Div ((t_i8)) ((t_i8)) :=\n  {\n    Div_f_Output := t_i8;\n    Div_f_div := fun  (self : t_i8) (other : t_i8)=>\n      Build_t_i8 (Div_f_div (i8_0 self) (i8_0 other));\n  }.\n\n#[global] Instance t_Div_69196905 : t_Div ((t_i16)) ((t_i16)) :=\n  {\n    Div_f_Output := t_i16;\n    Div_f_div := fun  (self : t_i16) (other : t_i16)=>\n      Build_t_i16 (Div_f_div (i16_0 self) (i16_0 other));\n  }.\n\n#[global] Instance t_Div_222178666 : t_Div ((t_i32)) ((t_i32)) :=\n  {\n    Div_f_Output := t_i32;\n    Div_f_div := fun  (self : t_i32) (other : t_i32)=>\n      Build_t_i32 (Div_f_div (i32_0 self) (i32_0 other));\n  }.\n\n#[global] Instance t_Div_551701934 : t_Div ((t_i64)) ((t_i64)) :=\n  {\n    Div_f_Output := t_i64;\n    Div_f_div := fun  (self : t_i64) (other : t_i64)=>\n      Build_t_i64 (Div_f_div (i64_0 self) (i64_0 other));\n  }.\n\n#[global] Instance t_Div_650346214 : t_Div ((t_i128)) ((t_i128)) :=\n  {\n    Div_f_Output := t_i128;\n    Div_f_div := fun  (self : t_i128) (other : t_i128)=>\n      Build_t_i128 (Div_f_div (i128_0 self) (i128_0 other));\n  }.\n\n#[global] Instance t_Div_911978922 : t_Div ((t_isize)) ((t_isize)) :=\n  {\n    Div_f_Output := t_isize;\n    Div_f_div := fun  (self : t_isize) (other : t_isize)=>\n      Build_t_isize (Div_f_div (isize_0 self) (isize_0 other));\n  }.\n\n#[global] Instance t_Rem_580678374 : t_Rem ((t_i8)) ((t_i8)) :=\n  {\n    Rem_f_Output := t_i8;\n    Rem_f_rem := fun  (self : t_i8) (other : t_i8)=>\n      Build_t_i8 (Rem_f_rem (i8_0 self) (i8_0 other));\n  }.\n\nDefinition rem_euclid622298453 (self : t_i8) (rhs : t_i8) : t_i8 :=\n  let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in\n  if\n    PartialOrd_f_lt (r) (Into_f_into (0))\n  then\n    wrapping_add634491935 (r) (wrapping_abs400396545 (rhs))\n  else\n    r.\n\n#[global] Instance t_Rem_532407972 : t_Rem ((t_i16)) ((t_i16)) :=\n  {\n    Rem_f_Output := t_i16;\n    Rem_f_rem := fun  (self : t_i16) (other : t_i16)=>\n      Build_t_i16 (Rem_f_rem (i16_0 self) (i16_0 other));\n  }.\n\nDefinition rem_euclid158017644 (self : t_i16) (rhs : t_i16) : t_i16 :=\n  let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in\n  if\n    PartialOrd_f_lt (r) (Into_f_into (0))\n  then\n    wrapping_add868559108 (r) (wrapping_abs229076826 (rhs))\n  else\n    r.\n\n#[global] Instance t_Rem_406274620 : t_Rem ((t_i32)) ((t_i32)) :=\n  {\n    Rem_f_Output := t_i32;\n    Rem_f_rem := fun  (self : t_i32) (other : t_i32)=>\n      Build_t_i32 (Rem_f_rem (i32_0 self) (i32_0 other));\n  }.\n\nDefinition rem_euclid881249982 (self : t_i32) (rhs : t_i32) : t_i32 :=\n  let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in\n  if\n    PartialOrd_f_lt (r) (Into_f_into (0))\n  then\n    wrapping_add475006616 (r) (wrapping_abs729536875 (rhs))\n  else\n    r.\n\n#[global] Instance t_Rem_296096507 : t_Rem ((t_i64)) ((t_i64)) :=\n  {\n    Rem_f_Output := t_i64;\n    Rem_f_rem := fun  (self : t_i64) (other : t_i64)=>\n      Build_t_i64 (Rem_f_rem (i64_0 self) (i64_0 other));\n  }.\n\nDefinition rem_euclid1057082210 (self : t_i64) (rhs : t_i64) : t_i64 :=\n  let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in\n  if\n    PartialOrd_f_lt (r) (Into_f_into (0))\n  then\n    wrapping_add590074241 (r) (wrapping_abs285829312 (rhs))\n  else\n    r.\n\n#[global] Instance t_Rem_773614977 : t_Rem ((t_i128)) ((t_i128)) :=\n  {\n    Rem_f_Output := t_i128;\n    Rem_f_rem := fun  (self : t_i128) (other : t_i128)=>\n      Build_t_i128 (Rem_f_rem (i128_0 self) (i128_0 other));\n  }.\n\nDefinition rem_euclid254910751 (self : t_i128) (rhs : t_i128) : t_i128 :=\n  let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in\n  if\n    PartialOrd_f_lt (r) (Into_f_into (0))\n  then\n    wrapping_add251385439 (r) (wrapping_abs281925696 (rhs))\n  else\n    r.\n\n#[global] Instance t_Rem_136872616 : t_Rem ((t_isize)) ((t_isize)) :=\n  {\n    Rem_f_Output := t_isize;\n    Rem_f_rem := fun  (self : t_isize) (other : t_isize)=>\n      Build_t_isize (Rem_f_rem (isize_0 self) (isize_0 other));\n  }.\n\nDefinition rem_euclid828379367 (self : t_isize) (rhs : t_isize) : t_isize :=\n  let r := Rem_f_rem (self) (Clone_f_clone (rhs)) in\n  if\n    PartialOrd_f_lt (r) (Into_f_into (0))\n  then\n    wrapping_add226040243 (r) (wrapping_abs347300819 (rhs))\n  else\n    r.\n\n#[global] Instance t_Not_500984294 : t_Not ((t_u8)) :=\n  {\n    Not_f_Output := t_u8;\n    Not_f_not := fun  (self : t_u8)=>\n      Build_t_u8 (Not_f_not (u8_0 self));\n  }.\n\n(* Definition count_zeros558337492 (self : t_u8) : t_u32 := *)\n(*   count_ones202509899 (Not_f_not (self)). *)\n\n(* Definition leading_ones55148479 (self : t_u8) : t_u32 := *)\n(*   leading_zeros75047366 (Not_f_not (self)). *)\n\n(* Definition trailing_ones359778731 (self : t_u8) : t_u32 := *)\n(*   trailing_zeros572929871 (Not_f_not (self)). *)\n\n#[global] Instance t_Not_560691647 : t_Not ((t_u16)) :=\n  {\n    Not_f_Output := t_u16;\n    Not_f_not := fun  (self : t_u16)=>\n      Build_t_u16 (Not_f_not (u16_0 self));\n  }.\n\n(* Definition count_zeros199825317 (self : t_u16) : t_u32 := *)\n(*   count_ones91875752 (Not_f_not (self)). *)\n\n(* Definition leading_ones164277656 (self : t_u16) : t_u32 := *)\n(*   leading_zeros462412478 (Not_f_not (self)). *)\n\n(* Definition trailing_ones903944727 (self : t_u16) : t_u32 := *)\n(*   trailing_zeros421474733 (Not_f_not (self)). *)\n\n#[global] Instance t_Not_220208504 : t_Not ((t_u32)) :=\n  {\n    Not_f_Output := t_u32;\n    Not_f_not := fun  (self : t_u32)=>\n      Build_t_u32 (Not_f_not (u32_0 self));\n  }.\n\n(* Definition count_zeros942566041 (self : t_u32) : t_u32 := *)\n(*   count_ones776185738 (Not_f_not (self)). *)\n\n(* Definition leading_ones766486760 (self : t_u32) : t_u32 := *)\n(*   leading_zeros698221972 (Not_f_not (self)). *)\n\n(* Definition trailing_ones223371510 (self : t_u32) : t_u32 := *)\n(*   trailing_zeros1061560720 (Not_f_not (self)). *)\n\n#[global] Instance t_Not_655044209 : t_Not ((t_u64)) :=\n  {\n    Not_f_Output := t_u64;\n    Not_f_not := fun  (self : t_u64)=>\n      Build_t_u64 (Not_f_not (u64_0 self));\n  }.\n\n(* Definition count_zeros60346158 (self : t_u64) : t_u32 := *)\n(*   count_ones235885653 (Not_f_not (self)). *)\n\n(* Definition leading_ones404666910 (self : t_u64) : t_u32 := *)\n(*   leading_zeros338302110 (Not_f_not (self)). *)\n\n(* Definition trailing_ones601201120 (self : t_u64) : t_u32 := *)\n(*   trailing_zeros188346231 (Not_f_not (self)). *)\n\n#[global] Instance t_Not_851738617 : t_Not ((t_u128)) :=\n  {\n    Not_f_Output := t_u128;\n    Not_f_not := fun  (self : t_u128)=>\n      Build_t_u128 (Not_f_not (u128_0 self));\n  }.\n\n(* Definition count_zeros824862815 (self : t_u128) : t_u32 := *)\n(*   count_ones926736261 (Not_f_not (self)). *)\n\n(* Definition leading_ones475503572 (self : t_u128) : t_u32 := *)\n(*   leading_zeros19644612 (Not_f_not (self)). *)\n\n(* Definition trailing_ones705845381 (self : t_u128) : t_u32 := *)\n(*   trailing_zeros821715250 (Not_f_not (self)). *)\n\n#[global] Instance t_Not_677551814 : t_Not ((t_usize)) :=\n  {\n    Not_f_Output := t_usize;\n    Not_f_not := fun  (self : t_usize)=>\n      Build_t_usize (Not_f_not (usize_0 self));\n  }.\n\n(* Definition count_zeros73479642 (self : t_usize) : t_u32 := *)\n(*   count_ones441645762 (Not_f_not (self)). *)\n\n(* Definition leading_ones667660708 (self : t_usize) : t_u32 := *)\n(*   leading_zeros905233489 (Not_f_not (self)). *)\n\n(* Definition trailing_ones979548463 (self : t_usize) : t_u32 := *)\n(*   trailing_zeros42066260 (Not_f_not (self)). *)\n\nRecord t_TryFromSliceError : Type :=\n  {\n    TryFromSliceError_0 : unit;\n  }.\nArguments Build_t_TryFromSliceError.\nArguments TryFromSliceError_0.\n#[export] Instance settable_t_TryFromSliceError : Settable _ :=\n  settable! (Build_t_TryFromSliceError) <TryFromSliceError_0>.\nNotation \"'TryFromSliceError'\" := Build_t_TryFromSliceError.\n\nDefinition t_Seq (v_T : Type) `{t_Sized (v_T)} : Type := list v_T.\n\n#[global] Instance t_Clone_640571940 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Seq ((v_T)))) :=\n  {\n    Clone_f_clone := fun  (self : t_Seq ((v_T)))=>\n      self;\n  }.\n\nDefinition t_LIST (v_T : Type) `{t_Sized (v_T)} : Type := list v_T.\nNotation \"'LIST_NIL'\" := nil.\nNotation \"'LIST_CONS'\" := (fun a b => cons b a).\n\nDefinition nil `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} '(_ : unit) : t_Seq ((v_T)) := nil.\n\nRecord t_Slice (v_T : Type) `{t_Sized (v_T)} : Type :=\n  {\n    Slice_f_v : t_Seq ((v_T));\n  }.\nArguments Build_t_Slice (_) {_}.\nArguments Slice_f_v {_} {_}.\n#[export] Instance settable_t_Slice `{v_T : Type} `{t_Sized (v_T)} : Settable _ :=\n  settable! (Build_t_Slice v_T) <Slice_f_v>.\n\n(* Instance t_From_692299963 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_From ((t_Slice ((v_T)))) ((t_Slice v_T)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Slice v_T)=> *)\n(*       t_Slice (t_Seq (impl__to_vec (x))); *)\n(*   }. *)\n\nRecord t_Array (v_T : Type) (v_N : t_usize) `{t_Sized (v_T)} : Type :=\n  {\n    Array_f_v : t_Slice ((v_T));\n  }.\nArguments Build_t_Array {_} {_} {_}.\nArguments Array_f_v {_} {_} {_}.\n#[export] Instance settable_t_Array `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} : Settable _ :=\n  settable! (@Build_t_Array v_T v_N _) <Array_f_v>.\n\n#[global] Instance t_Clone_962303223 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Array ((v_T)) (v_N))) :=\n  {\n    Clone_f_clone := fun  (self : t_Array ((v_T)) (v_N))=>\n      Build_t_Array (Clone_f_clone (Array_f_v self));\n  }.\n\nDefinition cast `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Array ((v_T)) (v_N)) : t_Slice ((v_T)) :=\n  Array_f_v self.\n\nFrom Core Require Import Core_Ops_Index.\n\nInstance t_Index_927562605 `{v_T : Type} `{v_I : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Sized (v_I)} `{t_Clone (v_T)} `{t_Index (t_Slice ((v_T))) (v_I)} : t_Index ((t_Array ((v_T)) (v_N))) ((v_I)) :=\n  {\n    Index_f_Output := Index_f_Output;\n    Index_f_index := fun  (self : t_Array ((v_T)) (v_N)) (index : v_I)=>\n      Index_f_index (cast (self)) (index);\n  }.\n\n(* Instance t_From_684363179 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_From ((t_Array (v_T) (v_N))) ((t_Array ((v_T)) (v_N))) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Array ((v_T)) (v_N))=> *)\n(*       match TryInto_f_try_into (Seq_f_v Slice_f_v Array_f_v x) with *)\n(*       | Result_Ok (x) => *)\n(*         x *)\n(*       | _ => *)\n(*         never_to_any (panic_fmt (impl_2__new_const ([\"some error?\"%string]))) *)\n(*       end; *)\n(*   }. *)\n\n#[global] Instance t_Index_324031838 `{v_T : Type} `{v_I : Type} `{t_Sized (v_T)} `{t_Sized (v_I)} `{v_SliceIndex (v_I) (t_Slice ((v_T)))} : t_Index ((t_Slice ((v_T)))) ((v_I)) :=\n  {\n    Index_f_Output := SliceIndex_f_Output;\n    Index_f_index := fun  (self : t_Slice ((v_T))) (index : v_I)=>\n      SliceIndex_f_index (index) (self);\n  }.\n\nDefinition cons `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (t : v_T) : t_Seq ((v_T)) :=\n  cons s t.\n\n(* Instance t_From_1005673342 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_From ((t_Array ((v_T)) (v_N))) ((t_Array (v_T) (v_N))) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Array (v_T) (v_N))=> *)\n(*       t_Array (t_Slice (t_Seq (impl__to_vec (Index_f_index (x) (Build_t_RangeFull))))); *)\n(*   }. *)\n\n(* Instance v_SliceIndex_1030023794 `{v_T : Type} `{t_Sized (v_T)} : v_SliceIndex ((t_RangeFull)) ((t_Slice ((v_T)))) := *)\n(*   { *)\n(*     SliceIndex_f_Output := t_Slice ((v_T)); *)\n(*     SliceIndex_f_index := fun  (self : t_RangeFull) (slice : t_Slice ((v_T)))=> *)\n(*       slice; *)\n(*   }. *)\n\n(* Instance t_AsRef_175264108 `{v_T : Type} `{v_N : t_usize} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_AsRef ((t_Array ((v_T)) (v_N))) ((t_Slice ((v_T)))) := *)\n(*   { *)\n(*     AsRef_f_as_ref := fun  (self : t_Array ((v_T)) (v_N))=> *)\n(*       Index_f_index (self) (Build_t_RangeFull); *)\n(*   }. *)\n\nDefinition match_list `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_LIST ((v_T)) := s.\n\n(* Fixpoint from_u128_binary (x : t_u128) `{PartialEq_f_ne (x) (0) = true} : t_Positive := *)\n(*   if *)\n(*     PartialEq_f_eq (x) (1) *)\n(*   then *)\n(*     xH *)\n(*   else *)\n(*     if *)\n(*       PartialEq_f_eq (Rem_f_rem (x) (2)) (0) *)\n(*     then *)\n(*       xO (from_u128_binary (Div_f_div (x) (Build_t_u128 (Build_t_U128 2)))) *)\n(*     else *)\n(*       xI (from_u128_binary (Div_f_div (x) (2))). *)\n\n(* Instance t_From_383682059 : t_From ((t_HaxInt)) ((t_u128)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_u128)=> *)\n(*       if *)\n(*         PartialEq_f_eq (x) (0) *)\n(*       then *)\n(*         v_HaxInt_ZERO *)\n(*       else *)\n(*         positive_to_int (from_u128_binary (x)); *)\n(*   }. *)\n\n(* Instance t_From_394907254 : t_From ((t_Z)) ((t_i128)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i128)=> *)\n(*       match Ord_f_cmp (x) (0) with *)\n(*       | Ordering_Equal => *)\n(*         Z_ZERO *)\n(*       | Ordering_Less => *)\n(*         Z_NEG (from_u128_binary (impl__i128__unsigned_abs (x))) *)\n(*       | Ordering_Greater => *)\n(*         Z_POS (from_u128_binary (impl__i128__unsigned_abs (x))) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint from_u16_binary (x : t_u16) `{ne (x) (0) = true} : t_Positive := *)\n(*   if *)\n(*     t_PartialEq_f_eq (x) (1) *)\n(*   then *)\n(*     xH *)\n(*   else *)\n(*     if *)\n(*       t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *)\n(*     then *)\n(*       xO (from_u16_binary (t_Div_f_div (x) (2))) *)\n(*     else *)\n(*       xI (from_u16_binary (t_Div_f_div (x) (2))). *)\n\n(* Instance t_From_283547720 : t_From ((t_HaxInt)) ((t_u16)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_u16)=> *)\n(*       if *)\n(*         t_PartialEq_f_eq (x) (0) *)\n(*       then *)\n(*         v_HaxInt_ZERO *)\n(*       else *)\n(*         positive_to_int (from_u16_binary (x)); *)\n(*   }. *)\n\n(* Instance t_From_960274744 : t_From ((t_Z)) ((t_i16)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i16)=> *)\n(*       match Ord_f_cmp (x) (0) with *)\n(*       | Ordering_Equal => *)\n(*         Z_ZERO *)\n(*       | Ordering_Less => *)\n(*         Z_NEG (from_u16_binary (impl__i16__unsigned_abs (x))) *)\n(*       | Ordering_Greater => *)\n(*         Z_POS (from_u16_binary (impl__i16__unsigned_abs (x))) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint from_u32_binary (x : t_u32) `{ne (x) (0) = true} : t_Positive := *)\n(*   if *)\n(*     t_PartialEq_f_eq (x) (1) *)\n(*   then *)\n(*     xH *)\n(*   else *)\n(*     if *)\n(*       t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *)\n(*     then *)\n(*       xO (from_u32_binary (t_Div_f_div (x) (2))) *)\n(*     else *)\n(*       xI (from_u32_binary (t_Div_f_div (x) (2))). *)\n\n(* Instance t_From_247317262 : t_From ((t_HaxInt)) ((t_u32)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_u32)=> *)\n(*       if *)\n(*         t_PartialEq_f_eq (x) (0) *)\n(*       then *)\n(*         v_HaxInt_ZERO *)\n(*       else *)\n(*         positive_to_int (from_u32_binary (x)); *)\n(*   }. *)\n\n(* Instance t_From_1033810922 : t_From ((t_Z)) ((t_i32)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i32)=> *)\n(*       match Ord_f_cmp (x) (0) with *)\n(*       | Ordering_Equal => *)\n(*         Z_ZERO *)\n(*       | Ordering_Less => *)\n(*         Z_NEG (from_u32_binary (impl__i32__unsigned_abs (x))) *)\n(*       | Ordering_Greater => *)\n(*         Z_POS (from_u32_binary (impl__i32__unsigned_abs (x))) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint from_u64_binary (x : t_u64) `{ne (x) (0) = true} : t_Positive := *)\n(*   if *)\n(*     t_PartialEq_f_eq (x) (1) *)\n(*   then *)\n(*     xH *)\n(*   else *)\n(*     if *)\n(*       t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *)\n(*     then *)\n(*       xO (from_u64_binary (t_Div_f_div (x) (2))) *)\n(*     else *)\n(*       xI (from_u64_binary (t_Div_f_div (x) (2))). *)\n\n(* Instance t_From_703205527 : t_From ((t_HaxInt)) ((t_u64)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_u64)=> *)\n(*       if *)\n(*         t_PartialEq_f_eq (x) (0) *)\n(*       then *)\n(*         v_HaxInt_ZERO *)\n(*       else *)\n(*         positive_to_int (from_u64_binary (x)); *)\n(*   }. *)\n\n(* Instance t_From_494553464 : t_From ((t_Z)) ((t_i64)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i64)=> *)\n(*       match Ord_f_cmp (x) (0) with *)\n(*       | Ordering_Equal => *)\n(*         Z_ZERO *)\n(*       | Ordering_Less => *)\n(*         Z_NEG (from_u64_binary (impl__i64__unsigned_abs (x))) *)\n(*       | Ordering_Greater => *)\n(*         Z_POS (from_u64_binary (impl__i64__unsigned_abs (x))) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint from_u8_binary (x : t_u8) `{ne (x) (0) = true} : t_Positive := *)\n(*   if *)\n(*     t_PartialEq_f_eq (x) (1) *)\n(*   then *)\n(*     xH *)\n(*   else *)\n(*     if *)\n(*       t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *)\n(*     then *)\n(*       xO (from_u8_binary (t_Div_f_div (x) (2))) *)\n(*     else *)\n(*       xI (from_u8_binary (t_Div_f_div (x) (2))). *)\n\n(* Instance t_From_421078324 : t_From ((t_HaxInt)) ((t_u8)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_u8)=> *)\n(*       if *)\n(*         t_PartialEq_f_eq (x) (0) *)\n(*       then *)\n(*         v_HaxInt_ZERO *)\n(*       else *)\n(*         positive_to_int (from_u8_binary (x)); *)\n(*   }. *)\n\n(* Instance t_From_976104611 : t_From ((t_Z)) ((t_i8)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_i8)=> *)\n(*       match Ord_f_cmp (x) (0) with *)\n(*       | Ordering_Equal => *)\n(*         Z_ZERO *)\n(*       | Ordering_Less => *)\n(*         Z_NEG (from_u8_binary (impl__unsigned_abs (x))) *)\n(*       | Ordering_Greater => *)\n(*         Z_POS (from_u8_binary (impl__unsigned_abs (x))) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint from_usize_binary (x : t_usize) `{ne (x) (0) = true} : t_Positive := *)\n(*   if *)\n(*     t_PartialEq_f_eq (x) (1) *)\n(*   then *)\n(*     xH *)\n(*   else *)\n(*     if *)\n(*       t_PartialEq_f_eq (t_Rem_f_rem (x) (2)) (0) *)\n(*     then *)\n(*       xO (from_usize_binary (t_Div_f_div (x) (2))) *)\n(*     else *)\n(*       xI (from_usize_binary (t_Div_f_div (x) (2))). *)\n\n(* Instance t_From_226738852 : t_From ((t_HaxInt)) ((t_usize)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_usize)=> *)\n(*       if *)\n(*         t_PartialEq_f_eq (x) (0) *)\n(*       then *)\n(*         v_HaxInt_ZERO *)\n(*       else *)\n(*         positive_to_int (from_usize_binary (x)); *)\n(*   }. *)\n\n(* Instance t_From_235021044 : t_From ((t_Z)) ((t_isize)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_isize)=> *)\n(*       match Ord_f_cmp (x) (0) with *)\n(*       | Ordering_Equal => *)\n(*         Z_ZERO *)\n(*       | Ordering_Less => *)\n(*         Z_NEG (from_usize_binary (impl__isize__unsigned_abs (x))) *)\n(*       | Ordering_Greater => *)\n(*         Z_POS (from_usize_binary (impl__isize__unsigned_abs (x))) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint to_u128_binary (self : t_Positive) : t_u128 := *)\n(*   match match_positive (self) with *)\n(*   | POSITIVE_XH => *)\n(*     1 *)\n(*   | POSITIVE_XO (p) => *)\n(*     t_Mul_f_mul (to_u128_binary (p)) (2) *)\n(*   | POSITIVE_XI (p) => *)\n(*     t_Add_f_add (t_Mul_f_mul (to_u128_binary (p)) (2)) (1) *)\n(*   end. *)\n\n(* Instance t_From_312029210 : t_From ((t_u128)) ((t_HaxInt)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_HaxInt)=> *)\n(*       match match_pos (x) with *)\n(*       | POS_ZERO => *)\n(*         0 *)\n(*       | POS_POS (p) => *)\n(*         to_u128_binary (p) *)\n(*       end; *)\n(*   }. *)\n\n(* Instance t_From_166626519 : t_From ((t_i128)) ((t_Z)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Z)=> *)\n(*       match x with *)\n(*       | Z_NEG (x) => *)\n(*         sub (neg (cast (sub (to_u128_binary (x)) (1)))) (1) *)\n(*       | Z_ZERO => *)\n(*         0 *)\n(*       | Z_POS (x) => *)\n(*         cast (to_u128_binary (x)) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint to_u16_binary (self : t_Positive) : t_u16 := *)\n(*   match match_positive (self) with *)\n(*   | POSITIVE_XH => *)\n(*     1 *)\n(*   | POSITIVE_XO (p) => *)\n(*     t_Mul_f_mul (to_u16_binary (p)) (2) *)\n(*   | POSITIVE_XI (p) => *)\n(*     t_Add_f_add (t_Mul_f_mul (to_u16_binary (p)) (2)) (1) *)\n(*   end. *)\n\n(* Instance t_From_863803022 : t_From ((t_u16)) ((t_HaxInt)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_HaxInt)=> *)\n(*       match match_pos (x) with *)\n(*       | POS_ZERO => *)\n(*         0 *)\n(*       | POS_POS (p) => *)\n(*         to_u16_binary (p) *)\n(*       end; *)\n(*   }. *)\n\n(* Instance t_From_217241508 : t_From ((t_i16)) ((t_Z)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Z)=> *)\n(*       match x with *)\n(*       | Z_NEG (x) => *)\n(*         sub (neg (cast (sub (to_u16_binary (x)) (1)))) (1) *)\n(*       | Z_ZERO => *)\n(*         0 *)\n(*       | Z_POS (x) => *)\n(*         cast (to_u16_binary (x)) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint to_u32_binary (self : t_Positive) : t_u32 := *)\n(*   match match_positive (self) with *)\n(*   | POSITIVE_XH => *)\n(*     1 *)\n(*   | POSITIVE_XO (p) => *)\n(*     t_Mul_f_mul (to_u32_binary (p)) (2) *)\n(*   | POSITIVE_XI (p) => *)\n(*     t_Add_f_add (t_Mul_f_mul (to_u32_binary (p)) (2)) (1) *)\n(*   end. *)\n\n(* Instance t_From_38549956 : t_From ((t_u32)) ((t_HaxInt)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_HaxInt)=> *)\n(*       match match_pos (x) with *)\n(*       | POS_ZERO => *)\n(*         0 *)\n(*       | POS_POS (p) => *)\n(*         to_u32_binary (p) *)\n(*       end; *)\n(*   }. *)\n\n(* Instance t_From_567539816 : t_From ((t_i32)) ((t_Z)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Z)=> *)\n(*       match x with *)\n(*       | Z_NEG (x) => *)\n(*         sub (neg (cast (sub (to_u32_binary (x)) (1)))) (1) *)\n(*       | Z_ZERO => *)\n(*         0 *)\n(*       | Z_POS (x) => *)\n(*         cast (to_u32_binary (x)) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint to_u64_binary (self : t_Positive) : t_u64 := *)\n(*   match match_positive (self) with *)\n(*   | POSITIVE_XH => *)\n(*     1 *)\n(*   | POSITIVE_XO (p) => *)\n(*     t_Mul_f_mul (to_u64_binary (p)) (2) *)\n(*   | POSITIVE_XI (p) => *)\n(*     t_Add_f_add (t_Mul_f_mul (to_u64_binary (p)) (2)) (1) *)\n(*   end. *)\n\n(* Instance t_From_100316698 : t_From ((t_u64)) ((t_HaxInt)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_HaxInt)=> *)\n(*       match match_pos (x) with *)\n(*       | POS_ZERO => *)\n(*         0 *)\n(*       | POS_POS (p) => *)\n(*         to_u64_binary (p) *)\n(*       end; *)\n(*   }. *)\n\n(* Instance t_From_99611562 : t_From ((t_i64)) ((t_Z)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Z)=> *)\n(*       match x with *)\n(*       | Z_NEG (x) => *)\n(*         sub (neg (cast (sub (to_u64_binary (x)) (1)))) (1) *)\n(*       | Z_ZERO => *)\n(*         0 *)\n(*       | Z_POS (x) => *)\n(*         cast (to_u64_binary (x)) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint to_u8_binary (self : t_Positive) : t_u8 := *)\n(*   match match_positive (self) with *)\n(*   | POSITIVE_XH => *)\n(*     1 *)\n(*   | POSITIVE_XO (p) => *)\n(*     t_Mul_f_mul (to_u8_binary (p)) (2) *)\n(*   | POSITIVE_XI (p) => *)\n(*     t_Add_f_add (t_Mul_f_mul (to_u8_binary (p)) (2)) (1) *)\n(*   end. *)\n\n(* Instance t_From_360336196 : t_From ((t_u8)) ((t_HaxInt)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_HaxInt)=> *)\n(*       match match_pos (x) with *)\n(*       | POS_ZERO => *)\n(*         0 *)\n(*       | POS_POS (p) => *)\n(*         to_u8_binary (p) *)\n(*       end; *)\n(*   }. *)\n\n(* Instance t_From_168893964 : t_From ((t_i8)) ((t_Z)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Z)=> *)\n(*       match x with *)\n(*       | Z_NEG (x) => *)\n(*         sub (neg (cast (sub (to_u8_binary (x)) (1)))) (1) *)\n(*       | Z_ZERO => *)\n(*         0 *)\n(*       | Z_POS (x) => *)\n(*         cast (to_u8_binary (x)) *)\n(*       end; *)\n(*   }. *)\n\n(* Fixpoint to_usize_binary (self : t_Positive) : t_usize := *)\n(*   match match_positive (self) with *)\n(*   | POSITIVE_XH => *)\n(*     1 *)\n(*   | POSITIVE_XO (p) => *)\n(*     t_Mul_f_mul (to_usize_binary (p)) (2) *)\n(*   | POSITIVE_XI (p) => *)\n(*     t_Add_f_add (t_Mul_f_mul (to_usize_binary (p)) (2)) (1) *)\n(*   end. *)\n\n(* Instance t_From_545039540 : t_From ((t_usize)) ((t_HaxInt)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_HaxInt)=> *)\n(*       match match_pos (x) with *)\n(*       | POS_ZERO => *)\n(*         0 *)\n(*       | POS_POS (p) => *)\n(*         to_usize_binary (p) *)\n(*       end; *)\n(*   }. *)\n\n(* Instance t_From_931346405 : t_From ((t_isize)) ((t_Z)) := *)\n(*   { *)\n(*     From_f_from := fun  (x : t_Z)=> *)\n(*       match x with *)\n(*       | Z_NEG (x) => *)\n(*         sub (neg (cast (sub (to_usize_binary (x)) (1)))) (1) *)\n(*       | Z_ZERO => *)\n(*         0 *)\n(*       | Z_POS (x) => *)\n(*         cast (to_usize_binary (x)) *)\n(*       end; *)\n(*   }. *)\n\n\n(* Instance v_SliceIndex_622480125 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : v_SliceIndex ((t_usize)) ((t_Slice ((v_T)))) := *)\n(*   { *)\n(*     SliceIndex_f_Output := v_T; *)\n(*     SliceIndex_f_index := fun  (self : t_usize) (slice : t_Slice ((v_T)))=> *)\n(*       let x : t_usize := Into_f_into (U64_f_v (usize_0 self)) in *)\n(*       Index_f_index (t_Index := _) (slice) (x); *)\n(*   }. *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec.\nExport Core_Base_Spec.\n\nFrom Core Require Import Core_Base_Binary.\nExport Core_Base_Binary.\n\n\n\nFrom Core Require Import Core_Base_Pos.\nExport Core_Base_Pos.\n\nFrom Core Require Import Core_Base_Z.\nExport Core_Base_Z.\n\n(* From Core Require Import Core_Base_Number_conversion. *)\n(* Export Core_Base_Number_conversion. *)\n\nFrom Core Require Import Core_Base_Seq.\nExport Core_Base_Seq.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Binary.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec.\nExport Core_Base_Spec.\n\nFrom Core Require Import Core_Cmp.\nExport Core_Cmp.\n\nFrom Core Require Import Core_Option.\nExport Core_Option.\n\nFrom Core Require Import Core_Clone.\nExport Core_Clone.\n\nFixpoint positive_cmp__cmp_binary_cont (x : t_Positive) (y : t_Positive) (r : t_Ordering) : t_Ordering :=\n  match match_positive (x) with\n  | POSITIVE_XH =>\n    match match_positive (y) with\n    | POSITIVE_XH =>\n      r\n    | POSITIVE_XO (q)\n    | POSITIVE_XI (q) =>\n      Ordering_Less\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (y) with\n    | POSITIVE_XH =>\n      Ordering_Greater\n    | POSITIVE_XO (q) =>\n      positive_cmp__cmp_binary_cont (p) (q) (r)\n    | POSITIVE_XI (q) =>\n      positive_cmp__cmp_binary_cont (p) (q) (Ordering_Less)\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (y) with\n    | POSITIVE_XH =>\n      Ordering_Greater\n    | POSITIVE_XO (q) =>\n      positive_cmp__cmp_binary_cont (p) (q) (Ordering_Greater)\n    | POSITIVE_XI (q) =>\n      positive_cmp__cmp_binary_cont (p) (q) (r)\n    end\n  end.\n\nDefinition positive_cmp (lhs : t_Positive) (rhs : t_Positive) : t_Ordering :=\n  positive_cmp__cmp_binary_cont (lhs) (rhs) (Ordering_Equal).\n\nDefinition positive_le (lhs : t_Positive) (rhs : t_Positive) : bool :=\n  match Option_Some (positive_cmp (lhs) (rhs)) with\n  | Option_Some (Ordering_Less\n  | Ordering_Equal) =>\n    true\n  | _ =>\n    false\n  end.\n\nFixpoint positive_pred_double (s : t_Positive) : t_Positive :=\n  match match_positive (s) with\n  | POSITIVE_XH =>\n    xH\n  | POSITIVE_XO (p) =>\n    xI (positive_pred_double (p))\n  | POSITIVE_XI (p) =>\n    xI (xO (p))\n  end.\n\nFixpoint positive_succ (s : t_Positive) : t_Positive :=\n  match match_positive (s) with\n  | POSITIVE_XH =>\n    xO (xH)\n  | POSITIVE_XO (q) =>\n    xI (q)\n  | POSITIVE_XI (q) =>\n    xO (positive_succ (q))\n  end.\n\nFixpoint positive_add__add (lhs : t_Positive) (rhs : t_Positive) : t_Positive :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xO (xH)\n    | POSITIVE_XO (q) =>\n      xI (q)\n    | POSITIVE_XI (q) =>\n      xO (positive_succ (q))\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xI (p)\n    | POSITIVE_XO (q) =>\n      xO (positive_add__add (p) (q))\n    | POSITIVE_XI (q) =>\n      xI (positive_add__add (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xO (positive_succ (p))\n    | POSITIVE_XO (q) =>\n      xI (positive_add__add (p) (q))\n    | POSITIVE_XI (q) =>\n      xO (positive_add__add_carry (p) (q))\n    end\n  end\n\nwith positive_add__add_carry (lhs : t_Positive) (rhs : t_Positive) : t_Positive :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xI (xH)\n    | POSITIVE_XO (q) =>\n      xO (positive_succ (q))\n    | POSITIVE_XI (q) =>\n      xI (positive_succ (q))\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xO (positive_succ (p))\n    | POSITIVE_XO (q) =>\n      xI (positive_add__add (p) (q))\n    | POSITIVE_XI (q) =>\n      xO (positive_add__add_carry (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xI (positive_succ (p))\n    | POSITIVE_XO (q) =>\n      xO (positive_add__add_carry (p) (q))\n    | POSITIVE_XI (q) =>\n      xI (positive_add__add_carry (p) (q))\n    end\n  end.\n\nDefinition positive_add (lhs : t_Positive) (rhs : t_Positive) : t_Positive :=\n  positive_add__add (lhs) (rhs).\n\nFixpoint positive_mul (lhs : t_Positive) (rhs : t_Positive) : t_Positive :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    rhs\n  | POSITIVE_XO (p) =>\n    xO (positive_mul (p) (rhs))\n  | POSITIVE_XI (p) =>\n    positive_add (Clone_f_clone (rhs)) (xO (positive_mul (p) (rhs)))\n  end.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Number_conversion.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec.\nExport Core_Base_Spec.\n\nFrom Core Require Import Core_Base.\nExport Core_Base.\n\nFrom Core Require Import Core (t_primitive).\nExport Core (t_primitive).\n\nFrom Core Require Import Core (t_cmp).\nExport Core (t_cmp).\n\nFrom Core Require Import Core (t_convert).\nExport Core (t_convert).\n\n(* NotImplementedYet *)\n\nNotation \"'impl_24__from_u128_binary'\" := (from_u128_binary).\n\nNotation \"'impl_8'\" := (impl_8).\n\nNotation \"'impl_20'\" := (impl_20).\n\nNotation \"'impl_24__from_u16_binary'\" := (from_u16_binary).\n\nNotation \"'impl_2'\" := (impl_2).\n\nNotation \"'impl_14'\" := (impl_14).\n\nNotation \"'impl_24__from_u32_binary'\" := (from_u32_binary).\n\nNotation \"'impl_4'\" := (impl_4).\n\nNotation \"'impl_16'\" := (impl_16).\n\nNotation \"'impl_24__from_u64_binary'\" := (from_u64_binary).\n\nNotation \"'impl_6'\" := (impl_6).\n\nNotation \"'impl_18'\" := (impl_18).\n\nNotation \"'impl_24__from_u8_binary'\" := (from_u8_binary).\n\nNotation \"'impl'\" := (impl).\n\nNotation \"'impl_12'\" := (impl_12).\n\nNotation \"'impl_24__from_usize_binary'\" := (from_usize_binary).\n\nNotation \"'impl_10'\" := (impl_10).\n\nNotation \"'impl_22'\" := (impl_22).\n\nNotation \"'impl_24__to_u128_binary'\" := (to_u128_binary).\n\nNotation \"'impl_9'\" := (impl_9).\n\nNotation \"'impl_21'\" := (impl_21).\n\nNotation \"'impl_24__to_u16_binary'\" := (to_u16_binary).\n\nNotation \"'impl_3'\" := (impl_3).\n\nNotation \"'impl_15'\" := (impl_15).\n\nNotation \"'impl_24__to_u32_binary'\" := (to_u32_binary).\n\nNotation \"'impl_5'\" := (impl_5).\n\nNotation \"'impl_17'\" := (impl_17).\n\nNotation \"'impl_24__to_u64_binary'\" := (to_u64_binary).\n\nNotation \"'impl_7'\" := (impl_7).\n\nNotation \"'impl_19'\" := (impl_19).\n\nNotation \"'impl_24__to_u8_binary'\" := (to_u8_binary).\n\nNotation \"'impl_1'\" := (impl_1).\n\nNotation \"'impl_13'\" := (impl_13).\n\nNotation \"'impl_24__to_usize_binary'\" := (to_usize_binary).\n\nNotation \"'impl_11'\" := (impl_11).\n\nNotation \"'impl_23'\" := (impl_23).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Pos.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec.\nExport Core_Base_Spec.\n\nFrom Core Require Import Core_Base_Binary.\nExport Core_Base_Binary.\n\nFrom Core Require Import Core_Cmp (t_Ordering).\nExport Core_Cmp (t_Ordering).\n\nDefinition haxint_double (s : t_HaxInt) : t_HaxInt :=\n  match match_pos (s) with\n  | POS_ZERO =>\n    v_HaxInt_ZERO\n  | POS_POS (p) =>\n    positive_to_int (xO (p))\n  end.\n\nDefinition haxint_shr__half (s : t_HaxInt) : t_HaxInt :=\n  match match_pos (s) with\n  | POS_ZERO =>\n    v_HaxInt_ZERO\n  | POS_POS (n) =>\n    match match_positive (n) with\n    | POSITIVE_XH =>\n      v_HaxInt_ZERO\n    | POSITIVE_XO (p) =>\n      positive_to_int (p)\n    | POSITIVE_XI (p) =>\n      positive_to_int (p)\n    end\n  end.\n\nDefinition haxint_sub__double_mask (lhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    v_HaxInt_ZERO\n  | POS_POS (p) =>\n    positive_to_int (xO (p))\n  end.\n\nDefinition haxint_sub__succ_double_mask (lhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    positive_to_int (xH)\n  | POS_POS (p) =>\n    positive_to_int (xI (p))\n  end.\n\nDefinition haxint_succ_double (s : t_HaxInt) : t_Positive :=\n  match match_pos (s) with\n  | POS_ZERO =>\n    xH\n  | POS_POS (p) =>\n    xI (p)\n  end.\n\nFixpoint bitand_binary (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    match match_positive (rhs) with\n    | POSITIVE_XO (q) =>\n      v_HaxInt_ZERO\n    | POSITIVE_XI (_)\n    | POSITIVE_XH =>\n      v_HaxInt_ONE\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      v_HaxInt_ZERO\n    | POSITIVE_XO (q)\n    | POSITIVE_XI (q) =>\n      haxint_double (bitand_binary (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      v_HaxInt_ONE\n    | POSITIVE_XO (q) =>\n      haxint_double (bitand_binary (p) (q))\n    | POSITIVE_XI (q) =>\n      positive_to_int (haxint_succ_double (bitand_binary (p) (q)))\n    end\n  end.\n\nFixpoint bitor_binary (lhs : t_Positive) (rhs : t_Positive) : t_Positive :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    match match_positive (rhs) with\n    | POSITIVE_XO (q) =>\n      xI (q)\n    | POSITIVE_XH =>\n      xH\n    | POSITIVE_XI (q) =>\n      xI (q)\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xI (p)\n    | POSITIVE_XO (q) =>\n      xO (bitor_binary (p) (q))\n    | POSITIVE_XI (q) =>\n      xI (bitor_binary (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      xI (p)\n    | POSITIVE_XO (q)\n    | POSITIVE_XI (q) =>\n      xI (bitor_binary (p) (q))\n    end\n  end.\n\nDefinition haxint_bitand (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    v_HaxInt_ZERO\n  | POS_POS (p) =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      v_HaxInt_ZERO\n    | POS_POS (q) =>\n      bitand_binary (p) (q)\n    end\n  end.\n\nDefinition haxint_bitor (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    rhs\n  | POS_POS (p) =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      positive_to_int (p)\n    | POS_POS (q) =>\n      positive_to_int (bitor_binary (p) (q))\n    end\n  end.\n\nFixpoint haxint_bitxor__bitxor_binary (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      v_HaxInt_ZERO\n    | POSITIVE_XO (q) =>\n      positive_to_int (xI (q))\n    | POSITIVE_XI (q) =>\n      positive_to_int (xO (q))\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      positive_to_int (xI (p))\n    | POSITIVE_XO (q) =>\n      haxint_double (haxint_bitxor__bitxor_binary (p) (q))\n    | POSITIVE_XI (q) =>\n      positive_to_int (haxint_succ_double (haxint_bitxor__bitxor_binary (p) (q)))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      positive_to_int (xO (p))\n    | POSITIVE_XO (q) =>\n      positive_to_int (haxint_succ_double (haxint_bitxor__bitxor_binary (p) (q)))\n    | POSITIVE_XI (q) =>\n      haxint_double (haxint_bitxor__bitxor_binary (p) (q))\n    end\n  end.\n\nDefinition haxint_bitxor (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    rhs\n  | POS_POS (p) =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      positive_to_int (p)\n    | POS_POS (q) =>\n      haxint_bitxor__bitxor_binary (p) (q)\n    end\n  end.\n\nDefinition haxint_cmp (lhs : t_HaxInt) (rhs : t_HaxInt) : t_Ordering :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      Ordering_Equal\n    | POS_POS (q) =>\n      Ordering_Less\n    end\n  | POS_POS (p) =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      Ordering_Greater\n    | POS_POS (q) =>\n      positive_cmp (p) (q)\n    end\n  end.\n\nDefinition haxint_le (lhs : t_HaxInt) (rhs : t_HaxInt) : bool :=\n  match Option_Some (haxint_cmp (lhs) (rhs)) with\n  | Option_Some (Ordering_Less\n  | Ordering_Equal) =>\n    true\n  | _ =>\n    false\n  end.\n\nDefinition haxint_lt (lhs : t_HaxInt) (rhs : t_HaxInt) : bool :=\n  match Option_Some (haxint_cmp (lhs) (rhs)) with\n  | Option_Some (Ordering_Less) =>\n    true\n  | _ =>\n    false\n  end.\n\nFixpoint haxint_shl__shl_helper (rhs : t_Unary) (lhs : t_HaxInt) : t_HaxInt :=\n  if\n    is_zero (Clone_f_clone (lhs))\n  then\n    lhs\n  else\n    match match_unary (rhs) with\n    | UNARY_ZERO =>\n      lhs\n    | UNARY_SUCC (n) =>\n      haxint_shl__shl_helper (n) (haxint_double (lhs))\n    end.\n\nDefinition haxint_shl (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  haxint_shl__shl_helper (unary_from_int (rhs)) (lhs).\n\nFixpoint haxint_shr__shr_helper (rhs : t_Unary) (lhs : t_HaxInt) : t_HaxInt :=\n  if\n    is_zero (Clone_f_clone (lhs))\n  then\n    lhs\n  else\n    match match_unary (rhs) with\n    | UNARY_ZERO =>\n      lhs\n    | UNARY_SUCC (n) =>\n      haxint_shr__shr_helper (n) (haxint_shr__half (lhs))\n    end.\n\nDefinition haxint_shr (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  haxint_shr__shr_helper (unary_from_int (rhs)) (lhs).\n\nDefinition haxint_sub__double_pred_mask (lhs : t_Positive) : t_HaxInt :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    v_HaxInt_ZERO\n  | POSITIVE_XO (p) =>\n    positive_to_int (xO (positive_pred_double (p)))\n  | POSITIVE_XI (p) =>\n    positive_to_int (xO (xO (p)))\n  end.\n\nFixpoint power_of_two (s : t_Unary) : t_Positive :=\n  match match_unary (s) with\n  | UNARY_ZERO =>\n    xH\n  | UNARY_SUCC (x) =>\n    xO (power_of_two (x))\n  end.\n\nDefinition haxint_add (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    rhs\n  | POS_POS (p) =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      positive_to_int (p)\n    | POS_POS (q) =>\n      positive_to_int (positive_add (p) (q))\n    end\n  end.\n\nFixpoint haxint_sub__sub_binary (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    v_HaxInt_ZERO\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      positive_to_int (positive_pred_double (p))\n    | POSITIVE_XO (q) =>\n      haxint_sub__double_mask (haxint_sub__sub_binary (p) (q))\n    | POSITIVE_XI (q) =>\n      haxint_sub__succ_double_mask (haxint_sub__sub_carry (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      positive_to_int (xO (p))\n    | POSITIVE_XO (q) =>\n      haxint_sub__succ_double_mask (haxint_sub__sub_binary (p) (q))\n    | POSITIVE_XI (q) =>\n      haxint_sub__double_mask (haxint_sub__sub_binary (p) (q))\n    end\n  end\n\nwith haxint_sub__sub_carry (lhs : t_Positive) (rhs : t_Positive) : t_HaxInt :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    v_HaxInt_ZERO\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      haxint_sub__double_pred_mask (p)\n    | POSITIVE_XO (q) =>\n      haxint_sub__succ_double_mask (haxint_sub__sub_carry (p) (q))\n    | POSITIVE_XI (q) =>\n      haxint_sub__double_mask (haxint_sub__sub_carry (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      positive_to_int (positive_pred_double (p))\n    | POSITIVE_XO (q) =>\n      haxint_sub__double_mask (haxint_sub__sub_binary (p) (q))\n    | POSITIVE_XI (q) =>\n      haxint_sub__succ_double_mask (haxint_sub__sub_carry (p) (q))\n    end\n  end.\n\nDefinition haxint_sub (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    v_HaxInt_ZERO\n  | POS_POS (p) =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      positive_to_int (p)\n    | POS_POS (q) =>\n      haxint_sub__sub_binary (p) (q)\n    end\n  end.\n\nFixpoint haxint_divmod__divmod_binary (a : t_Positive) (b : t_Positive) : (t_HaxInt*t_HaxInt) :=\n  match match_positive (a) with\n  | POSITIVE_XH =>\n    match match_positive (b) with\n    | POSITIVE_XH =>\n      (v_HaxInt_ONE,v_HaxInt_ZERO)\n    | POSITIVE_XO (q)\n    | POSITIVE_XI (q) =>\n      (v_HaxInt_ZERO,v_HaxInt_ONE)\n    end\n  | POSITIVE_XO (a___) =>\n    let (q,r) := haxint_divmod__divmod_binary (a___) (Clone_f_clone (b)) in\n    let r___ := haxint_double (r) in\n    if\n      haxint_le (positive_to_int (Clone_f_clone (b))) (Clone_f_clone (r___))\n    then\n      (positive_to_int (haxint_succ_double (q)),haxint_sub (r___) (positive_to_int (b)))\n    else\n      (haxint_double (q),r___)\n  | POSITIVE_XI (a___) =>\n    let (q,r) := haxint_divmod__divmod_binary (a___) (Clone_f_clone (b)) in\n    let r___ := positive_to_int (haxint_succ_double (r)) in\n    if\n      haxint_le (positive_to_int (Clone_f_clone (b))) (Clone_f_clone (r___))\n    then\n      (positive_to_int (haxint_succ_double (q)),haxint_sub (r___) (positive_to_int (b)))\n    else\n      (haxint_double (q),r___)\n  end.\n\nDefinition haxint_divmod (a : t_HaxInt) (b : t_HaxInt) : (t_HaxInt*t_HaxInt) :=\n  match match_pos (a) with\n  | POS_ZERO =>\n    (v_HaxInt_ZERO,v_HaxInt_ZERO)\n  | POS_POS (p) =>\n    match match_pos (b) with\n    | POS_ZERO =>\n      (v_HaxInt_ZERO,positive_to_int (p))\n    | POS_POS (q) =>\n      haxint_divmod__divmod_binary (p) (q)\n    end\n  end.\n\nDefinition haxint_div (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  let (q,_) := haxint_divmod (lhs) (rhs) in\n  q.\n\nDefinition haxint_mul (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  match match_pos (lhs) with\n  | POS_ZERO =>\n    v_HaxInt_ZERO\n  | POS_POS (p) =>\n    match match_pos (rhs) with\n    | POS_ZERO =>\n      v_HaxInt_ZERO\n    | POS_POS (q) =>\n      positive_to_int (positive_mul (p) (q))\n    end\n  end.\n\nDefinition haxint_rem (lhs : t_HaxInt) (rhs : t_HaxInt) : t_HaxInt :=\n  let (_,r) := haxint_divmod (lhs) (rhs) in\n  r.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Seq.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec.\nExport Core_Base_Spec.\n\nFrom Core Require Import Core_Base_Pos.\nExport Core_Base_Pos.\n\nFrom Core Require Import Core_Clone (t_Clone).\nExport Core_Clone (t_Clone).\n\nFrom Core Require Import Core_Cmp.\nExport Core_Cmp.\n\nFrom Core Require Import Core_Marker (t_Sized).\nExport Core_Marker (t_Sized).\n\nFrom Core Require Import Core_Panicking.\nExport Core_Panicking.\n\nDefinition hd__panic_cold_explicit '(_ : unit) `{HFalse : t_Never} : t_Never :=\n  panic_explicit (tt) HFalse.\n\nDefinition set_index__set_index_unary__panic_cold_explicit '(_ : unit) `{HFalse : t_Never} : t_Never :=\n  panic_explicit (tt) HFalse.\n\nDefinition is_empty `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : bool :=\n  match match_list (s) with\n  | LIST_NIL =>\n    true\n  | LIST_CONS (_) (_) =>\n    false\n  end.\n\nDefinition hd `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) `{Hpre : negb (is_empty (s)) = true} : v_T :=\n  match match_list (s) as s return negb (is_empty (s)) = true -> _ with\n  | LIST_NIL =>\n    fun HFalse => never_to_any (hd__panic_cold_explicit (tt) (False_rect _ (Bool.diff_false_true HFalse)))\n  | LIST_CONS (hd) (_) =>\n    fun _ => hd\n  end Hpre.\n\nDefinition tl `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) `{Hpre : negb (is_empty (s)) = true} : t_Seq ((v_T)) :=\n  match match_list (s) with\n  | LIST_NIL =>\n    nil (* (tt) *)\n  | LIST_CONS (_) (tl) =>\n    tl\n  end.\n\nFixpoint eq_inner `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} `{t_PartialEq (v_T) (v_T)} (s : t_Seq ((v_T))) (other : t_Seq ((v_T))) : bool :=\n  match match_list (Clone_f_clone (s)) with\n  | LIST_NIL =>\n    is_empty (Clone_f_clone (other))\n  | LIST_CONS (x) (xs) =>\n    match match_list (Clone_f_clone (other)) with\n    | LIST_NIL =>\n      false\n    | LIST_CONS (y) (ys) =>\n      andb (PartialEq_f_eq (x) (y)) (eq_inner (xs) (ys))\n    end\n  end.\n\nInstance t_PartialEq_126322860 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} `{t_PartialEq (v_T) (v_T)} : t_PartialEq ((t_Seq ((v_T)))) ((t_Seq ((v_T)))) :=\n  {\n    PartialEq_f_eq := fun  (self : t_Seq ((v_T))) (other : t_Seq ((v_T)))=>\n      eq_inner (Clone_f_clone (self)) (Clone_f_clone (other));\n    PartialEq_f_ne := fun  (self : t_Seq ((v_T))) (other : t_Seq ((v_T)))=>\n      negb (eq_inner (Clone_f_clone (self)) (Clone_f_clone (other)));\n  }.\n\nFixpoint len__len_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_Unary :=\n  match match_list (s) with\n  | LIST_NIL =>\n    unary_from_int(v_HaxInt_ZERO)\n  | LIST_CONS (_) (tl) =>\n    succ (len__len_unary (tl))\n  end.\n\nDefinition len `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_HaxInt :=\n  unary_to_int(len__len_unary(s)).\n\nLemma positive_cmp_is_spec :\n  forall p q, match positive_cmp p q with | Ordering_Less => Lt | Ordering_Equal => Eq | Ordering_Greater => Gt end = (p ?= q)%positive.\n  {\n    clear.\n    intros.\n\n    unfold positive_cmp.\n    unfold \"?=\"%positive.\n\n    set (Ordering_Equal).\n    pose (match Eq with | Lt => Ordering_Less | Gt => Ordering_Greater | Eq => Ordering_Equal end).\n    replace t with t0 by reflexivity.\n    clear t.\n\n    assert (forall c p q, c <> Eq -> Pos.compare_cont c p q <> Eq).\n    {\n      clear ; intros.\n      generalize dependent c.\n      generalize dependent q.\n      induction p ; intros ; destruct q, c ; (easy || now apply IHp).\n    }\n\n    assert (forall c p q, c <> Ordering_Equal -> positive_cmp__cmp_binary_cont p q c <> Ordering_Equal).\n    {\n      clear ; intros.\n      generalize dependent c.\n      generalize dependent q.\n      induction p ; intros ; destruct q, c ; (easy || now apply IHp).\n    }\n\n    subst t0.\n    set Eq.\n    generalize dependent c.\n    generalize dependent q.\n    induction p ; intros.\n    - destruct q.\n      + apply IHp.\n      + simpl.\n        rewrite <- IHp.\n        destruct positive_cmp__cmp_binary_cont eqn:ov.\n        * reflexivity.\n        * exfalso. refine (H0 _ p q _ ov). easy.\n        * reflexivity.\n      + reflexivity.\n    - destruct q.\n      + simpl.\n        rewrite <- IHp.\n        destruct positive_cmp__cmp_binary_cont eqn:ov.\n        * reflexivity.\n        * exfalso. refine (H0 _ p q _ ov). easy.\n        * reflexivity.\n      + apply IHp.\n      + reflexivity.\n    - now destruct q, c.\n  }\nQed.\n\nLemma haxint_lt_is_spec : forall x y, haxint_lt x y = N.ltb x y.\n  {\n    intros.\n    destruct x as [ | p], y as [ | q].\n    - easy.\n    - easy.\n    - easy.\n    - unfold haxint_lt.\n      unfold haxint_cmp.\n      simpl.\n\n      unfold N.ltb.\n      simpl.\n\n      rewrite <- positive_cmp_is_spec.\n\n      now destruct (positive_cmp).\n  }\nQed.\n\nProgram Fixpoint get_index__get_index_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (l : t_Seq ((v_T))) (i : t_Unary) `{Hpre : haxint_lt(unary_to_int i) (len l) = true} : v_T :=\n  match match_unary (i) with\n  | UNARY_ZERO =>\n    hd (Hpre := Hpre) (l)\n  | UNARY_SUCC (n) =>\n    get_index__get_index_unary (tl (Hpre := _) (l)) (n)\n  end.\nNext Obligation.\n  unfold match_unary in Heq_anonymous.\n  subst.\n  now destruct l.\nQed.\nNext Obligation.\n  unfold match_unary in Heq_anonymous.\n  subst.\n  now destruct l.\nQed.\nNext Obligation.\n  unfold match_unary in Heq_anonymous.\n  subst.\n\n  destruct l.\n  - easy.\n  - simpl.\n\n    rewrite haxint_lt_is_spec.\n    epose Hpre.\n    rewrite haxint_lt_is_spec in e.\n\n    apply N.ltb_lt.\n    apply N.ltb_lt in e.\n    apply N.succ_lt_mono.\n    unfold len ; rewrite <- !Nnat.Nat2N.inj_succ.\n    apply e.\nQed.\nFail Next Obligation.\n\nDefinition get_index `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (i : t_HaxInt) {Hpre : haxint_lt (i) (len s) = true} : v_T :=\n  get_index__get_index_unary (Hpre := ltac:(now rewrite Nnat.N2Nat.id)) (s) (unary_from_int (i)).\n\nFixpoint repeat__repeat_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (n : t_Unary) (v : v_T) : t_Seq ((v_T)) :=\n  match match_unary (n) with\n  | UNARY_ZERO =>\n    nil (* (tt) *)\n  | UNARY_SUCC (m) =>\n    cons (repeat__repeat_unary (m) (Clone_f_clone (v))) v\n  end.\n\nDefinition repeat `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (n : t_HaxInt) (v : v_T) : t_Seq ((v_T)) :=\n  repeat__repeat_unary (unary_from_int (n)) (v).\n\nFixpoint rev__rev_accum `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (accum : t_Seq ((v_T))) : t_Seq ((v_T)) :=\n  match match_list (s) with\n  | LIST_NIL =>\n    accum\n  | LIST_CONS (hd) (tl) =>\n    rev__rev_accum (tl) (cons (accum) (hd))\n  end.\n\nDefinition rev `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) : t_Seq ((v_T)) :=\n  rev__rev_accum (s) (nil (* (tt) *)).\n\nProgram Fixpoint set_index__set_index_unary `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (x : t_Seq ((v_T))) (i : t_Unary) (v : v_T) `{Hpre : haxint_lt(unary_to_int i) (len x) = true} : t_Seq ((v_T)) :=\n  match match_list (x) with\n  | LIST_NIL =>\n    never_to_any (set_index__set_index_unary__panic_cold_explicit (tt) _)\n  | LIST_CONS (hd) (tl) =>\n    match match_unary (i) with\n    | UNARY_ZERO =>\n      cons (tl) (v)\n    | UNARY_SUCC (n) =>\n      cons (set_index__set_index_unary (tl) (n) (v)) (hd)\n    end\n  end.\nNext Obligation.\n  unfold match_list in Heq_anonymous.\n  subst.\n  now destruct i.\nQed.\nNext Obligation.\n  unfold match_unary in Heq_anonymous.\n  subst.\n  unfold match_list in Heq_anonymous0.\n  subst.\n\n\n  rewrite haxint_lt_is_spec.\n  rewrite haxint_lt_is_spec in Hpre.\n\n  apply N.ltb_lt.\n  apply N.ltb_lt in Hpre.\n  apply N.succ_lt_mono.\n  unfold len ; rewrite <- !Nnat.Nat2N.inj_succ.\n  apply Hpre.\nQed.\nFail Next Obligation.\n\nDefinition set_index `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (s : t_Seq ((v_T))) (i : t_HaxInt) (v : v_T) `{haxint_lt (i) (len (s)) = true} : t_Seq ((v_T)) :=\n  set_index__set_index_unary (s)  (Hpre := ltac:(now rewrite Nnat.N2Nat.id)) (unary_from_int (i)) (v).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_Z.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_Spec.\nExport Core_Base_Spec.\n\nFrom Core Require Import Core_Base_Binary.\nExport Core_Base_Binary.\n\nFrom Core Require Import Core_Cmp (t_Ordering).\nExport Core_Cmp (t_Ordering).\n\nFrom Core Require Import Core_Base_Pos.\nExport Core_Base_Pos.\n\nDefinition z_neg (x : t_Z) : t_Z :=\n  match x with\n  | Z_NEG (p) =>\n    Z_POS (p)\n  | Z_ZERO =>\n    Z_ZERO\n  | Z_POS (p) =>\n    Z_NEG (p)\n  end.\n\nDefinition z_bitor__n_succ (x : t_POS) : t_Positive :=\n  match x with\n  | POS_ZERO =>\n    xH\n  | POS_POS (p) =>\n    positive_from_int (Hpos := ltac:(easy)) (unary_to_int (succ (unary_from_int (positive_to_int (p)))))\n  end.\n\nDefinition z_add__z_double (s : t_Z) : t_Z :=\n  match s with\n  | Z_ZERO =>\n    Z_ZERO\n  | Z_POS (p) =>\n    Z_POS (xO (p))\n  | Z_NEG (p) =>\n    Z_NEG (xO (p))\n  end.\n\nDefinition z_bitor__haxint_ldiff__n_double (x : t_POS) : t_POS :=\n  match x with\n  | POS_ZERO =>\n    POS_ZERO\n  | POS_POS (p) =>\n    POS_POS (xO (p))\n  end.\n\nDefinition z_bitor__haxint_ldiff__n_succ_double (x : t_POS) : t_POS :=\n  match x with\n  | POS_ZERO =>\n    POS_POS (xH)\n  | POS_POS (p) =>\n    POS_POS (xI (p))\n  end.\n\nDefinition z_add__z_pred_double (s : t_Z) : t_Z :=\n  match s with\n  | Z_ZERO =>\n    Z_NEG (xH)\n  | Z_POS (p) =>\n    Z_POS (positive_pred_double (p))\n  | Z_NEG (p) =>\n    Z_NEG (xI (p))\n  end.\n\nDefinition z_add__z_succ_double (s : t_Z) : t_Z :=\n  match s with\n  | Z_ZERO =>\n    Z_POS (xH)\n  | Z_POS (p) =>\n    Z_POS (xI (p))\n  | Z_NEG (p) =>\n    Z_NEG (positive_pred_double (p))\n  end.\n\nFixpoint z_bitor__haxint_ldiff__positive_ldiff (lhs : t_Positive) (rhs : t_Positive) : t_POS :=\n  match match_positive (lhs) with\n  | POSITIVE_XH =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      POS_ZERO\n    | POSITIVE_XO (_) =>\n      POS_POS (xH)\n    | POSITIVE_XI (_) =>\n      POS_ZERO\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      POS_POS (xO (p))\n    | POSITIVE_XO (q) =>\n      z_bitor__haxint_ldiff__n_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q))\n    | POSITIVE_XI (q) =>\n      z_bitor__haxint_ldiff__n_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (rhs) with\n    | POSITIVE_XH =>\n      POS_POS (xO (p))\n    | POSITIVE_XO (q) =>\n      z_bitor__haxint_ldiff__n_succ_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q))\n    | POSITIVE_XI (q) =>\n      z_bitor__haxint_ldiff__n_double (z_bitor__haxint_ldiff__positive_ldiff (p) (q))\n    end\n  end.\n\nDefinition z_bitor__haxint_ldiff (lhs : t_POS) (rhs : t_POS) : t_POS :=\n  match lhs with\n  | POS_ZERO =>\n    POS_ZERO\n  | POS_POS (p) =>\n    match rhs with\n    | POS_ZERO =>\n      POS_POS (p)\n    | POS_POS (q) =>\n      z_bitor__haxint_ldiff__positive_ldiff (p) (q)\n    end\n  end.\n\nDefinition z_bitor__n_and (lhs : t_POS) (rhs : t_POS) : t_POS :=\n  match lhs with\n  | POS_ZERO =>\n    POS_ZERO\n  | POS_POS (p) =>\n    match rhs with\n    | POS_ZERO =>\n      POS_ZERO\n    | POS_POS (q) =>\n      match_pos (bitand_binary (p) (q))\n    end\n  end.\n\nDefinition z_bitor__positive_pred_N (x : t_Positive) : t_POS :=\n  match match_positive (x) with\n  | POSITIVE_XH =>\n    POS_ZERO\n  | POSITIVE_XI (p) =>\n    POS_POS (xO (p))\n  | POSITIVE_XO (p) =>\n    POS_POS (positive_pred_double (p))\n  end.\n\nDefinition z_bitor (lhs : t_Z) (rhs : t_Z) : t_Z :=\n  match lhs with\n  | Z_ZERO =>\n    rhs\n  | Z_POS (x) =>\n    match rhs with\n    | Z_ZERO =>\n      Z_POS (x)\n    | Z_POS (y) =>\n      Z_POS (bitor_binary (x) (y))\n    | Z_NEG (y) =>\n      Z_NEG (z_bitor__n_succ (z_bitor__haxint_ldiff (z_bitor__positive_pred_N (y)) (POS_POS (x))))\n    end\n  | Z_NEG (x) =>\n    match rhs with\n    | Z_ZERO =>\n      Z_NEG (x)\n    | Z_POS (y) =>\n      Z_NEG (z_bitor__n_succ (z_bitor__haxint_ldiff (z_bitor__positive_pred_N (x)) (POS_POS (y))))\n    | Z_NEG (y) =>\n      Z_NEG (z_bitor__n_succ (z_bitor__n_and (z_bitor__positive_pred_N (x)) (z_bitor__positive_pred_N (y))))\n    end\n  end.\n\nDefinition z_cmp (lhs : t_Z) (rhs : t_Z) : t_Ordering :=\n  match lhs with\n  | Z_NEG (p) =>\n    match rhs with\n    | Z_NEG (q) =>\n      match positive_cmp (p) (q) with\n      | Ordering_Equal =>\n        Ordering_Equal\n      | Ordering_Less =>\n        Ordering_Greater\n      | Ordering_Greater =>\n        Ordering_Less\n      end\n    | _ =>\n      Ordering_Less\n    end\n  | Z_ZERO =>\n    match rhs with\n    | Z_ZERO =>\n      Ordering_Equal\n    | Z_POS (_) =>\n      Ordering_Less\n    | Z_NEG (_) =>\n      Ordering_Greater\n    end\n  | Z_POS (p) =>\n    match rhs with\n    | Z_POS (q) =>\n      positive_cmp (p) (q)\n    | _ =>\n      Ordering_Greater\n    end\n  end.\n\nDefinition z_le (lhs : t_Z) (rhs : t_Z) : bool :=\n  match Option_Some (z_cmp (lhs) (rhs)) with\n  | Option_Some (Ordering_Less\n  | Ordering_Equal) =>\n    true\n  | _ =>\n    false\n  end.\n\nDefinition z_lt (lhs : t_Z) (rhs : t_Z) : bool :=\n  match Option_Some (z_cmp (lhs) (rhs)) with\n  | Option_Some (Ordering_Less) =>\n    true\n  | _ =>\n    false\n  end.\n\nFixpoint z_add__pos_z_sub (x : t_Positive) (y : t_Positive) : t_Z :=\n  match match_positive (x) with\n  | POSITIVE_XH =>\n    match match_positive (y) with\n    | POSITIVE_XH =>\n      Z_ZERO\n    | POSITIVE_XO (q) =>\n      Z_NEG (positive_pred_double (q))\n    | POSITIVE_XI (q) =>\n      Z_NEG (xO (q))\n    end\n  | POSITIVE_XO (p) =>\n    match match_positive (y) with\n    | POSITIVE_XH =>\n      Z_POS (positive_pred_double (p))\n    | POSITIVE_XO (q) =>\n      z_add__z_double (z_add__pos_z_sub (p) (q))\n    | POSITIVE_XI (q) =>\n      z_add__z_pred_double (z_add__pos_z_sub (p) (q))\n    end\n  | POSITIVE_XI (p) =>\n    match match_positive (y) with\n    | POSITIVE_XH =>\n      Z_POS (xO (p))\n    | POSITIVE_XO (q) =>\n      z_add__z_succ_double (z_add__pos_z_sub (p) (q))\n    | POSITIVE_XI (q) =>\n      z_add__z_double (z_add__pos_z_sub (p) (q))\n    end\n  end.\n\nDefinition z_add (lhs : t_Z) (rhs : t_Z) : t_Z :=\n  match lhs with\n  | Z_NEG (p) =>\n    match rhs with\n    | Z_NEG (q) =>\n      Z_NEG (positive_add (p) (q))\n    | Z_ZERO =>\n      Z_NEG (p)\n    | Z_POS (q) =>\n      z_add__pos_z_sub (q) (p)\n    end\n  | Z_ZERO =>\n    rhs\n  | Z_POS (p) =>\n    match rhs with\n    | Z_NEG (q) =>\n      z_add__pos_z_sub (p) (q)\n    | Z_ZERO =>\n      Z_POS (p)\n    | Z_POS (q) =>\n      Z_POS (positive_add (p) (q))\n    end\n  end.\n\nDefinition z_sub (lhs : t_Z) (rhs : t_Z) : t_Z :=\n  z_add (lhs) (z_neg (rhs)).\n\nDefinition z_mul (lhs : t_Z) (rhs : t_Z) : t_Z :=\n  match lhs with\n  | Z_NEG (p) =>\n    match rhs with\n    | Z_NEG (q) =>\n      Z_POS (positive_mul (p) (q))\n    | Z_ZERO =>\n      Z_ZERO\n    | Z_POS (q) =>\n      Z_NEG (positive_mul (p) (q))\n    end\n  | Z_ZERO =>\n    Z_ZERO\n  | Z_POS (p) =>\n    match rhs with\n    | Z_NEG (q) =>\n      Z_NEG (positive_mul (p) (q))\n    | Z_ZERO =>\n      Z_ZERO\n    | Z_POS (q) =>\n      Z_POS (positive_mul (p) (q))\n    end\n  end.\n\nFixpoint pos_div_eucl (a : t_Positive) (b : t_Z) : (t_Z*t_Z) :=\n  match match_positive (a) with\n  | POSITIVE_XH =>\n    if\n      z_le (v_Z_TWO) (Clone_f_clone (b))\n    then\n      (Z_ZERO,v_Z_ONE)\n    else\n      (v_Z_ONE,Z_ZERO)\n  | POSITIVE_XO (p) =>\n    let (q,r) := pos_div_eucl (p) (Clone_f_clone (b)) in\n    let r___ := z_mul (v_Z_TWO) (r) in\n    if\n      z_lt (Clone_f_clone (r___)) (Clone_f_clone (b))\n    then\n      (z_mul (v_Z_TWO) (q),r___)\n    else\n      (z_add (z_mul (v_Z_TWO) (q)) (v_Z_ONE),z_sub (r___) (b))\n  | POSITIVE_XI (p) =>\n    let (q,r) := pos_div_eucl (p) (Clone_f_clone (b)) in\n    let r___ := z_add (z_mul (v_Z_TWO) (r)) (v_Z_ONE) in\n    if\n      z_lt (Clone_f_clone (r___)) (Clone_f_clone (b))\n    then\n      (z_mul (v_Z_TWO) (q),r___)\n    else\n      (z_add (z_mul (v_Z_TWO) (q)) (v_Z_ONE),z_sub (r___) (b))\n  end.\n\nDefinition z_divmod (a : t_Z) (b : t_Z) : (t_Z*t_Z) :=\n  match a with\n  | Z_ZERO =>\n    (Z_ZERO,Z_ZERO)\n  | Z_POS (a___) =>\n    match Clone_f_clone (b) with\n    | Z_ZERO =>\n      (Z_ZERO,Z_POS (a___))\n    | Z_POS (b___) =>\n      pos_div_eucl (a___) (b)\n    | Z_NEG (b___) =>\n      let (q,r) := pos_div_eucl (a___) (Z_POS (b___)) in\n      (z_neg (q),r)\n    end\n  | Z_NEG (a___) =>\n    match Clone_f_clone (b) with\n    | Z_ZERO =>\n      (Z_ZERO,Z_NEG (a___))\n    | Z_POS (_) =>\n      let (q,r) := pos_div_eucl (a___) (Clone_f_clone (b)) in\n      (z_neg (q),z_neg (r))\n    | Z_NEG (b___) =>\n      let (q,r) := pos_div_eucl (a___) (Z_POS (b___)) in\n      (q,z_neg (r))\n    end\n  end.\n\nDefinition z_div (lhs : t_Z) (rhs : t_Z) : t_Z :=\n  let (q,_) := z_divmod (lhs) (rhs) in\n  q.\n\nDefinition z_rem (lhs : t_Z) (rhs : t_Z) : t_Z :=\n  let (_,r) := z_divmod (lhs) (rhs) in\n  r.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nFrom Core Require Import Core_Base_interface_Coerce.\nExport Core_Base_interface_Coerce.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Coerce.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\nExport Core_Marker.\n\nClass t_Concretization (v_Self : Type) (v_T : Type) `{t_Sized (v_T)} : Type :=\n  {\n    Concretization_f_concretize : v_Self -> v_T;\n  }.\nArguments t_Concretization (_) (_) {_}.\n\nClass t_Abstraction (v_Self : Type) : Type :=\n  {\n    Abstraction_f_AbstractType : Type;\n    _ :: `{t_Sized (Abstraction_f_AbstractType)};\n    Abstraction_f_lift : v_Self -> Abstraction_f_AbstractType;\n  }.\nArguments t_Abstraction (_).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int.v",
    "content": "\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Cmp.\nExport Core_Cmp.\n\nFrom Core Require Import Core_Ops.\nExport Core_Ops.\n\nFrom Core Require Import Core_Base.\nExport Core_Base.\n\n\n\nFrom Core Require Import Core_Base_interface_Coerce.\nExport Core_Base_interface_Coerce.\n\n\nFrom Core Require Import Core_Option.\nExport Core_Option.\n\nFrom Core Require Import Core_Clone (t_Clone).\nExport Core_Clone (t_Clone).\n\nFrom Core Require Import Core_Convert (t_From).\nExport Core_Convert (t_From).\n\nClass t_Constants (v_Self : Type) : Type :=\n  {\n    Constants_f_ZERO : v_Self;\n    Constants_f_ONE : v_Self;\n    Constants_f_MIN : v_Self;\n    Constants_f_MAX : v_Self;\n  }.\nArguments t_Constants (_).\n\nRecord t_I128 : Type :=\n  {\n    I128_f_v : t_Z;\n  }.\nArguments Build_t_I128.\nArguments I128_f_v.\n#[export] Instance settable_t_I128 : Settable _ :=\n  settable! (Build_t_I128) <I128_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_I16 : Type :=\n  {\n    I16_f_v : t_Z;\n  }.\nArguments Build_t_I16.\nArguments I16_f_v.\n#[export] Instance settable_t_I16 : Settable _ :=\n  settable! (Build_t_I16) <I16_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_I32 : Type :=\n  {\n    I32_f_v : t_Z;\n  }.\nArguments Build_t_I32.\nArguments I32_f_v.\n#[export] Instance settable_t_I32 : Settable _ :=\n  settable! (Build_t_I32) <I32_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_I64 : Type :=\n  {\n    I64_f_v : t_Z;\n  }.\nArguments Build_t_I64.\nArguments I64_f_v.\n#[export] Instance settable_t_I64 : Settable _ :=\n  settable! (Build_t_I64) <I64_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_I8 : Type :=\n  {\n    I8_f_v : t_Z;\n  }.\nArguments Build_t_I8.\nArguments I8_f_v.\n#[export] Instance settable_t_I8 : Settable _ :=\n  settable! (Build_t_I8) <I8_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_U128 : Type :=\n  {\n    U128_f_v : t_HaxInt;\n  }.\nArguments Build_t_U128.\nArguments U128_f_v.\n#[export] Instance settable_t_U128 : Settable _ :=\n  settable! (Build_t_U128) <U128_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_U16 : Type :=\n  {\n    U16_f_v : t_HaxInt;\n  }.\nArguments Build_t_U16.\nArguments U16_f_v.\n#[export] Instance settable_t_U16 : Settable _ :=\n  settable! (Build_t_U16) <U16_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_U32 : Type :=\n  {\n    U32_f_v : t_HaxInt;\n  }.\nArguments Build_t_U32.\nArguments U32_f_v.\n#[export] Instance settable_t_U32 : Settable _ :=\n  settable! (Build_t_U32) <U32_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_U64 : Type :=\n  {\n    U64_f_v : t_HaxInt;\n  }.\nArguments Build_t_U64.\nArguments U64_f_v.\n#[export] Instance settable_t_U64 : Settable _ :=\n  settable! (Build_t_U64) <U64_f_v>.\n\n(* NotImplementedYet *)\n\nRecord t_U8 : Type :=\n  {\n    U8_f_v : t_HaxInt;\n  }.\nArguments Build_t_U8.\nArguments U8_f_v.\n#[export] Instance settable_t_U8 : Settable _ :=\n  settable! (Build_t_U8) <U8_f_v>.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n#[global] Instance t_Concretization_407178874 : t_Concretization ((t_Z)) ((t_I128)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_Z)=>\n      Build_t_I128 (self);\n  }.\n\n#[global] Instance t_Clone_960918039 : t_Clone ((t_I128)) :=\n  {\n    Clone_f_clone := fun  (self : t_I128)=>\n      Build_t_I128 (Clone_f_clone (I128_f_v self));\n  }.\n\n#[global] Instance t_Concretization_1068646878 : t_Concretization ((t_Z)) ((t_I64)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_Z)=>\n      Build_t_I64 (self);\n  }.\n\n#[global] Instance t_Clone_305340151 : t_Clone ((t_I64)) :=\n  {\n    Clone_f_clone := fun  (self : t_I64)=>\n      Build_t_I64 (Clone_f_clone (I64_f_v self));\n  }.\n\n#[global] Instance t_Concretization_499270091 : t_Concretization ((t_Z)) ((t_I32)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_Z)=>\n      Build_t_I32 (self);\n  }.\n\n#[global] Instance t_Clone_774571516 : t_Clone ((t_I32)) :=\n  {\n    Clone_f_clone := fun  (self : t_I32)=>\n      Build_t_I32 (Clone_f_clone (I32_f_v self));\n  }.\n\n#[global] Instance t_Concretization_432063162 : t_Concretization ((t_Z)) ((t_I16)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_Z)=>\n      Build_t_I16 (self);\n  }.\n\n#[global] Instance t_Clone_611206751 : t_Clone ((t_I16)) :=\n  {\n    Clone_f_clone := fun  (self : t_I16)=>\n      Build_t_I16 (Clone_f_clone (I16_f_v self));\n  }.\n\n#[global] Instance t_Concretization_232722110 : t_Concretization ((t_Z)) ((t_I8)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_Z)=>\n      Build_t_I8 (self);\n  }.\n\n#[global] Instance t_Clone_122768833 : t_Clone ((t_I8)) :=\n  {\n    Clone_f_clone := fun  (self : t_I8)=>\n      Build_t_I8 (Clone_f_clone (I8_f_v self));\n  }.\n\n#[global] Instance t_Constants_572255769 : t_Constants ((t_I128)) :=\n  {\n    Constants_f_ZERO := Build_t_I128 (Z_ZERO);\n    Constants_f_ONE := Build_t_I128 (Z_POS (xH));\n    Constants_f_MIN := Build_t_I128 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_64_)));\n    Constants_f_MAX := Build_t_I128 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_64_SUB_1_)));\n  }.\n\nDefinition impl_41__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_128_).\n\nDefinition impl_41__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_128_.\n\n#[global] Instance t_Constants_908090553 : t_Constants ((t_I64)) :=\n  {\n    Constants_f_ZERO := Build_t_I64 (Z_ZERO);\n    Constants_f_ONE := Build_t_I64 (Z_POS (xH));\n    Constants_f_MIN := Build_t_I64 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_32_)));\n    Constants_f_MAX := Build_t_I64 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_32_SUB_1_)));\n  }.\n\nDefinition impl_55__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_64_).\n\nDefinition impl_55__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_64_.\n\n#[global] Instance t_Constants_99970330 : t_Constants ((t_I32)) :=\n  {\n    Constants_f_ZERO := Build_t_I32 (Z_ZERO);\n    Constants_f_ONE := Build_t_I32 (Z_POS (xH));\n    Constants_f_MIN := Build_t_I32 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_16_)));\n    Constants_f_MAX := Build_t_I32 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_16_SUB_1_)));\n  }.\n\nDefinition impl_69__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_32_).\n\nDefinition impl_69__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_32_.\n\n#[global] Instance t_Constants_687261461 : t_Constants ((t_I16)) :=\n  {\n    Constants_f_ZERO := Build_t_I16 (Z_ZERO);\n    Constants_f_ONE := Build_t_I16 (Z_POS (xH));\n    Constants_f_MIN := Build_t_I16 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_8_)));\n    Constants_f_MAX := Build_t_I16 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_8_SUB_1_)));\n  }.\n\nDefinition impl_83__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_16_).\n\nDefinition impl_83__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_16_.\n\n#[global] Instance t_Constants_636847136 : t_Constants ((t_I8)) :=\n  {\n    Constants_f_ZERO := Build_t_I8 (Z_ZERO);\n    Constants_f_ONE := Build_t_I8 (Z_POS (xH));\n    Constants_f_MIN := Build_t_I8 (Z_NEG (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_4_)));\n    Constants_f_MAX := Build_t_I8 (Z_POS (positive_from_int (Hpos := ltac:(easy)) (v_WORDSIZE_4_SUB_1_)));\n  }.\n\nDefinition impl_97__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_8_).\n\nDefinition impl_97__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_8_.\n\n#[global] Instance t_Constants_119702187 : t_Constants ((t_U128)) :=\n  {\n    Constants_f_ZERO := Build_t_U128 (v_HaxInt_ZERO);\n    Constants_f_ONE := Build_t_U128 (v_HaxInt_ONE);\n    Constants_f_MIN := Build_t_U128 (v_HaxInt_ZERO);\n    Constants_f_MAX := Build_t_U128 (v_WORDSIZE_128_SUB_1_);\n  }.\n\nDefinition impl_111__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_128_).\n\nDefinition impl_111__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_128_.\n\n#[global] Instance t_Constants_579677195 : t_Constants ((t_U64)) :=\n  {\n    Constants_f_ZERO := Build_t_U64 (v_HaxInt_ZERO);\n    Constants_f_ONE := Build_t_U64 (v_HaxInt_ONE);\n    Constants_f_MIN := Build_t_U64 (v_HaxInt_ZERO);\n    Constants_f_MAX := Build_t_U64 (v_WORDSIZE_64_SUB_1_);\n  }.\n\nDefinition impl_138__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_64_).\n\nDefinition impl_138__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_64_.\n\n#[global] Instance t_Constants_63564700 : t_Constants ((t_U32)) :=\n  {\n    Constants_f_ZERO := Build_t_U32 (v_HaxInt_ZERO);\n    Constants_f_ONE := Build_t_U32 (v_HaxInt_ONE);\n    Constants_f_MIN := Build_t_U32 (v_HaxInt_ZERO);\n    Constants_f_MAX := Build_t_U32 (v_WORDSIZE_32_SUB_1_);\n  }.\n\nDefinition impl_165__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_32_).\n\nDefinition impl_165__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_32_.\n\n#[global] Instance t_Constants_221027212 : t_Constants ((t_U16)) :=\n  {\n    Constants_f_ZERO := Build_t_U16 (v_HaxInt_ZERO);\n    Constants_f_ONE := Build_t_U16 (v_HaxInt_ONE);\n    Constants_f_MIN := Build_t_U16 (v_HaxInt_ZERO);\n    Constants_f_MAX := Build_t_U16 (v_WORDSIZE_16_SUB_1_);\n  }.\n\nDefinition impl_192__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_16_).\n\nDefinition impl_192__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_16_.\n\n#[global] Instance t_Constants_932070468 : t_Constants ((t_U8)) :=\n  {\n    Constants_f_ZERO := Build_t_U8 (v_HaxInt_ZERO);\n    Constants_f_ONE := Build_t_U8 (v_HaxInt_ONE);\n    Constants_f_MIN := Build_t_U8 (v_HaxInt_ZERO);\n    Constants_f_MAX := Build_t_U8 (v_WORDSIZE_8_SUB_1_);\n  }.\n\nDefinition impl_219__BITS : t_U32 :=\n  Build_t_U32 (v_BITS_8_).\n\nDefinition impl_219__WORDSIZE : t_HaxInt :=\n  v_WORDSIZE_8_.\n\n#[global] Instance t_Clone_138729312 : t_Clone ((t_U128)) :=\n  {\n    Clone_f_clone := fun  (self : t_U128)=>\n      Build_t_U128 (Clone_f_clone (U128_f_v self));\n  }.\n\n#[global] Instance t_Clone_461763462 : t_Clone ((t_U64)) :=\n  {\n    Clone_f_clone := fun  (self : t_U64)=>\n      Build_t_U64 (Clone_f_clone (U64_f_v self));\n  }.\n\n#[global] Instance t_Clone_412151272 : t_Clone ((t_U32)) :=\n  {\n    Clone_f_clone := fun  (self : t_U32)=>\n      Build_t_U32 (Clone_f_clone (U32_f_v self));\n  }.\n\n#[global] Instance t_Clone_387504240 : t_Clone ((t_U16)) :=\n  {\n    Clone_f_clone := fun  (self : t_U16)=>\n      Build_t_U16 (Clone_f_clone (U16_f_v self));\n  }.\n\n#[global] Instance t_Clone_917943387 : t_Clone ((t_U8)) :=\n  {\n    Clone_f_clone := fun  (self : t_U8)=>\n      Build_t_U8 (Clone_f_clone (U8_f_v self));\n  }.\n\n#[global] Instance t_Abstraction_970113908 : t_Abstraction ((t_I128)) :=\n  {\n    Abstraction_f_AbstractType := t_Z;\n    Abstraction_f_lift := fun  (self : t_I128)=>\n      I128_f_v self;\n  }.\n\n#[global] Instance t_From_330503528 : t_From ((t_I8)) ((t_I128)) :=\n  {\n    From_f_from := fun  (x : t_I128)=>\n      Concretization_f_concretize (Abstraction_f_lift (x) : t_Z);\n  }.\n\n#[global] Instance t_From_185067369 : t_From ((t_I16)) ((t_I128)) :=\n  {\n    From_f_from := fun  (x : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x) : t_Z);\n  }.\n\n#[global] Instance t_From_106548803 : t_From ((t_I32)) ((t_I128)) :=\n  {\n    From_f_from := fun  (x : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x) : t_Z);\n  }.\n\n#[global] Instance t_From_237552649 : t_From ((t_I64)) ((t_I128)) :=\n  {\n    From_f_from := fun  (x : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x) : t_Z);\n  }.\n\n#[global] Instance t_PartialEq_488790252 : t_PartialEq ((t_I128)) ((t_I128)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_I128) (rhs : t_I128)=>\n      PartialEq_f_eq (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_I128) (rhs : t_I128)=>\n      PartialEq_f_ne (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_387128921 : t_PartialOrd ((t_I128)) ((t_I128)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_I128) (rhs : t_I128)=>\n      Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_I128) (rhs : t_I128)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_I128) (rhs : t_I128)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_I128) (rhs : t_I128)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_I128) (rhs : t_I128)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_692501606 : t_Abstraction ((t_I64)) :=\n  {\n    Abstraction_f_AbstractType := t_Z;\n    Abstraction_f_lift := fun  (self : t_I64)=>\n      I64_f_v self;\n  }.\n\n#[global] Instance t_From_318313768 : t_From ((t_I8)) ((t_I64)) :=\n  {\n    From_f_from := fun  (x : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_215423074 : t_From ((t_I16)) ((t_I64)) :=\n  {\n    From_f_from := fun  (x : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_221659723 : t_From ((t_I32)) ((t_I64)) :=\n  {\n    From_f_from := fun  (x : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_927453474 : t_From ((t_I128)) ((t_I64)) :=\n  {\n    From_f_from := fun  (x : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_PartialEq_474861724 : t_PartialEq ((t_I64)) ((t_I64)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_I64) (rhs : t_I64)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_I64) (rhs : t_I64)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_552634265 : t_PartialOrd ((t_I64)) ((t_I64)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_I64) (rhs : t_I64)=>\n      Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_I64) (rhs : t_I64)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_I64) (rhs : t_I64)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_I64) (rhs : t_I64)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_I64) (rhs : t_I64)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_493183574 : t_Abstraction ((t_I32)) :=\n  {\n    Abstraction_f_AbstractType := t_Z;\n    Abstraction_f_lift := fun  (self : t_I32)=>\n      I32_f_v self;\n  }.\n\n#[global] Instance t_From_573287156 : t_From ((t_I8)) ((t_I32)) :=\n  {\n    From_f_from := fun  (x : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_278670998 : t_From ((t_I16)) ((t_I32)) :=\n  {\n    From_f_from := fun  (x : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_697572388 : t_From ((t_I64)) ((t_I32)) :=\n  {\n    From_f_from := fun  (x : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_30146175 : t_From ((t_I128)) ((t_I32)) :=\n  {\n    From_f_from := fun  (x : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_PartialEq_795859780 : t_PartialEq ((t_I32)) ((t_I32)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_I32) (rhs : t_I32)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_I32) (rhs : t_I32)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_126468614 : t_PartialOrd ((t_I32)) ((t_I32)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_I32) (rhs : t_I32)=>\n      Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_I32) (rhs : t_I32)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_I32) (rhs : t_I32)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_I32) (rhs : t_I32)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_I32) (rhs : t_I32)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_8671741 : t_Abstraction ((t_I16)) :=\n  {\n    Abstraction_f_AbstractType := t_Z;\n    Abstraction_f_lift := fun  (self : t_I16)=>\n      I16_f_v self;\n  }.\n\n#[global] Instance t_From_767089390 : t_From ((t_I8)) ((t_I16)) :=\n  {\n    From_f_from := fun  (x : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_339600325 : t_From ((t_I32)) ((t_I16)) :=\n  {\n    From_f_from := fun  (x : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_929749154 : t_From ((t_I64)) ((t_I16)) :=\n  {\n    From_f_from := fun  (x : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_366897745 : t_From ((t_I128)) ((t_I16)) :=\n  {\n    From_f_from := fun  (x : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_PartialEq_359538097 : t_PartialEq ((t_I16)) ((t_I16)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_I16) (rhs : t_I16)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_I16) (rhs : t_I16)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_524872806 : t_PartialOrd ((t_I16)) ((t_I16)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_I16) (rhs : t_I16)=>\n      Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_I16) (rhs : t_I16)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_I16) (rhs : t_I16)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_I16) (rhs : t_I16)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_I16) (rhs : t_I16)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_78490685 : t_Abstraction ((t_I8)) :=\n  {\n    Abstraction_f_AbstractType := t_Z;\n    Abstraction_f_lift := fun  (self : t_I8)=>\n      I8_f_v self;\n  }.\n\n#[global] Instance t_From_995744130 : t_From ((t_I16)) ((t_I8)) :=\n  {\n    From_f_from := fun  (x : t_I8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_513826093 : t_From ((t_I32)) ((t_I8)) :=\n  {\n    From_f_from := fun  (x : t_I8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_843443999 : t_From ((t_I64)) ((t_I8)) :=\n  {\n    From_f_from := fun  (x : t_I8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_532428771 : t_From ((t_I128)) ((t_I8)) :=\n  {\n    From_f_from := fun  (x : t_I8)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_Z t_I128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_PartialEq_594648758 : t_PartialEq ((t_I8)) ((t_I8)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_I8) (rhs : t_I8)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_I8) (rhs : t_I8)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_221919414 : t_PartialOrd ((t_I8)) ((t_I8)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_I8) (rhs : t_I8)=>\n      Option_Some (z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_I8) (rhs : t_I8)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_I8) (rhs : t_I8)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_I8) (rhs : t_I8)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_I8) (rhs : t_I8)=>\n      match z_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_133243863 : t_Abstraction ((t_U128)) :=\n  {\n    Abstraction_f_AbstractType := t_HaxInt;\n    Abstraction_f_lift := fun  (self : t_U128)=>\n      U128_f_v self;\n  }.\n\n#[global] Instance t_PartialEq_792968920 : t_PartialEq ((t_U128)) ((t_U128)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_U128) (rhs : t_U128)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_U128) (rhs : t_U128)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_168269581 : t_PartialOrd ((t_U128)) ((t_U128)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_U128) (rhs : t_U128)=>\n      Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_U128) (rhs : t_U128)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_U128) (rhs : t_U128)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_U128) (rhs : t_U128)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_U128) (rhs : t_U128)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_219241396 : t_Abstraction ((t_U64)) :=\n  {\n    Abstraction_f_AbstractType := t_HaxInt;\n    Abstraction_f_lift := fun  (self : t_U64)=>\n      U64_f_v self;\n  }.\n\n#[global] Instance t_PartialEq_162514109 : t_PartialEq ((t_U64)) ((t_U64)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_U64) (rhs : t_U64)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_U64) (rhs : t_U64)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_210240032 : t_PartialOrd ((t_U64)) ((t_U64)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_U64) (rhs : t_U64)=>\n      Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_U64) (rhs : t_U64)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_U64) (rhs : t_U64)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_U64) (rhs : t_U64)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_U64) (rhs : t_U64)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_517050128 : t_Abstraction ((t_U32)) :=\n  {\n    Abstraction_f_AbstractType := t_HaxInt;\n    Abstraction_f_lift := fun  (self : t_U32)=>\n      U32_f_v self;\n  }.\n\n#[global] Instance t_PartialEq_894496962 : t_PartialEq ((t_U32)) ((t_U32)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_U32) (rhs : t_U32)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_U32) (rhs : t_U32)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_534404445 : t_PartialOrd ((t_U32)) ((t_U32)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_U32) (rhs : t_U32)=>\n      Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_U32) (rhs : t_U32)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_U32) (rhs : t_U32)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_U32) (rhs : t_U32)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_U32) (rhs : t_U32)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_994821584 : t_Abstraction ((t_U16)) :=\n  {\n    Abstraction_f_AbstractType := t_HaxInt;\n    Abstraction_f_lift := fun  (self : t_U16)=>\n      U16_f_v self;\n  }.\n\n#[global] Instance t_PartialEq_603208302 : t_PartialEq ((t_U16)) ((t_U16)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_U16) (rhs : t_U16)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_U16) (rhs : t_U16)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_595325431 : t_PartialOrd ((t_U16)) ((t_U16)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_U16) (rhs : t_U16)=>\n      Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_U16) (rhs : t_U16)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_U16) (rhs : t_U16)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_U16) (rhs : t_U16)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_U16) (rhs : t_U16)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Abstraction_789996186 : t_Abstraction ((t_U8)) :=\n  {\n    Abstraction_f_AbstractType := t_HaxInt;\n    Abstraction_f_lift := fun  (self : t_U8)=>\n      U8_f_v self;\n  }.\n\n#[global] Instance t_PartialEq_774173636 : t_PartialEq ((t_U8)) ((t_U8)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_U8) (rhs : t_U8)=>\n      PartialEq_f_eq (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n    PartialEq_f_ne := fun  (self : t_U8) (rhs : t_U8)=>\n      PartialEq_f_ne (t_PartialEq := _ : t_PartialEq t_Ordering t_Ordering) (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs)))) (Ordering_Equal);\n  }.\n\n#[global] Instance t_PartialOrd_577399304 : t_PartialOrd ((t_U8)) ((t_U8)) :=\n  {\n    PartialOrd_f_partial_cmp := fun  (self : t_U8) (rhs : t_U8)=>\n      Option_Some (haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))));\n    PartialOrd_f_lt := fun  (self : t_U8) (rhs : t_U8)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_le := fun  (self : t_U8) (rhs : t_U8)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Less\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_gt := fun  (self : t_U8) (rhs : t_U8)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater =>\n        true\n      | _ =>\n        false\n      end;\n    PartialOrd_f_ge := fun  (self : t_U8) (rhs : t_U8)=>\n      match haxint_cmp (Abstraction_f_lift (Clone_f_clone (self))) (Abstraction_f_lift (Clone_f_clone (rhs))) with\n      | Ordering_Greater\n      | Ordering_Equal =>\n        true\n      | _ =>\n        false\n      end;\n  }.\n\n#[global] Instance t_Neg_375517228 : t_Neg ((t_I128)) :=\n  {\n    Neg_f_Output := t_I128;\n    Neg_f_neg := fun  (self : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_neg (Abstraction_f_lift (self)));\n  }.\n\n#[global] Instance t_BitOr_938342430 : t_BitOr ((t_I128)) ((t_I128)) :=\n  {\n    BitOr_f_Output := t_I128;\n    BitOr_f_bitor := fun  (self : t_I128) (rhs : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_210530286 : t_Neg ((t_I64)) :=\n  {\n    Neg_f_Output := t_I64;\n    Neg_f_neg := fun  (self : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_neg (Abstraction_f_lift (self)));\n  }.\n\n#[global] Instance t_BitOr_329754853 : t_BitOr ((t_I64)) ((t_I64)) :=\n  {\n    BitOr_f_Output := t_I64;\n    BitOr_f_bitor := fun  (self : t_I64) (rhs : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_104016941 : t_Neg ((t_I32)) :=\n  {\n    Neg_f_Output := t_I32;\n    Neg_f_neg := fun  (self : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_neg (Abstraction_f_lift (self)));\n  }.\n\n#[global] Instance t_BitOr_840483685 : t_BitOr ((t_I32)) ((t_I32)) :=\n  {\n    BitOr_f_Output := t_I32;\n    BitOr_f_bitor := fun  (self : t_I32) (rhs : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_1063990797 : t_Neg ((t_I16)) :=\n  {\n    Neg_f_Output := t_I16;\n    Neg_f_neg := fun  (self : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_neg (Abstraction_f_lift (self)));\n  }.\n\n#[global] Instance t_BitOr_450806124 : t_BitOr ((t_I16)) ((t_I16)) :=\n  {\n    BitOr_f_Output := t_I16;\n    BitOr_f_bitor := fun  (self : t_I16) (rhs : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_979719905 : t_Neg ((t_I8)) :=\n  {\n    Neg_f_Output := t_I8;\n    Neg_f_neg := fun  (self : t_I8)=>\n      Concretization_f_concretize (z_neg (Abstraction_f_lift (self)));\n  }.\n\n#[global] Instance t_BitOr_828862178 : t_BitOr ((t_I8)) ((t_I8)) :=\n  {\n    BitOr_f_Output := t_I8;\n    BitOr_f_bitor := fun  (self : t_I8) (rhs : t_I8)=>\n      Concretization_f_concretize (z_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_749575336 : t_Add ((t_I128)) ((t_I128)) :=\n  {\n    Add_f_Output := t_I128;\n    Add_f_add := fun  (self : t_I128) (rhs : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Sub_800692471 : t_Sub ((t_I128)) ((t_I128)) :=\n  {\n    Sub_f_Output := t_I128;\n    Sub_f_sub := fun  (self : t_I128) (rhs : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_87367909 : t_Add ((t_I64)) ((t_I64)) :=\n  {\n    Add_f_Output := t_I64;\n    Add_f_add := fun  (self : t_I64) (rhs : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Sub_741383133 : t_Sub ((t_I64)) ((t_I64)) :=\n  {\n    Sub_f_Output := t_I64;\n    Sub_f_sub := fun  (self : t_I64) (rhs : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_574043038 : t_Add ((t_I32)) ((t_I32)) :=\n  {\n    Add_f_Output := t_I32;\n    Add_f_add := fun  (self : t_I32) (rhs : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Sub_699874712 : t_Sub ((t_I32)) ((t_I32)) :=\n  {\n    Sub_f_Output := t_I32;\n    Sub_f_sub := fun  (self : t_I32) (rhs : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_413164706 : t_Add ((t_I16)) ((t_I16)) :=\n  {\n    Add_f_Output := t_I16;\n    Add_f_add := fun  (self : t_I16) (rhs : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Sub_544358249 : t_Sub ((t_I16)) ((t_I16)) :=\n  {\n    Sub_f_Output := t_I16;\n    Sub_f_sub := fun  (self : t_I16) (rhs : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_335735231 : t_Add ((t_I8)) ((t_I8)) :=\n  {\n    Add_f_Output := t_I8;\n    Add_f_add := fun  (self : t_I8) (rhs : t_I8)=>\n      Concretization_f_concretize (z_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Sub_257575332 : t_Sub ((t_I8)) ((t_I8)) :=\n  {\n    Sub_f_Output := t_I8;\n    Sub_f_sub := fun  (self : t_I8) (rhs : t_I8)=>\n      Concretization_f_concretize (z_sub (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Concretization_943450188 : t_Concretization ((t_HaxInt)) ((t_U128)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_HaxInt)=>\n      Build_t_U128 (haxint_rem (self) (v_WORDSIZE_128_));\n  }.\n\n#[global] Instance t_From_355161674 : t_From ((t_U128)) ((t_U8)) :=\n  {\n    From_f_from := fun  (x : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_739905379 : t_From ((t_U128)) ((t_U16)) :=\n  {\n    From_f_from := fun  (x : t_U16)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_487010006 : t_From ((t_U128)) ((t_U32)) :=\n  {\n    From_f_from := fun  (x : t_U32)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_665417617 : t_From ((t_U128)) ((t_U64)) :=\n  {\n    From_f_from := fun  (x : t_U64)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U128) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_Concretization_10977439 : t_Concretization ((t_HaxInt)) ((t_U64)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_HaxInt)=>\n      Build_t_U64 (haxint_rem (self) (v_WORDSIZE_64_));\n  }.\n\n#[global] Instance t_From_746191059 : t_From ((t_U64)) ((t_U8)) :=\n  {\n    From_f_from := fun  (x : t_U8)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_598353876 : t_From ((t_U64)) ((t_U16)) :=\n  {\n    From_f_from := fun  (x : t_U16)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_293255347 : t_From ((t_U64)) ((t_U32)) :=\n  {\n    From_f_from := fun  (x : t_U32)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_478031507 : t_From ((t_U64)) ((t_U128)) :=\n  {\n    From_f_from := fun  (x : t_U128)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U64) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_Concretization_264065114 : t_Concretization ((t_HaxInt)) ((t_U32)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_HaxInt)=>\n      Build_t_U32 (haxint_rem (self) (v_WORDSIZE_32_));\n  }.\n\n#[global] Instance t_From_675834555 : t_From ((t_U32)) ((t_U8)) :=\n  {\n    From_f_from := fun  (x : t_U8)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_410569540 : t_From ((t_U32)) ((t_U16)) :=\n  {\n    From_f_from := fun  (x : t_U16)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_616913228 : t_From ((t_U32)) ((t_U64)) :=\n  {\n    From_f_from := fun  (x : t_U64)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_376625380 : t_From ((t_U32)) ((t_U128)) :=\n  {\n    From_f_from := fun  (x : t_U128)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U32) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_Concretization_656994795 : t_Concretization ((t_HaxInt)) ((t_U16)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_HaxInt)=>\n      Build_t_U16 (haxint_rem (self) (v_WORDSIZE_16_));\n  }.\n\n#[global] Instance t_From_352276566 : t_From ((t_U16)) ((t_U8)) :=\n  {\n    From_f_from := fun  (x : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_699842532 : t_From ((t_U16)) ((t_U32)) :=\n  {\n    From_f_from := fun  (x : t_U32)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_326646767 : t_From ((t_U16)) ((t_U64)) :=\n  {\n    From_f_from := fun  (x : t_U64)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_604186294 : t_From ((t_U16)) ((t_U128)) :=\n  {\n    From_f_from := fun  (x : t_U128)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U16) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_Concretization_492312374 : t_Concretization ((t_HaxInt)) ((t_U8)) :=\n  {\n    Concretization_f_concretize := fun  (self : t_HaxInt)=>\n      Build_t_U8 (haxint_rem (self) (v_WORDSIZE_8_));\n  }.\n\n#[global] Instance t_From_374313775 : t_From ((t_U8)) ((t_U16)) :=\n  {\n    From_f_from := fun  (x : t_U16)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_42776580 : t_From ((t_U8)) ((t_U32)) :=\n  {\n    From_f_from := fun  (x : t_U32)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_480314375 : t_From ((t_U8)) ((t_U64)) :=\n  {\n    From_f_from := fun  (x : t_U64)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_From_135782329 : t_From ((t_U8)) ((t_U128)) :=\n  {\n    From_f_from := fun  (x : t_U128)=>\n      Concretization_f_concretize(t_Concretization := _ : t_Concretization t_HaxInt t_U8) (Abstraction_f_lift (x));\n  }.\n\n#[global] Instance t_Mul_180009375 : t_Mul ((t_I128)) ((t_I128)) :=\n  {\n    Mul_f_Output := t_I128;\n    Mul_f_mul := fun  (self : t_I128) (rhs : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Mul_1051209688 : t_Mul ((t_I64)) ((t_I64)) :=\n  {\n    Mul_f_Output := t_I64;\n    Mul_f_mul := fun  (self : t_I64) (rhs : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Mul_481497752 : t_Mul ((t_I32)) ((t_I32)) :=\n  {\n    Mul_f_Output := t_I32;\n    Mul_f_mul := fun  (self : t_I32) (rhs : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Mul_768005208 : t_Mul ((t_I16)) ((t_I16)) :=\n  {\n    Mul_f_Output := t_I16;\n    Mul_f_mul := fun  (self : t_I16) (rhs : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Mul_1057691929 : t_Mul ((t_I8)) ((t_I8)) :=\n  {\n    Mul_f_Output := t_I8;\n    Mul_f_mul := fun  (self : t_I8) (rhs : t_I8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (z_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_200638412 : t_Neg ((t_U128)) :=\n  {\n    Neg_f_Output := t_U128;\n    Neg_f_neg := fun  (self : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_sub (v_WORDSIZE_128_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_128_)));\n  }.\n\n#[global] Instance t_Mul_508073751 : t_Mul ((t_U128)) ((t_U128)) :=\n  {\n    Mul_f_Output := t_U128;\n    Mul_f_mul := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_184769952 : t_Rem ((t_U128)) ((t_U128)) :=\n  {\n    Rem_f_Output := t_U128;\n    Rem_f_rem := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_74062568 : t_Add ((t_U128)) ((t_U128)) :=\n  {\n    Add_f_Output := t_U128;\n    Add_f_add := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_697142148 : t_Div ((t_U128)) ((t_U128)) :=\n  {\n    Div_f_Output := t_U128;\n    Div_f_div := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_912131656 : t_Shl ((t_U128)) ((t_U8)) :=\n  {\n    Shl_f_Output := t_U128;\n    Shl_f_shl := fun  (self : t_U128) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_188720840 : t_Shl ((t_U128)) ((t_U16)) :=\n  {\n    Shl_f_Output := t_U128;\n    Shl_f_shl := fun  (self : t_U128) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_674581806 : t_Shl ((t_U128)) ((t_U32)) :=\n  {\n    Shl_f_Output := t_U128;\n    Shl_f_shl := fun  (self : t_U128) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_230523808 : t_Shl ((t_U128)) ((t_U64)) :=\n  {\n    Shl_f_Output := t_U128;\n    Shl_f_shl := fun  (self : t_U128) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_304350501 : t_Shl ((t_U128)) ((t_U128)) :=\n  {\n    Shl_f_Output := t_U128;\n    Shl_f_shl := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_584068908 : t_Shr ((t_U128)) ((t_U8)) :=\n  {\n    Shr_f_Output := t_U128;\n    Shr_f_shr := fun  (self : t_U128) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_73833277 : t_Shr ((t_U128)) ((t_U16)) :=\n  {\n    Shr_f_Output := t_U128;\n    Shr_f_shr := fun  (self : t_U128) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_50912121 : t_Shr ((t_U128)) ((t_U32)) :=\n  {\n    Shr_f_Output := t_U128;\n    Shr_f_shr := fun  (self : t_U128) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_282345299 : t_Shr ((t_U128)) ((t_U64)) :=\n  {\n    Shr_f_Output := t_U128;\n    Shr_f_shr := fun  (self : t_U128) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_354892033 : t_Shr ((t_U128)) ((t_U128)) :=\n  {\n    Shr_f_Output := t_U128;\n    Shr_f_shr := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitXor_457452962 : t_BitXor ((t_U128)) ((t_U128)) :=\n  {\n    BitXor_f_Output := t_U128;\n    BitXor_f_bitxor := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitAnd_499214249 : t_BitAnd ((t_U128)) ((t_U128)) :=\n  {\n    BitAnd_f_Output := t_U128;\n    BitAnd_f_bitand := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitOr_579754702 : t_BitOr ((t_U128)) ((t_U128)) :=\n  {\n    BitOr_f_Output := t_U128;\n    BitOr_f_bitor := fun  (self : t_U128) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U128) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_338880159 : t_Neg ((t_U64)) :=\n  {\n    Neg_f_Output := t_U64;\n    Neg_f_neg := fun  (self : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_sub (v_WORDSIZE_64_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_64_)));\n  }.\n\n#[global] Instance t_Mul_785129859 : t_Mul ((t_U64)) ((t_U64)) :=\n  {\n    Mul_f_Output := t_U64;\n    Mul_f_mul := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_450198244 : t_Rem ((t_U64)) ((t_U64)) :=\n  {\n    Rem_f_Output := t_U64;\n    Rem_f_rem := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_880469818 : t_Add ((t_U64)) ((t_U64)) :=\n  {\n    Add_f_Output := t_U64;\n    Add_f_add := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_1065913959 : t_Div ((t_U64)) ((t_U64)) :=\n  {\n    Div_f_Output := t_U64;\n    Div_f_div := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_307107617 : t_Shl ((t_U64)) ((t_U8)) :=\n  {\n    Shl_f_Output := t_U64;\n    Shl_f_shl := fun  (self : t_U64) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64 )(haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_521831749 : t_Shl ((t_U64)) ((t_U16)) :=\n  {\n    Shl_f_Output := t_U64;\n    Shl_f_shl := fun  (self : t_U64) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_243646433 : t_Shl ((t_U64)) ((t_U32)) :=\n  {\n    Shl_f_Output := t_U64;\n    Shl_f_shl := fun  (self : t_U64) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_732371970 : t_Shl ((t_U64)) ((t_U64)) :=\n  {\n    Shl_f_Output := t_U64;\n    Shl_f_shl := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_362455113 : t_Shl ((t_U64)) ((t_U128)) :=\n  {\n    Shl_f_Output := t_U64;\n    Shl_f_shl := fun  (self : t_U64) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_675607391 : t_Shr ((t_U64)) ((t_U8)) :=\n  {\n    Shr_f_Output := t_U64;\n    Shr_f_shr := fun  (self : t_U64) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_163042579 : t_Shr ((t_U64)) ((t_U16)) :=\n  {\n    Shr_f_Output := t_U64;\n    Shr_f_shr := fun  (self : t_U64) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_329072619 : t_Shr ((t_U64)) ((t_U32)) :=\n  {\n    Shr_f_Output := t_U64;\n    Shr_f_shr := fun  (self : t_U64) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_1046321056 : t_Shr ((t_U64)) ((t_U64)) :=\n  {\n    Shr_f_Output := t_U64;\n    Shr_f_shr := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_1027159812 : t_Shr ((t_U64)) ((t_U128)) :=\n  {\n    Shr_f_Output := t_U64;\n    Shr_f_shr := fun  (self : t_U64) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitXor_771705591 : t_BitXor ((t_U64)) ((t_U64)) :=\n  {\n    BitXor_f_Output := t_U64;\n    BitXor_f_bitxor := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitAnd_61309855 : t_BitAnd ((t_U64)) ((t_U64)) :=\n  {\n    BitAnd_f_Output := t_U64;\n    BitAnd_f_bitand := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitOr_584478327 : t_BitOr ((t_U64)) ((t_U64)) :=\n  {\n    BitOr_f_Output := t_U64;\n    BitOr_f_bitor := fun  (self : t_U64) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U64) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_660092460 : t_Neg ((t_U32)) :=\n  {\n    Neg_f_Output := t_U32;\n    Neg_f_neg := fun  (self : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_sub (v_WORDSIZE_32_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_32_)));\n  }.\n\n#[global] Instance t_Mul_907086750 : t_Mul ((t_U32)) ((t_U32)) :=\n  {\n    Mul_f_Output := t_U32;\n    Mul_f_mul := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_754047547 : t_Rem ((t_U32)) ((t_U32)) :=\n  {\n    Rem_f_Output := t_U32;\n    Rem_f_rem := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_62760194 : t_Add ((t_U32)) ((t_U32)) :=\n  {\n    Add_f_Output := t_U32;\n    Add_f_add := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_1036065219 : t_Div ((t_U32)) ((t_U32)) :=\n  {\n    Div_f_Output := t_U32;\n    Div_f_div := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_940272829 : t_Shl ((t_U32)) ((t_U8)) :=\n  {\n    Shl_f_Output := t_U32;\n    Shl_f_shl := fun  (self : t_U32) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_184065944 : t_Shl ((t_U32)) ((t_U16)) :=\n  {\n    Shl_f_Output := t_U32;\n    Shl_f_shl := fun  (self : t_U32) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_512141775 : t_Shl ((t_U32)) ((t_U32)) :=\n  {\n    Shl_f_Output := t_U32;\n    Shl_f_shl := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_760382167 : t_Shl ((t_U32)) ((t_U64)) :=\n  {\n    Shl_f_Output := t_U32;\n    Shl_f_shl := fun  (self : t_U32) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_938844716 : t_Shl ((t_U32)) ((t_U128)) :=\n  {\n    Shl_f_Output := t_U32;\n    Shl_f_shl := fun  (self : t_U32) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_376401556 : t_Shr ((t_U32)) ((t_U8)) :=\n  {\n    Shr_f_Output := t_U32;\n    Shr_f_shr := fun  (self : t_U32) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_816225657 : t_Shr ((t_U32)) ((t_U16)) :=\n  {\n    Shr_f_Output := t_U32;\n    Shr_f_shr := fun  (self : t_U32) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_131570199 : t_Shr ((t_U32)) ((t_U32)) :=\n  {\n    Shr_f_Output := t_U32;\n    Shr_f_shr := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_643141508 : t_Shr ((t_U32)) ((t_U64)) :=\n  {\n    Shr_f_Output := t_U32;\n    Shr_f_shr := fun  (self : t_U32) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_472576920 : t_Shr ((t_U32)) ((t_U128)) :=\n  {\n    Shr_f_Output := t_U32;\n    Shr_f_shr := fun  (self : t_U32) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitXor_568575701 : t_BitXor ((t_U32)) ((t_U32)) :=\n  {\n    BitXor_f_Output := t_U32;\n    BitXor_f_bitxor := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitAnd_188629984 : t_BitAnd ((t_U32)) ((t_U32)) :=\n  {\n    BitAnd_f_Output := t_U32;\n    BitAnd_f_bitand := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitOr_727300711 : t_BitOr ((t_U32)) ((t_U32)) :=\n  {\n    BitOr_f_Output := t_U32;\n    BitOr_f_bitor := fun  (self : t_U32) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U32) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_524209972 : t_Neg ((t_U16)) :=\n  {\n    Neg_f_Output := t_U16;\n    Neg_f_neg := fun  (self : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_sub (v_WORDSIZE_16_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_16_)));\n  }.\n\n#[global] Instance t_Mul_813798593 : t_Mul ((t_U16)) ((t_U16)) :=\n  {\n    Mul_f_Output := t_U16;\n    Mul_f_mul := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_1023129312 : t_Rem ((t_U16)) ((t_U16)) :=\n  {\n    Rem_f_Output := t_U16;\n    Rem_f_rem := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_669194837 : t_Add ((t_U16)) ((t_U16)) :=\n  {\n    Add_f_Output := t_U16;\n    Add_f_add := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_599727096 : t_Div ((t_U16)) ((t_U16)) :=\n  {\n    Div_f_Output := t_U16;\n    Div_f_div := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_254354835 : t_Shl ((t_U16)) ((t_U8)) :=\n  {\n    Shl_f_Output := t_U16;\n    Shl_f_shl := fun  (self : t_U16) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_786190756 : t_Shl ((t_U16)) ((t_U16)) :=\n  {\n    Shl_f_Output := t_U16;\n    Shl_f_shl := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_961613024 : t_Shl ((t_U16)) ((t_U32)) :=\n  {\n    Shl_f_Output := t_U16;\n    Shl_f_shl := fun  (self : t_U16) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_699049796 : t_Shl ((t_U16)) ((t_U64)) :=\n  {\n    Shl_f_Output := t_U16;\n    Shl_f_shl := fun  (self : t_U16) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_98667823 : t_Shl ((t_U16)) ((t_U128)) :=\n  {\n    Shl_f_Output := t_U16;\n    Shl_f_shl := fun  (self : t_U16) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_116990915 : t_Shr ((t_U16)) ((t_U8)) :=\n  {\n    Shr_f_Output := t_U16;\n    Shr_f_shr := fun  (self : t_U16) (rhs : t_U8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_53270962 : t_Shr ((t_U16)) ((t_U16)) :=\n  {\n    Shr_f_Output := t_U16;\n    Shr_f_shr := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_622272332 : t_Shr ((t_U16)) ((t_U32)) :=\n  {\n    Shr_f_Output := t_U16;\n    Shr_f_shr := fun  (self : t_U16) (rhs : t_U32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_1061476863 : t_Shr ((t_U16)) ((t_U64)) :=\n  {\n    Shr_f_Output := t_U16;\n    Shr_f_shr := fun  (self : t_U16) (rhs : t_U64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_148349277 : t_Shr ((t_U16)) ((t_U128)) :=\n  {\n    Shr_f_Output := t_U16;\n    Shr_f_shr := fun  (self : t_U16) (rhs : t_U128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitXor_39308972 : t_BitXor ((t_U16)) ((t_U16)) :=\n  {\n    BitXor_f_Output := t_U16;\n    BitXor_f_bitxor := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitAnd_100986953 : t_BitAnd ((t_U16)) ((t_U16)) :=\n  {\n    BitAnd_f_Output := t_U16;\n    BitAnd_f_bitand := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitOr_321212552 : t_BitOr ((t_U16)) ((t_U16)) :=\n  {\n    BitOr_f_Output := t_U16;\n    BitOr_f_bitor := fun  (self : t_U16) (rhs : t_U16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_HaxInt t_U16) (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Neg_410091205 : t_Neg ((t_U8)) :=\n  {\n    Neg_f_Output := t_U8;\n    Neg_f_neg := fun  (self : t_U8)=>\n      Concretization_f_concretize (haxint_sub (v_WORDSIZE_8_) (haxint_rem (Abstraction_f_lift (self)) (v_WORDSIZE_8_)));\n  }.\n\n#[global] Instance t_Mul_116494850 : t_Mul ((t_U8)) ((t_U8)) :=\n  {\n    Mul_f_Output := t_U8;\n    Mul_f_mul := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_mul (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_674469245 : t_Rem ((t_U8)) ((t_U8)) :=\n  {\n    Rem_f_Output := t_U8;\n    Rem_f_rem := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Add_886374338 : t_Add ((t_U8)) ((t_U8)) :=\n  {\n    Add_f_Output := t_U8;\n    Add_f_add := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_add (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_7559770 : t_Div ((t_U8)) ((t_U8)) :=\n  {\n    Div_f_Output := t_U8;\n    Div_f_div := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_889664521 : t_Shl ((t_U8)) ((t_U8)) :=\n  {\n    Shl_f_Output := t_U8;\n    Shl_f_shl := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_268581730 : t_Shl ((t_U8)) ((t_U16)) :=\n  {\n    Shl_f_Output := t_U8;\n    Shl_f_shl := fun  (self : t_U8) (rhs : t_U16)=>\n      Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_833473770 : t_Shl ((t_U8)) ((t_U32)) :=\n  {\n    Shl_f_Output := t_U8;\n    Shl_f_shl := fun  (self : t_U8) (rhs : t_U32)=>\n      Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_896563459 : t_Shl ((t_U8)) ((t_U64)) :=\n  {\n    Shl_f_Output := t_U8;\n    Shl_f_shl := fun  (self : t_U8) (rhs : t_U64)=>\n      Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shl_595294021 : t_Shl ((t_U8)) ((t_U128)) :=\n  {\n    Shl_f_Output := t_U8;\n    Shl_f_shl := fun  (self : t_U8) (rhs : t_U128)=>\n      Concretization_f_concretize (haxint_shl (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_467626732 : t_Shr ((t_U8)) ((t_U8)) :=\n  {\n    Shr_f_Output := t_U8;\n    Shr_f_shr := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_985367369 : t_Shr ((t_U8)) ((t_U16)) :=\n  {\n    Shr_f_Output := t_U8;\n    Shr_f_shr := fun  (self : t_U8) (rhs : t_U16)=>\n      Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_868101800 : t_Shr ((t_U8)) ((t_U32)) :=\n  {\n    Shr_f_Output := t_U8;\n    Shr_f_shr := fun  (self : t_U8) (rhs : t_U32)=>\n      Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_300023283 : t_Shr ((t_U8)) ((t_U64)) :=\n  {\n    Shr_f_Output := t_U8;\n    Shr_f_shr := fun  (self : t_U8) (rhs : t_U64)=>\n      Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Shr_794091640 : t_Shr ((t_U8)) ((t_U128)) :=\n  {\n    Shr_f_Output := t_U8;\n    Shr_f_shr := fun  (self : t_U8) (rhs : t_U128)=>\n      Concretization_f_concretize (haxint_shr (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitXor_24738444 : t_BitXor ((t_U8)) ((t_U8)) :=\n  {\n    BitXor_f_Output := t_U8;\n    BitXor_f_bitxor := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_bitxor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitAnd_358790390 : t_BitAnd ((t_U8)) ((t_U8)) :=\n  {\n    BitAnd_f_Output := t_U8;\n    BitAnd_f_bitand := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_bitand (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_BitOr_349401480 : t_BitOr ((t_U8)) ((t_U8)) :=\n  {\n    BitOr_f_Output := t_U8;\n    BitOr_f_bitor := fun  (self : t_U8) (rhs : t_U8)=>\n      Concretization_f_concretize (haxint_bitor (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_998027599 : t_Rem ((t_I128)) ((t_I128)) :=\n  {\n    Rem_f_Output := t_I128;\n    Rem_f_rem := fun  (self : t_I128) (rhs : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_865866956 : t_Div ((t_I128)) ((t_I128)) :=\n  {\n    Div_f_Output := t_I128;\n    Div_f_div := fun  (self : t_I128) (rhs : t_I128)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I128) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_957489424 : t_Rem ((t_I64)) ((t_I64)) :=\n  {\n    Rem_f_Output := t_I64;\n    Rem_f_rem := fun  (self : t_I64) (rhs : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_611785525 : t_Div ((t_I64)) ((t_I64)) :=\n  {\n    Div_f_Output := t_I64;\n    Div_f_div := fun  (self : t_I64) (rhs : t_I64)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I64) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_219303214 : t_Rem ((t_I32)) ((t_I32)) :=\n  {\n    Rem_f_Output := t_I32;\n    Rem_f_rem := fun  (self : t_I32) (rhs : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_1002924104 : t_Div ((t_I32)) ((t_I32)) :=\n  {\n    Div_f_Output := t_I32;\n    Div_f_div := fun  (self : t_I32) (rhs : t_I32)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I32) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_948867246 : t_Rem ((t_I16)) ((t_I16)) :=\n  {\n    Rem_f_Output := t_I16;\n    Rem_f_rem := fun  (self : t_I16) (rhs : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_357493436 : t_Div ((t_I16)) ((t_I16)) :=\n  {\n    Div_f_Output := t_I16;\n    Div_f_div := fun  (self : t_I16) (rhs : t_I16)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I16) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Rem_228000167 : t_Rem ((t_I8)) ((t_I8)) :=\n  {\n    Rem_f_Output := t_I8;\n    Rem_f_rem := fun  (self : t_I8) (rhs : t_I8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (z_rem (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Div_470010025 : t_Div ((t_I8)) ((t_I8)) :=\n  {\n    Div_f_Output := t_I8;\n    Div_f_div := fun  (self : t_I8) (rhs : t_I8)=>\n      Concretization_f_concretize (t_Concretization := _ : t_Concretization t_Z t_I8) (z_div (Abstraction_f_lift (self)) (Abstraction_f_lift (rhs)));\n  }.\n\n#[global] Instance t_Sub_1018502693 : t_Sub ((t_U128)) ((t_U128)) :=\n  {\n    Sub_f_Output := t_U128;\n    Sub_f_sub := fun  (self : t_U128) (rhs : t_U128)=>\n      Add_f_add (t_Add := _ : t_Add t_U128 t_U128) (self) (Neg_f_neg (rhs));\n  }.\n\n#[global] Instance t_Not_758360759 : t_Not ((t_U128)) :=\n  {\n    Not_f_Output := t_U128;\n    Not_f_not := fun  (self : t_U128)=>\n      BitXor_f_bitxor (self) (Constants_f_MAX);\n  }.\n\n#[global] Instance t_Sub_919216830 : t_Sub ((t_U64)) ((t_U64)) :=\n  {\n    Sub_f_Output := t_U64;\n    Sub_f_sub := fun  (self : t_U64) (rhs : t_U64)=>\n      Add_f_add (t_Add := _ : t_Add _ t_U64) (self) (Neg_f_neg (rhs));\n  }.\n\n#[global] Instance t_Not_693249901 : t_Not ((t_U64)) :=\n  {\n    Not_f_Output := t_U64;\n    Not_f_not := fun  (self : t_U64)=>\n      BitXor_f_bitxor (self) (Constants_f_MAX);\n  }.\n\n#[global] Instance t_Sub_22623594 : t_Sub ((t_U32)) ((t_U32)) :=\n  {\n    Sub_f_Output := t_U32;\n    Sub_f_sub := fun  (self : t_U32) (rhs : t_U32)=>\n      Add_f_add (t_Add := _ : t_Add _ t_U32) (self) (Neg_f_neg (rhs));\n  }.\n\n#[global] Instance t_Not_183316157 : t_Not ((t_U32)) :=\n  {\n    Not_f_Output := t_U32;\n    Not_f_not := fun  (self : t_U32)=>\n      BitXor_f_bitxor (self) (Constants_f_MAX);\n  }.\n\n#[global] Instance t_Sub_502320750 : t_Sub ((t_U16)) ((t_U16)) :=\n  {\n    Sub_f_Output := t_U16;\n    Sub_f_sub := fun  (self : t_U16) (rhs : t_U16)=>\n      Add_f_add (t_Add := _ : t_Add _ t_U16) (self) (Neg_f_neg (rhs));\n  }.\n\n#[global] Instance t_Not_669226601 : t_Not ((t_U16)) :=\n  {\n    Not_f_Output := t_U16;\n    Not_f_not := fun  (self : t_U16)=>\n      BitXor_f_bitxor (self) (Constants_f_MAX);\n  }.\n\n#[global] Instance t_Sub_299023787 : t_Sub ((t_U8)) ((t_U8)) :=\n  {\n    Sub_f_Output := t_U8;\n    Sub_f_sub := fun  (self : t_U8) (rhs : t_U8)=>\n      Add_f_add (t_Add := _ : t_Add _ t_U8) (self) (Neg_f_neg (rhs));\n  }.\n\n#[global] Instance t_Not_761019181 : t_Not ((t_U8)) :=\n  {\n    Not_f_Output := t_U8;\n    Not_f_not := fun  (self : t_U8)=>\n      BitXor_f_bitxor (self) (Constants_f_MAX);\n  }.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I128_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_I128) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I16_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_I16) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I32_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_I32) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I64_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_I64) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_I8_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_I8) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U128_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_U128) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n\nLemma mod_add (x : t_U128) (y : t_U128) (z : t_U128) :\n   ->\n  orb (haxint_le (v_WORDSIZE_128_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n\nLemma mod_mul (x : t_U128) (y : t_U128) (z : t_U128) :\n   ->\n  orb (haxint_lt (v_WORDSIZE_128_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true.\nProof. Admitted.\n\nLemma mod_one (x : t_U128) :\n   ->\n  PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true.\nProof. Admitted.\n\nLemma mod_sub (x : t_U128) (y : t_U128) (z : t_U128) :\n   ->\n  orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U16_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_U16) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n\nLemma mod_add (x : t_U16) (y : t_U16) (z : t_U16) :\n   ->\n  orb (haxint_le (v_WORDSIZE_16_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n\nLemma mod_mul (x : t_U16) (y : t_U16) (z : t_U16) :\n   ->\n  orb (haxint_lt (v_WORDSIZE_16_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true.\nProof. Admitted.\n\nLemma mod_one (x : t_U16) :\n   ->\n  PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true.\nProof. Admitted.\n\nLemma mod_sub (x : t_U16) (y : t_U16) (z : t_U16) :\n   ->\n  orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U32_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_U32) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n\nLemma mod_add (x : t_U32) (y : t_U32) (z : t_U32) :\n   ->\n  orb (haxint_le (v_WORDSIZE_32_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n\nLemma mod_mul (x : t_U32) (y : t_U32) (z : t_U32) :\n   ->\n  orb (haxint_lt (v_WORDSIZE_32_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true.\nProof. Admitted.\n\nLemma mod_one (x : t_U32) :\n   ->\n  PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true.\nProof. Admitted.\n\nLemma mod_sub (x : t_U32) (y : t_U32) (z : t_U32) :\n   ->\n  orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U64_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_U64) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n\nLemma mod_add (x : t_U64) (y : t_U64) (z : t_U64) :\n   ->\n  orb (haxint_le (v_WORDSIZE_64_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n\nLemma mod_mul (x : t_U64) (y : t_U64) (z : t_U64) :\n   ->\n  orb (haxint_lt (v_WORDSIZE_64_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true.\nProof. Admitted.\n\nLemma mod_one (x : t_U64) :\n   ->\n  PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true.\nProof. Admitted.\n\nLemma mod_sub (x : t_U64) (y : t_U64) (z : t_U64) :\n   ->\n  orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Base_interface_Int_U8_proofs.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nLemma abstract_concretize_cancel (x : t_U8) :\n   ->\n  PartialEq_f_eq (Concretization_f_concretize (Abstraction_f_lift (Clone_f_clone (x)))) (x) = true.\nProof. Admitted.\n\nLemma mod_add (x : t_U8) (y : t_U8) (z : t_U8) :\n   ->\n  orb (haxint_le (v_WORDSIZE_8_) (haxint_add (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Add_f_add (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Add_f_add (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n\nLemma mod_mul (x : t_U8) (y : t_U8) (z : t_U8) :\n   ->\n  orb (haxint_lt (v_WORDSIZE_8_) (haxint_mul (Abstraction_f_lift (Clone_f_clone (x))) (Abstraction_f_lift (Clone_f_clone (y))))) (PartialEq_f_eq (Rem_f_rem (Mul_f_mul (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Rem_f_rem (Mul_f_mul (Rem_f_rem (x) (Clone_f_clone (z))) (y)) (Clone_f_clone (z))) (z))) = true.\nProof. Admitted.\n\nLemma mod_one (x : t_U8) :\n   ->\n  PartialEq_f_eq (Rem_f_rem (x) (Constants_f_ONE)) (Constants_f_ZERO) = true.\nProof. Admitted.\n\nLemma mod_sub (x : t_U8) (y : t_U8) (z : t_U8) :\n   ->\n  orb (orb (PartialOrd_f_lt (Clone_f_clone (x)) (Clone_f_clone (y))) (PartialOrd_f_le (Clone_f_clone (z)) (Clone_f_clone (x)))) (PartialEq_f_eq (Rem_f_rem (Sub_f_sub (Clone_f_clone (x)) (Clone_f_clone (y))) (Clone_f_clone (z))) (Rem_f_rem (Sub_f_sub (Rem_f_rem (x) (Clone_f_clone (z))) (Rem_f_rem (y) (Clone_f_clone (z)))) (z))) = true.\nProof. Admitted.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Clone.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nClass t_Clone (v_Self : Type) : Type :=\n  {\n    Clone_f_clone : v_Self -> v_Self;\n  }.\nArguments t_Clone (_).\n\n#[global] Instance t_Clone_any T : t_Clone T := { Clone_f_clone := id }.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Cmp.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Option (t_Option).\nExport Core_Option (t_Option).\n\nDefinition discriminant_Ordering_Equal :=\n  0.\n\nDefinition discriminant_Ordering_Greater :=\n  1.\n\nInductive t_Ordering : Type :=\n| Ordering_Less\n| Ordering_Equal\n| Ordering_Greater.\nArguments Ordering_Less.\nArguments Ordering_Equal.\nArguments Ordering_Greater.\n\nDefinition impl__Ordering__is_eq (self : t_Ordering) : bool :=\n  match self with\n  | Ordering_Equal =>\n    true\n  | _ =>\n    false\n  end.\n\nDefinition impl__Ordering__is_gt (self : t_Ordering) : bool :=\n  match self with\n  | Ordering_Greater =>\n    true\n  | _ =>\n    false\n  end.\n\nDefinition impl__Ordering__is_lt (self : t_Ordering) : bool :=\n  match self with\n  | Ordering_Less =>\n    true\n  | _ =>\n    false\n  end.\n\nDefinition impl__Ordering__reverse (self : t_Ordering) : t_Ordering :=\n  match self with\n  | Ordering_Less =>\n    Ordering_Greater\n  | Ordering_Equal =>\n    Ordering_Equal\n  | Ordering_Greater =>\n    Ordering_Less\n  end.\n\nDefinition discriminant_Ordering_Less :=\n  -1.\n\nDefinition t_Ordering_cast_to_repr (x : t_Ordering) :=\n  match x with\n  | Ordering_Less =>\n    discriminant_Ordering_Less\n  | Ordering_Equal =>\n    discriminant_Ordering_Equal\n  | Ordering_Greater =>\n    discriminant_Ordering_Greater\n  end.\n\nClass t_PartialEq (v_Self : Type) (v_Rhs : Type) : Type :=\n  {\n    PartialEq_f_eq : v_Self -> v_Rhs -> bool;\n    PartialEq_f_ne : v_Self -> v_Rhs -> bool;\n  }.\nArguments t_PartialEq (_) (_).\n\nDefinition impl__Ordering__is_ge (self : t_Ordering) : bool :=\n  negb (match self with\n  | Ordering_Less =>\n    true\n  | _ =>\n    false\n  end).\n\nDefinition impl__Ordering__is_le (self : t_Ordering) : bool :=\n  negb (match self with\n  | Ordering_Greater =>\n    true\n  | _ =>\n    false\n  end).\n\nDefinition impl__Ordering__is_ne (self : t_Ordering) : bool :=\n  negb (match self with\n  | Ordering_Equal =>\n    true\n  | _ =>\n    false\n  end).\n\n#[global] Instance t_PartialEq_603824491 : t_PartialEq ((t_Ordering)) ((t_Ordering)) :=\n  {\n    PartialEq_f_eq := fun  (self : t_Ordering) (other : t_Ordering)=>\n      match self with\n      | Ordering_Less =>\n        match other with\n        | Ordering_Less =>\n          true\n        | _ =>\n          false\n        end\n      | Ordering_Equal =>\n        match other with\n        | Ordering_Equal =>\n          true\n        | _ =>\n          false\n        end\n      | Ordering_Greater =>\n        match other with\n        | Ordering_Greater =>\n          true\n        | _ =>\n          false\n        end\n      end;\n    PartialEq_f_ne := fun  (self : t_Ordering) (other : t_Ordering)=>\n      negb (match self with\n      | Ordering_Less =>\n        match other with\n        | Ordering_Less =>\n          true\n        | _ =>\n          false\n        end\n      | Ordering_Equal =>\n        match other with\n        | Ordering_Equal =>\n          true\n        | _ =>\n          false\n        end\n      | Ordering_Greater =>\n        match other with\n        | Ordering_Greater =>\n          true\n        | _ =>\n          false\n        end\n      end);\n  }.\n\nClass t_PartialOrd (v_Self : Type) (v_Rhs : Type) `{t_PartialEq (v_Self) (v_Rhs)} : Type :=\n  {\n    PartialOrd_f_partial_cmp : v_Self -> v_Rhs -> t_Option ((t_Ordering));\n    PartialOrd_f_lt : v_Self -> v_Rhs -> bool;\n    PartialOrd_f_le : v_Self -> v_Rhs -> bool;\n    PartialOrd_f_gt : v_Self -> v_Rhs -> bool;\n    PartialOrd_f_ge : v_Self -> v_Rhs -> bool;\n  }.\nArguments t_PartialOrd (_) (_) {_}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Convert.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\nExport Core_Marker.\n\nClass t_From (v_Self : Type) (v_T : Type) (* `{t_Sized (v_Self)} `{t_Sized (v_T)} *) : Type :=\n  {\n    From_f_from : v_T -> v_Self;\n  }.\nArguments t_From (_) (_) (* {_} {_} *).\n\n#[global] Instance t_From_46353410 `{v_T : Type} (* `{t_Sized (v_T)} *) : t_From ((v_T)) ((v_T)) :=\n  {\n    From_f_from := fun  (t : v_T)=>\n      t;\n  }.\n\nClass t_Into (v_Self : Type) (v_T : Type) (* `{t_Sized (v_Self)} `{t_Sized (v_T)} *) : Type :=\n  {\n    Into_f_into : v_Self -> v_T;\n  }.\nArguments t_Into (_) (_) (* {_} {_} *).\n\n#[global] Instance t_Into_730689925 `{v_T : Type} `{v_U : Type} (* `{t_Sized (v_T)} `{t_Sized (v_U)} *) `{t_From (v_U) (v_T)} : t_Into ((v_T)) ((v_U)) :=\n  {\n    Into_f_into := fun  (self : v_T)=>\n      From_f_from (self);\n  }.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Fmt.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Intrinsics.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\n\n\nFrom Core Require Import Core_Base_interface.\nExport Core_Base_interface.\n\nFrom Core Require Import Core_Base_interface_Coerce.\nExport Core_Base_interface_Coerce.\n\nFrom Core Require Import Core_Base.\nExport Core_Base.\n\n\n\nFrom Core Require Import Core_Ops.\nExport Core_Ops.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nNotation \"'add_with_overflow_i128'\" := (add_with_overflow_i128).\n\nNotation \"'add_with_overflow_i16'\" := (add_with_overflow_i16).\n\nNotation \"'add_with_overflow_i32'\" := (add_with_overflow_i32).\n\nNotation \"'add_with_overflow_i64'\" := (add_with_overflow_i64).\n\nNotation \"'add_with_overflow_i8'\" := (add_with_overflow_i8).\n\nNotation \"'add_with_overflow_isize'\" := (add_with_overflow_isize).\n\nNotation \"'unchecked_add_i128'\" := (unchecked_add_i128).\n\nNotation \"'unchecked_add_i16'\" := (unchecked_add_i16).\n\nNotation \"'unchecked_add_i32'\" := (unchecked_add_i32).\n\nNotation \"'unchecked_add_i64'\" := (unchecked_add_i64).\n\nNotation \"'unchecked_add_i8'\" := (unchecked_add_i8).\n\nNotation \"'unchecked_add_isize'\" := (unchecked_add_isize).\n\nNotation \"'unchecked_add_u128'\" := (unchecked_add_u128).\n\nNotation \"'unchecked_add_u16'\" := (unchecked_add_u16).\n\nNotation \"'unchecked_add_u32'\" := (unchecked_add_u32).\n\nNotation \"'unchecked_add_u64'\" := (unchecked_add_u64).\n\nNotation \"'unchecked_add_u8'\" := (unchecked_add_u8).\n\nNotation \"'unchecked_add_usize'\" := (unchecked_add_usize).\n\nNotation \"'add_with_overflow_u128'\" := (add_with_overflow_u128).\n\nNotation \"'add_with_overflow_u16'\" := (add_with_overflow_u16).\n\nNotation \"'add_with_overflow_u32'\" := (add_with_overflow_u32).\n\nNotation \"'add_with_overflow_u64'\" := (add_with_overflow_u64).\n\nNotation \"'add_with_overflow_u8'\" := (add_with_overflow_u8).\n\nNotation \"'add_with_overflow_usize'\" := (add_with_overflow_usize).\n\nNotation \"'unchecked_div_u128'\" := (unchecked_div_u128).\n\nNotation \"'unchecked_div_u16'\" := (unchecked_div_u16).\n\nNotation \"'unchecked_div_u32'\" := (unchecked_div_u32).\n\nNotation \"'unchecked_div_u64'\" := (unchecked_div_u64).\n\nNotation \"'unchecked_div_u8'\" := (unchecked_div_u8).\n\nNotation \"'unchecked_div_usize'\" := (unchecked_div_usize).\n\nNotation \"'wrapping_add_i128'\" := (wrapping_add_i128).\n\nNotation \"'wrapping_add_i16'\" := (wrapping_add_i16).\n\nNotation \"'wrapping_add_i32'\" := (wrapping_add_i32).\n\nNotation \"'wrapping_add_i64'\" := (wrapping_add_i64).\n\nNotation \"'wrapping_add_i8'\" := (wrapping_add_i8).\n\nNotation \"'wrapping_add_isize'\" := (wrapping_add_isize).\n\nNotation \"'wrapping_sub_i128'\" := (wrapping_sub_i128).\n\nNotation \"'wrapping_sub_i16'\" := (wrapping_sub_i16).\n\nNotation \"'wrapping_sub_i32'\" := (wrapping_sub_i32).\n\nNotation \"'wrapping_sub_i64'\" := (wrapping_sub_i64).\n\nNotation \"'wrapping_sub_i8'\" := (wrapping_sub_i8).\n\nNotation \"'wrapping_sub_isize'\" := (wrapping_sub_isize).\n\nNotation \"'unchecked_div_i128'\" := (unchecked_div_i128).\n\nNotation \"'unchecked_div_i16'\" := (unchecked_div_i16).\n\nNotation \"'unchecked_div_i32'\" := (unchecked_div_i32).\n\nNotation \"'unchecked_div_i64'\" := (unchecked_div_i64).\n\nNotation \"'unchecked_div_i8'\" := (unchecked_div_i8).\n\nNotation \"'unchecked_div_isize'\" := (unchecked_div_isize).\n\nNotation \"'wrapping_add_u128'\" := (wrapping_add_u128).\n\nNotation \"'wrapping_add_u16'\" := (wrapping_add_u16).\n\nNotation \"'wrapping_add_u32'\" := (wrapping_add_u32).\n\nNotation \"'wrapping_add_u64'\" := (wrapping_add_u64).\n\nNotation \"'wrapping_add_u8'\" := (wrapping_add_u8).\n\nNotation \"'wrapping_add_usize'\" := (wrapping_add_usize).\n\nNotation \"'wrapping_mul_i128'\" := (wrapping_mul_i128).\n\nNotation \"'wrapping_mul_i16'\" := (wrapping_mul_i16).\n\nNotation \"'wrapping_mul_i32'\" := (wrapping_mul_i32).\n\nNotation \"'wrapping_mul_i64'\" := (wrapping_mul_i64).\n\nNotation \"'wrapping_mul_i8'\" := (wrapping_mul_i8).\n\nNotation \"'wrapping_mul_isize'\" := (wrapping_mul_isize).\n\nNotation \"'wrapping_mul_u128'\" := (wrapping_mul_u128).\n\nNotation \"'wrapping_mul_u16'\" := (wrapping_mul_u16).\n\nNotation \"'wrapping_mul_u32'\" := (wrapping_mul_u32).\n\nNotation \"'wrapping_mul_u64'\" := (wrapping_mul_u64).\n\nNotation \"'wrapping_mul_u8'\" := (wrapping_mul_u8).\n\nNotation \"'wrapping_mul_usize'\" := (wrapping_mul_usize).\n\nNotation \"'wrapping_sub_u128'\" := (wrapping_sub_u128).\n\nNotation \"'wrapping_sub_u16'\" := (wrapping_sub_u16).\n\nNotation \"'wrapping_sub_u32'\" := (wrapping_sub_u32).\n\nNotation \"'wrapping_sub_u64'\" := (wrapping_sub_u64).\n\nNotation \"'wrapping_sub_u8'\" := (wrapping_sub_u8).\n\nNotation \"'wrapping_sub_usize'\" := (wrapping_sub_usize).\n\nNotation \"'rotate_left_u128'\" := (rotate_left_u128).\n\nNotation \"'rotate_left_u16'\" := (rotate_left_u16).\n\nNotation \"'rotate_left_u32'\" := (rotate_left_u32).\n\nNotation \"'rotate_left_u64'\" := (rotate_left_u64).\n\nNotation \"'rotate_left_u8'\" := (rotate_left_u8).\n\nNotation \"'rotate_left_usize'\" := (rotate_left_usize).\n\nNotation \"'rotate_right_u128'\" := (rotate_right_u128).\n\nNotation \"'rotate_right_u16'\" := (rotate_right_u16).\n\nNotation \"'rotate_right_u32'\" := (rotate_right_u32).\n\nNotation \"'rotate_right_u64'\" := (rotate_right_u64).\n\nNotation \"'rotate_right_u8'\" := (rotate_right_u8).\n\nNotation \"'rotate_right_usize'\" := (rotate_right_usize).\n\n(* Notation \"'bswap_u128'\" := (bswap_u128). *)\n\n(* Notation \"'bswap_u16'\" := (bswap_u16). *)\n\n(* Notation \"'bswap_u32'\" := (bswap_u32). *)\n\n(* Notation \"'bswap_u64'\" := (bswap_u64). *)\n\n(* Notation \"'bswap_u8'\" := (bswap_u8). *)\n\n(* Notation \"'bswap_usize'\" := (bswap_usize). *)\n\n(* Notation \"'ctlz_u128'\" := (ctlz_u128). *)\n\n(* Notation \"'ctlz_u16'\" := (ctlz_u16). *)\n\n(* Notation \"'ctlz_u32'\" := (ctlz_u32). *)\n\n(* Notation \"'ctlz_u64'\" := (ctlz_u64). *)\n\n(* Notation \"'ctlz_u8'\" := (ctlz_u8). *)\n\n(* Notation \"'ctlz_usize'\" := (ctlz_usize). *)\n\n(* Notation \"'ctpop_u128'\" := (ctpop_u128). *)\n\n(* Notation \"'ctpop_u16'\" := (ctpop_u16). *)\n\n(* Notation \"'ctpop_u32'\" := (ctpop_u32). *)\n\n(* Notation \"'ctpop_u64'\" := (ctpop_u64). *)\n\n(* Notation \"'ctpop_u8'\" := (ctpop_u8). *)\n\n(* Notation \"'ctpop_usize'\" := (ctpop_usize). *)\n\n(* Notation \"'cttz_u128'\" := (cttz_u128). *)\n\n(* Notation \"'cttz_u16'\" := (cttz_u16). *)\n\n(* Notation \"'cttz_u32'\" := (cttz_u32). *)\n\n(* Notation \"'cttz_u64'\" := (cttz_u64). *)\n\n(* Notation \"'cttz_u8'\" := (cttz_u8). *)\n\n(* Notation \"'cttz_usize'\" := (cttz_usize). *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Self_Traits (t_Iterator).\nExport Self_Traits (t_Iterator).\n\nFrom Core Require Import Self_Range (t_Step).\nExport Self_Range (t_Step).\n\nFrom Core Require Import Self_Traits (t_TrustedStep).\nExport Self_Traits (t_TrustedStep).\n\n\n\nFrom Core Require Import Self_Traits (t_IntoIterator).\nExport Self_Traits (t_IntoIterator).\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Range.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core (t_num).\nExport Core (t_num).\n\n\n\nFrom Core Require Import Core_Primitive (t_u8).\nExport Core_Primitive (t_u8).\n\nFrom Core Require Import Core_Primitive (t_u16).\nExport Core_Primitive (t_u16).\n\nFrom Core Require Import Core_Primitive (t_u32).\nExport Core_Primitive (t_u32).\n\nFrom Core Require Import Core_Primitive (t_u64).\nExport Core_Primitive (t_u64).\n\nFrom Core Require Import Core_Primitive (t_u128).\nExport Core_Primitive (t_u128).\n\nFrom Core Require Import Core_Primitive (t_usize).\nExport Core_Primitive (t_usize).\n\n(* NotImplementedYet *)\n\nClass t_Step (v_Self : Type) `{t_Sized (v_Self)} `{t_Clone (v_Self)} `{t_PartialOrd (v_Self) (v_Self)} : Type :=\n  {\n    Step_f_steps_between : v_Self -> v_Self -> t_Option ((t_usize));\n    Step_f_forward_checked : v_Self -> t_usize -> t_Option ((v_Self));\n  }.\nArguments t_Step (_) {_} {_} {_}.\n\nClass t_RangeIteratorImpl (v_Self : Type) : Type :=\n  {\n    RangeIteratorImpl_f_Item : Type;\n    _ :: `{t_Sized (RangeIteratorImpl_f_Item)};\n    RangeIteratorImpl_f_spec_next : v_Self -> (v_Self*t_Option ((RangeIteratorImpl_f_Item)));\n  }.\nArguments t_RangeIteratorImpl (_).\n\nInstance t_RangeIteratorImpl_158276838 `{v_A : Type} `{t_Sized (v_A)} `{t_Step (v_A)} : t_RangeIteratorImpl ((t_Range ((v_A)))) :=\n  {\n    RangeIteratorImpl_impl_f_Item := v_A;\n    RangeIteratorImpl_impl_f_spec_next := fun  (self : t_Range ((v_A)))=>\n      let hax_temp_output := never_to_any (panic_fmt (impl_1__new_v1 ([\"not yet implemented: specification needed\"%string]) (impl_1__none (tt)))) in\n      (self,hax_temp_output);\n  }.\n\nInstance t_Iterator_416192239 `{v_A : Type} `{t_Sized (v_A)} `{t_Step (v_A)} : t_Iterator ((t_Range ((v_A)))) :=\n  {\n    Iterator_impl_1_f_Item := v_A;\n    Iterator_impl_1_f_next := fun  (self : t_Range ((v_A)))=>\n      let hax_temp_output := never_to_any (panic_fmt (impl_1__new_v1 ([\"not yet implemented: specification needed\"%string]) (impl_1__none (tt)))) in\n      (self,hax_temp_output);\n    Iterator_impl_1_f_size_hint := fun  (self : t_Range ((v_A)))=>\n      if\n        PartialOrd_f_lt (Range_f_start self) (Range_f_end self)\n      then\n        let hint := Step_f_steps_between (Range_f_start self) (Range_f_end self) in\n        (0,Option_Some (0))\n      else\n        (0,Option_Some (0));\n  }.\n\nInstance t_Step_890486371 : t_Step ((t_u8)) :=\n  {\n    Step_impl_2_f_steps_between := fun  (start : t_u8) (v_end : t_u8)=>\n      if\n        PartialOrd_f_le (start) (v_end)\n      then\n        Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start))))\n      else\n        Option_None;\n    Step_impl_2_f_forward_checked := fun  (start : t_u8) (n : t_usize)=>\n      match TryFrom_f_try_from (n) with\n      | Result_Ok (n) =>\n        impl_6__checked_add (start) (n)\n      | Result_Err (_) =>\n        Option_None\n      end;\n  }.\n\nInstance t_Step_800843805 : t_Step ((t_u16)) :=\n  {\n    Step_impl_3_f_steps_between := fun  (start : t_u16) (v_end : t_u16)=>\n      if\n        PartialOrd_f_le (start) (v_end)\n      then\n        Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start))))\n      else\n        Option_None;\n    Step_impl_3_f_forward_checked := fun  (start : t_u16) (n : t_usize)=>\n      match TryFrom_f_try_from (n) with\n      | Result_Ok (n) =>\n        impl_7__checked_add (start) (n)\n      | Result_Err (_) =>\n        Option_None\n      end;\n  }.\n\nInstance t_Step_230073379 : t_Step ((t_u32)) :=\n  {\n    Step_impl_4_f_steps_between := fun  (start : t_u32) (v_end : t_u32)=>\n      if\n        PartialOrd_f_le (start) (v_end)\n      then\n        Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start))))\n      else\n        Option_None;\n    Step_impl_4_f_forward_checked := fun  (start : t_u32) (n : t_usize)=>\n      match TryFrom_f_try_from (n) with\n      | Result_Ok (n) =>\n        impl_8__checked_add (start) (n)\n      | Result_Err (_) =>\n        Option_None\n      end;\n  }.\n\nInstance t_Step_851062726 : t_Step ((t_u64)) :=\n  {\n    Step_impl_5_f_steps_between := fun  (start : t_u64) (v_end : t_u64)=>\n      if\n        PartialOrd_f_le (start) (v_end)\n      then\n        Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start))))\n      else\n        Option_None;\n    Step_impl_5_f_forward_checked := fun  (start : t_u64) (n : t_usize)=>\n      match TryFrom_f_try_from (n) with\n      | Result_Ok (n) =>\n        impl_9__checked_add (start) (n)\n      | Result_Err (_) =>\n        Option_None\n      end;\n  }.\n\nInstance t_Step_679763039 : t_Step ((t_u128)) :=\n  {\n    Step_impl_7_f_steps_between := fun  (start : t_u128) (v_end : t_u128)=>\n      if\n        PartialOrd_f_le (start) (v_end)\n      then\n        impl__ok (TryFrom_f_try_from (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start))))\n      else\n        Option_None;\n    Step_impl_7_f_forward_checked := fun  (start : t_u128) (n : t_usize)=>\n      Option_None;\n  }.\n\nInstance t_Step_999413546 : t_Step ((t_usize)) :=\n  {\n    Step_impl_6_f_steps_between := fun  (start : t_usize) (v_end : t_usize)=>\n      if\n        PartialOrd_f_le (start) (v_end)\n      then\n        Option_Some (Into_f_into (Sub_f_sub (Clone_f_clone (v_end)) (Clone_f_clone (start))))\n      else\n        Option_None;\n    Step_impl_6_f_forward_checked := fun  (start : t_usize) (n : t_usize)=>\n      match TryFrom_f_try_from (n) with\n      | Result_Ok (n) =>\n        impl_11__checked_add (start) (n)\n      | Result_Err (_) =>\n        Option_None\n      end;\n  }.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\n\n\n\n\nFrom Core Require Import Self_Collect (t_IntoIterator).\nExport Self_Collect (t_IntoIterator).\n\nFrom Core Require Import Self_Exact_size (t_ExactSizeIterator).\nExport Self_Exact_size (t_ExactSizeIterator).\n\nFrom Core Require Import Self_Iterator (t_Iterator).\nExport Self_Iterator (t_Iterator).\n\n\n\nFrom Core Require Import Self_Marker (t_FusedIterator).\nExport Self_Marker (t_FusedIterator).\n\nFrom Core Require Import Self_Marker (t_TrustedLen).\nExport Self_Marker (t_TrustedLen).\n\nFrom Core Require Import Self_Marker (t_TrustedStep).\nExport Self_Marker (t_TrustedStep).\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Collect.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Iter_Traits (t_Iterator).\nExport Core_Iter_Traits (t_Iterator).\n\nClass t_IntoIterator (v_Self : Type) : Type :=\n  {\n    IntoIterator_f_Item : Type;\n    _ :: `{t_Sized (IntoIterator_f_Item)};\n    IntoIterator_f_IntoIter `{_.(Iterator_f_Item) = IntoIterator_f_Item} : Type;\n    _ :: `{t_Iterator (IntoIterator_f_IntoIter)};\n    _ :: `{t_Sized (IntoIterator_f_IntoIter)};\n    IntoIterator_f_into_iter : v_Self -> IntoIterator_f_IntoIter;\n  }.\nArguments t_IntoIterator (_).\n\nClass t_FromIterator (v_Self : Type) (v_A : Type) `{t_Sized (v_Self)} `{t_Sized (v_A)} : Type :=\n  {\n    FromIterator_f_from_iter v_T : Type `{t_Sized (v_T)} `{t_IntoIterator (v_T)} `{_.(IntoIterator_f_Item) = v_A} : v_T -> v_Self;\n  }.\nArguments t_FromIterator (_) (_) {_} {_}.\n\nInstance t_IntoIterator_346955793 `{v_I : Type} `{t_Sized (v_I)} `{t_Iterator (v_I)} : t_IntoIterator ((v_I)) :=\n  {\n    IntoIterator_impl_f_Item := Iterator_f_Item;\n    IntoIterator_impl_f_IntoIter := v_I;\n    IntoIterator_impl_f_into_iter := fun  (self : v_I)=>\n      self;\n  }.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Exact_size.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Iter (t_Iterator).\nExport Core_Iter (t_Iterator).\n\nClass t_ExactSizeIterator (v_Self : Type) `{t_Iterator (v_Self)} : Type :=\n  {\n    ExactSizeIterator_f_len : v_Self -> t_usize;\n    ExactSizeIterator_f_is_empty : v_Self -> bool;\n  }.\nArguments t_ExactSizeIterator (_) {_}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Iterator.v",
    "content": "\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\nExport Core_Marker.\n\nFrom Core Require Import Core_Option.\nExport Core_Option.\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\nFrom Core Require Import Core_Ops_Function.\nExport Core_Ops_Function.\n\nClass t_Iterator (v_Self : Type) : Type :=\n  {\n    Iterator_f_Item : Type;\n    _H_Sized :: `{t_Sized (Iterator_f_Item)};\n    Iterator_f_next : v_Self -> (v_Self*t_Option ((Iterator_f_Item)));\n    Iterator_f_size_hint : v_Self -> (t_usize*t_Option ((t_usize)));\n    Iterator_f_fold (v_B : Type) (v_F : Type) `{t_Sized (v_B)} `{t_Sized (v_F)} `{t_Sized (v_Self)} `{t_FnMut (v_F) ((v_B*Iterator_f_Item))} `{_.(FnOnce_f_Output) = v_B} : v_Self -> v_B -> v_F -> v_B;\n  }.\nArguments t_Iterator (_).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Iter_Traits_Marker.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core_Iter (t_Step).\nExport Core_Iter (t_Step).\n\nClass t_TrustedFused (v_Self : Type) : Type :=\n  {\n  }.\nArguments t_TrustedFused (_).\n\nClass t_TrustedStep (v_Self : Type) `{t_Step (v_Self)} `{t_Copy (v_Self)} : Type :=\n  {\n  }.\nArguments t_TrustedStep (_) {_} {_}.\n\nClass t_FusedIterator (v_Self : Type) `{t_Iterator (v_Self)} : Type :=\n  {\n  }.\nArguments t_FusedIterator (_) {_}.\n\nClass t_TrustedLen (v_Self : Type) `{t_Iterator (v_Self)} : Type :=\n  {\n  }.\nArguments t_TrustedLen (_) {_}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Marker.v",
    "content": "\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Clone (t_Clone).\nExport Core_Clone (t_Clone).\n\nClass t_Copy (v_Self : Type) `{t_Clone (v_Self)} : Type :=\n  {\n  }.\nArguments t_Copy (_) {_}.\n\nClass t_Destruct (v_Self : Type) : Type :=\n  {\n  }.\nArguments t_Destruct (_).\n\nClass t_Sized (v_Self : Type) : Type :=\n  {\n  }.\nArguments t_Sized (_).\n\nRecord t_PhantomData (v_T : Type) `{t_Sized (v_T)} : Type :=\n  {\n  }.\nArguments Build_t_PhantomData {_} {_}.\n#[export]\nNotation \"'PhantomData'\" := Build_t_PhantomData.\n\nClass t_Tuple (v_Self : Type) : Type :=\n  {\n  }.\nArguments t_Tuple (_).\n\n#[global] Instance t_Sized_any T : t_Sized T := {}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Num.v",
    "content": "\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Base_interface.\nExport Core_Base_interface.\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\nFrom Core Require Import Core_Intrinsics.\nExport Core_Intrinsics.\n\nFrom Core Require Import Core_Ops_Index.\nExport Core_Ops_Index.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nNotation \"'impl_10__from_le'\" := (from_le715594649).\n\nNotation \"'impl_10__to_le'\" := (to_le902648378).\n\nNotation \"'impl_7__from_le'\" := (from_le793045973).\n\nNotation \"'impl_7__to_le'\" := (to_le1012469456).\n\nNotation \"'impl_8__from_le'\" := (from_le706338679).\n\nNotation \"'impl_8__to_le'\" := (to_le724624277).\n\nNotation \"'impl_9__from_le'\" := (from_le435089922).\n\nNotation \"'impl_9__to_le'\" := (to_le2703875).\n\nNotation \"'impl_6__from_le'\" := (from_le529489651).\n\nNotation \"'impl_6__to_le'\" := (to_le523556665).\n\nNotation \"'impl_11__from_le'\" := (from_le418743864).\n\nNotation \"'impl_11__to_le'\" := (to_le946822077).\n\nNotation \"'impl__BITS'\" := (v_BITS80497669).\n\nNotation \"'impl__MAX'\" := (v_MAX626626007).\n\nNotation \"'impl__MIN'\" := (v_MIN19747349).\n\nNotation \"'impl__i16__BITS'\" := (v_BITS421056295).\n\nNotation \"'impl__i16__MAX'\" := (v_MAX474501300).\n\nNotation \"'impl__i16__MIN'\" := (v_MIN776391606).\n\nNotation \"'impl__i32__BITS'\" := (v_BITS465526498).\n\nNotation \"'impl__i32__MAX'\" := (v_MAX106630818).\n\nNotation \"'impl__i32__MIN'\" := (v_MIN682967538).\n\nNotation \"'impl__i64__BITS'\" := (v_BITS419886578).\n\nNotation \"'impl__i64__MAX'\" := (v_MAX527043787).\n\nNotation \"'impl__i64__MIN'\" := (v_MIN654206259).\n\nNotation \"'impl__i128__BITS'\" := (v_BITS992667165).\n\nNotation \"'impl__i128__MAX'\" := (v_MAX375377319).\n\nNotation \"'impl__i128__MIN'\" := (v_MIN79612531).\n\nNotation \"'impl__isize__BITS'\" := (v_BITS211584016).\n\nNotation \"'impl__isize__MAX'\" := (v_MAX937003029).\n\nNotation \"'impl__isize__MIN'\" := (v_MIN1017039533).\n\nNotation \"'impl_6__BITS'\" := (v_BITS690311813).\n\nNotation \"'impl_6__MAX'\" := (v_MAX310118176).\n\nNotation \"'impl_6__MIN'\" := (v_MIN41851434).\n\nNotation \"'impl_7__BITS'\" := (v_BITS277333551).\n\nNotation \"'impl_7__MAX'\" := (v_MAX487295910).\n\nNotation \"'impl_7__MIN'\" := (v_MIN592300287).\n\nNotation \"'impl_8__BITS'\" := (v_BITS473478051).\n\nNotation \"'impl_8__MAX'\" := (v_MAX826434525).\n\nNotation \"'impl_8__MIN'\" := (v_MIN932777089).\n\nNotation \"'impl_9__BITS'\" := (v_BITS177666292).\n\nNotation \"'impl_9__MAX'\" := (v_MAX815180633).\n\nNotation \"'impl_9__MIN'\" := (v_MIN631333594).\n\nNotation \"'impl_10__BITS'\" := (v_BITS136999051).\n\nNotation \"'impl_10__MAX'\" := (v_MAX404543799).\n\nNotation \"'impl_10__MIN'\" := (v_MIN668621698).\n\nNotation \"'impl_11__BITS'\" := (v_BITS229952196).\n\nNotation \"'impl_11__MAX'\" := (v_MAX750570916).\n\nNotation \"'impl_11__MIN'\" := (v_MIN861571008).\n\nNotation \"'impl__is_negative'\" := (is_negative350273175).\n\nNotation \"'impl__is_positive'\" := (is_positive286955196).\n\nNotation \"'impl__signum'\" := (signum721334203).\n\nNotation \"'impl__i16__is_negative'\" := (is_negative477067241).\n\nNotation \"'impl__i16__is_positive'\" := (is_positive821581438).\n\nNotation \"'impl__i16__signum'\" := (signum243706004).\n\nNotation \"'impl__i32__is_negative'\" := (is_negative1035644813).\n\nNotation \"'impl__i32__is_positive'\" := (is_positive401652342).\n\nNotation \"'impl__i32__signum'\" := (signum323641039).\n\nNotation \"'impl__i64__is_negative'\" := (is_negative1066124578).\n\nNotation \"'impl__i64__is_positive'\" := (is_positive16569358).\n\nNotation \"'impl__i64__signum'\" := (signum582963664).\n\nNotation \"'impl__i128__is_negative'\" := (is_negative221698470).\n\nNotation \"'impl__i128__is_positive'\" := (is_positive883218309).\n\nNotation \"'impl__i128__signum'\" := (signum408800799).\n\nNotation \"'impl__isize__is_negative'\" := (is_negative693446369).\n\nNotation \"'impl__isize__is_positive'\" := (is_positive169998680).\n\nNotation \"'impl__isize__signum'\" := (signum91486536).\n\nNotation \"'impl_6__checked_add'\" := (checked_add268751055).\n\nNotation \"'impl_7__checked_add'\" := (checked_add132377399).\n\nNotation \"'impl_8__checked_add'\" := (checked_add985437730).\n\nNotation \"'impl_9__checked_add'\" := (checked_add586246465).\n\nNotation \"'impl_10__checked_add'\" := (checked_add218978451).\n\nNotation \"'impl_11__checked_add'\" := (checked_add984013567).\n\nNotation \"'impl__wrapping_add'\" := (wrapping_add634491935).\n\nNotation \"'impl__wrapping_sub'\" := (wrapping_sub973428293).\n\nNotation \"'impl__wrapping_neg'\" := (wrapping_neg400701205).\n\nNotation \"'impl__wrapping_abs'\" := (wrapping_abs400396545).\n\nNotation \"'impl__i16__wrapping_add'\" := (wrapping_add868559108).\n\nNotation \"'impl__i16__wrapping_sub'\" := (wrapping_sub189469152).\n\nNotation \"'impl__i16__wrapping_neg'\" := (wrapping_neg860505723).\n\nNotation \"'impl__i16__wrapping_abs'\" := (wrapping_abs229076826).\n\nNotation \"'impl__i32__wrapping_add'\" := (wrapping_add475006616).\n\nNotation \"'impl__i32__wrapping_sub'\" := (wrapping_sub298337071).\n\nNotation \"'impl__i32__wrapping_neg'\" := (wrapping_neg636433078).\n\nNotation \"'impl__i32__wrapping_abs'\" := (wrapping_abs729536875).\n\nNotation \"'impl__i64__wrapping_add'\" := (wrapping_add590074241).\n\nNotation \"'impl__i64__wrapping_sub'\" := (wrapping_sub334584751).\n\nNotation \"'impl__i64__wrapping_neg'\" := (wrapping_neg868282938).\n\nNotation \"'impl__i64__wrapping_abs'\" := (wrapping_abs285829312).\n\nNotation \"'impl__i128__wrapping_add'\" := (wrapping_add251385439).\n\nNotation \"'impl__i128__wrapping_sub'\" := (wrapping_sub681598071).\n\nNotation \"'impl__i128__wrapping_neg'\" := (wrapping_neg446546984).\n\nNotation \"'impl__i128__wrapping_abs'\" := (wrapping_abs281925696).\n\nNotation \"'impl__isize__wrapping_add'\" := (wrapping_add226040243).\n\nNotation \"'impl__isize__wrapping_sub'\" := (wrapping_sub698035192).\n\nNotation \"'impl__isize__wrapping_neg'\" := (wrapping_neg912291768).\n\nNotation \"'impl__isize__wrapping_abs'\" := (wrapping_abs347300819).\n\nNotation \"'impl_6__checked_div'\" := (checked_div508301931).\n\nNotation \"'impl_6__overflowing_add'\" := (overflowing_add708890057).\n\nNotation \"'impl_7__checked_div'\" := (checked_div614920780).\n\nNotation \"'impl_7__overflowing_add'\" := (overflowing_add1023344178).\n\nNotation \"'impl_8__checked_div'\" := (checked_div979383477).\n\nNotation \"'impl_8__overflowing_add'\" := (overflowing_add905744292).\n\nNotation \"'impl_9__checked_div'\" := (checked_div988689127).\n\nNotation \"'impl_9__overflowing_add'\" := (overflowing_add581983607).\n\nNotation \"'impl_10__checked_div'\" := (checked_div344106746).\n\nNotation \"'impl_10__overflowing_add'\" := (overflowing_add458293681).\n\nNotation \"'impl_11__checked_div'\" := (checked_div80223906).\n\nNotation \"'impl_11__overflowing_add'\" := (overflowing_add682280407).\n\nNotation \"'impl__abs'\" := (abs945505614).\n\nNotation \"'impl__i16__abs'\" := (abs581170970).\n\nNotation \"'impl__i32__abs'\" := (abs590464694).\n\nNotation \"'impl__i64__abs'\" := (abs654781043).\n\nNotation \"'impl__i128__abs'\" := (abs204417539).\n\nNotation \"'impl__isize__abs'\" := (abs220926056).\n\nNotation \"'impl_6__wrapping_add'\" := (wrapping_add480603777).\n\nNotation \"'impl_6__wrapping_mul'\" := (wrapping_mul885216284).\n\nNotation \"'impl_7__wrapping_add'\" := (wrapping_add124432709).\n\nNotation \"'impl_7__wrapping_mul'\" := (wrapping_mul14465189).\n\nNotation \"'impl_8__wrapping_add'\" := (wrapping_add1049665857).\n\nNotation \"'impl_8__wrapping_mul'\" := (wrapping_mul203346768).\n\nNotation \"'impl_9__wrapping_add'\" := (wrapping_add865565639).\n\nNotation \"'impl_9__wrapping_mul'\" := (wrapping_mul742978873).\n\nNotation \"'impl_10__wrapping_add'\" := (wrapping_add40844100).\n\nNotation \"'impl_10__wrapping_mul'\" := (wrapping_mul294115024).\n\nNotation \"'impl_11__wrapping_add'\" := (wrapping_add427637036).\n\nNotation \"'impl_11__wrapping_mul'\" := (wrapping_mul680896953).\n\nNotation \"'impl_6__wrapping_sub'\" := (wrapping_sub403906422).\n\nNotation \"'impl_6__wrapping_neg'\" := (wrapping_neg123212788).\n\nNotation \"'impl_7__wrapping_sub'\" := (wrapping_sub811251034).\n\nNotation \"'impl_7__wrapping_neg'\" := (wrapping_neg128555595).\n\nNotation \"'impl_8__wrapping_sub'\" := (wrapping_sub708953500).\n\nNotation \"'impl_8__wrapping_neg'\" := (wrapping_neg328220773).\n\nNotation \"'impl_9__wrapping_sub'\" := (wrapping_sub762520851).\n\nNotation \"'impl_9__wrapping_neg'\" := (wrapping_neg617136337).\n\nNotation \"'impl_10__wrapping_sub'\" := (wrapping_sub409310259).\n\nNotation \"'impl_10__wrapping_neg'\" := (wrapping_neg729451428).\n\nNotation \"'impl_11__wrapping_sub'\" := (wrapping_sub813101882).\n\nNotation \"'impl_11__wrapping_neg'\" := (wrapping_neg342773446).\n\nNotation \"'impl_6__wrapping_div'\" := (wrapping_div660080892).\n\nNotation \"'impl_6__wrapping_div_euclid'\" := (wrapping_div_euclid481233436).\n\nNotation \"'impl_7__wrapping_div'\" := (wrapping_div366977334).\n\nNotation \"'impl_7__wrapping_div_euclid'\" := (wrapping_div_euclid22267888).\n\nNotation \"'impl_8__wrapping_div'\" := (wrapping_div931150450).\n\nNotation \"'impl_8__wrapping_div_euclid'\" := (wrapping_div_euclid606291997).\n\nNotation \"'impl_9__wrapping_div'\" := (wrapping_div168427046).\n\nNotation \"'impl_9__wrapping_div_euclid'\" := (wrapping_div_euclid321252086).\n\nNotation \"'impl_10__wrapping_div'\" := (wrapping_div692427683).\n\nNotation \"'impl_10__wrapping_div_euclid'\" := (wrapping_div_euclid926334515).\n\nNotation \"'impl_11__wrapping_div'\" := (wrapping_div905768546).\n\nNotation \"'impl_11__wrapping_div_euclid'\" := (wrapping_div_euclid90317722).\n\nNotation \"'impl_6__wrapping_rem'\" := (wrapping_rem984569721).\n\nNotation \"'impl_6__wrapping_rem_euclid'\" := (wrapping_rem_euclid946579345).\n\nNotation \"'impl_7__wrapping_rem'\" := (wrapping_rem378598035).\n\nNotation \"'impl_7__wrapping_rem_euclid'\" := (wrapping_rem_euclid602402638).\n\nNotation \"'impl_8__wrapping_rem'\" := (wrapping_rem292009099).\n\nNotation \"'impl_8__wrapping_rem_euclid'\" := (wrapping_rem_euclid1020271291).\n\nNotation \"'impl_9__wrapping_rem'\" := (wrapping_rem390602260).\n\nNotation \"'impl_9__wrapping_rem_euclid'\" := (wrapping_rem_euclid839264546).\n\nNotation \"'impl_10__wrapping_rem'\" := (wrapping_rem332379920).\n\nNotation \"'impl_10__wrapping_rem_euclid'\" := (wrapping_rem_euclid646122423).\n\nNotation \"'impl_11__wrapping_rem'\" := (wrapping_rem333089373).\n\nNotation \"'impl_11__wrapping_rem_euclid'\" := (wrapping_rem_euclid769656504).\n\nNotation \"'impl_6__rotate_left'\" := (rotate_left792925914).\n\nNotation \"'impl_6__rotate_right'\" := (rotate_right166090082).\n\nNotation \"'impl_7__rotate_left'\" := (rotate_left297034175).\n\nNotation \"'impl_7__rotate_right'\" := (rotate_right138522246).\n\nNotation \"'impl_8__rotate_left'\" := (rotate_left823573251).\n\nNotation \"'impl_8__rotate_right'\" := (rotate_right869195717).\n\nNotation \"'impl_9__rotate_left'\" := (rotate_left618936072).\n\nNotation \"'impl_9__rotate_right'\" := (rotate_right1041614027).\n\nNotation \"'impl_10__rotate_left'\" := (rotate_left1065866885).\n\nNotation \"'impl_10__rotate_right'\" := (rotate_right591112338).\n\nNotation \"'impl_11__rotate_left'\" := (rotate_left996672710).\n\nNotation \"'impl_11__rotate_right'\" := (rotate_right442734174).\n\n(* Notation \"'impl_6__count_ones'\" := (count_ones202509899). *)\n\n(* Notation \"'impl_6__leading_zeros'\" := (leading_zeros75047366). *)\n\n(* Notation \"'impl_6__swap_bytes'\" := (swap_bytes657156997). *)\n\n(* Notation \"'impl_6__from_be'\" := (from_be746282521). *)\n\n(* Notation \"'impl_6__to_be'\" := (to_be972448780). *)\n\n(* Notation \"'impl_6__trailing_zeros'\" := (trailing_zeros572929871). *)\n\n(* Notation \"'impl_7__count_ones'\" := (count_ones91875752). *)\n\n(* Notation \"'impl_7__leading_zeros'\" := (leading_zeros462412478). *)\n\n(* Notation \"'impl_7__swap_bytes'\" := (swap_bytes926722059). *)\n\n(* Notation \"'impl_7__from_be'\" := (from_be510959665). *)\n\n(* Notation \"'impl_7__to_be'\" := (to_be551590602). *)\n\n(* Notation \"'impl_7__trailing_zeros'\" := (trailing_zeros421474733). *)\n\n(* Notation \"'impl_8__count_ones'\" := (count_ones776185738). *)\n\n(* Notation \"'impl_8__leading_zeros'\" := (leading_zeros698221972). *)\n\n(* Notation \"'impl_8__swap_bytes'\" := (swap_bytes320480126). *)\n\n(* Notation \"'impl_8__from_be'\" := (from_be664756649). *)\n\n(* Notation \"'impl_8__to_be'\" := (to_be82825962). *)\n\n(* Notation \"'impl_8__trailing_zeros'\" := (trailing_zeros1061560720). *)\n\n(* Notation \"'impl_9__count_ones'\" := (count_ones235885653). *)\n\n(* Notation \"'impl_9__leading_zeros'\" := (leading_zeros338302110). *)\n\n(* Notation \"'impl_9__swap_bytes'\" := (swap_bytes722254271). *)\n\n(* Notation \"'impl_9__from_be'\" := (from_be16013635). *)\n\n(* Notation \"'impl_9__to_be'\" := (to_be376714729). *)\n\n(* Notation \"'impl_9__trailing_zeros'\" := (trailing_zeros188346231). *)\n\n(* Notation \"'impl_10__count_ones'\" := (count_ones926736261). *)\n\n(* Notation \"'impl_10__leading_zeros'\" := (leading_zeros19644612). *)\n\n(* Notation \"'impl_10__swap_bytes'\" := (swap_bytes420879368). *)\n\n(* Notation \"'impl_10__from_be'\" := (from_be191085771). *)\n\n(* Notation \"'impl_10__to_be'\" := (to_be555075987). *)\n\n(* Notation \"'impl_10__trailing_zeros'\" := (trailing_zeros821715250). *)\n\n(* Notation \"'impl_11__count_ones'\" := (count_ones441645762). *)\n\n(* Notation \"'impl_11__leading_zeros'\" := (leading_zeros905233489). *)\n\n(* Notation \"'impl_11__swap_bytes'\" := (swap_bytes268673424). *)\n\n(* Notation \"'impl_11__from_be'\" := (from_be607978059). *)\n\n(* Notation \"'impl_11__to_be'\" := (to_be561847134). *)\n\n(* Notation \"'impl_11__trailing_zeros'\" := (trailing_zeros42066260). *)\n\nNotation \"'impl__rem_euclid'\" := (rem_euclid622298453).\n\nNotation \"'impl__i16__rem_euclid'\" := (rem_euclid158017644).\n\nNotation \"'impl__i32__rem_euclid'\" := (rem_euclid881249982).\n\nNotation \"'impl__i64__rem_euclid'\" := (rem_euclid1057082210).\n\nNotation \"'impl__i128__rem_euclid'\" := (rem_euclid254910751).\n\nNotation \"'impl__isize__rem_euclid'\" := (rem_euclid828379367).\n\n(* Notation \"'impl_6__count_zeros'\" := (count_zeros558337492). *)\n\n(* Notation \"'impl_6__leading_ones'\" := (leading_ones55148479). *)\n\n(* Notation \"'impl_6__trailing_ones'\" := (trailing_ones359778731). *)\n\n(* Notation \"'impl_7__count_zeros'\" := (count_zeros199825317). *)\n\n(* Notation \"'impl_7__leading_ones'\" := (leading_ones164277656). *)\n\n(* Notation \"'impl_7__trailing_ones'\" := (trailing_ones903944727). *)\n\n(* Notation \"'impl_8__count_zeros'\" := (count_zeros942566041). *)\n\n(* Notation \"'impl_8__leading_ones'\" := (leading_ones766486760). *)\n\n(* Notation \"'impl_8__trailing_ones'\" := (trailing_ones223371510). *)\n\n(* Notation \"'impl_9__count_zeros'\" := (count_zeros60346158). *)\n\n(* Notation \"'impl_9__leading_ones'\" := (leading_ones404666910). *)\n\n(* Notation \"'impl_9__trailing_ones'\" := (trailing_ones601201120). *)\n\n(* Notation \"'impl_10__count_zeros'\" := (count_zeros824862815). *)\n\n(* Notation \"'impl_10__leading_ones'\" := (leading_ones475503572). *)\n\n(* Notation \"'impl_10__trailing_ones'\" := (trailing_ones705845381). *)\n\n(* Notation \"'impl_11__count_zeros'\" := (count_zeros73479642). *)\n\n(* Notation \"'impl_11__leading_ones'\" := (leading_ones667660708). *)\n\n(* Notation \"'impl_11__trailing_ones'\" := (trailing_ones979548463). *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Num_Int_macros.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Num_Uint_macros.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* From Core Require Import Core_Ops_Arith (t_Add). *)\n(* Export Core_Ops_Arith (t_Add). *)\n\n(* From Core Require Import Core_Ops_Arith (t_Div). *)\n(* Export Core_Ops_Arith (t_Div). *)\n\n(* From Core Require Import Core_Ops_Arith (t_Mul). *)\n(* Export Core_Ops_Arith (t_Mul). *)\n\n(* From Core Require Import Core_Ops_Arith (t_Neg). *)\n(* Export Core_Ops_Arith (t_Neg). *)\n\n(* From Core Require Import Core_Ops_Arith (t_Rem). *)\n(* Export Core_Ops_Arith (t_Rem). *)\n\n(* From Core Require Import Core_Ops_Arith (t_Sub). *)\n(* Export Core_Ops_Arith (t_Sub). *)\n\nFrom Core Require Import Core_Ops_Arith.\nExport Core_Ops_Arith.\n\n\n\n(* From Core Require Import Core_Ops_Bit (t_BitAnd). *)\n(* Export Core_Ops_Bit (t_BitAnd). *)\n\n(* From Core Require Import Core_Ops_Bit (t_BitOr). *)\n(* Export Core_Ops_Bit (t_BitOr). *)\n\n(* From Core Require Import Core_Ops_Bit (t_BitXor). *)\n(* Export Core_Ops_Bit (t_BitXor). *)\n\n(* From Core Require Import Core_Ops_Bit (t_Not). *)\n(* Export Core_Ops_Bit (t_Not). *)\n\n(* From Core Require Import Core_Ops_Bit (t_Shl). *)\n(* Export Core_Ops_Bit (t_Shl). *)\n\n(* From Core Require Import Core_Ops_Bit (t_Shr). *)\n(* Export Core_Ops_Bit (t_Shr). *)\n\nFrom Core Require Import Core_Ops_Bit.\nExport Core_Ops_Bit.\n\n\n\nFrom Core Require Import Core_Ops_Index.\nExport Core_Ops_Index.\n\n\n\nFrom Core Require Import Core_Ops_Range.\nExport Core_Ops_Range.\n\n(* From Core Require Import Core_Ops_Index_range (t_IndexRange). *)\n(* Export Core_Ops_Index_range (t_IndexRange). *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Arith.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker (t_Sized).\nExport Core_Marker (t_Sized).\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nClass t_Add (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    Add_f_Output : Type;\n    _ :: `{t_Sized (Add_f_Output)};\n    Add_f_add : v_Self -> v_Rhs -> Add_f_Output;\n  }.\nArguments t_Add (_) (_) {_}.\n\nClass t_Div (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    Div_f_Output : Type;\n    _ :: `{t_Sized (Div_f_Output)};\n    Div_f_div : v_Self -> v_Rhs -> Div_f_Output;\n  }.\nArguments t_Div (_) (_) {_}.\n\nClass t_Mul (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    Mul_f_Output : Type;\n    _ :: `{t_Sized (Mul_f_Output)};\n    Mul_f_mul : v_Self -> v_Rhs -> Mul_f_Output;\n  }.\nArguments t_Mul (_) (_) {_}.\n\nClass t_Neg (v_Self : Type) : Type :=\n  {\n    Neg_f_Output : Type;\n    _ :: `{t_Sized (Neg_f_Output)};\n    Neg_f_neg : v_Self -> Neg_f_Output;\n  }.\nArguments t_Neg (_).\n\nClass t_Rem (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    Rem_f_Output : Type;\n    _ :: `{t_Sized (Rem_f_Output)};\n    Rem_f_rem : v_Self -> v_Rhs -> Rem_f_Output;\n  }.\nArguments t_Rem (_) (_) {_}.\n\nClass t_Sub (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    Sub_f_Output : Type;\n    _ :: `{t_Sized (Sub_f_Output)};\n    Sub_f_sub : v_Self -> v_Rhs -> Sub_f_Output;\n  }.\nArguments t_Sub (_) (_) {_}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Arith_Impls_for_prims.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\n\n\nFrom Core Require Import Core_Primitive (t_u8).\nExport Core_Primitive (t_u8).\n\nFrom Core Require Import Core_Primitive (t_u16).\nExport Core_Primitive (t_u16).\n\nFrom Core Require Import Core_Primitive (t_u32).\nExport Core_Primitive (t_u32).\n\nFrom Core Require Import Core_Primitive (t_u64).\nExport Core_Primitive (t_u64).\n\nFrom Core Require Import Core_Primitive (t_u128).\nExport Core_Primitive (t_u128).\n\nFrom Core Require Import Core_Primitive (t_usize).\nExport Core_Primitive (t_usize).\n\n\n\nFrom Core Require Import Core_Primitive (t_i8).\nExport Core_Primitive (t_i8).\n\nFrom Core Require Import Core_Primitive (t_i16).\nExport Core_Primitive (t_i16).\n\nFrom Core Require Import Core_Primitive (t_i32).\nExport Core_Primitive (t_i32).\n\nFrom Core Require Import Core_Primitive (t_i64).\nExport Core_Primitive (t_i64).\n\nFrom Core Require Import Core_Primitive (t_i128).\nExport Core_Primitive (t_i128).\n\nFrom Core Require Import Core_Primitive (t_isize).\nExport Core_Primitive (t_isize).\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nNotation \"'impl'\" := (impl).\n\nNotation \"'impl_1'\" := (impl_1).\n\nNotation \"'impl_2'\" := (impl_2).\n\nNotation \"'impl_3'\" := (impl_3).\n\nNotation \"'impl_4'\" := (impl_4).\n\nNotation \"'impl_5'\" := (impl_5).\n\nNotation \"'impl_12'\" := (impl_12).\n\nNotation \"'impl_13'\" := (impl_13).\n\nNotation \"'impl_14'\" := (impl_14).\n\nNotation \"'impl_15'\" := (impl_15).\n\nNotation \"'impl_16'\" := (impl_16).\n\nNotation \"'impl_17'\" := (impl_17).\n\nNotation \"'impl_24'\" := (impl_24).\n\nNotation \"'impl_25'\" := (impl_25).\n\nNotation \"'impl_26'\" := (impl_26).\n\nNotation \"'impl_27'\" := (impl_27).\n\nNotation \"'impl_28'\" := (impl_28).\n\nNotation \"'impl_29'\" := (impl_29).\n\nNotation \"'impl_6'\" := (impl_6).\n\nNotation \"'impl_7'\" := (impl_7).\n\nNotation \"'impl_8'\" := (impl_8).\n\nNotation \"'impl_9'\" := (impl_9).\n\nNotation \"'impl_10'\" := (impl_10).\n\nNotation \"'impl_11'\" := (impl_11).\n\nNotation \"'impl_30'\" := (impl_30).\n\nNotation \"'impl_31'\" := (impl_31).\n\nNotation \"'impl_32'\" := (impl_32).\n\nNotation \"'impl_33'\" := (impl_33).\n\nNotation \"'impl_34'\" := (impl_34).\n\nNotation \"'impl_35'\" := (impl_35).\n\nNotation \"'impl_36'\" := (impl_36).\n\nNotation \"'impl_37'\" := (impl_37).\n\nNotation \"'impl_38'\" := (impl_38).\n\nNotation \"'impl_39'\" := (impl_39).\n\nNotation \"'impl_40'\" := (impl_40).\n\nNotation \"'impl_41'\" := (impl_41).\n\nNotation \"'impl_42'\" := (impl_42).\n\nNotation \"'impl_43'\" := (impl_43).\n\nNotation \"'impl_44'\" := (impl_44).\n\nNotation \"'impl_45'\" := (impl_45).\n\nNotation \"'impl_46'\" := (impl_46).\n\nNotation \"'impl_47'\" := (impl_47).\n\nNotation \"'impl_54'\" := (impl_54).\n\nNotation \"'impl_55'\" := (impl_55).\n\nNotation \"'impl_56'\" := (impl_56).\n\nNotation \"'impl_57'\" := (impl_57).\n\nNotation \"'impl_58'\" := (impl_58).\n\nNotation \"'impl_59'\" := (impl_59).\n\nNotation \"'impl_18'\" := (impl_18).\n\nNotation \"'impl_19'\" := (impl_19).\n\nNotation \"'impl_20'\" := (impl_20).\n\nNotation \"'impl_21'\" := (impl_21).\n\nNotation \"'impl_22'\" := (impl_22).\n\nNotation \"'impl_23'\" := (impl_23).\n\nNotation \"'impl_48'\" := (impl_48).\n\nNotation \"'impl_49'\" := (impl_49).\n\nNotation \"'impl_50'\" := (impl_50).\n\nNotation \"'impl_51'\" := (impl_51).\n\nNotation \"'impl_52'\" := (impl_52).\n\nNotation \"'impl_53'\" := (impl_53).\n\nNotation \"'impl_60'\" := (impl_60).\n\nNotation \"'impl_61'\" := (impl_61).\n\nNotation \"'impl_62'\" := (impl_62).\n\nNotation \"'impl_63'\" := (impl_63).\n\nNotation \"'impl_64'\" := (impl_64).\n\nNotation \"'impl_65'\" := (impl_65).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Bit.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker (t_Sized).\nExport Core_Marker (t_Sized).\n\n(* NotImplementedYet *)\n\nClass t_BitAnd (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    BitAnd_f_Output : Type;\n    _ :: `{t_Sized (BitAnd_f_Output)};\n    BitAnd_f_bitand : v_Self -> v_Rhs -> BitAnd_f_Output;\n  }.\nArguments t_BitAnd (_) (_) {_}.\n\nClass t_BitOr (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    BitOr_f_Output : Type;\n    _ :: `{t_Sized (BitOr_f_Output)};\n    BitOr_f_bitor : v_Self -> v_Rhs -> BitOr_f_Output;\n  }.\nArguments t_BitOr (_) (_) {_}.\n\nClass t_BitXor (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    BitXor_f_Output : Type;\n    _ :: `{t_Sized (BitXor_f_Output)};\n    BitXor_f_bitxor : v_Self -> v_Rhs -> BitXor_f_Output;\n  }.\nArguments t_BitXor (_) (_) {_}.\n\nClass t_Not (v_Self : Type) : Type :=\n  {\n    Not_f_Output : Type;\n    _ :: `{t_Sized (Not_f_Output)};\n    Not_f_not : v_Self -> Not_f_Output;\n  }.\nArguments t_Not (_).\n\nClass t_Shl (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    Shl_f_Output : Type;\n    _ :: `{t_Sized (Shl_f_Output)};\n    Shl_f_shl : v_Self -> v_Rhs -> Shl_f_Output;\n  }.\nArguments t_Shl (_) (_) {_}.\n\nClass t_Shr (v_Self : Type) (v_Rhs : Type) `{t_Sized (v_Rhs)} : Type :=\n  {\n    Shr_f_Output : Type;\n    _ :: `{t_Sized (Shr_f_Output)};\n    Shr_f_shr : v_Self -> v_Rhs -> Shr_f_Output;\n  }.\nArguments t_Shr (_) (_) {_}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Bit_Impls_for_prims.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\n\n\nFrom Core Require Import Core_Primitive (t_u8).\nExport Core_Primitive (t_u8).\n\nFrom Core Require Import Core_Primitive (t_u16).\nExport Core_Primitive (t_u16).\n\nFrom Core Require Import Core_Primitive (t_u32).\nExport Core_Primitive (t_u32).\n\nFrom Core Require Import Core_Primitive (t_u64).\nExport Core_Primitive (t_u64).\n\nFrom Core Require Import Core_Primitive (t_u128).\nExport Core_Primitive (t_u128).\n\nFrom Core Require Import Core_Primitive (t_usize).\nExport Core_Primitive (t_usize).\n\n\n\nFrom Core Require Import Core_Primitive (t_i8).\nExport Core_Primitive (t_i8).\n\nFrom Core Require Import Core_Primitive (t_i16).\nExport Core_Primitive (t_i16).\n\nFrom Core Require Import Core_Primitive (t_i32).\nExport Core_Primitive (t_i32).\n\nFrom Core Require Import Core_Primitive (t_i64).\nExport Core_Primitive (t_i64).\n\nFrom Core Require Import Core_Primitive (t_i128).\nExport Core_Primitive (t_i128).\n\nFrom Core Require Import Core_Primitive (t_isize).\nExport Core_Primitive (t_isize).\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nNotation \"'impl_84'\" := (impl_84).\n\nNotation \"'impl_85'\" := (impl_85).\n\nNotation \"'impl_86'\" := (impl_86).\n\nNotation \"'impl_87'\" := (impl_87).\n\nNotation \"'impl_88'\" := (impl_88).\n\nNotation \"'impl_89'\" := (impl_89).\n\nNotation \"'impl_6'\" := (impl_6).\n\nNotation \"'impl_7'\" := (impl_7).\n\nNotation \"'impl_8'\" := (impl_8).\n\nNotation \"'impl_9'\" := (impl_9).\n\nNotation \"'impl_10'\" := (impl_10).\n\nNotation \"'impl_11'\" := (impl_11).\n\nNotation \"'impl_12'\" := (impl_12).\n\nNotation \"'impl_13'\" := (impl_13).\n\nNotation \"'impl_14'\" := (impl_14).\n\nNotation \"'impl_15'\" := (impl_15).\n\nNotation \"'impl_16'\" := (impl_16).\n\nNotation \"'impl_17'\" := (impl_17).\n\nNotation \"'impl_18'\" := (impl_18).\n\nNotation \"'impl_19'\" := (impl_19).\n\nNotation \"'impl_20'\" := (impl_20).\n\nNotation \"'impl_21'\" := (impl_21).\n\nNotation \"'impl_22'\" := (impl_22).\n\nNotation \"'impl_23'\" := (impl_23).\n\nNotation \"'impl_24'\" := (impl_24).\n\nNotation \"'impl_25'\" := (impl_25).\n\nNotation \"'impl_26'\" := (impl_26).\n\nNotation \"'impl_27'\" := (impl_27).\n\nNotation \"'impl_28'\" := (impl_28).\n\nNotation \"'impl_29'\" := (impl_29).\n\nNotation \"'impl_30'\" := (impl_30).\n\nNotation \"'impl_31'\" := (impl_31).\n\nNotation \"'impl_32'\" := (impl_32).\n\nNotation \"'impl_33'\" := (impl_33).\n\nNotation \"'impl_34'\" := (impl_34).\n\nNotation \"'impl_35'\" := (impl_35).\n\nNotation \"'impl_36'\" := (impl_36).\n\nNotation \"'impl_37'\" := (impl_37).\n\nNotation \"'impl_38'\" := (impl_38).\n\nNotation \"'impl_39'\" := (impl_39).\n\nNotation \"'impl_40'\" := (impl_40).\n\nNotation \"'impl_41'\" := (impl_41).\n\nNotation \"'impl_42'\" := (impl_42).\n\nNotation \"'impl_43'\" := (impl_43).\n\nNotation \"'impl_44'\" := (impl_44).\n\nNotation \"'impl_45'\" := (impl_45).\n\nNotation \"'impl_46'\" := (impl_46).\n\nNotation \"'impl_47'\" := (impl_47).\n\nNotation \"'impl_48'\" := (impl_48).\n\nNotation \"'impl_49'\" := (impl_49).\n\nNotation \"'impl_50'\" := (impl_50).\n\nNotation \"'impl_51'\" := (impl_51).\n\nNotation \"'impl_52'\" := (impl_52).\n\nNotation \"'impl_53'\" := (impl_53).\n\nNotation \"'impl_54'\" := (impl_54).\n\nNotation \"'impl_55'\" := (impl_55).\n\nNotation \"'impl_56'\" := (impl_56).\n\nNotation \"'impl_57'\" := (impl_57).\n\nNotation \"'impl_58'\" := (impl_58).\n\nNotation \"'impl_59'\" := (impl_59).\n\nNotation \"'impl_60'\" := (impl_60).\n\nNotation \"'impl_61'\" := (impl_61).\n\nNotation \"'impl_62'\" := (impl_62).\n\nNotation \"'impl_63'\" := (impl_63).\n\nNotation \"'impl_64'\" := (impl_64).\n\nNotation \"'impl_65'\" := (impl_65).\n\nNotation \"'impl_66'\" := (impl_66).\n\nNotation \"'impl_67'\" := (impl_67).\n\nNotation \"'impl_68'\" := (impl_68).\n\nNotation \"'impl_69'\" := (impl_69).\n\nNotation \"'impl_70'\" := (impl_70).\n\nNotation \"'impl_71'\" := (impl_71).\n\nNotation \"'impl_72'\" := (impl_72).\n\nNotation \"'impl_73'\" := (impl_73).\n\nNotation \"'impl_74'\" := (impl_74).\n\nNotation \"'impl_75'\" := (impl_75).\n\nNotation \"'impl_76'\" := (impl_76).\n\nNotation \"'impl_77'\" := (impl_77).\n\nNotation \"'impl_78'\" := (impl_78).\n\nNotation \"'impl_79'\" := (impl_79).\n\nNotation \"'impl_80'\" := (impl_80).\n\nNotation \"'impl_81'\" := (impl_81).\n\nNotation \"'impl_82'\" := (impl_82).\n\nNotation \"'impl_83'\" := (impl_83).\n\nNotation \"'impl_90'\" := (impl_90).\n\nNotation \"'impl_91'\" := (impl_91).\n\nNotation \"'impl_92'\" := (impl_92).\n\nNotation \"'impl_93'\" := (impl_93).\n\nNotation \"'impl_94'\" := (impl_94).\n\nNotation \"'impl_95'\" := (impl_95).\n\nNotation \"'impl_96'\" := (impl_96).\n\nNotation \"'impl_97'\" := (impl_97).\n\nNotation \"'impl_98'\" := (impl_98).\n\nNotation \"'impl_99'\" := (impl_99).\n\nNotation \"'impl_100'\" := (impl_100).\n\nNotation \"'impl_101'\" := (impl_101).\n\nNotation \"'impl'\" := (impl).\n\nNotation \"'impl_1'\" := (impl_1).\n\nNotation \"'impl_2'\" := (impl_2).\n\nNotation \"'impl_3'\" := (impl_3).\n\nNotation \"'impl_4'\" := (impl_4).\n\nNotation \"'impl_5'\" := (impl_5).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Function.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\nExport Core_Marker.\n\n(* NotImplementedYet *)\n\nClass t_FnOnce (v_Self : Type) (v_Args : Type) (* `{t_Sized (v_Args)} `{t_Tuple (v_Args)} *) : Type :=\n  {\n    FnOnce_f_Output : Type;\n    _ :: `{t_Sized (FnOnce_f_Output)};\n    FnOnce_f_call_once : v_Self -> v_Args -> FnOnce_f_Output;\n  }.\nArguments t_FnOnce (_) (_) (* {_} {_} *).\n\nClass t_FnMut (v_Self : Type) (v_Args : Type) `{t_FnOnce (v_Self) (v_Args)} (* `{t_Sized (v_Args)} `{t_Tuple (v_Args)} *) : Type :=\n  {\n    FnMut_f_call_mut : v_Self -> v_Args -> (v_Self*FnOnce_f_Output);\n  }.\nArguments t_FnMut (_) (_) {_} (* {_} {_} *).\n\nClass t_Fn (v_Self : Type) (v_Args : Type) `{t_FnMut (v_Self) (v_Args)} (* `{t_Sized (v_Args)} `{t_Tuple (v_Args)} *) : Type :=\n  {\n    Fn_f_call : v_Self -> v_Args -> FnOnce_f_Output;\n  }.\nArguments t_Fn (_) (_) {_} (* {_} {_} *).\n\n\n\n#[global] Instance t_FnOnceAny {A B} : t_FnOnce (A -> B) A.\nProof.\n  econstructor.\n  easy.\n  refine (fun f x => f x).\nDefined.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Index.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nClass t_Index (v_Self : Type) (v_Idx : Type) : Type :=\n  {\n    Index_f_Output : Type;\n    Index_f_index : v_Self -> v_Idx -> Index_f_Output;\n  }.\nArguments t_Index (_) (_).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Index_range.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\nFrom Core Require Import Core_Iter_Traits_Iterator.\nExport Core_Iter_Traits_Iterator.\n\nRecord t_IndexRange : Type :=\n  {\n    IndexRange_f_start : t_usize;\n    IndexRange_f_end : t_usize;\n  }.\nArguments Build_t_IndexRange.\nArguments IndexRange_f_start.\nArguments IndexRange_f_end.\n#[export] Instance settable_t_IndexRange : Settable _ :=\n  settable! (Build_t_IndexRange) <IndexRange_f_start; IndexRange_f_end>.\n\nDefinition impl__IndexRange__zero_to (v_end : t_usize) : t_IndexRange :=\n  Build_t_IndexRange (Build_t_usize (Build_t_U64 0%N)) (v_end).\n\nDefinition impl__IndexRange__next_unchecked (self : t_IndexRange) : (t_IndexRange*t_usize) :=\n  let value := IndexRange_f_start self in\n  let self := self <|IndexRange_f_start := Add_f_add (value) (Build_t_usize (Build_t_U64 1%N) : t_usize) |> in\n  let hax_temp_output := value in\n  (self,hax_temp_output).\n\nDefinition impl__IndexRange__len (self : t_IndexRange) : t_usize :=\n  Sub_f_sub (IndexRange_f_end self) (IndexRange_f_start self).\n\nProgram Instance t_Iterator_538767852 : t_Iterator ((t_IndexRange)) :=\n  {\n    Iterator_f_Item := t_usize;\n    Iterator_f_next := fun  (self : t_IndexRange)=>\n      (* let hax_temp_output := never_to_any (panic (\"not yet implemented: specification needed\"%string)) in *)\n      (self,Option_Some (self.(IndexRange_f_start)));\n    Iterator_f_size_hint := fun  (self : t_IndexRange)=>\n      let len := impl__IndexRange__len (self) in\n      (len,Option_Some (len));\n    Iterator_f_fold := fun {v_B : Type} {v_F : Type} `{t_Sized v_B} `{t_Sized v_F} `{t_Sized t_IndexRange} (_ : t_FnOnce v_F (v_B * t_usize)) (_ : t_FnMut v_F (v_B * t_usize)) `{_ : FnOnce_f_Output = v_B} (self : t_IndexRange) (init : v_B) (f : v_F)=>\n      never_to_any (panic \"not yet implemented: specification needed\"%string);\n  }.\nNext Obligation.\nAdmitted.\n\n(* Instance t_ExactSizeIterator_661616782 : t_ExactSizeIterator ((t_IndexRange)) := *)\n(*   { *)\n(*     ExactSizeIterator_impl_2_f_len := fun  (self : t_IndexRange)=> *)\n(*       impl__IndexRange__len (self); *)\n(*   }. *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Ops_Range.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\nExport Core_Marker.\n\nRecord t_Range (v_Idx : Type) `{t_Sized (v_Idx)} : Type :=\n  {\n    Range_f_start : v_Idx;\n    Range_f_end : v_Idx;\n  }.\nArguments Build_t_Range (_) {_}.\nArguments Range_f_start {_} {_}.\nArguments Range_f_end {_} {_}.\n#[export] Instance settable_t_Range `{v_Idx : Type} `{t_Sized (v_Idx)} : Settable _ :=\n  settable! (Build_t_Range v_Idx) <Range_f_start; Range_f_end>.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Option.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Clone.\nExport Core_Clone.\n\nFrom Core Require Import Core_Marker (t_Sized).\nExport Core_Marker (t_Sized).\n\nFrom Core Require Import Core_Panicking (panic).\nExport Core_Panicking (panic).\n\nFrom Core Require Import Core_Ops_Function.\nExport Core_Ops_Function.\n\nInductive t_Option (v_T : Type) `{t_Sized (v_T)} : Type :=\n| Option_None\n| Option_Some : v_T -> _.\nArguments Option_None {_} {_}.\nArguments Option_Some {_} {_}.\n\nInstance t_Clone_390068633 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Option ((v_T)))) :=\n  {\n    Clone_f_clone := fun  (self : t_Option ((v_T)))=>\n      match self with\n      | Option_Some (x) =>\n        Option_Some (Clone_f_clone (x))\n      | Option_None =>\n        Option_None\n      end;\n  }.\n\nDefinition impl_1__is_some `{v_T : Type} `{t_Sized (v_T)} (self : t_Option ((v_T))) : bool :=\n  match self with\n  | Option_Some (_) =>\n    true\n  | _ =>\n    false\n  end.\n\nProgram Definition impl__map `{v_T : Type} `{v_U : Type} `{v_F : Type} `{t_Sized (v_T)} `{t_Sized (v_U)} `{t_Sized (v_F)} `{t_FnOnce (v_F) ((v_T))} `{_.(FnOnce_f_Output) = v_U} (self : t_Option ((v_T))) (f : v_F) : t_Option ((v_U)) :=\n  match self with\n  | Option_Some (x) =>\n    Option_Some _ (* (FnOnce_f_call_once (f) ((x))) *)\n  | Option_None =>\n    Option_None\n  end.\nNext Obligation.\n  refine (FnOnce_f_call_once (f) ((x))).\nDefined.\nFail Next Obligation.\n\n(* Definition unwrap_failed '(_ : unit) : t_Never := *)\n(*   panic (\"called `Option::unwrap()` on a `None` value\"%string). *)\n\n(* Definition impl_1__unwrap `{v_T : Type} `{t_Sized (v_T)} (self : t_Option ((v_T))) `{impl_1__is_some (self___) = true} : v_T := *)\n(*   match self with *)\n(*   | Option_Some (val) => *)\n(*     val *)\n(*   | Option_None => *)\n(*     never_to_any (unwrap_failed (tt)) *)\n(*   end. *)\n\n(* Definition expect_failed (msg : string) : t_Never := *)\n(*   panic (msg). *)\n\n(* Definition impl_1__expect `{v_T : Type} `{t_Sized (v_T)} (self : t_Option ((v_T))) (msg : string) : v_T := *)\n(*   match self with *)\n(*   | Option_Some (val) => *)\n(*     val *)\n(*   | Option_None => *)\n(*     never_to_any (expect_failed (msg)) *)\n(*   end. *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Panicking.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nInductive t_Never : Type :=\n.\n\nDefinition t_Never_cast_to_repr (x : t_Never) : t_Never :=\n  match x with\n  end.\n\nDefinition never_to_any `{v_T : Type} (x : t_Never) : v_T :=\n  (match x with\n  end).\n\nDefinition panic (expr : string) {HFalse : t_Never} : t_Never :=\n  never_to_any HFalse.\n\nDefinition panic_explicit '(_ : unit) `{HFalse : t_Never} : t_Never :=\n  never_to_any HFalse.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Primitive.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Ops.\nExport Core_Ops.\n\nFrom Core Require Import Core_Cmp.\nExport Core_Cmp.\n\nFrom Core Require Import Core_Base.\nExport Core_Base.\n\n(* From Core Require Import Core_Base_Number_conversion. *)\n(* Export Core_Base_Number_conversion. *)\n\nFrom Core Require Import Core_Base_interface_Int.\nExport Core_Base_interface_Int.\n\nFrom Core Require Import Core_Array_Rec_bundle_579704328.\nExport Core_Array_Rec_bundle_579704328.\n\nNotation \"'t_Slice'\" := (t_Slice).\n\nNotation \"'Slice_f_v'\" := (Slice_f_v).\n\n(* Notation \"'impl_2'\" := (impl_2). *)\n\nNotation \"'t_Array'\" := (t_Array).\n\nNotation \"'Array_f_v'\" := (Array_f_v).\n\nNotation \"'impl_3__cast'\" := (cast).\n\nNotation \"'t_i128'\" := (t_i128).\n\nNotation \"'i128_0'\" := (i128_0).\n\n(* Notation \"'impl_25'\" := (impl_25). *)\n\nNotation \"'t_i16'\" := (t_i16).\n\nNotation \"'i16_0'\" := (i16_0).\n\n(* Notation \"'impl_19'\" := (impl_19). *)\n\nNotation \"'t_i32'\" := (t_i32).\n\nNotation \"'i32_0'\" := (i32_0).\n\n(* Notation \"'impl_21'\" := (impl_21). *)\n\nNotation \"'t_i64'\" := (t_i64).\n\nNotation \"'i64_0'\" := (i64_0).\n\n(* Notation \"'impl_23'\" := (impl_23). *)\n\nNotation \"'t_i8'\" := (t_i8).\n\nNotation \"'i8_0'\" := (i8_0).\n\n(* Notation \"'impl_17'\" := (impl_17). *)\n\nNotation \"'t_isize'\" := (t_isize).\n\nNotation \"'isize_0'\" := (isize_0).\n\n(* Notation \"'impl_27'\" := (impl_27). *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nNotation \"'t_u128'\" := (t_u128).\n\nNotation \"'u128_0'\" := (u128_0).\n\nNotation \"'t_u16'\" := (t_u16).\n\nNotation \"'u16_0'\" := (u16_0).\n\nNotation \"'t_u32'\" := (t_u32).\n\nNotation \"'u32_0'\" := (u32_0).\n\nNotation \"'t_u64'\" := (t_u64).\n\nNotation \"'u64_0'\" := (u64_0).\n\nNotation \"'t_u8'\" := (t_u8).\n\nNotation \"'u8_0'\" := (u8_0).\n\nNotation \"'t_usize'\" := (t_usize).\n\nNotation \"'usize_0'\" := (usize_0).\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* Notation \"'impl_1'\" := (impl_1). *)\n\n(* Notation \"'impl_5'\" := (impl_5). *)\n\n(* Notation \"'impl_7'\" := (impl_7). *)\n\n(* Notation \"'impl_9'\" := (impl_9). *)\n\n(* Notation \"'impl_11'\" := (impl_11). *)\n\n(* Notation \"'impl_13'\" := (impl_13). *)\n\n(* Notation \"'impl_15'\" := (impl_15). *)\n\n(* Notation \"'impl'\" := (impl). *)\n\n(* Notation \"'impl_29'\" := (impl_29). *)\n\n(* Notation \"'impl_30'\" := (impl_30). *)\n\n(* Notation \"'impl_31'\" := (impl_31). *)\n\n(* Notation \"'impl_32'\" := (impl_32). *)\n\n(* Notation \"'impl_33'\" := (impl_33). *)\n\n(* Notation \"'impl_34'\" := (impl_34). *)\n\n(* Notation \"'impl_35'\" := (impl_35). *)\n\n(* Notation \"'impl_36'\" := (impl_36). *)\n\n(* Notation \"'impl_37'\" := (impl_37). *)\n\n(* Notation \"'impl_38'\" := (impl_38). *)\n\n(* Notation \"'impl_39'\" := (impl_39). *)\n\n(* Notation \"'impl_40'\" := (impl_40). *)\n\n(* Notation \"'impl_41'\" := (impl_41). *)\n\n(* Notation \"'impl_42'\" := (impl_42). *)\n\n(* Notation \"'impl_43'\" := (impl_43). *)\n\n(* Notation \"'impl_44'\" := (impl_44). *)\n\n(* Notation \"'impl_45'\" := (impl_45). *)\n\n(* Notation \"'impl_46'\" := (impl_46). *)\n\n(* Notation \"'impl_47'\" := (impl_47). *)\n\n(* Notation \"'impl_48'\" := (impl_48). *)\n\n(* Notation \"'impl_49'\" := (impl_49). *)\n\n(* Notation \"'impl_50'\" := (impl_50). *)\n\n(* Notation \"'impl_51'\" := (impl_51). *)\n\n(* Notation \"'impl_52'\" := (impl_52). *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Primitive_Number_conversion.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\nFrom Core Require Import Core_Cmp.\nExport Core_Cmp.\n\nFrom Core Require Import Core_Convert.\nExport Core_Convert.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* Notation \"'impl_31'\" := (impl_31). *)\n\n(* Notation \"'impl_40'\" := (impl_40). *)\n\n(* Notation \"'impl'\" := (impl). *)\n\n(* Notation \"'impl_1'\" := (impl_1). *)\n\n(* Notation \"'impl_2'\" := (impl_2). *)\n\n(* Notation \"'impl_3'\" := (impl_3). *)\n\n(* Notation \"'impl_4'\" := (impl_4). *)\n\n(* Notation \"'impl_5'\" := (impl_5). *)\n\n(* Notation \"'impl_6'\" := (impl_6). *)\n\n(* Notation \"'impl_7'\" := (impl_7). *)\n\n(* Notation \"'impl_8'\" := (impl_8). *)\n\n(* Notation \"'impl_9'\" := (impl_9). *)\n\n(* Notation \"'impl_10'\" := (impl_10). *)\n\n(* Notation \"'impl_11'\" := (impl_11). *)\n\n(* Notation \"'impl_12'\" := (impl_12). *)\n\n(* Notation \"'impl_13'\" := (impl_13). *)\n\n(* Notation \"'impl_14'\" := (impl_14). *)\n\n(* Notation \"'impl_15'\" := (impl_15). *)\n\n(* Notation \"'impl_16'\" := (impl_16). *)\n\n(* Notation \"'impl_17'\" := (impl_17). *)\n\n(* Notation \"'impl_18'\" := (impl_18). *)\n\n(* Notation \"'impl_19'\" := (impl_19). *)\n\n(* Notation \"'impl_20'\" := (impl_20). *)\n\n(* Notation \"'impl_21'\" := (impl_21). *)\n\n(* Notation \"'impl_22'\" := (impl_22). *)\n\n(* Notation \"'impl_23'\" := (impl_23). *)\n\n(* Notation \"'impl_24'\" := (impl_24). *)\n\n(* Notation \"'impl_25'\" := (impl_25). *)\n\n(* Notation \"'impl_26'\" := (impl_26). *)\n\n(* Notation \"'impl_27'\" := (impl_27). *)\n\n(* Notation \"'impl_28'\" := (impl_28). *)\n\n(* Notation \"'impl_29'\" := (impl_29). *)\n\n(* Notation \"'impl_30'\" := (impl_30). *)\n\n(* Notation \"'impl_32'\" := (impl_32). *)\n\n(* Notation \"'impl_33'\" := (impl_33). *)\n\n(* Notation \"'impl_34'\" := (impl_34). *)\n\n(* Notation \"'impl_35'\" := (impl_35). *)\n\n(* Notation \"'impl_36'\" := (impl_36). *)\n\n(* Notation \"'impl_37'\" := (impl_37). *)\n\n(* Notation \"'impl_38'\" := (impl_38). *)\n\n(* Notation \"'impl_39'\" := (impl_39). *)\n\n(* Notation \"'impl_41'\" := (impl_41). *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Primitive_Number_conversion_i.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* Notation \"'impl_31'\" := (impl_31). *)\n\n(* Notation \"'impl_40'\" := (impl_40). *)\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\nFrom Core Require Import Core_Cmp.\nExport Core_Cmp.\n\nFrom Core Require Import Core_Convert.\nExport Core_Convert.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* Notation \"'impl'\" := (impl). *)\n\n(* Notation \"'impl_1'\" := (impl_1). *)\n\n(* Notation \"'impl_2'\" := (impl_2). *)\n\n(* Notation \"'impl_3'\" := (impl_3). *)\n\n(* Notation \"'impl_4'\" := (impl_4). *)\n\n(* Notation \"'impl_5'\" := (impl_5). *)\n\n(* Notation \"'impl_6'\" := (impl_6). *)\n\n(* Notation \"'impl_7'\" := (impl_7). *)\n\n(* Notation \"'impl_8'\" := (impl_8). *)\n\n(* Notation \"'impl_9'\" := (impl_9). *)\n\n(* Notation \"'impl_10'\" := (impl_10). *)\n\n(* Notation \"'impl_11'\" := (impl_11). *)\n\n(* Notation \"'impl_12'\" := (impl_12). *)\n\n(* Notation \"'impl_13'\" := (impl_13). *)\n\n(* Notation \"'impl_14'\" := (impl_14). *)\n\n(* Notation \"'impl_15'\" := (impl_15). *)\n\n(* Notation \"'impl_16'\" := (impl_16). *)\n\n(* Notation \"'impl_17'\" := (impl_17). *)\n\n(* Notation \"'impl_18'\" := (impl_18). *)\n\n(* Notation \"'impl_19'\" := (impl_19). *)\n\n(* Notation \"'impl_20'\" := (impl_20). *)\n\n(* Notation \"'impl_21'\" := (impl_21). *)\n\n(* Notation \"'impl_22'\" := (impl_22). *)\n\n(* Notation \"'impl_23'\" := (impl_23). *)\n\n(* Notation \"'impl_24'\" := (impl_24). *)\n\n(* Notation \"'impl_25'\" := (impl_25). *)\n\n(* Notation \"'impl_26'\" := (impl_26). *)\n\n(* Notation \"'impl_27'\" := (impl_27). *)\n\n(* Notation \"'impl_28'\" := (impl_28). *)\n\n(* Notation \"'impl_29'\" := (impl_29). *)\n\n(* Notation \"'impl_30'\" := (impl_30). *)\n\n(* Notation \"'impl_32'\" := (impl_32). *)\n\n(* Notation \"'impl_33'\" := (impl_33). *)\n\n(* Notation \"'impl_34'\" := (impl_34). *)\n\n(* Notation \"'impl_35'\" := (impl_35). *)\n\n(* Notation \"'impl_36'\" := (impl_36). *)\n\n(* Notation \"'impl_37'\" := (impl_37). *)\n\n(* Notation \"'impl_38'\" := (impl_38). *)\n\n(* Notation \"'impl_39'\" := (impl_39). *)\n\n(* Notation \"'impl_41'\" := (impl_41). *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Result.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Option.\nExport Core_Option.\n\nInductive t_Result (v_T : Type) (v_E : Type) `{t_Sized (v_T)} `{t_Sized (v_E)} : Type :=\n| Result_Ok : v_T -> _\n| Result_Err : v_E -> _.\nArguments Result_Ok {_} {_} {_} {_}.\nArguments Result_Err {_} {_} {_} {_}.\n\nDefinition impl__ok `{v_T : Type} `{v_E : Type} `{t_Sized (v_T)} `{t_Sized (v_E)} (self : t_Result ((v_T)) ((v_E))) : t_Option ((v_T)) :=\n  match self with\n  | Result_Ok (x) =>\n    Option_Some (x)\n  | Result_Err (_) =>\n    Option_None\n  end.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\n\n\nFrom Core Require Import Core_Slice_Iter.\nExport Core_Slice_Iter.\n\nFrom Core Require Import Core_Convert.\nExport Core_Convert.\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\nDefinition impl__iter `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Slice ((v_T))) : t_Iter ((v_T)) :=\n  impl__new (self).\n\nDefinition impl__len `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Slice ((v_T))) : t_usize :=\n  From_f_from (len (Clone_f_clone (Slice_f_v self))).\n\nDefinition impl__is_empty `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (self : t_Slice ((v_T))) : bool :=\n  PartialEq_f_eq (impl__len (self)) (Build_t_usize (Build_t_U64 0%N)).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Index.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\nFrom Core Require Import Core (t_ops).\nExport Core (t_ops).\n\nFrom Core Require Import Core_Primitive (t_Slice).\nExport Core_Primitive (t_Slice).\n\n(* NotImplementedYet *)\n\nNotation \"'v_SliceIndex'\" := (v_SliceIndex).\n\nNotation \"'impl'\" := (impl).\n\nNotation \"'impl_2'\" := (impl_2).\n\nNotation \"'impl_1'\" := (impl_1).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Index_Private_slice_index.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\n\n\nFrom Core Require Import Core_Slice_Index_Ops.\nExport Core_Slice_Index_Ops.\n\nNotation \"'v_Sealed'\" := (v_Sealed).\n\nNotation \"'impl'\" := (impl).\n\nNotation \"'impl_1'\" := (impl_1).\n\nNotation \"'impl_2'\" := (impl_2).\n\nNotation \"'impl_3'\" := (impl_3).\n\nNotation \"'impl_4'\" := (impl_4).\n\nNotation \"'impl_5'\" := (impl_5).\n\nNotation \"'impl_6'\" := (impl_6).\n\nNotation \"'impl_7'\" := (impl_7).\n\nNotation \"'impl_8'\" := (impl_8).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Iter.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\nFrom Core Require Import Core_Marker.\nExport Core_Marker.\n\nFrom Core Require Import Core_Primitive.\nExport Core_Primitive.\n\nRecord t_Iter (v_T : Type) `{t_Sized (v_T)} : Type :=\n  {\n    Iter_f_data : t_Slice ((v_T));\n    Iter_f__marker : t_PhantomData ((v_T));\n  }.\nArguments Build_t_Iter {_} {_}.\nArguments Iter_f_data {_} {_}.\nArguments Iter_f__marker {_} {_}.\n#[export] Instance settable_t_Iter `{v_T : Type} `{t_Sized (v_T)} : Settable _ :=\n  settable! (Build_t_Iter) <Iter_f_data; Iter_f__marker>.\n\nDefinition impl__new `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} (slice : t_Slice ((v_T))) : t_Iter ((v_T)) :=\n  Build_t_Iter (Clone_f_clone (slice)) (Build_t_PhantomData).\n\nInstance t_Clone_313886898 `{v_T : Type} `{t_Sized (v_T)} `{t_Clone (v_T)} : t_Clone ((t_Iter ((v_T)))) :=\n  {\n    Clone_f_clone := fun  (self : t_Iter ((v_T)))=>\n      Build_t_Iter (Clone_f_clone (Iter_f_data self)) (Iter_f__marker self);\n  }.\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/Core_Slice_Iter_Macros.v",
    "content": "(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\n\n(* From Core Require Import Core. *)\n\n(* TODO: Replace this dummy lib with core lib *)\nClass t_Sized (T : Type) := { }.\nDefinition t_u8 := Z.\nDefinition t_u16 := Z.\nDefinition t_u32 := Z.\nDefinition t_u64 := Z.\nDefinition t_u128 := Z.\nDefinition t_usize := Z.\nDefinition t_i8 := Z.\nDefinition t_i16 := Z.\nDefinition t_i32 := Z.\nDefinition t_i64 := Z.\nDefinition t_i128 := Z.\nDefinition t_isize := Z.\nDefinition t_Array T (x : t_usize) := list T.\nDefinition t_String := string.\nDefinition ToString_f_to_string (x : string) := x.\nInstance Sized_any : forall {t_A}, t_Sized t_A := {}.\nClass t_Clone (T : Type) := { Clone_f_clone : T -> T }.\nInstance Clone_any : forall {t_A}, t_Clone t_A := {Clone_f_clone := fun x => x}.\nDefinition t_Slice (T : Type) := list T.\nDefinition unsize {T : Type} : list T -> t_Slice T := id.\nDefinition t_PartialEq_f_eq x y := x =? y.\nDefinition t_Rem_f_rem (x y : Z) := x mod y.\nDefinition assert (b : bool) (* `{H_assert : b = true} *) : unit := tt.\nInductive globality := | t_Global.\nDefinition t_Vec T (_ : globality) : Type := list T.\nDefinition impl_1__append {T} l1 l2 : list T * list T := (app l1 l2, l2).\nDefinition impl_1__len {A} (l : list A) := Z.of_nat (List.length l).\nDefinition impl__new {A} (_ : Datatypes.unit) : list A := nil.\nDefinition impl__with_capacity {A} (_ : Z)  : list A := nil.\nDefinition impl_1__push {A} l (x : A) := cons x l.\nClass t_From (A B : Type) := { From_f_from : B -> A }.\nDefinition impl__to_vec {T} (x : t_Slice T) : t_Vec T t_Global := x.\nClass t_Into (A B : Type) := { Into_f_into : A -> B }.\nInstance t_Into_from_t_From {A B : Type} `{H : t_From B A} : t_Into A B := { Into_f_into x := @From_f_from B A H x }.\nDefinition from_elem {A} (x : A) (l : Z) := repeat x (Z.to_nat l).\nDefinition t_Option := option.\nDefinition impl__map {A B} (x : t_Option A) (f : A -> B) : t_Option B := match x with | Some x => Some (f x) | None => None end.\nDefinition t_Add_f_add x y := x + y.\nClass Cast A B := { cast : A -> B }.\nInstance cast_t_u8_t_u32 : Cast t_u8 t_u32 := {| cast x := x |}.\n(* / dummy lib *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n\n(* NotImplementedYet *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/coq/generated-core/src/_CoqProject",
    "content": "-R ./ TODO\n-arg -w\n-arg all\n\nCore_Slice_Iter_Macros.v\nCore_Slice_Iter.v\nCore_Slice_Index_Private_slice_index.v\nCore_Slice_Index.v\nCore_Slice.v\nCore_Result.v\nCore_Primitive_Number_conversion_i.v\nCore_Primitive_Number_conversion.v\nCore_Primitive.v\nCore_Panicking.v\nCore_Option.v\nCore_Ops_Range.v\nCore_Ops_Index_range.v\nCore_Ops_Index.v\nCore_Ops_Function.v\nCore_Ops_Bit_Impls_for_prims.v\nCore_Ops_Bit.v\nCore_Ops_Arith_Impls_for_prims.v\nCore_Ops_Arith.v\nCore_Ops.v\nCore_Num_Uint_macros.v\nCore_Num_Int_macros.v\nCore_Num.v\nCore_Marker.v\nCore_Iter_Traits_Marker.v\nCore_Iter_Traits_Iterator.v\nCore_Iter_Traits_Exact_size.v\nCore_Iter_Traits_Collect.v\nCore_Iter_Traits.v\nCore_Iter_Range.v\nCore_Iter.v\nCore_Intrinsics.v\nCore_Fmt.v\nCore_Convert.v\nCore_Cmp.v\nCore_Clone.v\nCore_Base_interface_Int_U8_proofs.v\nCore_Base_interface_Int_U64_proofs.v\nCore_Base_interface_Int_U32_proofs.v\nCore_Base_interface_Int_U16_proofs.v\nCore_Base_interface_Int_U128_proofs.v\nCore_Base_interface_Int_I8_proofs.v\nCore_Base_interface_Int_I64_proofs.v\nCore_Base_interface_Int_I32_proofs.v\nCore_Base_interface_Int_I16_proofs.v\nCore_Base_interface_Int_I128_proofs.v\nCore_Base_interface_Int.v\nCore_Base_interface_Coerce.v\nCore_Base_interface.v\nCore_Base_Z.v\nCore_Base_Spec_Z.v\nCore_Base_Spec_Unary.v\nCore_Base_Spec_Seq.v\nCore_Base_Spec_Haxint.v\nCore_Base_Spec_Constants.v\nCore_Base_Spec_Binary_Positive.v\nCore_Base_Spec_Binary_Pos.v\nCore_Base_Spec_Binary.v\nCore_Base_Spec.v\nCore_Base_Seq.v\nCore_Base_Pos.v\nCore_Base_Number_conversion.v\nCore_Base_Binary.v\nCore_Base.v\nCore_Array_Rec_bundle_579704328.v\nCore_Array_Iter.v\nCore_Array.v\nCore.v"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/.gitignore",
    "content": "*.vo*\n*.aux\n*.glob\n*.cache\n.Makefile.d\nMakefile\nMakefile.conf\nsrc/_temp/\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/README.md",
    "content": "## Dependencies\n\nThe coq libraries uses `ssprove/jasmin` for machine signed and unsigned integer modulo arithmetic, and `coqword` for finite field arithmetic on prime modulus (to support hacspec's `nat_mod p` type).\nThis requires the following repository:\n\n```\nopam repo add coq-released https://coq.inria.fr/opam/released --all-switches\n```\n\nThen one can install the dependencies through `opam` (assuming you have coq installed through opam)\n\n```\nopam update\nopam install conf-ppl.1 -y\nopam install coq-mathcomp-word.2.0 -y\nopam pin jasmin https://github.com/SSProve/ssprove.git#3d40bc89 -y\nopam pin ssprove https://github.com/SSProve/ssprove.git#bead4e76acbb69b3ecf077cece56cd3fbde501e3 -y\nopam upgrade -y\n```\nthe development uses the Jasmin branch of SSProve, meaning one might need to install these from source.\n\n## Docker\n\nThere is a docker container with the dependencies installed (Coq / Rust) at `ghcr.io/cmester0/hacspec_ssprove:8.15.2`.\n\n## Compiling the coq files\n\nIn folder `/coq_ssprove`, type `make`. This compiles the coq libraries and the compiled examples, as defined in `_CoqProject`.\n\nIf you want to add a new example to `_CoqProject`, such that it is compiled through `make`, you should run `coq_makefile -f _CoqProject -o Makefile` in `/coq` to update the makefile.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/_CoqProject",
    "content": "-R src/ Hacspec\n-arg -w\n-arg all\n\nsrc/Hacspec_Lib_Comparable.v\nsrc/LocationUtility.v\nsrc/ChoiceEquality.v\nsrc/Hacspec_Lib_Pre.v\n\nsrc/Hacspec_Lib_Integers.v\nsrc/Hacspec_Lib_Loops.v\nsrc/Hacspec_Lib_Seq.v\nsrc/Hacspec_Lib_Natmod.v\nsrc/Hacspec_Lib_Coercions.v\nsrc/Hacspec_Lib_Eq.v\nsrc/Hacspec_Lib_Monad.v\nsrc/Hacspec_Lib_Ltac.v\nsrc/Hacspec_Lib_Controlflow.v\nsrc/Hacspec_Lib_Notation.v\nsrc/Hacspec_Lib_TODO.v\n\nsrc/ConCertLib.v\n\nsrc/Hacspec_Lib.v\n\n# src/Hacspec_Aes_Jazz.v\n# src/Hacspec_Xor.v\n\n# src/Hacspec_Aes.v\n# src/Hacspec_Bls12_381.v\n# src/Hacspec_Poly1305.v\n# src/Hacspec_Curve25519.v\n# src/Hacspec_Gf128.v\n# src/Hacspec_P256.v\n# src/Hacspec_Sha256.v\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/coq-hacspec-ssprove.opam.template",
    "content": "pin-depends: [\n  [\"jasmin.dev\" \"git+https://github.com/proux01/jasmin.git#mathcomp2\"]\n  [\"ssprove.dev\" \"git+https://github.com/ssprove/ssprove.git#jasmin-coq.8.18.0\"]\n  [\"coq-concert.dev\" \"git+https://github.com/AU-COBRA/ConCert.git#master\"]\n  [\"coq-rust-extraction.dev\" \"git+https://github.com/AU-COBRA/coq-rust-extraction.git#0053733e56008c917bf43d12e8bf0616d3b9a856\"]\n  [\"coq-elm-extraction.dev\" \"git+https://github.com/AU-COBRA/coq-elm-extraction.git#903320120e3f36d7857161e5680fabeb6e743c6b\"]\n  [\"coq-quickchick.dev\" \"git+https://github.com/4ever2/QuickChick.git#bc61d58045feeb754264df9494965c280e266e1c\"]\n]\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/docker_build/Dockerfile",
    "content": "FROM coqorg/coq:8.15.2-ocaml-4.14.0-flambda\nRUN curl https://sh.rustup.rs -sSf | sh -s -- -y\nENV PATH $HOME/.cargo/bin:$PATH\nRUN rustup update\nRUN rustup toolchain install nightly-2022-07-04\nRUN rustup component add --toolchain nightly-2022-07-04 rustc-dev llvm-tools-preview rust-analysis rust-src\nRUN rustc --version\nRUN cargo --version\nRUN sudo apt-get update\nRUN sudo apt-get install libppl-dev -y\nRUN sudo apt-get install libmpfr-dev -y\nRUN opam update\nRUN opam switch create 4.12.0\nRUN eval $(opam env --switch=4.12.0)\nRUN opam config list; opam repo list; opam list\nRUN opam repo add coq-released https://coq.inria.fr/opam/released --all-switches\nRUN opam update\nRUN opam pin coq 8.15.2 -y\nRUN eval $(opam env)\nRUN git clone https://github.com/jasmin-lang/jasmin.git\nRUN git clone https://github.com/SSProve/ssprove.git\nRUN cd jasmin && git checkout 3d40bc89 && cd ..\nRUN opam install -y --verbose ./jasmin/. --working-dir\nRUN eval $(opam env)\nRUN cd ssprove && git checkout jasmin && cd ..\nRUN opam upgrade -y\nRUN (opam install -y --verbose ./ssprove/ssprove.opam --working-dir) || echo \"failed\"\nRUN cd ssprove && make -j7 && opam install -y --verbose ./ssprove.opam --working-dir --assume-built\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/ChoiceEquality.v",
    "content": "From Coq Require Import ZArith List.\nFrom Crypt Require Import choice_type Package.\nImport PackageNotation.\nFrom Crypt Require Import pkg_interpreter.\nFrom extructures Require Import ord fset fmap.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\n\nFrom Hacspec Require Import LocationUtility.\nRequire Import Coq.Logic.FunctionalExtensionality.\n\nImport RulesStateProb.\nImport RulesStateProb.RSemanticNotation.\nOpen Scope rsemantic_scope.\n\nFrom Crypt Require Import choice_type Package Prelude.\nFrom Crypt Require Import Axioms. (* proof_irrelevance *)\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nImport choice.Choice.Exports.\n\nImport List.ListNotations.\n\nFrom mathcomp Require Import ssrbool.\n\n(*** Ltac *)\n\nLtac normalize_fset :=\n  hnf ;\n  autounfold with * ;\n  try rewrite !fset_cons ;\n  try rewrite <- !fset0E ;\n  try rewrite !fsetU0 ;\n  try rewrite !fset0U ;\n  try rewrite !fset1E ;\n  repeat (match goal with\n          | |- context [?a :|: ?b :|: ?c] =>\n              replace (a :|: b :|: c) with (a :|: (b :|: c)) by apply fsetUA\n          end\n          || match goal with\n            | |- context [?a :|: (?a :|: ?b)] =>\n                rewrite (fsetUA a a b) ; rewrite (fsetUid a)\n            end\n          || match goal with\n            | |- context [?a :|: (?b :|: (?a :|: (?b :|: ?c)))] =>\n                rewrite (fsetUA a b (a :|: (b :|: c))) ;\n                rewrite (fsetUA a b c) ;\n                rewrite (fsetUA (a :|: b) (a :|: b) c) ;\n                rewrite (fsetUid (a :|: b))\n            end).\n\nLtac solve_match :=\n  try set (fset _) ;\n  (lazymatch goal with\n   | |- context [ fsubset ?a (?a :|: _) ] => apply fsubsetUl\n   | |- context [ fsubset ?a (_ :|: ?a) ] => apply fsubsetUr\n   | |- context [ fsubset fset0 _ ] => apply fsub0set\n   | |- context [ fsubset ?a ?a ] => apply fsubsetxx\n   end).\n\nLtac split_fsubset_lhs :=\n  repeat (rewrite !is_true_split_and || rewrite !fsubUset) ;\n  repeat (try rewrite !andb_true_intro ; split).\n\nLtac solve_single_fset_fsubset :=\n  repeat (solve_match || apply fsubsetU ; rewrite is_true_split_or ; (left ; solve_match) || right).\n\nLtac solve_is_true :=\n  now normalize_fset ;\n  split_fsubset_lhs ;\n  solve_single_fset_fsubset.\n\nLtac left_assoc :=\n  repeat (match goal with\n          | |- context [?a :|: ?b :|: ?c] =>\n              replace (a :|: b :|: c) with (a :|: (b :|: c)) by apply fsetUA\n          end).\n\nLtac solve_in_fset :=\n  match goal with\n  | [ |- context [ is_true (fsubset _ _) ] ] => solve_is_true\n  | [ |- context [ fsubset _ _ = true ] ] => solve_is_true\n  end.\n\nLtac solve_fset_eq :=\n  apply (ssrbool.elimT eqtype.eqP) ;\n  rewrite eqEfsubset ;\n  rewrite is_true_split_and ; split ;\n  solve_in_fset.\n\nLtac fset_equality :=\n  repeat\n    match goal with\n    | H : fsubset (?x :|: ?y) ?z = true |- _ =>\n        rewrite fsubUset in H ;\n        apply andb_prop in H ;\n        destruct H\n    end ;\n  match goal with\n  | [ |- context [ @eq (fset_of _) _ _ ] ] =>\n      solve_fset_eq\n  | [ |- context [ @eq Interface _ _ ] ] =>\n      solve_fset_eq\n  | [ |- context [ @Logic.eq (fset_of _) _ _ ] ] =>\n      solve_fset_eq\n  | [ |- context [ @Logic.eq Interface _ _ ] ] =>\n      solve_fset_eq\n  end.\n\nNotation \"prod_ce( a , b )\" := ((a , b) : chProd _ _) : hacspec_scope.\nNotation \"prod_ce( a , b , .. , c )\" := ((.. ((a , b) : chProd _ _) .. , c) : chProd _ _) : hacspec_scope.\n\nDefinition lift_to_code {ce L I} (x : choice.Choice.sort ce) : code L I ce :=\n  {code ret x}.\n\nDefinition pre_to_post (P : precond) {A} : postcond A A :=\n  fun '(a, h₀) '(b, h₁) => a = b /\\ P (h₀ , h₁).\nDefinition pre_to_post_ret (P : precond) {A} v : postcond A A :=\n  fun '(a, h₀) '(b, h₁) => (a = b /\\ b = v) /\\ P (h₀ , h₁).\n\nDefinition true_precond : precond := fun _ => True.\n\nTheorem forget_precond {B} (x y : raw_code B) P Q :\n  ⊢ ⦃ true_precond ⦄ x ≈ y ⦃ Q ⦄ ->\n  ⊢ ⦃ P ⦄ x ≈ y ⦃ Q ⦄.\nProof.\n  intros.\n  now apply (rpre_weaken_rule _ _ _ H).\nQed.\n\nSection Both.\n  Context (A : choice_type).\n\n  Class raw_both :=\n    {\n      is_pure : choice.Choice.sort A ;\n      is_state : raw_code A ;\n    }.\n  Arguments is_pure raw_both.\n  Arguments is_state raw_both.\n\n  Inductive valid_both :\n    forall (b : raw_both), Prop :=\n  | both_valid_ret :\n    forall x, valid_both {| is_pure := x ; is_state := ret x |}.\n\n  Class ValidBoth (p : raw_both) :=\n    { is_valid_code : ValidCode fset0 fset0 (@is_state p) ;\n      is_valid_both : @valid_both p ;\n    }.\n  Arguments is_valid_code {_} ValidBoth.\n  Arguments is_valid_both {_} ValidBoth.\n\n  Record both : Type :=\n    mk2prog {\n        both_prog :> raw_both ;\n        both_prog_valid : @ValidBoth both_prog ;\n        p_eq : forall P, ⊢ ⦃ P ⦄ (@is_state both_prog) ≈ ret (@is_pure both_prog) ⦃ pre_to_post_ret P (@is_pure both_prog) ⦄ ;\n      }.\n  Arguments both_prog b.\n  Arguments both_prog_valid b.\n  Arguments p_eq b.\n\nEnd Both.\n\nArguments is_pure {_} raw_both.\nArguments is_state {_} raw_both.\n\nArguments valid_both {_}.\nArguments both_valid_ret {_}.\n\nArguments ValidBoth {_} p.\nArguments is_valid_code {_} {_} ValidBoth.\nArguments is_valid_both {_} {_} ValidBoth.\n\nArguments both_prog {_} b.\nArguments both_prog_valid {_} b.\nArguments p_eq {_} b.\n\nSection Both_helper.\n\n  Lemma valid_both_eta :\n    forall {A : choice_type} {x : raw_both A},\n      valid_both x ->\n      valid_both {| is_pure := is_pure x ; is_state := is_state x |}.\n  Proof.\n    now intros ? [] ?.\n  Defined.\n\n  Lemma ValidBoth_eta :\n    forall {A : choice_type} {x : both A},\n      ValidBoth x ->\n      ValidBoth {| is_pure := is_pure x ; is_state := is_state x |}.\n  Proof.\n    now intros ? [[] ? ?] ?.\n  Defined.\n\n  Definition bind_raw_both {A B} (c : raw_both A) (k : A -> raw_both B) : raw_both B :=\n    {|\n      is_pure := let x := (is_pure c) in is_pure (k x) ;\n      is_state := bind (is_state c) (fun x => is_state (k x))\n    |}.\n\n  Lemma valid_bind_both_ :\n    forall A B c k,\n      valid_both c ->\n      (forall x, valid_both {| is_pure := is_pure (k x) ; is_state := is_state (k x) |}) ->\n      valid_both (@bind_raw_both A B c k).\n  Proof.\n    intros A B c k Hc Hk.\n    induction Hc ; intros.\n    apply Hk.\n  Qed.\n\n  Lemma valid_bind_both :\n    forall A B c k,\n      ValidBoth c ->\n      (forall x, ValidBoth (k x)) ->\n      ValidBoth (@bind_raw_both A B c k).\n  Proof.\n    intros A B c k Hc Hk.\n    constructor ; simpl.\n    - apply valid_bind.\n      apply (is_valid_code Hc).\n      apply (fun x => is_valid_code (Hk x)).\n    - eapply valid_bind_both_.\n      apply (is_valid_both Hc).\n      intros.\n      apply valid_both_eta.\n      apply (fun x => is_valid_both (Hk x)).\n  Qed.\n\n  Definition both_ret {A : choice_type} (x : A) : raw_both A :=\n    {| is_pure := x ; is_state := ret x |} .\n\n  Program Definition both_ret_valid {A : choice_type} (x : A) : ValidBoth (both_ret x) :=\n    {| is_valid_code := valid_ret _ _ _ ; is_valid_both := both_valid_ret _ |} \n    .\n  Fail Next Obligation.\n\nEnd Both_helper.\n\nProgram Definition ret_both {A : choice_type} (x : A) : both A :=\n  {|\n    both_prog := {| is_pure := x ; is_state := ret x |} ;\n    both_prog_valid := {|\n                        is_valid_code := valid_ret fset0 fset0 x ;\n                        is_valid_both := both_valid_ret x ;\n                      |} ;\n    p_eq := fun P => r_ret _ _ _ _ _ ;\n  |}.\nFail Next Obligation.\n\nLtac pattern_both Hx Hf Hg :=\n  (match goal with\n   | [ |- context [ @is_state _ ?x : both _ _ _ ] ] =>\n       set (Hx := x)\n       ; try change (@is_pure _ _) with (@is_pure _ Hx)\n       ; match goal with\n         | [ |- context [ ⊢ ⦃ _ ⦄ bind _ ?fb ≈ ?os ⦃ _ ⦄ ] ] =>\n             let H := fresh in\n             set (H := os)\n             ; pattern (@is_pure _ Hx) in H\n             ; subst H\n             ; set (Hf := fb)\n             ; match goal with\n               | [ |- context [ ⊢ ⦃ _ ⦄ _ ≈ ?gb _ ⦃ _ ⦄ ] ] =>\n                   set (Hg := gb)\n               end\n         end\n   end).\n\nLtac pattern_both_fresh :=\n  let x := fresh in\n  let y := fresh in\n  let z := fresh in\n  pattern_both x y z.\n\nTheorem r_bind_trans :\n  forall {B C : choice_type}\n     (f : choice.Choice.sort B -> raw_code C)\n    (g : choice.Choice.sort B -> raw_code C) (x : raw_code B) (y : choice.Choice.sort B),\n  forall (P P_mid : precond) (Q : postcond (choice.Choice.sort C) (choice.Choice.sort C)),\n  forall (H_x_is_y : ⊢ ⦃ P ⦄ x  ≈ ret y ⦃ pre_to_post_ret P_mid (y) ⦄),\n    (⊢ ⦃ P_mid ⦄ f (y)  ≈ g y ⦃ Q ⦄) ->\n    ⊢ ⦃ P ⦄ temp ← x ;; f temp ≈ g y ⦃ Q ⦄.\nProof.\n  intros.\n  replace (g y) with (temp ← ret y ;; g temp) by reflexivity.\n\n  pose @r_bind.\n  specialize r with (f₀ := f) (f₁ := fun x => g x).\n  specialize r with (m₀ := x) (m₁ := (ret y)).\n  specialize r with (pre := P) (mid := pre_to_post_ret P_mid y ) (post := Q).\n  apply r ; clear r.\n\n  - apply H_x_is_y.\n  - intros.\n    eapply rpre_hypothesis_rule.\n    intros ? ? [[] ?]. subst.\n    eapply rpre_weaken_rule.\n    cbn in H2.\n    subst.\n    apply H.\n    intros ? ? []. subst. apply H2.\nQed.\n\nTheorem r_bind_trans_both : forall {B C : choice_type} {f : choice.Choice.sort B -> raw_code C} {g : choice.Choice.sort B -> raw_code C} (b : both B),\n  forall (P : precond) (Q : postcond _ _),\n    (⊢ ⦃ true_precond ⦄ f ((is_pure b))  ≈ g (is_pure b) ⦃ Q ⦄) ->\n    ⊢ ⦃ P ⦄ temp ← is_state b ;; f temp ≈ g (is_pure b) ⦃ Q ⦄.\nProof.\n  intros.\n  apply r_bind_trans with (P_mid := true_precond).\n\n  eapply rpre_weaken_rule.\n  apply p_eq.\n  reflexivity.\n\n  apply H.\nQed.\n\nLtac match_bind_trans_both :=\n  let Hx := fresh in\n  let Hf := fresh in\n  let Hg := fresh in\n  pattern_both Hx Hf Hg\n  ; apply (@r_bind_trans_both) with (b := Hx) (f := Hf) (g := Hg)\n  ; intros ; subst Hf ; subst Hg ; subst Hx ; hnf.\n\nLtac r_bind_both a :=\n  eapply r_bind ; [ apply (p_eq a) | ] ;\n  intros ;\n  apply rpre_hypothesis_rule ;\n  intros ? ? [[] []] ; subst ;\n  apply forget_precond.\n\nLtac r_subst_both a :=\n  let x := fresh in\n  let y := fresh in\n  let z := fresh in\n  pattern_both x y z ;\n  change (z _) with (temp ← ret (is_pure x) ;; z temp) ;\n  r_bind_both a ;\n  subst x y z ; hnf.\n\nProgram Definition bind_both {A B} (c : both A) (k : A -> both B) : both B :=\n  {|\n    both_prog := bind_raw_both (both_prog c) (fun x => both_prog (k x)) ;\n    both_prog_valid := valid_bind_both A B c k (both_prog_valid c) (fun x => both_prog_valid (k x)) ;\n  |}.\nNext Obligation.\n  intros.\n  let x := fresh in\n  let y := fresh in\n  let z := fresh in\n  pattern_both x y z ;\n  change (z _) with (temp ← ret (is_pure x) ;; z temp).\n\n  eapply r_bind ; [ apply (p_eq _) | ].\n  intros ;\n  apply rpre_hypothesis_rule.\n  intros ? ? [[]].\n  eapply rpre_weaken_rule.\n  2:{\n    simpl ; intros ? ? [].\n    subst.\n    apply H4.\n  }\n  subst a₀ a₁ ; hnf.\n  apply (k (is_pure c)).\nQed.\n\nLemma both_eq : forall {A : choice_type} (a b : both A),\n    both_prog a = both_prog b ->\n    a = b.\nProof.\n  intros.\n  destruct a , b.\n  cbn in *. subst.\n  f_equal ; apply proof_irrelevance.\nQed.\n\nLemma bind_ret_both : forall {A B : choice_type} (f : A -> both B) (x : A),\n    (bind_both (ret_both x) f) = f x.\nProof.\n  intros.\n  apply both_eq.\n  simpl.\n  unfold bind_raw_both.\n  simpl.\n  destruct (f x). simpl.\n  destruct both_prog0. simpl.\n  reflexivity.\nQed.\n\nDefinition lift_both {A} (x : both A) : both A :=\n    {| both_prog := x ;\n      both_prog_valid := (both_prog_valid x) ;\n      p_eq := p_eq x |}.\n\nNotation \"'solve_lift' x\" := (lift_both x) (at level 100).\n\nEquations lift1_both {A B : choice_type} (f : A -> B) (x : both A) : both B\n  :=\n  lift1_both f x := bind_both x (fun x' => solve_lift (ret_both (f x'))).\nSolve All Obligations with intros ; solve_in_fset.\nFail Next Obligation.\n\nEquations lift2_both {A B C : choice_type} (f : A -> B -> C) (x : both A) (y : both B)\n  : both C\n  :=\n  lift2_both f x y :=\n    bind_both x (fun x' =>\n    bind_both y (fun y' =>\n    solve_lift (ret_both (f x' y')))).\nSolve All Obligations with intros ; solve_in_fset.\nFail Next Obligation.\n\nEquations lift3_both {A B C D : choice_type} (f : A -> B -> C -> D) (x : both A) (y : both B) (z : both C)\n  : both D :=\n  lift3_both f x y z :=\n  bind_both x (fun x' => lift_both (lift2_both (f x') y z)).\nSolve All Obligations with intros ; solve_in_fset.\nFail Next Obligation.\n\nDefinition choice_type_size (ce : choice_type) : nat.\nProof.\n  induction ce.\n  1, 2, 3, 4, 8, 9: exact 1.\n  - refine (S (IHce1 + IHce2))%nat.\n  - refine (S (S (S (IHce1 + IHce2))))%nat.\n  - refine (S (IHce))%nat.\n  - refine (S (IHce))%nat.\n  - refine (S (IHce1 + IHce2))%nat.\nDefined.\n\nFixpoint ce_to_chElement_ordType_ce (ce : choice_type) (X : chElement_ordType ce) : ce :=\n  match ce as A return chElement_ordType A -> A with\n  | 'unit | 'nat | 'int | 'bool | chFin _ | 'word _ => id\n  | Y × Z => fun '(y,z) => (ce_to_chElement_ordType_ce Y y, ce_to_chElement_ordType_ce Z z)\n  | chMap Y Z => fun y => mkfmap (seq.zip (seq.unzip1 (FMap.fmval y)) (List.map (ce_to_chElement_ordType_ce Z) (seq.unzip2 (FMap.fmval y))))\n  | 'option Y => (fun y => match y with\n                 | None => None\n                 | Some z => Some (ce_to_chElement_ordType_ce Y z)\n                       end)\n  | chList Y => List.map (ce_to_chElement_ordType_ce Y)\n  | Y ∐ Z => (fun y => match y with\n            | inl z => inl (ce_to_chElement_ordType_ce Y z)\n            | inr z => inr (ce_to_chElement_ordType_ce Z z)\n            end)\n  end X.\n\nFixpoint chElement_ordType_ce_to_ce (ce : choice_type) (X : ce) : chElement_ordType ce :=\n  match ce as A return A -> chElement_ordType A with\n  | 'unit | 'nat | 'int | 'bool | chFin _ | 'word _ => id\n  | Y × Z => fun '(y,z) => (chElement_ordType_ce_to_ce Y y,\n                        chElement_ordType_ce_to_ce Z z)\n  | chMap Y Z => fun y => mkfmap (seq.zip (seq.unzip1 (FMap.fmval y)) (List.map (chElement_ordType_ce_to_ce Z) (seq.unzip2 (FMap.fmval y))))\n  | 'option Y => (fun y => match y with\n                 | None => None\n                 | Some z => Some (chElement_ordType_ce_to_ce Y z)\n                       end)\n  | chList Y => List.map (chElement_ordType_ce_to_ce Y)\n  | Y ∐ Z => (fun y => match y with\n            | inl z => inl (chElement_ordType_ce_to_ce Y z)\n            | inr z => inr (chElement_ordType_ce_to_ce Z z)\n            end)\n  end X.\n\nEquations prod_both {ceA ceB : choice_type} (a : both ceA) (b : both ceB) : both (ceA × ceB) :=\n  prod_both a b :=\n    bind_both a (fun a' =>\n    bind_both b (fun b' =>\n                   solve_lift (ret_both ((a', b') : _ × _)))).\nSolve All Obligations with intros ; solve_in_fset.\nFail Next Obligation.\n\nNotation \"'prod_b' ( a , b )\" := (prod_both a b) : hacspec_scope.\nNotation \"'prod_b' ( a , b , .. , c )\" := (prod_both .. (prod_both a b) .. c) : hacspec_scope.\n\nLtac ssprove_valid_program :=\n  try (apply prog_valid) ;\n  try (apply valid_scheme ; try rewrite <- fset.fset0E ; apply prog_valid).\n\nLtac destruct_choice_type_prod :=\n  try match goal with\n  | H : choice.Choice.sort (chElement (loc_type ?p)) |- _ =>\n      unfold p in H ;\n      unfold loc_type in H ;\n      unfold projT1 in H\n  end ;\n  repeat match goal with\n  | H : (chProd _ _) |- _ =>\n      destruct H\n  end ;\n  repeat match goal with\n  | H : choice.Choice.sort\n         (chElement\n            (choice.Choice.sort\n               (chProd _ _))) |- _ =>\n      destruct H\n  end ;\n  repeat match goal with\n         | H : prod _ _ |- _ => destruct H\n         end ;\n  cbv zeta.\n\nTheorem tag_leq_simplify :\n  forall (a b : Location),\n    is_true (ssrfun.tag a <= ssrfun.tag b)%ord ->\n    is_true (ssrfun.tagged a <= ssrfun.tagged b)%ord ->\n    is_true (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord) a b).\nProof.\n  intros [] [].\n\n  unfold tag_leq.\n  unfold eqtype.tagged_as, ssrfun.tagged , ssrfun.tag , projT1 , projT2.\n\n  intro.\n  rewrite Ord.leq_eqVlt in H.\n  rewrite is_true_split_or in H.\n  destruct H.\n  - apply Couplings.reflection_nonsense in H ; subst.\n\n    rewrite Ord.ltxx.\n    rewrite Bool.orb_false_l.\n    rewrite eqtype.eq_refl.\n    rewrite Bool.andb_true_l.\n\n    destruct eqtype.eqP.\n    + unfold eq_rect_r , eq_rect ; destruct eq_sym.\n      trivial.\n    + contradiction.\n  - rewrite H ; clear H.\n    reflexivity.\nQed.\n\nTheorem tag_leq_inverse :\n  forall a b,\n    tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord) a b\n    =\n      (negb (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord)\n                    b a) ||\n           eqtype.eq_op (ssrfun.tag a) (ssrfun.tag b) &&\n        eqtype.eq_op (ssrfun.tagged a) (ssrfun.tagged b))%bool.\nProof.\n  intros [a b] [c d].\n  unfold tag_leq.\n\n  rewrite Bool.negb_orb.\n  rewrite Bool.negb_andb.\n  rewrite Bool.andb_orb_distrib_r.\n\n  unfold eqtype.tagged_as.\n  unfold ssrfun.tagged , ssrfun.tag , projT1 , projT2.\n  rewrite <- Bool.orb_assoc.\n\n  f_equal.\n  - rewrite <- Bool.negb_orb.\n    rewrite <- Bool.orb_comm.\n    rewrite <- Ord.leq_eqVlt.\n    rewrite <- Ord.ltNge.\n    reflexivity.\n  - destruct (eqtype.eq_op a c) eqn:a_eq_c.\n    + apply Couplings.reflection_nonsense in a_eq_c.\n      subst.\n      do 2 rewrite Bool.andb_true_l.\n\n      destruct eqtype.eqP. 2: contradiction.\n\n      unfold eq_rect_r , eq_rect.\n      destruct eq_sym.\n\n      rewrite Ord.leq_eqVlt.\n      rewrite Bool.orb_comm.\n\n      f_equal.\n      rewrite <- Ord.ltNge.\n      rewrite Ord.ltxx.\n      reflexivity.\n    + do 2 rewrite Bool.andb_false_l.\n      rewrite Bool.orb_false_r.\n      symmetry.\n\n      destruct eqtype.eqP.\n      { subst. rewrite eqtype.eq_refl in a_eq_c. discriminate a_eq_c. }\n\n      rewrite Ord.eq_leq by reflexivity.\n      rewrite Bool.andb_false_r.\n      reflexivity.\nQed.\n\nLtac valid_program :=\n  apply prog_valid\n  || (apply valid_scheme ; try rewrite <- fset.fset0E ; apply prog_valid).\n\n\nDefinition heap_ignore_post fset {A} : postcond A A :=\n  pre_to_post (heap_ignore fset).\n\nTheorem heap_ignore_refl :\n  forall {fset} h, heap_ignore fset (h, h).\nProof.\n  intros fset h ℓ ?.\n  reflexivity.\nQed.\n\nTheorem heap_ignore_post_refl :\n  forall {fset A} (x : A * heap), heap_ignore_post fset x x.\nProof.\n  intros fset A [].\n  split. reflexivity.\n  apply heap_ignore_refl.\nQed.\n\nLemma heap_ignore_weaken :\n  forall fset fset', is_true (fsubset fset fset') ->\n  forall x, heap_ignore fset x -> heap_ignore fset' x.\nProof.\n  intros.\n  destruct x as [h h0].\n  pose (INV'_heap_ignore fset fset' fset0).\n  rewrite fsetU0 in i.\n  unfold INV' in i.\n  specialize (i H h h0).\n  destruct i as [? _].\n  intros l ?.\n  specialize (H1 H0 l H2 ltac:(easy)).\n  rewrite H1.\n  reflexivity.\nQed.\n\nLemma rpost_heap_ignore_weaken :\n  forall {A} fset fset', is_true (fsubset fset fset') ->\n  forall (x y : raw_code A),\n    ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset (h0, h1)) ⦄\n        x ≈ y\n      ⦃ heap_ignore_post fset ⦄ ->\n    ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset (h0, h1)) ⦄\n        x ≈ y\n        ⦃ heap_ignore_post fset' ⦄.\nProof.\n  intros.\n  eapply rpost_weaken_rule.\n  apply H0.\n\n  intros [] [] []. subst.\n  split. reflexivity.\n  apply (heap_ignore_weaken fset) ; assumption.\nQed.\n\n\nLemma rpre_heap_ignore_weaken :\n  forall {A} fset fset', is_true (fsubset fset fset') ->\n  forall (x y : raw_code A),\n    ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset' (h0, h1)) ⦄\n        x ≈ y\n      ⦃ heap_ignore_post fset ⦄ ->\n    ⊢ ⦃ (fun '(h0, h1) => heap_ignore fset (h0, h1)) ⦄\n        x ≈ y\n        ⦃ heap_ignore_post fset ⦄.\nProof.\n  intros.\n  eapply rpre_weaken_rule.\n  apply H0.\n  intros. cbn.\n  apply (heap_ignore_weaken fset fset') ; assumption.\nQed.\n\nTheorem bind_rewrite : forall A B x f, @bind A B (ret x) f = f x.\nProof.\n  intros.\n  unfold bind.\n  reflexivity.\nQed.\n\nTheorem r_bind_eq : forall {B C : choice_type} (y : choice.Choice.sort B) (g : choice.Choice.sort B  -> raw_code C), (temp ← ret y ;; g temp) = g y.\nProof. reflexivity. Qed.\n\nTheorem r_bind_trans' :\n  forall {B C : choice_type}\n     (f : choice.Choice.sort B -> raw_code C)\n    (g : choice.Choice.sort B -> raw_code C) (x : raw_code B) (y : choice.Choice.sort B),\n  forall (P : precond) (Q : postcond (choice.Choice.sort C) (choice.Choice.sort C)),\n  forall (H_x_is_y : ⊨ repr x ≈ repr (ret y) [{retW (y, y)}]),\n    (⊢ ⦃ P ⦄ f ( y)  ≈ g y ⦃ Q ⦄) ->\n    ⊢ ⦃ P ⦄ temp ← x ;; f temp ≈ g y ⦃ Q ⦄.\nProof.\n  intros.\n\n  replace (g y) with (temp ← ret y ;; g temp) by reflexivity.\n\n  pose @r_bind.\n  specialize r with (f₀ := f) (f₁ := fun x => g x).\n  specialize r with (m₀ := x) (m₁ := (ret y)).\n  specialize r with (pre := P) (mid := fun s0 s1 => pre_to_post P s0 s1 /\\ fst s1 = y) (post := Q).\n  apply r ; clear r.\n\n  - eapply from_sem_jdg.\n    eapply (RulesStateProb.weaken_rule (retW (y , y))).\n    + apply H_x_is_y.\n    + unfold retW.\n      intros [] X [? πa1a2] ; cbn in X.\n      specialize (fun x => πa1a2 (x, s) (y, s0)).\n\n      unfold proj1_sig.\n\n      unfold RulesStateProb.WrelSt.\n      unfold θ.\n      unfold StateTransformingLaxMorph.rlmm_codomain ; simpl.\n\n      apply πa1a2.\n      split.\n      cbn.\n      split.\n      reflexivity.\n      2: { reflexivity. }\n      apply H0.\n  - intros.\n    eapply rpre_hypothesis_rule.\n    intros ? ? [[] ?]. subst.\n    eapply rpre_weaken_rule.\n    2: { intros ? ? []. subst. apply H1. }\n    clear H1.\n    apply H.\nQed.\n\nLtac solve_post_from_pre :=\n  let H := fresh in\n  intros ? ? H\n  ; split\n  ; [reflexivity | ]\n  ; ( assumption\n      || (apply restore_set_lhs in H\n         ; [ assumption\n           | intros ? ? ] )).\n\nCorollary better_r :\n  forall {A B : choice.Choice.type}\n    (r₀ : raw_code A)\n    (r₁ : raw_code B) (pre : precond)\n    (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)),\n    ⊢ ⦃ fun '(s₀, s₁) => pre (s₀, s₁) ⦄ r₀ ≈ r₁ ⦃ post ⦄ <->\n      ⊢ ⦃ pre ⦄ r₀ ≈ r₁ ⦃ post ⦄.\nProof.\n  split ; intros ; (eapply rpre_hypothesis_rule ; intros ; eapply rpre_weaken_rule ; [ apply H | intros ? ? [] ; subst ; easy ]).\nQed.\n\nCorollary better_r_put_lhs : forall {A B : choice.Choice.type} (ℓ : Location)\n       (v : choice.Choice.sort (Value (projT1 ℓ))) (r₀ : raw_code A)\n       (r₁ : raw_code B) (pre : precond)\n       (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)),\n     ⊢ ⦃ set_lhs ℓ v pre ⦄ r₀ ≈ r₁ ⦃ post ⦄ ->\n     ⊢ ⦃ pre ⦄ #put ℓ := v ;; r₀ ≈ r₁ ⦃ post ⦄.\nProof.\n  intros ; now apply better_r, r_put_lhs, better_r.\nQed.\n\nCorollary better_r_put_rhs : forall {A B : choice.Choice.type} (ℓ : Location)\n                               (v : choice.Choice.sort (Value (projT1 ℓ))) (r₀ : raw_code A)\n                               (r₁ : raw_code B) (pre : precond)\n                               (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)),\n    ⊢ ⦃ set_rhs ℓ v pre ⦄ r₀ ≈ r₁ ⦃ post ⦄ ->\n    ⊢ ⦃ pre ⦄ r₀ ≈ #put ℓ := v ;; r₁ ⦃ post ⦄.\nProof.\n  intros ; now apply better_r, r_put_rhs, better_r.\nQed.\n\nCorollary better_r_put_get_lhs : forall (A : choice.Choice.type) (B : choice.Choice.type) (ℓ : Location) (v : choice.Choice.sort ℓ) (r : choice.Choice.sort ℓ -> raw_code A) rhs (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)),\n    ⊢ ⦃ pre ⦄\n     #put ℓ := v ;;\n     r v ≈ rhs ⦃ post ⦄ ->\n    ⊢ ⦃ pre ⦄\n        #put ℓ := v ;;\n        x ← get ℓ ;;\n        r x ≈ rhs ⦃ post ⦄.\nProof.\n  intros.\n  apply (r_transL (#put ℓ := v ;; r v )).\n  apply r_put_get.\n  apply H.\nQed.\n\nCorollary better_r_put_get_rhs : forall (A : choice.Choice.type) (B : choice.Choice.type) (ℓ : Location) (v : choice.Choice.sort ℓ) (r : choice.Choice.sort ℓ -> raw_code B) lhs (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)),\n    ⊢ ⦃ pre ⦄\n        lhs ≈\n        #put ℓ := v ;;\n        r v ⦃ post ⦄ ->\n    ⊢ ⦃ pre ⦄\n        lhs ≈\n        #put ℓ := v ;;\n        x ← get ℓ ;;\n        r x ⦃ post ⦄.\nProof.\n  intros.\n  apply (r_transR _ (#put ℓ := v ;; r v )).\n  apply r_put_get.\n  apply H.\nQed.\n\nCorollary better_r_get_remind_lhs : forall {A B : choice.Choice.type} (ℓ : Location)\n       (v : choice.Choice.sort (Value (projT1 ℓ)))\n       (r₀ : choice.Choice.sort (Value (projT1 ℓ)) -> raw_code A) (r₁ : raw_code B)\n       (pre : precond) (post : postcond (choice.Choice.sort A) (choice.Choice.sort B)),\n     Remembers_lhs ℓ v pre ->\n     ⊢ ⦃ pre ⦄ r₀ v ≈ r₁ ⦃ post ⦄ ->\n     ⊢ ⦃ pre ⦄ x ← get ℓ ;; r₀ x ≈ r₁ ⦃ post ⦄.\nProof.\n  intros.\n  apply better_r.\n  eapply r_get_remind_lhs.\n  apply H.\n  apply better_r.\n  apply H0.\nQed.\n\nLemma getr_set_lhs :\n  forall {A B} ℓ v pre post (a : _ -> raw_code A) (b : raw_code B),\n  ⊢ ⦃ set_lhs ℓ v pre ⦄\n     a v\n  ≈\n     b\n  ⦃ post ⦄ ->\n  ⊢ ⦃ set_lhs ℓ v pre ⦄\n     x ← get ℓ ;;\n     a x\n  ≈\n     b\n  ⦃ post ⦄.\nProof.\n  clear.\n  intros.\n\n  eapply better_r_get_remind_lhs.\n  unfold Remembers_lhs.\n  intros ? ? [? []]. subst.\n  unfold rem_lhs.\n  rewrite get_set_heap_eq.\n  reflexivity.\n  apply H.\nQed.\n\nEquations prod_to_prod {A B} (x : both (A × B)) : (both A * both B) :=\n  prod_to_prod x :=\n  (bind_both x (fun x' => solve_lift (ret_both (fst x'))) ,\n   bind_both x (fun x' => solve_lift (ret_both (snd x')))).\nSolve All Obligations with intros ; solve_in_fset.\nFail Next Obligation.\n\nEquations let_both {A B} (x : both A) (f : both A -> both B) : both B :=\n  let_both x f := f x.\n\nNotation \"'letb' x ':=' y 'in' f\" :=\n  (let_both y (fun x => f)) (at level 100, x pattern, right associativity).\nNotation \"'letb' ''' x ':=' y 'in' f\" :=\n  (let_both y (fun x => f)) (at level 100, x pattern, right associativity).\n\nFixpoint split_type (F : choice_type -> Type) (A : choice_type) : Type :=\n  match A with\n  | C × D => split_type F C * split_type F D\n  | _ => F A\n  end.\n\nFixpoint split_both {A} (x : both A) : (split_type (both) A) :=\n   match A as c return (both c -> split_type (both) c) with\n   | _ × _ => fun y => (split_both (fst (prod_to_prod y)) , split_both (snd (prod_to_prod y)))\n   | _ => fun y : both _ => y\n   end x.\n\nFixpoint unsplit_both {A} (s : split_type (both) A) : both A :=\n  match A as c return (split_type (both) c -> both c) with\n  | _ × _ =>\n      fun y => prod_both ( unsplit_both (fst y)) ((unsplit_both (snd y)))\n  | _ => fun y => y\n  end s.\n\nNotation \"'unsplit_both_all' ( a , b , .. , c )\" := ((.. ((unsplit_both a , unsplit_both b)) .. , unsplit_both c)).\n\n\n(* Handle products of size 2 - 4 for letb *)\n\nFixpoint prod_to_prod_n_ty (n : nat) (F : choice_type -> Type) (A : choice_type) : Type :=\n  match n with\n  | O => F A\n  | S n' =>\n      match A with\n      | B × C => (prod_to_prod_n_ty n' F B) * F C\n      | _ => F A\n      end\n  end.\nEval simpl in prod_to_prod_n_ty 2 (both) ('nat × 'bool).\n\n(* TODO: Currently duplicates code, due to prod_to_prod, should only evaluate and project the result ! *)\nFixpoint prod_to_prod_n {A} (n : nat) (x : both A) : prod_to_prod_n_ty n  (both) A :=\n  match n as m return prod_to_prod_n_ty m (both) A with\n  | O => x\n  | S n' =>\n      match A as B return both B -> prod_to_prod_n_ty (S n') (both) B with\n      | B × C => fun y => (prod_to_prod_n n' (fst (prod_to_prod y)), snd (prod_to_prod y))\n      | _ => fun y => y\n      end x\n  end.\n\nEquations lift_n {A B} (n : nat) (z : both A) (f : prod_to_prod_n_ty n (both) A -> both B) : both B :=\n  lift_n n z f :=\n  (bind_both z (fun z' => f (prod_to_prod_n n (solve_lift (ret_both z'))))).\nSolve All Obligations with intros ; solve_in_fset.\nFail Next Obligation.\n\nNotation \"'letb' ' '(' a ',' b ')' ':=' z 'in' f\" :=\n  (lift_n 1 z (fun '(a, b) => f))\n    (at level 100).\n\nNotation \"'letb' ' '(' a ',' b ',' c ')' ':=' z 'in' f\" :=\n  (lift_n 2 z (fun '(a, b, c) => f))\n    (at level 100).\n\nNotation \"'letb' ' '(' a ',' b ',' c ',' d ')' ':=' z 'in' f\" :=\n  (lift_n 3 z (fun '(a, b, c, d) => f))\n    (at level 100).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/ConCertLib.v",
    "content": "Require Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom mathcomp Require Import ssrZ word.\n(* From Jasmin Require Import word. *)\nFrom Crypt Require Import jasmin_word.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\nOpen Scope Z_scope.\n\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom ConCert.Execution Require Import Serializable.\n\nProgram Definition serialize_by_other {A B} (f_to : B -> A) (f_from : A -> B)  `(forall m, f_from (f_to m) = m) `{Serializable A} : Serializable B :=\n  {|\n      serialize m := serialize (f_to m);\n      deserialize m := option_map f_from (deserialize m) ;\n  |}.\nNext Obligation.\n  intros. hnf. rewrite deserialize_serialize.\n  unfold option_map. now f_equal.\nDefined.\n\nProgram Definition serialize_by_other_option {A B} (f_to : B -> Datatypes.option A) (f_from : Datatypes.option A -> Datatypes.option B)  `(forall m, f_from (f_to m) = Some m) `{Serializable A} : Serializable B :=\n  {|\n      serialize m := serialize (f_to m);\n    deserialize m := match (deserialize m) with\n                     | Some m => f_from m\n                     | None => None\n                     end;\n  |}.\nNext Obligation.\n  intros. hnf. simpl. rewrite deserialize_serialize. now f_equal.\nDefined.\n\n#[global] Instance hacspec_int_serializable {ws : wsize} : Serializable (int ws) := serialize_by_other (unsigned) (@repr ws) (@wrepr_unsigned ws).\n\nLemma eqtype_ord_ext :\n  forall n, forall x y : fintype.ordinal n, (@eqtype.eq_op\n        (fintype_ordinal__canonical__eqtype_Equality\n           _ (* (@ord.Ord.clone _ *)\n           (*    (ord.ordinal_ordType n) *)\n           (*    _ *)\n           (*    id) *)) x y) = (@eqtype.eq_op ssrnat.Datatypes_nat__canonical__eqtype_Equality (nat_of_ord x) (nat_of_ord y)).\nProof.\n  intros.\n  destruct x.\n  simpl.\n  destruct y.\n  simpl.\n  reflexivity.\nQed.\n\nTheorem lift_set_commute :\n  forall {A : choice_type} {len} (a : nseq_ A (S len)) (b : fintype.ordinal (S len)) (c : A),\n    @lift_nseq A (S _) (fmap.setm a b c) =\n      fmap.setm (@lift_nseq A (S _) a) (lift_ordinal _ b) c.\nProof.\n  clear ; intros ; fold chElement in *.\n  simpl in b.\n  unfold lift_nseq.\n  apply fmap.eq_fmap. intros x ; simpl in x.\n  rewrite fmap.setmE.\n  unfold fmap.getm.\n  simpl fmap.FMap.fmval.\n  destruct a ; induction fmval ; simpl lift_fval.\n  - now rewrite (lift_fval_equation_2 _ (len) (b, c) nil).\n  - {\n      destruct x , b.\n      rewrite (eqtype_ord_ext (S (S (len)))).\n      simpl eqtype.eq_op.\n      destruct eqtype.eq_op eqn:eq_o at 2.\n      + apply (ssrbool.elimT eqtype.eqP) in eq_o.\n        subst.\n        destruct ord.Ord.lt.\n        * simpl.\n          rewrite (lift_fval_equation_2 _ (len)).\n          simpl.\n          rewrite (eqtype_ord_ext (S (S ( len)))).\n          simpl.\n          rewrite eqtype.eq_refl.\n          reflexivity.\n        * rewrite (eqtype_ord_ext (S (len))).\n          simpl.\n          set (eqtype.eq_op _ _).\n          destruct b eqn:eq_b_o ; subst b.\n          -- apply (ssrbool.elimT eqtype.eqP) in eq_b_o.\n             subst.\n             rewrite (lift_fval_equation_2 _ (len)).\n             simpl.\n             rewrite (eqtype_ord_ext (S (S (len)))).\n             simpl.\n             rewrite eqtype.eq_refl.\n             reflexivity.\n          -- rewrite (lift_fval_equation_2 _ (len)).\n             simpl.\n             rewrite (eqtype_ord_ext (S (S (len)))).\n             simpl.\n             destruct (fst _).\n             simpl in *.\n             rewrite ssrnat.eqSS.\n             rewrite eq_b_o.\n\n             rewrite IHfmval.\n             rewrite (eqtype_ord_ext (S (S (len)))).\n             simpl.\n             rewrite eqtype.eq_refl.\n             reflexivity.\n\n             (* apply (path_sorted_tl _). *)\n             {\n               intros.\n               destruct fmval. reflexivity.\n               - cbn.\n                 cbn in i.\n                 destruct (seq.unzip1 fmval).\n                 + reflexivity.\n                 + cbn in i.\n                   now rewrite LocationUtility.is_true_split_and in i.\n             }\n      + destruct ord.Ord.lt.\n        * simpl.\n          rewrite (lift_fval_equation_2 _ (len)).\n          simpl.\n          rewrite (eqtype_ord_ext (S (S (len)))).\n          simpl.\n          rewrite eq_o.\n          reflexivity.\n        * rewrite (eqtype_ord_ext (S (len))).\n          simpl.\n          set (eqtype.eq_op _ _).\n          destruct b eqn:eq_b_o ; subst b.\n          -- apply (ssrbool.elimT eqtype.eqP) in eq_b_o.\n             subst.\n             rewrite (lift_fval_equation_2 _ (len)).\n             simpl.\n             rewrite (eqtype_ord_ext (S (S (len)))).\n             simpl.\n             rewrite eq_o.\n             rewrite (lift_fval_equation_2 _ (len)).\n             simpl.\n             rewrite (eqtype_ord_ext (S (S (len)))).\n             simpl.\n             unfold lift_ordinal.\n             destruct (fst _).\n             simpl.\n             simpl in eq_o.\n             rewrite eq_o.\n             reflexivity.\n          -- rewrite (lift_fval_equation_2 _ (len)).\n             simpl.\n             rewrite (eqtype_ord_ext (S (S (len)))).\n             simpl.\n             destruct a.\n             destruct s.\n             simpl in *.\n             set (b := eqtype.eq_op _ _) ; destruct b eqn:eq_m_o ; subst b.\n             ++ apply (ssrbool.elimT eqtype.eqP) in eq_m_o.\n                subst.\n                rewrite (lift_fval_equation_2 _ (len)).\n                simpl.\n                rewrite (eqtype_ord_ext (S (S (len)))).\n                simpl.\n                now rewrite eqtype.eq_refl.\n             ++ rewrite IHfmval.\n                rewrite (eqtype_ord_ext (S (S (len)))).\n                simpl.\n                rewrite eq_o.\n                rewrite (lift_fval_equation_2 _ (len)).\n                simpl.\n                rewrite (eqtype_ord_ext (S (S (len)))).\n                simpl.\n                rewrite eq_m_o.\n                reflexivity.\n                (* apply (path_sorted_tl _). *)\n                {\n                  intros.\n                  destruct fmval. reflexivity.\n                  - cbn.\n                    cbn in i.\n                    destruct (seq.unzip1 fmval).\n                    + reflexivity.\n                    + cbn in i.\n                      now rewrite LocationUtility.is_true_split_and in i.\n                }\n    }\nQed.\n\nTheorem array_from_list_helper_inverse : forall {A} len (m : nseq_ A (S len)),\n    array_from_option_list_helper\n      (nseq_hd_option m)\n      (array_to_option_list (nseq_tl m)) len = m.\nProof.\n  intros.\n  induction len.\n  - unfold nseq_tl.\n    unfold nseq_hd_option.\n    rewrite array_to_option_list_equation_1.\n    destruct m, fmval.\n    + now apply fmap.eq_fmap.\n    + apply fmap.eq_fmap. intros x ; simpl in x.\n\n      unfold fmap.getm at 2 ; simpl.\n      destruct (fst _), m ; [ | discriminate ] ; simpl.\n      rewrite array_from_option_list_helper_equation_1.\n      unfold setm_option.\n      rewrite fmap.setmE.\n      now destruct x , m ; [ | discriminate ] ; simpl.\n  - rewrite array_to_option_list_equation_2.\n\n    assert (forall (T : ordType) (S : choice_type)\n         (m : @fmap.FMap.fmap_of T S\n                (ssreflect.Phant (Ord.Ord.sort T -> S)))\n         (k : Ord.Ord.sort T) (v : chOption S) (k' : Ord.Ord.sort T),\n       @fmap.getm T S (setm_option m k v) k' =\n         match v with\n         | Some v => @fmap.getm T S (fmap.setm m k v) k'\n         | None => @fmap.getm T S m k'\n         end) by now destruct v.\n\n    rewrite array_from_option_list_helper_equation_3.\n    rewrite (IHlen (nseq_tl m)).\n\n    clear.\n\n    apply fmap.eq_fmap.\n    intros x ; simpl in x.\n    destruct m ; induction fmval.\n    + now unfold fmap.getm ; cbn ; rewrite lift_fval_equation_1.\n    + {\n        specialize (IHfmval (path_sorted_tl i)).\n        unfold nseq_hd_option in *.\n        simpl.\n        destruct a.\n        destruct s.\n        unfold fmap.getm at 2.\n        simpl.\n        destruct m.\n        {\n          setoid_rewrite <- IHfmval ; clear.\n\n          setoid_rewrite fmap.setmE.\n          rewrite !(eqtype_ord_ext (S (S len))).\n          simpl eqtype.eq_op.\n          replace (_ - _)%nat with O by (set (temp := nseq_tl _) ; rewrite <- (array_to_length_option_list_is_len A len temp) at 1; now rewrite Nat.sub_diag).\n\n          destruct x , m ; [ reflexivity | ].\n          rewrite tl_fmap_equation_2.\n          unfold setm_option.\n          destruct fmval ; [reflexivity | ].\n          simpl.\n          destruct p, s.\n          simpl.\n          destruct m0 ; [ discriminate | ].\n\n          rewrite tl_fmap_equation_3.\n\n          unfold fmap.getm.\n          simpl.\n\n          set (@fmap.getm_def _ _).\n          set (lift_fval _).\n          set (lift_fval _).\n          assert (l = l0) ; [ subst l l0 | now rewrite H ].\n          f_equal.\n\n          now apply lower_fval_ext_list.\n        }\n        {\n          setoid_rewrite <- IHfmval ; clear.\n          unfold setm_option.\n          unfold fmap.getm.\n          simpl.\n\n          rewrite tl_fmap_equation_3.\n          destruct (eqtype.eq_op _ _) eqn:eq_o.\n          - apply (ssrbool.elimT eqtype.eqP) in eq_o.\n            rewrite eq_o.\n\n            subst.\n            simpl.\n\n            rewrite lower_fval_equation_2.\n            rewrite lift_fval_equation_2.\n            simpl.\n\n            rewrite !(eqtype_ord_ext (S (S len))).\n            simpl.\n            rewrite eqtype.eq_refl.\n            reflexivity.\n          - unfold setm_option.\n            destruct fmval.\n            + (* discriminate. *)\n              rewrite tl_fmap_equation_1.\n              simpl.\n\n              rewrite lower_fval_equation_2.\n              rewrite lift_fval_equation_2.\n              simpl.\n\n              rewrite lower_fval_equation_1.\n              simpl.\n\n              rewrite !(eqtype_ord_ext (S (S len))).\n              simpl.\n              rewrite !(eqtype_ord_ext (S (S len))) in eq_o.\n              simpl in eq_o.\n              rewrite eq_o.\n              simpl.\n              reflexivity.\n            + destruct p , s.\n              destruct m0 ; [ discriminate | ].\n              simpl.\n\n              rewrite lower_fval_equation_2.\n              rewrite lift_fval_equation_2.\n              simpl.\n\n              rewrite lower_fval_equation_2.\n              rewrite lift_fval_equation_2.\n              simpl.\n\n              rewrite tl_fmap_equation_3.\n              simpl.\n\n              rewrite lower_fval_equation_2.\n              rewrite lift_fval_equation_2.\n              simpl.\n\n              rewrite !(eqtype_ord_ext (S (S len))).\n              simpl.\n\n              rewrite (eqtype_ord_ext (S (S len))) in eq_o.\n              simpl in eq_o.\n              rewrite eq_o.\n\n              apply (ssrbool.elimF eqtype.eqP) in eq_o.\n\n              destruct (eqtype.eq_op _ _) eqn:eq_o2 ; [ reflexivity | ].\n\n\n              simpl.\n\n              set (@fmap.getm_def _ _).\n              set (lift_fval _).\n              set (lift_fval _).\n              assert (l = l0) ; [ subst l l0 | now rewrite H ].\n              f_equal.\n              apply lower_fval_ext_list.\n              apply (path_sorted_tl (path_sorted_tl i)).\n              apply (path_sorted_tl (path_sorted_tl i)).\n              reflexivity.\n        }\n      }\nQed.\n\nTheorem array_from_list_to_list_unit : forall {A} len (m : nseq_ A len),\n    array_from_option_list' (array_to_option_list m) len = m.\nProof.\n  intros.\n  induction len.\n  - now destruct m. (* unit element equailty *)\n  - simpl.\n    pose (resize_to_length_idemp (array_to_option_list m)).\n    rewrite (array_to_length_option_list_is_len A (S len) m) in e.\n    rewrite <- e ; clear e.\n    rewrite array_to_option_list_equation_2.\n    specialize (IHlen (nseq_tl m)).\n    apply array_from_list_helper_inverse.\nQed.\n\nDefinition defaulted_nseq {A len} (m : nseq_ A (S len)) :=\n  forall i, match fmap.getm m i with\n       | Some x => x <> chCanonical A\n       | None => True\n       end.\n\n#[global] Instance nseq_serializable {A : choice_type} {len} `{Serializable A} : Serializable (nseq_ A len) :=\n  serialize_by_other (array_to_option_list) (fun x => array_from_option_list' x len) (array_from_list_to_list_unit len).\n\nLtac serialize_enum := intros ; autounfold ; repeat apply @product_serializable ; fold chElement.\n\nFrom ConCert.Execution Require Import Blockchain.\n\n#[global] Instance BaseTypes : ConCert.Execution.Blockchain.ChainBase :=\n  {|\n    Address := nat;\n    address_eqb := Nat.eqb ;\n    address_eqb_spec := Nat.eqb_spec;\n    address_is_contract := Nat.even;\n  |}.\n\nFrom Hacspec Require Import ChoiceEquality.\n(* From Hacspec Require Import Hacspec_Lib. *)\n\nTheorem both_ext_prog :\n  forall {A} (x y : both A), both_prog x = both_prog y <-> x = y.\nProof.\n  intros A [both_x valid_x eq_x] [both_y valid_y eq_y] ; simpl.\n  split.\n  - intros ; subst.\n    f_equal ; easy.\n  - easy.\nQed.\n\nPrint pkg_core_definition.typed_raw_function.\n\n(* Instance serializable_code {L I} {A : choice_type} `{Serializable A} : Serializable (pkg_core_definition.code L I A). *)\n(* Proof. *)\n(* Admitted. *)\n\n(* Instance serializable_both {A : choice_type} `{Serializable A} : Serializable (both A). *)\n(* Proof. *)\n(* Admitted. *)\n\nLemma fmap_ext : forall {T : ordType} {S : Type} (m : {fmap T -> S}), mkfmap (FMap.fmval m) = m.\nProof.\n  intros.\n  apply fmap.eq_fmap.\n  intros ?.\n  rewrite (@mkfmapE T S _ x).\n  reflexivity.\nQed.\n\nInstance serializable_choice_ordType {C : choice_type} : Serializable (chElement_ordType C).\nProof.\n  induction C.\n  - exact unit_serializable.\n  - exact nat_serializable.\n  - exact int_serializable.\n  - exact bool_serializable.\n  - now apply product_serializable.\n  - refine (@serialize_by_other _ _ (fun x => FMap.fmval x) (mkfmap) _ list_serializable).\n    apply fmap_ext.\n  - now apply option_serializable.\n  - destruct n as [[] ?] ; [discriminate | ].\n    eapply (serialize_by_other (fun x => nat_of_ord x) (fun x => Ordinal (n := S n) (m := x mod S n) (ssrbool.introT ssrnat.ltP (Nat.mod_upper_bound x (S n) (Nat.neq_succ_0 n))))).\n    intros.\n    destruct m.\n    apply ord_ext.\n    rewrite Nat.mod_small ; [ reflexivity | simpl ; easy ].\n    exact nat_serializable.\n  - apply hacspec_int_serializable.\n  - now apply list_serializable.\n  - now apply sum_serializable.\nDefined.\n\nInstance serializable_choice {C : choice_type} : Serializable.Serializable C.\nProof.\n  induction C.\n  - exact unit_serializable.\n  - exact nat_serializable.\n  - exact int_serializable.\n  - exact bool_serializable.\n  - now apply product_serializable.\n  - refine (@serialize_by_other (list (chElement_ordType C1 * C2)) (chMap C1 C2) (fun x => FMap.fmval x) (mkfmap) _ list_serializable).\n    apply fmap_ext.\n  - now apply option_serializable.\n  - destruct n as [[] ?] ; [discriminate | ].\n    eapply (serialize_by_other (fun x => nat_of_ord x) (fun x => Ordinal (n := S n) (m := x mod S n) (ssrbool.introT ssrnat.ltP (Nat.mod_upper_bound x (S n) (Nat.neq_succ_0 n))))).\n    intros.\n    destruct m.\n    apply ord_ext.\n    rewrite Nat.mod_small ; [ reflexivity | simpl ; easy ].\n    exact nat_serializable.\n  - apply hacspec_int_serializable.\n  - now apply list_serializable.\n  - now apply sum_serializable.\nDefined.\n\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality. Export ChoiceEquality.\nFrom Hacspec Require Import LocationUtility. Export LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable. Export Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre. Export Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nFrom Hacspec Require Import Hacspec_Lib_Integers. Export Hacspec_Lib_Integers.\nFrom Hacspec Require Import Hacspec_Lib_Loops. Export Hacspec_Lib_Loops.\nFrom Hacspec Require Import Hacspec_Lib_Seq. Export Hacspec_Lib_Seq.\nFrom Hacspec Require Import Hacspec_Lib_Natmod. Export Hacspec_Lib_Natmod.\nFrom Hacspec Require Import Hacspec_Lib_Coercions. Export Hacspec_Lib_Coercions.\nFrom Hacspec Require Import Hacspec_Lib_Eq. Export Hacspec_Lib_Eq.\nFrom Hacspec Require Import Hacspec_Lib_Monad. Export Hacspec_Lib_Monad.\nFrom Hacspec Require Import Hacspec_Lib_Ltac. Export Hacspec_Lib_Ltac.\nFrom Hacspec Require Import Hacspec_Lib_Controlflow. Export Hacspec_Lib_Controlflow.\nFrom Hacspec Require Import Hacspec_Lib_Notation. Export Hacspec_Lib_Notation.\nFrom Hacspec Require Import Hacspec_Lib_TODO. Export Hacspec_Lib_TODO.\nFrom Hacspec Require Import ConCertLib. Export ConCertLib.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Coercions.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nFrom Hacspec Require Import Hacspec_Lib_Natmod.\n\n(**** Integers to arrays *)\nDefinition uint16_to_le_bytes (n : int16) : both ((nseq_ int8 2)) := ret_both (uint16_to_le_bytes n).\nDefinition uint16_to_be_bytes (n : int16) : both ((nseq_ int8 2)) := ret_both (uint16_to_be_bytes n).\nDefinition uint16_from_le_bytes (n : (nseq_ int8 2)) : both ((int16)) := ret_both (uint16_from_le_bytes n).\nDefinition uint16_from_be_bytes (n : (nseq_ int8 2)) : both ((int16)) := ret_both (uint16_from_be_bytes n).\nDefinition uint32_to_le_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (uint32_to_le_bytes n).\nDefinition uint32_to_be_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (uint32_to_be_bytes n).\nDefinition uint32_from_le_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (uint32_from_le_bytes n).\nDefinition uint32_from_be_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (uint32_from_be_bytes n).\nDefinition uint64_to_le_bytes (n : int64) : both ((nseq_ int8 8)) := ret_both (uint64_to_le_bytes n).\nDefinition uint64_to_be_bytes (n : int64) : both ((nseq_ int8 8)) := ret_both (uint64_to_be_bytes n).\nDefinition uint64_from_le_bytes (n : (nseq_ int8 8)) : both ((int64)) := ret_both (uint64_from_le_bytes n).\nDefinition uint64_from_be_bytes (n : (nseq_ int8 8)) : both ((int64)) := ret_both (uint64_from_be_bytes n).\nDefinition uint128_to_le_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (uint128_to_le_bytes n).\nDefinition uint128_to_be_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (uint128_to_be_bytes n).\nDefinition uint128_from_le_bytes (n : (nseq_ int8 16)) : both (int128) := ret_both (uint128_from_le_bytes n).\nDefinition uint128_from_be_bytes (n : (nseq_ int8 16)) : both ((int128)) := ret_both (uint128_from_be_bytes n).\nDefinition u32_to_le_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (u32_to_le_bytes n).\nDefinition u32_to_be_bytes (n : int32) : both ((nseq_ int8 4)) := ret_both (u32_to_be_bytes n).\nDefinition u32_from_le_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (u32_from_le_bytes n).\nDefinition u32_from_be_bytes (n : (nseq_ int8 4)) : both ((int32)) := ret_both (u32_from_be_bytes n).\nDefinition u64_to_le_bytes (n : int64) : both ((nseq_ int8 8)) := ret_both (u64_to_le_bytes n).\nDefinition u64_from_le_bytes (n : (nseq_ int8 8)) : both ((int64)) := ret_both (u64_from_le_bytes n).\nDefinition u128_to_le_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (u128_to_le_bytes n).\nDefinition u128_to_be_bytes (n : int128) : both ((nseq_ int8 16)) := ret_both (u128_to_be_bytes n).\nDefinition u128_from_le_bytes (n : (nseq_ int8 16)) : both ((int128)) := ret_both (u128_from_le_bytes n).\nDefinition u128_from_be_bytes (n : (nseq_ int8 16)) : both ((int128)) := ret_both (u128_from_be_bytes n).\n\n(*** Casting *)\n\nSection TodoSection2.\n\nDefinition uint128_from_usize (n : uint_size) : both int128 := ret_both (repr _ (unsigned n)).\nDefinition uint64_from_usize (n : uint_size) : both int64 := ret_both (repr _ (unsigned n)).\nDefinition uint32_from_usize (n : uint_size) : both int32 := ret_both (repr _ (unsigned n)).\nDefinition uint16_from_usize (n : uint_size) : both int16 := ret_both (repr _ (unsigned n)).\nDefinition uint8_from_usize (n : uint_size) : both int8 := ret_both (repr _ (unsigned n)).\n\nDefinition uint128_from_uint8 (n : int8) : both int128 := ret_both (repr _ (unsigned n)).\nDefinition uint64_from_uint8 (n : int8) : both int64 := ret_both (repr _ (unsigned n)).\nDefinition uint32_from_uint8 (n : int8) : both int32 := ret_both (repr _ (unsigned n)).\nDefinition uint16_from_uint8 (n : int8) : both int16 := ret_both (repr _ (unsigned n)).\nDefinition usize_from_uint8 (n : int8) : both uint_size := ret_both (repr _ (unsigned n)).\n\nDefinition uint128_from_uint16 (n : int16) : both int128 := ret_both (repr _ (unsigned n)).\nDefinition uint64_from_uint16 (n : int16) : both int64 := ret_both (repr _ (unsigned n)).\nDefinition uint32_from_uint16 (n : int16) : both int32 := ret_both (repr _ (unsigned n)).\nDefinition uint8_from_uint16 (n : int16) : both int8 := ret_both (repr _ (unsigned n)).\nDefinition usize_from_uint16 (n : int16) : both uint_size := ret_both (repr _ (unsigned n)).\n\nDefinition uint128_from_uint32 (n : int32) : both int128 := ret_both (repr _ (unsigned n)).\nDefinition uint64_from_uint32 (n : int32) : both int64 := ret_both (repr _ (unsigned n)).\nDefinition uint16_from_uint32 (n : int32) : both int16 := ret_both (repr _ (unsigned n)).\nDefinition uint8_from_uint32 (n : int32) : both int8 := ret_both (repr _ (unsigned n)).\nDefinition usize_from_uint32 (n : int32) : both uint_size := ret_both (repr _ (unsigned n)).\n\nDefinition uint128_from_uint64 (n : int64) : both int128 := ret_both (repr _ (unsigned n)).\nDefinition uint32_from_uint64 (n : int64) : both int32 := ret_both (repr _ (unsigned n)).\nDefinition uint16_from_uint64 (n : int64) : both int16 := ret_both (repr _ (unsigned n)).\nDefinition uint8_from_uint64 (n : int64) : both int8 := ret_both (repr _ (unsigned n)).\nDefinition usize_from_uint64 (n : int64) : both uint_size := ret_both (repr _ (unsigned n)).\n\nDefinition uint64_from_uint128 (n : int128) : both int64 := ret_both (repr _ (unsigned n)).\nDefinition uint32_from_uint128 (n : int128) : both int32 := ret_both (repr _ (unsigned n)).\nDefinition uint16_from_uint128 (n : int128) : both int16 := ret_both (repr _ (unsigned n)).\nDefinition uint8_from_uint128 (n : int128) : both int8 := ret_both (repr _ (unsigned n)).\nDefinition usize_from_uint128 (n : int128) : both uint_size := ret_both (repr _ (unsigned n)).\n\nEnd TodoSection2.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Comparable.v",
    "content": "From Coq Require Import ZArith List.\nFrom Crypt Require Import Package.\n\n(************************************************)\n(*   Implementation of comparision functions    *)\n(************************************************)\n\n(* Typeclass handling of default elements, for use in sequences/arrays.\n   We provide instances for the library integer types *)\nClass Default (A : Type) := {\n  default : A\n}.\nGlobal Arguments default {_} {_}.\n\nClass EqDec (A : Type) :=\n  { eqb : A -> A -> bool ;\n    eqb_leibniz : forall x y, is_true (eqb x y) <-> x = y }.\n\nInfix \"=.?\" := eqb (at level 40) : hacspec_scope.\nInfix \"!=.?\" := (fun a b => negb (eqb a b)) (at level 40) : hacspec_scope.\n\nClass Comparable (A : Type) := {\n  ltb : A -> A -> bool;\n  leb : A -> A -> bool;\n  gtb : A -> A -> bool;\n  geb : A -> A -> bool;\n}.\nInfix \"<.?\" := ltb (at level 42) : hacspec_scope.\nInfix \"<=.?\" := leb (at level 42) : hacspec_scope.\nInfix \">.?\" := gtb (at level 42) : hacspec_scope.\nInfix \">=.?\" := geb (at level 42) : hacspec_scope.\n\nInstance eq_dec_lt_Comparable {A : Type} `{EqDec A} (ltb : A -> A -> bool) : Comparable A := {\n    ltb := ltb;\n    leb a b := if eqb a b then true else ltb a b ;\n    gtb a b := ltb b a;\n    geb a b := if eqb a b then true else ltb b a;\n  }.\n\nInstance eq_dec_le_Comparable {A : Type} `{EqDec A} (leb : A -> A -> bool) : Comparable A := {\n    ltb a b := if eqb a b then false else leb a b;\n    leb := leb ;\n    gtb a b := if eqb a b then false else leb b a;\n    geb a b := leb b a;\n  }.\n\nTheorem eqb_refl : forall {A} {H : EqDec A} (x : A), (@eqb A H x x) = true.\nProof.\n  intros.\n  now apply eqb_leibniz.\nQed.\n\nTheorem eqbP : forall {A} {H : EqDec A} (x y : A), ssrbool.reflect (x = y) (@eqb A H x y).\nProof.\n  intros.\n  apply Bool.iff_reflect.\n  rewrite <- eqb_leibniz.\n  reflexivity.\nQed.\n\nTheorem neqb_leibniz : forall {A} {H : EqDec A} x y, eqb x y = false <-> x <> y .\nProof.\n  intros.\n  rewrite (ssrbool.rwP ssrbool.negPf).\n  rewrite <- (ssrbool.rwP (@ssrbool.negP (eqb x y))).\n  apply not_iff_compat.\n  apply eqb_leibniz.\nQed.\n\n\nGlobal Program Instance nat_eqdec : EqDec nat := {\n  eqb := Nat.eqb;\n  eqb_leibniz := Nat.eqb_eq ;\n}.\n\nGlobal Instance nat_comparable : Comparable nat := {\n  ltb := Nat.ltb;\n  leb := Nat.leb;\n  gtb a b := Nat.ltb b a;\n  geb a b := Nat.leb b a;\n}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Controlflow.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nFrom Hacspec Require Import Hacspec_Lib_Integers.\nFrom Hacspec Require Import Hacspec_Lib_Loops.\nFrom Hacspec Require Import Hacspec_Lib_Monad.\nFrom Hacspec Require Import Hacspec_Lib_Ltac.\n\n(* Handle products of size 1 - 4 for foldi_both' *)\nNotation \"'ssp' ( 'fun' a => f )\" :=\n  (((fun (a : both _) => f))) (at level 100, f at next level, a at next level).\n\nNotation \"'ssp' ( 'fun' ' ( a , b ) => f )\" :=\n  (fun (temp : both (_ × _)) => lift_n 1 temp (fun '(a, b) => f)) (at level 100, f at next level, a at next level, b at next level).\n\nNotation \"'ssp' ( 'fun' ' ( a , b , c ) => f )\" :=\n  (fun (temp : both (_ × _ × _)) => lift_n 2 temp (fun '(a, b, c) => f)) (at level 100, f at next level, a at next level, b at next level, c at next level).\n\nNotation \"'ssp' ( 'fun' ' ( a , b , c , d ) => f )\" :=\n  (fun (temp : both (_ × _ × _ × _)) => lift_n 3 temp (fun '(a, b, c, d) => f)) (at level 100, f at next level, a at next level, b at next level, c at next level, d at next level).\n\n(* eq_fset *)\n(* finmap.finSet *)\n(* https://coq.zulipchat.com/#narrow/stream/237977-Coq-users/topic/aac-tactics.2C.20fset.20automation.2C.20universes *)\n(* Display map / exponenetial maps *)\n\nEquations foldi_both\n        {acc: choice_type}\n        (lo_hi: both uint_size * both uint_size)\n        (f: both uint_size ->\n            both acc ->\n            both acc)\n        (init: both acc)\n         : both (acc) :=\n  foldi_both lo_hi f init :=\n    foldi (fst lo_hi) (snd lo_hi) (@f) (init).\nSolve All Obligations with intros ; solve_fsubset_trans.\nFail Next Obligation.\n\nEquations foldi_both_list\n           {acc B: choice_type}\n        (l : both (chList B))\n        (f: both B ->\n            both acc ->\n            both acc)\n        (init: both acc)\n  : both (acc) :=\n  foldi_both_list l f init :=\n  bind_both l (fun l' => List.fold_left (fun x y => solve_lift @f (solve_lift ret_both y) (x) : both _) l' (solve_lift init)).\nSolve All Obligations with intros ; solve_fsubset_trans.\nSolve All Obligations with intros ; solve_ssprove_obligations.\nFail Next Obligation.\n\nProgram Definition if_both {A} (c : both 'bool) (e_then : both A) (e_else : both A) : both A :=\n  bind_both c (fun b => if b then lift_both e_then else lift_both e_else).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nNotation \"'ifb' b 'then' et 'else' ee\" :=\n  (if_both b et ee) (at level 100).\n\nEquations match_both_option {A B} (x : both (option A)) (fa : both A -> both B) (fb : both B) : both B :=\n  match_both_option x fa fb :=\n  bind_both x (fun y => match y with\n                     | Some a => solve_lift (fa (solve_lift (ret_both a)))\n                     | None => solve_lift fb\n                     end).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nNotation \"'matchb' x 'with' '|' 'Option_Some' a '=>' va '|' 'Option_None' '=>' vb 'end'\" :=\n  (match_both_option x (fun a => va) vb).\n\nNotation \"'matchb' x 'with' '|' 'Option_Some' a '=>' va '|' '_' '=>' vb 'end'\" :=\n  (match_both_option x (fun a => va) vb).\n\nProgram Definition foldi_both0_\n        {acc : choice_type}\n        (fuel : nat)\n        (i : both uint_size)\n        (f: both (uint_size) -> both acc -> both (acc))\n        (cur : both acc) : both (acc) :=\n  foldi_ fuel i (@f) (lift_both cur).\nSolve All Obligations with (intros ; (fset_equality || solve_in_fset)).\nFail Next Obligation.\n\nEquations foldi0\n          {acc: choice_type}\n          (lo: both uint_size)\n          (hi: both uint_size) (* {lo <= hi} *)\n          (f: both (uint_size) -> both acc -> both (acc)) (* {i < hi} *)\n          (init: both acc) : both (acc) :=\n  foldi0 lo hi f init :=\n    bind_both lo (fun lo =>\n                    bind_both hi (fun hi =>\n                                    match Z.sub (unsigned hi) (unsigned lo) with\n                                    | Z0 => lift_both init\n                                    | Zneg p => lift_both init\n                                    | Zpos p => foldi_both0_ (Pos.to_nat p) (solve_lift (ret_both lo)) (@f) init\n                                    end))\n.\nSolve All Obligations with (intros ; (fset_equality || solve_in_fset)).\nFail Next Obligation.\n\nDefinition foldi_both0\n        {acc: choice_type}\n        (lo_hi: both uint_size * both uint_size)\n        (f: both uint_size -> both acc -> both (acc)) (* {i < hi} *)\n        (init: both acc)\n  : both (acc) :=\n  foldi0 (fst lo_hi) (snd lo_hi) f init.\n\nEquations foldi_both0_list\n           {acc B: choice_type}\n        (l : both (chList B))\n        (f: both B -> both acc -> both (acc)) (* {i < hi} *)\n        (init: both acc)\n  : both (acc) :=\n  foldi_both0_list l f init :=\n    bind_both l (fun l' => List.fold_left (fun x y => solve_lift @f (solve_lift ret_both y) (x) : both _) l' (solve_lift init : both _)).\nFail Next Obligation.\n\nNotation \"'f_fold'\" :=\n  (fun lo_hi init f => foldi_both_list lo_hi f init).\n\nProgram Definition if_both0 {A} (c : both 'bool) (e_then : both A) (e_else : both A) : both A :=\n  bind_both c (fun b => if b then lift_both e_then else lift_both  e_else).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nNotation \"'ifb0' b 'then' et 'else' ee\" :=\n  (if_both0 b et ee) (at level 100).\n\nNotation \"'letm[' bind_code_mnd ']' x ':=' y 'in' z\" := (choice_typeMonad.monad_bind_both (BindCode := bind_code_mnd) y (fun x => z)) (at level 100, x pattern).\nNotation \"'letm[' bind_code_mnd ']' ( x : t ) ':=' y 'in' z\" := (choice_typeMonad.monad_bind_both (BindCode := bind_code_mnd) y (fun x => z)) (at level 100, x pattern).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Eq.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom mathcomp Require Import ssrZ word.\n(* From Jasmin Require Import word. *)\nFrom Crypt Require Import jasmin_word.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nFrom Hacspec Require Import Hacspec_Lib_Natmod.\n\n(* Comparisons, boolean equality, and notation *)\n\nGlobal Instance int_eqdec `{WS : wsize}: EqDec (@int WS) := {\n  eqb := eqtype.eq_op ;\n  eqb_leibniz := int_eqb_eq ;\n}.\n\nGlobal Instance int_comparable `{WS : wsize} : Comparable (@int WS) :=\n    eq_dec_lt_Comparable (wlt Unsigned).\n\nDefinition uint8_equal (x y : int8) : both 'bool := ret_both (eqb x y : 'bool).\n\nTheorem nat_mod_eqb_spec : forall {p} (a b : nat_mod p),\n    is_pure (nat_mod_equal a b) = true <-> a = b.\nProof.\n  symmetry ; apply (ssrbool.rwP nat_mod_equal_reflect).\nQed.\n\nGlobal Instance nat_mod_eqdec {p} : EqDec (nat_mod p) := {\n  eqb a b := is_pure (nat_mod_equal a b);\n  eqb_leibniz := nat_mod_eqb_spec;\n}.\n\nDefinition nat_mod_rem {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) :=\n  ret_both (nat_mod_rem a b).\n\n\nInfix \"rem\" := nat_mod_rem (at level 33) : hacspec_scope.\n\nGlobal Instance bool_eqdec : EqDec bool := {\n  eqb := Bool.eqb;\n  eqb_leibniz := Bool.eqb_true_iff;\n}.\n\nGlobal Instance string_eqdec : EqDec String.string := {\n  eqb := String.eqb;\n  eqb_leibniz := String.eqb_eq ;\n}.\n\nFixpoint list_eqdec {A} `{EqDec A} (l1 l2 : list A) : bool :=\n  match l1, l2 with\n  | x::xs, y::ys => if eqb x y then list_eqdec xs ys else false\n  | [], [] => true\n  | _,_ => false\n  end.\n\nLemma list_eqdec_refl : forall {A} `{EqDec A} (l1 : list A), list_eqdec l1 l1 = true.\nProof.\n  intros ; induction l1 ; cbn ; try rewrite eqb_refl ; easy.\nQed.\n\nLemma list_eqdec_sound : forall {A} `{EqDec A} (l1 l2 : list A), list_eqdec l1 l2 = true <-> l1 = l2.\nProof.\n  intros A H l1.\n  induction l1 ; induction l2 ; split ; intros ; simpl in * ; try easy ; try inversion H0.\n  - (* inductive case *)\n    apply Field_theory.if_true in H0; destruct H0.\n    f_equal.\n    (* show heads are equal *)\n    + apply (proj1 (eqb_leibniz a a0) H0).\n    (* show tails are equal using induction hypothesis *)\n    + apply IHl1. assumption.\n  - rewrite eqb_refl.\n    apply list_eqdec_refl.\nQed.\n\nGlobal Instance List_eqdec {A} `{EqDec A} : EqDec (list A) := {\n  eqb := list_eqdec;\n  eqb_leibniz := list_eqdec_sound;\n}.\n\nLemma vector_eqb_sound : forall {A : Type} {n : nat} `{EqDec A} (v1 v2 : VectorDef.t A n), Vector.eqb _ eqb v1 v2 = true <-> v1 = v2.\nProof.\n  intros.\n  apply Vector.eqb_eq.\n  intros.\n  apply eqb_leibniz.\nQed.\n\nGlobal Program Instance Vector_eqdec {A n} `{EqDec A}: EqDec (VectorDef.t A n) := {\n  eqb := Vector.eqb _ eqb;\n  eqb_leibniz := vector_eqb_sound;\n}.\n\nGlobal Program Instance Dec_eq_prod (A B : Type) `{EqDec A} `{EqDec B} : EqDec (A * B) := {\n  eqb '(a0, b0) '(a1, b1) := andb (eqb a0 a1) (eqb b0 b1)\n}.\nNext Obligation.\n  split ; intros ; destruct x ; destruct y.\n  - (* symmetry in H1. *)\n    (* apply Bool.andb_true_eq in H1. destruct H1. *)\n    rewrite is_true_split_and in H1. destruct H1.\n    rewrite (eqb_leibniz) in H1.\n    rewrite (eqb_leibniz) in H2. now subst.\n  - inversion_clear H1. now do 2 rewrite eqb_refl.\nDefined.\n\nFixpoint array_eq_\n  {a: choice_type}\n  {len: nat}\n  (eq: ( (a)) -> ( (a)) -> bool)\n  (s1: ( (nseq_ a len)))\n  (s2 : ( (nseq_ a len)))\n  {struct len}\n  : bool.\nProof.\n  destruct len ; cbn in *.\n  - exact  true.\n  - destruct (getm s1 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s | ].\n    + destruct (getm s2 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s0 | ].\n      * exact (eq s s0).\n      * exact false.\n    + exact false.\nDefined.\n\nInfix \"array_xor\" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_xor)) (at level 33) : hacspec_scope.\nInfix \"array_add\" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_add)) (at level 33) : hacspec_scope.\nInfix \"array_minus\" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_sub)) (at level 33) : hacspec_scope.\nInfix \"array_mul\" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_mul)) (at level 33) : hacspec_scope.\nInfix \"array_div\" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_div)) (at level 33) : hacspec_scope.\nInfix \"array_or\" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_or)) (at level 33) : hacspec_scope.\nInfix \"array_and\" := (@array_join_map (int _) _ _ _ _ _ (fun _ _ _ _ => int_and)) (at level 33) : hacspec_scope.\n\nInfix \"array_eq\" := (array_eq_ eq) (at level 33) : hacspec_scope.\nInfix \"array_neq\" := (fun s1 s2 => negb (array_eq_ eq s1 s2)) (at level 33) : hacspec_scope.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Integers.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\n(*** Integers *)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nImport choice.Choice.Exports.\n\nEquations int_add {WS} (x : both (int WS)) (y : both (int WS)) : both (int WS) :=\n  int_add := lift2_both (Hacspec_Lib_Pre.int_add).\nFail Next Obligation.\n\nEquations int_sub {WS}\n  (x : both (int WS)) (y : both (int WS))\n  : both (int WS) :=\n  int_sub := (lift2_both (Hacspec_Lib_Pre.int_sub)).\nFail Next Obligation.\n\nEquations int_opp {WS} (x : both (int WS)) : both (int WS) :=\n  int_opp := (lift1_both (Hacspec_Lib_Pre.int_opp)).\nFail Next Obligation.\n\nEquations int_mul {WS}\n  (x : both (int WS)) (y : both (int WS))\n  : both (int WS) :=\n  int_mul := (lift2_both (Hacspec_Lib_Pre.int_mul)).\nFail Next Obligation.\n\nEquations int_div {WS}\n  (x : both (int WS)) (y : both (int WS))\n  : both (int WS) :=\n  int_div := (lift2_both (Hacspec_Lib_Pre.int_div : int _ -> int _ -> int _)).\nFail Next Obligation.\n\nEquations int_mod {WS}\n  (x : both (int WS)) (y : both (int WS))\n  : both (int WS) :=\n  int_mod := (lift2_both (Hacspec_Lib_Pre.int_mod : int _ -> int _ -> int _)).\nFail Next Obligation.\n\nEquations int_xor {WS}\n  (x : both (int WS)) (y : both (int WS))\n  : both (int WS) :=\n  int_xor := (lift2_both (Hacspec_Lib_Pre.int_xor : int _ -> int _ -> int _)).\nFail Next Obligation.\n\nEquations int_and {WS}\n  (x : both (int WS)) (y : both (int WS))\n  : both (int WS) :=\n  int_and := (lift2_both (Hacspec_Lib_Pre.int_and : int _ -> int _ -> int _)).\nFail Next Obligation.\n\nEquations int_or {WS}\n  (x : both (int WS)) (y : both (int WS))\n  : both (int WS) :=\n  int_or := (lift2_both (Hacspec_Lib_Pre.int_or : int _ -> int _ -> int _)).\nFail Next Obligation.\n\nEquations int_not {WS} (x : both (int WS)) : both (int WS) :=\n  int_not := (lift1_both (Hacspec_Lib_Pre.int_not : int _ -> int _)).\nFail Next Obligation.\n\nEquations cast_int {WS1 WS2} (x : both (int WS1)) : both (int WS2) :=\n  cast_int := (lift1_both (fun (n : int _) => repr _ (unsigned n))).\nFail Next Obligation.\n(* End IntType. *)\n\nNotation secret := (lift1_both secret).\n\nInfix \".%%\" := int_modi (at level 40, left associativity) : Z_scope.\nInfix \".+\" := int_add (at level 77) : hacspec_scope.\nInfix \".-\" := int_sub (at level 77) : hacspec_scope.\nNotation \"-\" := int_opp (at level 77) : hacspec_scope.\nInfix \".*\" := int_mul (at level 77) : hacspec_scope.\nInfix \"./\" := int_div (at level 77) : hacspec_scope.\nInfix \".%\" := int_mod (at level 77) : hacspec_scope.\nInfix \".^\" := int_xor (at level 77) : hacspec_scope.\nInfix \".&\" := int_and (at level 77) : hacspec_scope.\nInfix \".|\" := int_or (at level 77) : hacspec_scope.\nNotation \"'not'\" := int_not (at level 77) : hacspec_scope.\n\n(* Section Uint. *)\nNotation uint8_declassify := (lift1_both uint8_declassify).\nNotation int8_declassify := (lift1_both int8_declassify).\nNotation uint16_declassify := (lift1_both uint16_declassify).\nNotation int16_declassify := (lift1_both int16_declassify).\nNotation uint32_declassify := (lift1_both uint32_declassify).\nNotation int32_declassify := (lift1_both int32_declassify).\nNotation uint64_declassify := (lift1_both uint64_declassify).\nNotation int64_declassify := (lift1_both int64_declassify).\nNotation uint128_declassify := (lift1_both uint128_declassify).\nNotation int128_declassify := (lift1_both int128_declassify).\n\nNotation uint8_classify := (lift1_both uint8_classify).\nNotation int8_classify := (lift1_both int8_classify).\nNotation uint16_classify := (lift1_both uint16_classify).\nNotation int16_classify := (lift1_both int16_classify).\nNotation uint32_classify := (lift1_both uint32_classify).\nNotation int32_classify := (lift1_both int32_classify).\nNotation uint64_classify := (lift1_both uint64_classify).\nNotation int64_classify := (lift1_both int64_classify).\nNotation uint128_classify := (lift1_both uint128_classify).\nNotation int128_classify := (lift1_both int128_classify).\n\n(* CompCert integers' signedness is only interpreted through 'signed' and 'unsigned',\n   and not in the representation. Therefore, uints are just names for their respective ints.\n *)\n\nNotation declassify_usize_from_uint8 := (lift1_both declassify_usize_from_uint8).\nNotation declassify_u32_from_uint32 := (lift1_both declassify_u32_from_uint32).\n\nNotation uint8_rotate_left := (lift2_both uint8_rotate_left).\n\nNotation uint8_rotate_right := (lift2_both uint8_rotate_right).\n\nNotation uint16_rotate_left := (lift2_both uint16_rotate_left).\n\nNotation uint16_rotate_right := (lift2_both uint16_rotate_right).\n\nNotation uint32_rotate_left := (lift2_both uint32_rotate_left).\n\nNotation uint32_rotate_right := (lift2_both uint32_rotate_right).\n\nNotation uint64_rotate_left := (lift2_both uint64_rotate_left).\n\nNotation uint64_rotate_right := (lift2_both uint64_rotate_right).\n\nNotation uint128_rotate_left := (lift2_both uint128_rotate_left).\n\nNotation uint128_rotate_right := (lift2_both uint128_rotate_right).\nNotation usize_shift_right_ := (lift2_both (fun u s => u usize_shift_right s)).\n\nNotation usize_shift_left_ :=\n  (fun (u: both (fset []) ([interface]) uint_size) (s: both (fset []) ([interface]) int32) =>\n     {|\n       is_pure := (is_pure u) usize_shift_left (is_pure s) ;\n       is_state :=\n         {code\n            temp_u ← is_state u ;;\n          temp_s ← is_state s ;;\n          ret (temp_u usize_shift_left temp_s)\n         }\n     |}).\n\n(**** Operations *)\n\nNotation shift_left_ := (lift2_both shift_left_).\nNotation shift_right_ := (lift2_both shift_right_).\n\n(* End Uint. *)\n\nInfix \"usize_shift_right\" := (usize_shift_right_) (at level 77) : hacspec_scope.\nInfix \"usize_shift_left\" := (usize_shift_left_) (at level 77) : hacspec_scope.\n\nInfix \"shift_left\" := (shift_left_) (at level 77) : hacspec_scope.\nInfix \"shift_right\" := (shift_right_) (at level 77) : hacspec_scope.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Loops.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nFrom Hacspec Require Import Hacspec_Lib_Integers.\n\n(*** Loops *)\n\nSection Loops.\n\n  Program Fixpoint foldi_\n           {acc : choice_type}\n           (fuel : nat)\n           (i : both uint_size)\n           (f: both uint_size -> both acc -> both (acc))\n           (cur : both acc)\n           {struct fuel} : both (acc) :=\n    match fuel with\n    | 0 => lift_both cur\n    | S n' => foldi_ n' (int_add i (ret_both one)) f (f i cur)\n    end.\n  Solve All Obligations with (intros ; (fset_equality || solve_in_fset)).\n  Fail Next Obligation.\n\n  Equations foldi_both_\n           {acc : choice_type}\n           (fuel : nat)\n           (i : both uint_size)\n           (f: both (uint_size) -> both acc -> both (acc))\n           (cur : both acc) : both (acc) :=\n    foldi_both_ fuel i f cur :=\n      match fuel with\n      | 0 => lift_both cur\n      | S n' => solve_lift foldi_both_ n' (int_add i (ret_both one)) (fun x y => solve_lift f (solve_lift x) y) (f i (solve_lift cur))\n      end.\n  Solve All Obligations with (intros ; (fset_equality || solve_in_fset)).\n  Fail Next Obligation.\n\n  Equations foldi\n             {acc: choice_type}\n             (lo: both uint_size)\n             (hi: both uint_size) (* {lo <= hi} *)\n           (f: both (uint_size) -> both acc -> both (acc)) (* {i < hi} *)\n             (init: both acc) : both (acc) :=\n    foldi lo hi f init :=\n      bind_both lo (fun lo =>\n      bind_both hi (fun hi =>\n      match Z.sub (unsigned hi) (unsigned lo) with\n      | Z0 => lift_both init\n      | Zneg p => lift_both init\n      | Zpos p => foldi_both_ (Pos.to_nat p) (solve_lift (ret_both lo)) (@f) init\n      end))\n  .\n  Solve All Obligations with (intros ; (fset_equality || solve_in_fset)).\n  Fail Next Obligation.\n\n  (* Fold done using natural numbers for bounds *)\n  Fixpoint foldi_nat_\n           {acc : choice_type}\n           (fuel : nat)\n           (i : nat)\n           (f : nat -> chElement acc -> raw_code (acc))\n           (cur : acc) : raw_code (acc) :=\n    match fuel with\n    | O => ret (cur)\n    | S n' =>\n        cur' ← f i cur ;;\n        foldi_nat_ n' (S i) f (cur')\n    end.\n  Definition foldi_nat\n             {acc: choice_type}\n             (lo: nat)\n             (hi: nat) (* {lo <= hi} *)\n             (f: nat -> acc -> raw_code (acc)) (* {i < hi} *)\n             (init: acc) : raw_code (acc) :=\n    match Nat.sub hi lo with\n    | O => ret (init)\n    | S n' => foldi_nat_ (S n') lo f init\n    end.\n\n  Lemma foldi__nat_move_S :\n    forall {acc: choice_type}\n      (fuel : nat)\n      (i : nat)\n      (f : nat -> acc -> raw_code (acc))\n      (cur : acc),\n      (cur' ← f i cur ;; foldi_nat_ fuel (S i) f (cur')) = foldi_nat_ (S fuel) i f cur.\n  Proof. reflexivity. Qed.\n\n  Lemma foldi__nat_move_S_append :\n    forall {acc: choice_type}\n      (fuel : nat)\n      (i : nat)\n      (f : nat -> acc -> raw_code (acc))\n      (cur : acc),\n      (cur' ← foldi_nat_ fuel i f (cur) ;; f (i + fuel) (cur')) = foldi_nat_ (S fuel) i f cur.\n  Proof.\n\n    induction fuel ; intros.\n    - rewrite <- foldi__nat_move_S.\n      unfold foldi_nat_.\n      replace (fun cur' => @ret acc ((cur'))) with (fun cur' => @ret acc cur').\n      2: {\n        apply functional_extensionality.\n        reflexivity.\n      }\n      rewrite bind_ret.\n      unfold bind at 1.\n      rewrite Nat.add_0_r.\n      reflexivity.\n    - rewrite <- foldi__nat_move_S.\n      rewrite <- foldi__nat_move_S.\n      rewrite bind_assoc.\n      f_equal.\n      apply functional_extensionality.\n      intros.\n      replace (i + S fuel) with (S i + fuel) by lia.\n      rewrite IHfuel.\n      reflexivity.\n  Qed.\n\n  Lemma foldi__nat_move_to_function :\n    forall {acc: choice_type}\n      (fuel : nat)\n      (i : nat)\n      (f : nat -> acc -> raw_code (acc))\n      (cur : acc),\n      foldi_nat_ fuel i (fun x => f (S x)) (cur) = foldi_nat_ fuel (S i) f cur.\n  Proof.\n    induction fuel ; intros.\n    - reflexivity.\n    - cbn.\n      f_equal.\n      apply functional_extensionality.\n      intros.\n      rewrite IHfuel.\n      reflexivity.\n  Qed.\n\n  Lemma foldi__nat_move_to_function_add :\n    forall {acc: choice_type}\n      (fuel : nat)\n      (i j : nat)\n      (f : nat -> acc -> raw_code (acc))\n      (cur : acc),\n      foldi_nat_ fuel i (fun x => f (x + j)) (cur) = foldi_nat_ fuel (i + j) f cur.\n  Proof.\n    intros acc fuel i j. generalize dependent i.\n    induction j ; intros.\n    - rewrite Nat.add_0_r.\n      replace (fun x : nat => f (x + 0)) with f.\n      reflexivity.\n      apply functional_extensionality.\n      intros.\n      now rewrite Nat.add_0_r.\n    - replace (i + S j) with (S i + j) by lia.\n      rewrite <- IHj.\n      rewrite <- foldi__nat_move_to_function.\n      f_equal.\n      apply functional_extensionality.\n      intros.\n      f_equal.\n      lia.\n  Qed.\n\n  Lemma bind_raw_both_ret :\n    forall {A B : choice_type} (x : A) (f : A -> both B), bind_raw_both (both_ret x) f = f x.\n  Proof.\n    intros.\n    unfold bind_raw_both.\n    simpl.\n    destruct (f x).\n    destruct both_prog.\n    simpl.\n    reflexivity.\n  Qed.\n\n  Lemma bind_raw_both_assoc :\n    forall {A B C : choice_type} (v : raw_both A) (k1 : A -> raw_both B) (k2 : B -> raw_both C),\n  (bind_raw_both (bind_raw_both v k1) k2 = (bind_raw_both v (fun x => bind_raw_both (k1 x) k2))).\n  Proof.\n    intros.\n    unfold bind_raw_both.\n    simpl.\n    rewrite bind_assoc.\n    reflexivity.\n  Qed.\n\n  Lemma valid_remove_back :\n    forall x (xs : {fset Location}) I {ct} c,\n      ValidCode (fset xs) I c ->\n      @ValidCode (fset (xs ++ [x])) I ct c.\n  Proof.\n    intros.\n    apply (valid_injectLocations) with (L1 := fset xs).\n    - rewrite fset_cat.\n      apply fsubsetUl.\n    - apply H.\n  Qed.\n\n  Lemma list_constructor : forall {A : Type} (x : A) (xs : list A) (l : list A) (H : (x :: xs) = l), (l <> []).\n  Proof.\n    intros.\n    subst.\n    easy.\n  Qed.\n\n  Definition pop_back {A : Type} (l : list A) :=\n    match (rev l) with\n    | [] => []\n    | (x :: xs) => rev xs ++ [x]\n    end.\n\n  Theorem pop_back_ignore_front : forall {A} (a : A) (l : list A), pop_back (a :: l) = a :: pop_back l.\n  Proof.\n    intros.\n    induction l ; intros.\n    - reflexivity.\n    - unfold pop_back.\n      destruct (rev (a :: a0 :: l)) eqn:orev.\n      { apply f_equal with (f := @rev A) in orev.\n        rewrite (rev_involutive) in orev.\n        discriminate orev.\n      }\n      cbn in orev.\n\n      destruct (rev (a0 :: l)) eqn:orev2.\n      { apply f_equal with (f := @rev A) in orev2.\n        rewrite (rev_involutive) in orev2.\n        discriminate orev2.\n      }\n      cbn in orev2.\n      rewrite orev2 in orev ; clear orev2.\n\n      inversion_clear orev.\n      rewrite rev_unit.\n      reflexivity.\n  Qed.\n\n  Theorem pop_back_is_id : forall {A} (l : list A), l = pop_back l.\n  Proof.\n    intros.\n    induction l.\n    - reflexivity.\n    - destruct l.\n      + reflexivity.\n      + rewrite pop_back_ignore_front.\n        rewrite <- IHl.\n        reflexivity.\n  Qed.\n\n  Ltac valid_remove_back' :=\n    match goal with\n    | _ : _ |- (ValidCode (fset (?l)) _ _) =>\n        rewrite (@pop_back_is_id _ l)\n    end ;\n    apply valid_remove_back.\n\n\n  Lemma valid_remove_front :\n    forall x xs I {ct} c,\n      ValidCode (fset xs) I c ->\n      @ValidCode (fset (x :: xs)) I ct c.\n  Proof.\n    intros.\n    apply (@valid_injectLocations) with (L1 := fset xs).\n    - replace (x :: xs) with (seq.cat [x] xs) by reflexivity.\n      rewrite fset_cat.\n      apply fsubsetUr.\n    - apply H.\n  Qed.\n\nTheorem for_loop_unfold :\n  forall c n,\n  for_loop (fun m : nat => c m) (S n) =\n    (c 0 ;; for_loop (fun m : nat => c (S m)) (n) ).\n  cbn.\n  induction n ; intros.\n  - reflexivity.\n  - unfold for_loop ; fold for_loop.\n    cbn.\n    rewrite IHn.\n    rewrite bind_assoc.\n    reflexivity.\nQed.\n\nEnd Loops.\n\n\n(*** For loop again *)\n\n(* SSProve for loop is inclusive upperbound, while hacspec is exclusive upperbound *)\nDefinition for_loop_range\n  (lo: nat)\n  (hi: nat)\n  (f : nat -> raw_code 'unit) : raw_code 'unit :=\n  match hi - lo with\n  | O => @ret 'unit tt\n  | S i => for_loop (fun n => f (n + lo)) i\n  end.\n\nFixpoint list_types_ (l : list choice_type) (init : choice_type) : choice_type  :=\n  match l with\n  | (t :: ts) => list_types_ ts t × init\n  | [] => init\n  end.\n\nDefinition list_types (l : list choice_type) : choice_type :=\n  match l with\n  | [] => 'unit\n  | (t :: ts) => list_types_ ts t\n  end.\n\nProgram Fixpoint vars_to_tuple (vars : list (∑ (t : choice_type), t)) {measure (length vars)} : list_types (seq.map (fun '(x ; y) => x) vars) :=\n  match vars with\n  | [] => tt\n  | [x] => _\n  | (x :: s :: xs) => (vars_to_tuple (s :: xs) , _)\n  end.\n\nFixpoint for_loop_return_ (ℓ : list Location) (vars : list (∑ (t : choice_type), t)) : raw_code (list_types (seq.cat (seq.map (fun '(x ; y) => x) vars) (seq.map (fun '(x ; y) => x) ℓ) )).\n\n  destruct ℓ as [ | l ls ].\n  - rewrite seq.cats0.\n    pose (ret (vars_to_tuple vars)).\n    replace (fun pat : ∑ t : choice_type, t => _) with\n      (fun pat : @sigT choice_type\n       (fun t : choice_type => t) =>\n         match pat return choice_type with\n         | @existT _ _ x _ => x\n         end)\n      in r by (apply functional_extensionality ; now intros []).\n    apply r.\n  - apply (getr (l)).\n    intros x.\n    destruct l.\n    cbn in x.\n    pose (for_loop_return_ ls (vars ++ [(x0 ; x)])).\n    rewrite seq.map_cat in r.\n    cbn in r.\n    rewrite <- seq.catA in r.\n    cbn in r.\n    apply r.\nDefined.\n\nDefinition for_loop_return (ℓ : list Location) : raw_code (list_types (seq.map (fun '(x ; y) => x) ℓ)) := for_loop_return_ ℓ [].\n\nDefinition for_loop_locations\n           (lo: nat)\n           (hi: nat)\n           (ℓ : list Location)\n           (f : nat -> raw_code 'unit) :=\n  match hi - lo with\n  | O => @ret 'unit tt\n  | S i => for_loop (fun n => f (n + lo)) i\n  end  ;; for_loop_return ℓ.\n\nTheorem empty_put {B} ℓ v (k h : raw_code B) :\n  ⊢ ⦃ true_precond ⦄ k ≈ h ⦃ pre_to_post true_precond ⦄ ->\n  ⊢ ⦃ true_precond ⦄ #put ℓ := v ;; k ≈ h ⦃ pre_to_post true_precond ⦄.\nProof.\n  intros.\n  apply better_r_put_lhs.\n  eapply rpre_weaken_rule.\n  apply H.\n  intros.\n  reflexivity.\nQed.\n\nTheorem length_merge_sort_pop : forall {A} leb (l1 : list (list A)) (l2 : list A),\n    length (path.merge_sort_pop leb l2 l1) = length (seq.cat (seq.flatten l1) l2).\nProof.\n  intros.\n  generalize dependent l2.\n  induction l1 ; intros.\n  - cbn.\n    reflexivity.\n  - cbn.\n    rewrite IHl1.\n    rewrite seq.size_cat.\n    rewrite seq.size_cat.\n    rewrite seq.size_cat.\n    rewrite path.size_merge.\n    rewrite seq.size_cat.\n    rewrite ssrnat.addnA.\n    f_equal.\n    rewrite ssrnat.addnC.\n    reflexivity.\nQed.\n\nTheorem length_sort_even : forall {A} leb a x (l1 : list (list A)) (l2 : list A),\n    length (path.merge_sort_rec leb l1 (a :: x :: l2)) =\n    length (path.merge_sort_rec leb\n        (path.merge_sort_push leb (if leb a x then [a; x] else [x; a]) l1) l2).\nProof.\n  reflexivity.\nQed.\n\nTheorem length_sort_is_length' : forall {A} leb (l1 : list (list A)),\n    length (path.merge_sort_rec leb l1 []) = length (seq.flatten l1).\nProof.\n  destruct l1.\n  + cbn.\n    reflexivity.\n  + cbn.\n    rewrite length_merge_sort_pop.\n    rewrite seq.size_cat.\n    rewrite seq.size_cat.\n    rewrite path.size_merge.\n    rewrite seq.cats0.\n    rewrite ssrnat.addnC.\n    reflexivity.\nQed.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Ltac.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom mathcomp Require Import ssrZ word.\n(* From Jasmin Require Import word. *)\nFrom Crypt Require Import jasmin_word.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nLtac pattern_foldi_both Hx Hf Hg :=\n  match goal with\n    | [ |- context [ ⊢ ⦃ _ ⦄ bind _ (foldi _ _ _ ?fb) ≈ ?os ⦃ _ ⦄ ] ] =>\n        let H := fresh in\n        set (H := os)\n        ; set (Hx := Hacspec_Lib_Pre.foldi _ _ _ _) in H\n        ; pattern Hx in H\n        ; subst H\n        ; set (Hf := fb)\n        ; match goal with\n          | [ |- context [ ⊢ ⦃ _ ⦄ _ ≈ ?gb _ ⦃ _ ⦄ ] ] =>\n              set (Hg := gb)\n          end\n  | [ |- context [ ⊢ ⦃ _ ⦄ prog (foldi _ _ _ ?fb) ≈ ?os ⦃ _ ⦄ ] ] =>\n        let H := fresh in\n        set (H := os)\n        ; set (Hx := Hacspec_Lib_Pre.foldi _ _ _ _) in H\n        ; pattern Hx in H\n        ; subst H\n        ; set (Hf := fb)\n        ; match goal with\n          | [ |- context [ ⊢ ⦃ _ ⦄ _ ≈ ?gb _ ⦃ _ ⦄ ] ] =>\n              set (Hg := gb)\n          end\n    end.\n\nLtac pattern_foldi_both_fresh :=\n  let Hx := fresh in\n  let Hf := fresh in\n  let Hg := fresh in\n  pattern_foldi_both Hx Hf Hg.\n\nTheorem r_bind_trans_as_both : forall {B C : choice_type} {L I} (f : choice.Choice.sort B -> raw_code C) (g : B -> raw_code C) (state : code L I (B))\n    (pure : B),\n  forall (P : precond) (Q : postcond _ _),\n    (⊢ ⦃ true_precond ⦄\n        state ≈ lift_to_code (L := L) (I := I) (pure)\n    ⦃ pre_to_post_ret true_precond (pure) ⦄) ->\n    (⊢ ⦃ true_precond ⦄ f (pure)  ≈ g pure ⦃ Q ⦄) ->\n    (⊢ ⦃ P ⦄ temp ← state ;; f temp ≈ g (pure) ⦃ Q ⦄).\nProof.\n  intros.\n  eapply r_bind_trans with (P_mid := true_precond).\n\n  eapply rpre_weaken_rule.\n  apply H.\n\n  reflexivity.\n\n  intros.\n  apply H0.\nQed.\n\nLtac progress_step_code :=\n  match_foldi_both\n  || (match_bind_trans_both)\n  || match goal with\n    | [ |- context [ ⊢ ⦃ _ ⦄ (#put ?l := ?x ;; (getr ?l ?a)) ≈ _ ⦃ _ ⦄ ]] =>\n        apply better_r_put_get_lhs\n    end\n  ||\n  match goal with\n  | [ |- context [ ⊢ ⦃ _ ⦄ (#put ?l := ?x ;; (putr ?l ?y ?a)) ≈ _ ⦃ _ ⦄ ]] =>\n      apply (r_transL (#put l := y ;; a )) ;\n      [ apply contract_put | ]\n  end\n  ||\n  match goal with\n  | [ |- context [ ⊢ ⦃ _ ⦄ (#put ?l := ?x ;; ?a) ≈ ?b ⦃ _ ⦄ ]] =>\n      apply (better_r_put_lhs l x a b)\n  end\n  ||\n  (unfold lift_to_code ; apply r_ret)\n  ||\n  (rewrite bind_assoc)\n    with\n    match_foldi_both :=\n    let Hx := fresh in\n    let Hf := fresh in\n    let Hg := fresh in\n    pattern_foldi_both Hx Hf Hg\n    ; try (apply (@r_bind_trans_as_both) with (f := Hf) (g := Hg))\n    ; intros ; subst Hf ; subst Hg ; subst Hx ; hnf\n    (* ; [apply foldi_as_both ; [ try (cbn ; Lia.lia) | intros ; unfold lift_to_code ; unfold prog ] | step_code] *)\n    with\n    step_code :=\n      repeat (clear_bind || progress_step_code) ; try easy\n        with\n        clear_bind :=\n        (unfold lift_to_code ;\n         match goal with\n         | [ |- context [ bind ?y (fun x => ret (_)) ] ] =>\n             let H := fresh in\n             set (H := y)\n\n             ; rewrite bind_ret\n             ; subst H\n         | [ |- context [ bind ?y (fun x => ret _) ] ] =>\n             let H := fresh in\n             set (H := y)\n\n             ; rewrite bind_ret\n             ; subst H\n         end)\n        ||\n        (repeat (rewrite bind_assoc)\n        ; match goal with\n          | [ |- context [ bind (ret (?y)) (fun x => _) ] ] =>\n              let H := fresh in\n              set (H := y)\n\n              ; rewrite bind_rewrite\n              ; subst H\n          | [ |- context [ bind (ret ?y) (fun x => _) ] ] =>\n              let H := fresh in\n              set (H := y)\n              ; rewrite bind_rewrite\n              ; subst H\n          end).\n\nLtac init_both_proof b_state b_pure :=\n  intros ;\n  unfold lift_to_code ;\n  cbv delta [b_state] ;\n  cbn beta ;\n  let H := fresh in\n  match goal with\n  | [ |- context [(prog {code ?x})] ] =>\n      set (H := x)\n  end ;\n  unfold prog ;\n  cbv delta [b_pure] ;\n  subst H ;\n  cbn beta.\n\nLtac f_equal_fun_ext :=\n  repeat (apply f_equal ; try (apply Coq.Logic.FunctionalExtensionality.functional_extensionality ; intros)).\n\nLtac ssprove_valid_step :=\n  (progress\n     (\n       cbv zeta\n       || unfold prog\n       || (match goal with | [ |- context[ @bind ?A ?B (ret ?x) ?f ]] => rewrite bind_rewrite end)\n       || match goal with\n         | [ |- context[match ?x with | true => _ | false => _ end] ] =>\n             destruct x\n         end\n       || match goal with\n         | [ |- context[match ?x with | tt => _ end] ] =>\n             destruct x\n         end\n       || match goal with\n         | [ |- context[match ?x with | inl _ => _ | inr _ => _ end] ] =>\n             destruct x\n         end\n       || (match goal with | [ |- context[bind (bind ?v ?k1) ?k2] ] => rewrite bind_assoc end)\n       || (apply valid_bind ; [apply valid_scheme ; try rewrite <- fset.fset0E ; apply prog_valid | intros])\n       || (apply valid_bind ; [valid_program | intros])\n       || (apply valid_bind ; [repeat ssprove_valid_step | intros])\n       || (apply valid_opr ; [ (* ssprove_valid_opsig *) | intros ] )\n       ||  match goal with\n         | [ |- context [ putr _ _ _ ] ] => (apply valid_putr ; [ (* ssprove_valid_location *) | ])\n\n         end\n\n       || match goal with\n         | [ |- context [ getr _ _ ] ] => (apply valid_getr ; [ (* ssprove_valid_location *) | intros])\n         end\n       || (match goal with\n          | [ |- context [ValidCode (fset ?ys) _ (@prog _ _ _ (@foldi _ ?lo ?hi (fset ?xs) _ ?f ?v))] ] =>\n              simpl (* !! TODO !! *)\n              (* eapply (valid_subset_fset xs ys) ; [ | apply foldi ] *)\n              (* ; loc_incl_compute *)\n          end)\n       || apply valid_ret\n       || (hnf in * ; destruct_choice_type_prod)\n  )).\n\n\nLtac ssprove_valid'_2 :=\n  repeat ssprove_valid_step\n  ; ssprove_valid_program\n  (* ; try ssprove_valid_location *).\n\nLtac ssprove_valid_package :=\n  (repeat apply valid_package_cons ; [ apply valid_empty_package | .. | try (rewrite <- fset0E ; setoid_rewrite @imfset0 ; rewrite in_fset0 ; reflexivity) ] ; intros ; progress unfold prog).\n\nLtac solve_zero :=\n  match goal with\n  | [ |- context [ (_ <= _)%Z ] ] =>\n      cbn ;\n      match goal with\n      | [ |- context [ (0 <= toword ?x)%Z ] ] =>\n          let H := fresh in\n          let H_zero := fresh in\n          let H_succ := fresh in\n          set (H := x)\n          ; destruct_uint_size_as_nat_named H H_zero H_succ\n          ; [ reflexivity | cbn in H_succ ; cbn ; try rewrite H_succ ; Lia.lia ]\n      end\n  end.\n\nLtac solve_in_mem :=\n  normalize_fset ;\n  repeat (rewrite (@in_fsetU loc_ordType) ; rewrite (is_true_split_or_)) ; try rewrite <- !fset1E ; try rewrite (ssrbool.introT (fset1P _ _) eq_refl) ; repeat (reflexivity || (left ; reflexivity) || right).\n\nLtac solve_ssprove_obligations :=\n  repeat (\n      intros ; autounfold ; normalize_fset ;\n      (now solve_in_mem) (* TODO: add match goal *)\n      || (now fset_equality) (* TODO: add match goal *)\n      || (now solve_in_fset) (* TODO: add match goal *)\n      || (ssprove_valid'_2)\n      || ((now (* try *) (Tactics.program_simpl; fail)))).\n\nLtac solve_fsubset_trans :=\n  now (solve_match || (refine (fsubset_trans _ _) ; [ | eassumption ] ; solve_ssprove_obligations)).\n\nLtac solve_foldi_fsubset_trans :=\n  normalize_fset ;\n  repeat (rewrite is_true_split_and || rewrite fsubUset) ;\n  repeat (try rewrite andb_true_intro ; split) ;\n  repeat (solve_fsubset_trans || apply fsubsetU ; rewrite is_true_split_or ; ((left ; solve_fsubset_trans) || right)).\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Monad.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\n(*** Monad / Bind *)\n\nDefinition result_unwrap {a b} (x : result b a) : both (a) :=\n  ret_both (result_unwrap x).\nDefinition result_unwrap_safe {a b} (x : result b a) `{match x with inl _ => True | inr _ => False end} : both (a) :=\n  ret_both (result_unwrap_safe x (H := H)).\n\nModule choice_typeMonad.\n\n  Class BindCode :=\n    {\n      mnd :> choice_typeMonad.CEMonad ;\n      monad_bind_both {A B : choice_type} (x : both (choice_typeMonad.M (CEMonad := mnd) A)) (f : both A -> both (choice_typeMonad.M (CEMonad := mnd) B)) : both (choice_typeMonad.M (CEMonad := mnd) B) ;\n    }.\n\n  #[global] Program Instance result_bind_code C : BindCode :=\n    {|\n      mnd := (@choice_typeMonad.result_monad C) ;\n      monad_bind_both _ _ x f := bind_both x (fun x => match x with\n                                                    | inl s => f (solve_lift ret_both s)\n                                                    | inr s => solve_lift ret_both (Err s)\n                                                    end)\n    |}.\n  Solve All Obligations with (intros ; (fset_equality || solve_in_fset)).\n  Fail Next Obligation.\n\n  #[global] Program Instance option_bind_code : BindCode :=\n    {| mnd := choice_typeMonad.option_monad;\n      monad_bind_both A B x f :=\n      bind_both x (fun t_x =>\n       match t_x with\n       | Some s => f (solve_lift ret_both s)\n       | None => solve_lift ret_both (@None B : option B)\n       end) |}.\n  Solve All Obligations with (intros ; (fset_equality || solve_in_fset)).\n  Fail Next Obligation.\nEnd choice_typeMonad.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Natmod.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\n(*** Nats *)\n\n\nSection Todosection.\n\nDefinition nat_mod_equal {p} (a b : nat_mod p) : both 'bool :=\n  ret_both (@eqtype.eq_op (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b : 'bool).\n\nDefinition nat_mod_equal_reflect {p} {a b} : Bool.reflect (a = b) (is_pure (@nat_mod_equal p a b)) :=\n  @eqtype.eqP (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b.\n\nDefinition nat_mod_zero {p} : both ((nat_mod p)) := ret_both (nat_mod_zero).\nDefinition nat_mod_one {p} : both ((nat_mod p)) := ret_both (nat_mod_one).\nDefinition nat_mod_two {p} : both ((nat_mod p)) := ret_both (nat_mod_two).\n\nDefinition nat_mod_add {n : Z} (a : nat_mod n) (b : nat_mod n) : both (nat_mod n) := ret_both (nat_mod_add a b).\nDefinition nat_mod_mul {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_mul a b).\nDefinition nat_mod_sub {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_sub a b).\nDefinition nat_mod_div {n : Z} (a:nat_mod n) (b:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_div a b).\n\nDefinition nat_mod_neg {n : Z} (a:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_neg a).\n\nDefinition nat_mod_inv {n : Z} (a:nat_mod n) : both (nat_mod n) := ret_both (nat_mod_inv a).\n\nDefinition nat_mod_exp_def {p : Z} (a:nat_mod p) (n : nat) : both (nat_mod p) :=\n  ret_both (nat_mod_exp_def a n).\n\nDefinition nat_mod_exp {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)).\nDefinition nat_mod_pow {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)).\nDefinition nat_mod_pow_felem {p} (a n : nat_mod p) := @nat_mod_exp_def p a (Z.to_nat (nat_of_ord n)).\nDefinition nat_mod_pow_self {p} (a n : nat_mod p) := nat_mod_pow_felem a n.\n\nClose Scope nat_scope.\n\nDefinition nat_mod_from_secret_literal {m : Z} (x:int128) : both (nat_mod m) :=\n ret_both (@nat_mod_from_secret_literal m x).\n\nDefinition nat_mod_from_literal (m : Z) (x:int128) : both ((nat_mod m)) := nat_mod_from_secret_literal x.\n\nDefinition nat_mod_to_byte_seq_le {n : Z} (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_byte_seq_le m).\nDefinition nat_mod_to_byte_seq_be {n : Z} (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_byte_seq_be m).\nDefinition nat_mod_to_public_byte_seq_le (n : Z) (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_public_byte_seq_le n m).\nDefinition nat_mod_to_public_byte_seq_be (n : Z) (m : nat_mod n) : both (seq int8) := ret_both (nat_mod_to_public_byte_seq_be n m).\n\nDefinition nat_mod_bit {n : Z} (a : nat_mod n) (i : uint_size) : both 'bool :=\n  ret_both (nat_mod_bit a i).\n\n(* Alias for nat_mod_bit *)\nDefinition nat_get_mod_bit {p} (a : nat_mod p) (i : uint_size) : both 'bool := ret_both (nat_get_mod_bit a i).\nDefinition nat_mod_get_bit {p} (a : nat_mod p) n : both (nat_mod p) :=\n  ret_both (nat_mod_get_bit a n).\n\nDefinition array_declassify_eq {A l} (x : nseq_ A l) (y : nseq_ A l) : both 'bool := ret_both (array_declassify_eq x y).\nDefinition array_to_le_uint32s {A l} (x : nseq_ A l) : both (seq uint32) := ret_both (array_to_le_uint32s x).\nDefinition array_to_be_uint32s {l} (x : nseq_ uint8 l) : both (seq uint32) := ret_both (array_to_be_uint32s x).\nDefinition array_to_le_uint64s {A l} (x : nseq_ A l) : both (seq uint64) := ret_both (array_to_le_uint64s x).\nDefinition array_to_be_uint64s {l} (x : nseq_ uint8 l) : both (seq uint64) := ret_both (array_to_be_uint64s x).\nDefinition array_to_le_uint128s {A l} (x : nseq_ A l) : both (seq uint128) := ret_both (array_to_le_uint128s x).\nDefinition array_to_be_uint128s {l} (x : nseq_ uint8 l) : both (seq uint128) := ret_both (array_to_be_uint128s x).\nDefinition array_to_le_bytes {A l} (x : nseq_ A l) : both (seq uint8) := ret_both (array_to_le_bytes x).\nDefinition array_to_be_bytes {A l} (x : nseq_ A l) : both (seq uint8) := ret_both (array_to_be_bytes x).\nDefinition nat_mod_from_byte_seq_le {A n} (x : seq A) : both (nat_mod n) := ret_both (nat_mod_from_byte_seq_le x).\nDefinition most_significant_bit {m} (x : nat_mod m) (n : uint_size) : both (uint_size) := ret_both (most_significant_bit x n).\n\n\n(* We assume 2^x < m *)\n\nDefinition nat_mod_pow2 (m : Z) {WS} (x : (@int WS)) : both ((nat_mod m)) :=\n  ret_both (nat_mod_pow2 m (Z.to_nat (unsigned x))).\n\nEnd Todosection.\n\nInfix \"+%\" := nat_mod_add (at level 33) : hacspec_scope.\nInfix \"*%\" := nat_mod_mul (at level 33) : hacspec_scope.\nInfix \"-%\" := nat_mod_sub (at level 33) : hacspec_scope.\nInfix \"/%\" := nat_mod_div (at level 33) : hacspec_scope.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Notation.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\n(*** Notation *)\n\nSection TodoSection3.\nDefinition nat_mod_from_byte_seq_be {A n} (x : seq A) : both (nat_mod n) := ret_both (nat_mod_from_byte_seq_be x).\n\nEnd TodoSection3.\n\nDefinition neqb {A : choice_type} `{EqDec A} : both A -> both A -> both 'bool := lift2_both (fun x y => negb (eqb x y) : 'bool).\nDefinition eqb {A : choice_type} `{EqDec A} : both A -> both A -> both 'bool := lift2_both (fun x y => eqb x y : 'bool).\n\nDefinition ltb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => ltb x y : 'bool).\nDefinition leb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => leb x y : 'bool).\nDefinition gtb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => gtb x y : 'bool).\nDefinition geb {A : choice_type} `{Comparable A} : both A -> both A -> both 'bool := lift2_both (fun x y => geb x y : 'bool).\n\nInfix \"=.?\" := eqb (at level 40) : hacspec_scope.\nInfix \"!=.?\" := neqb (at level 40) : hacspec_scope.\nInfix \"<.?\" := ltb (at level 42) : hacspec_scope.\nInfix \"<=.?\" := leb (at level 42) : hacspec_scope.\nInfix \">.?\" := gtb (at level 42) : hacspec_scope.\nInfix \">=.?\" := geb (at level 42) : hacspec_scope.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Pre.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nRequire Import ChoiceEquality.\n\nFrom mathcomp Require Import ssrZ word.\n(* From Jasmin Require Import word. *)\nFrom Crypt Require Import jasmin_word.\n\n\nFrom Coq Require Import ZArith List.\nImport ListNotations.\n\n(*****************************************************)\n(*   Implementation of all Hacspec library functions *)\n(* for choice_type types.                         *)\n(*****************************************************)\n\n(*** Integers *)\nDeclare Scope hacspec_scope.\n\nOpen Scope list_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\n\nRequire Import Hacspec_Lib_Comparable.\n\nImport choice.Choice.Exports.\n\n(* Section IntType. *)\n\n  Notation int := chWord.\n\n  Notation unsigned := wunsigned.\n  Notation signed := wsigned.\n  Notation repr := (fun WS x => wrepr WS x : int WS).\n\n  Notation rol := (fun u s => wrol u (unsigned s)).\n  Notation ror := (fun u s => wror u (unsigned s)).\n\n  Notation int8 := (@int U8).\n  Notation int16 := (@int U16).\n  Notation int32 := (@int U32).\n  Notation int64 := (@int U64).\n  Notation int128 := (@int U128).\n\n  Notation int_modi := wmodi.\n  Definition int_add {WS} : @int WS -> @int WS -> @int WS := @add_word WS.\n  Definition int_sub {WS} : @int WS -> @int WS -> @int WS := @sub_word WS.\n  Definition int_opp {WS} : @int WS -> @int WS := @opp_word WS.\n  Definition int_mul {WS} : @int WS -> @int WS -> @int WS := @mul_word WS.\n  Notation int_div := wdiv.\n  Notation int_mod := wmod.\n  Notation int_xor := wxor.\n  Notation int_and := wand.\n  Notation int_or := wor.\n\n  Definition int_not {WS : wsize} : (@int WS) -> (@int WS) := wnot.\n\n  Definition zero {WS : wsize} : ((@int WS)) := @word0 WS.\n  Definition one {WS : wsize} : ((@int WS)) := @word1 (pred WS).\n\n  Lemma add_zero_l : forall {WS : wsize} n, int_add (@zero WS) n = n.\n  Proof.\n    intros.\n    apply add0w.\n  Defined.\n\n  Lemma add_one_l : forall {WS : wsize} n, int_add one (repr WS n) = repr _ (Z.succ n).\n  Proof.\n    intros.\n    setoid_rewrite wrepr_add.\n    rewrite urepr_word.\n    replace (urepr (@one WS)) with 1%Z by reflexivity.\n    replace toword  with urepr by reflexivity.\n    setoid_rewrite ureprK.\n    rewrite ssralg.GRing.addrC.\n    now setoid_rewrite mkword1E.\n  Defined.\n\n  Lemma repr0_is_zero : forall {WS : wsize}, repr WS 0%Z = zero.\n  Proof.\n    intros.\n    now rewrite wrepr0.\n  Qed.\n\n  Lemma add_repr : forall {WS : wsize} (n m : Z), int_add (repr WS n) (repr WS m) = (repr WS (n + m)%Z).\n  Proof. intros ; now rewrite wrepr_add. Qed.\n\n(* End IntType. *)\n\nAxiom secret : forall {WS : wsize},  ((@int WS)) -> ((@int WS)).\n\nInfix \".%%\" := int_modi (at level 40, left associativity) : Z_scope.\nInfix \".+\" := int_add (at level 77) : hacspec_scope.\nInfix \".-\" := int_sub (at level 77) : hacspec_scope.\nNotation \"-\" := int_opp (at level 77) : hacspec_scope.\nInfix \".*\" := int_mul (at level 77) : hacspec_scope.\nInfix \"./\" := int_div (at level 77) : hacspec_scope.\nInfix \".%\" := int_mod (at level 77) : hacspec_scope.\nInfix \".^\" := int_xor (at level 77) : hacspec_scope.\nInfix \".&\" := int_and (at level 77) : hacspec_scope.\nInfix \".|\" := int_or (at level 77) : hacspec_scope.\n\nNotation \"'not'\" := int_not (at level 77) : hacspec_scope.\n\n(* Comparisons, boolean equality, and notation *)\n\nGlobal Program Instance nat_eqdec : EqDec nat := {\n    eqb := Nat.eqb;\n    eqb_leibniz := Nat.eqb_eq ;\n  }.\n\nGlobal Instance nat_comparable : Comparable nat := {\n    ltb := Nat.ltb;\n    leb := Nat.leb;\n    gtb a b := Nat.ltb b a;\n    geb a b := Nat.leb b a;\n  }.\n\nGlobal Instance N_eqdec : EqDec N := {\n    eqb := N.eqb;\n    eqb_leibniz := N.eqb_eq ;\n  }.\n\nGlobal Instance N_comparable : Comparable N := {\n    ltb := N.ltb;\n    leb := N.leb;\n    gtb a b := N.ltb b a;\n    geb a b := N.leb b a;\n  }.\n\nGlobal Instance Z_eqdec : EqDec Z := {\n    eqb := Z.eqb;\n    eqb_leibniz := Z.eqb_eq ;\n  }.\n\nGlobal Instance Z_comparable : Comparable Z := {\n    ltb := Z.ltb;\n    leb := Z.leb;\n    gtb a b := Z.ltb b a;\n    geb a b := Z.leb b a;\n  }.\n\nLemma int_eqb_eq : forall {WS : wsize} (a b : (@int WS)), eqtype.eq_op a b = true <-> a = b.\nProof.\n  symmetry ; exact (ssrbool.rwP (@eqtype.eqP _ a b)).\nQed.\n\nGlobal Instance int_eqdec `{WS : wsize}: EqDec ((@int WS)) := {\n    eqb := eqtype.eq_op;\n    eqb_leibniz := int_eqb_eq ;\n  }.\n\nGlobal Instance int_comparable `{WS : wsize} : Comparable ((@int WS)) :=\n  eq_dec_lt_Comparable (wlt Unsigned).\n\nAxiom uint8_declassify : int8 -> int8.\nAxiom int8_declassify : int8 -> int8.\nAxiom uint16_declassify : int16 -> int16.\nAxiom int16_declassify : int16 -> int16.\nAxiom uint32_declassify : int32 -> int32.\nAxiom int32_declassify : int32 -> int32.\nAxiom uint64_declassify : int64 -> int64.\nAxiom int64_declassify : int64 -> int64.\nAxiom uint128_declassify : int128 -> int128.\nAxiom int128_declassify : int128 -> int128.\n\nAxiom uint8_classify : int8 -> int8.\nAxiom int8_classify : int8 -> int8.\nAxiom uint16_classify : int16 -> int16.\nAxiom int16_classify : int16 -> int16.\nAxiom uint32_classify : int32 -> int32.\nAxiom int32_classify : int32 -> int32.\nAxiom uint64_classify : int64 -> int64.\nAxiom int64_classify : int64 -> int64.\nAxiom uint128_classify : int128 -> int128.\nAxiom int128_classify : int128 -> int128.\n\n\n(* CompCert integers' signedness is only interpreted through 'signed' and 'unsigned',\n   and not in the representation. Therefore, uints are just names for their respective ints.\n *)\n\nNotation uint8 := int8.\nNotation uint32 := int32.\nNotation uint64 := int64.\nNotation uint128 := int128.\n\nDefinition uint_size : choice_type := int32.\nDefinition int_size : choice_type := int32.\n\nAxiom declassify_usize_from_uint8 : uint8 -> uint_size.\nAxiom declassify_u32_from_uint32 : uint32 -> uint32.\n\n(* Represents any type that can be converted to uint_size and back *)\nClass UInt_sizeable (A : Type) := {\n    usize : A -> uint_size;\n    from_uint_size :> uint_size -> A;\n  }.\nArguments usize {_} {_}.\nArguments from_uint_size {_} {_}.\n\nDefinition from_uint_size_int (x : uint_size) : @int U32 := x.\nCoercion from_uint_size_int : choice.Choice.sort >-> choice.Choice.sort.\n\nGlobal Instance nat_uint_sizeable : UInt_sizeable nat := {\n    usize n := repr _ (Z.of_nat n);\n    from_uint_size n := Z.to_nat (unsigned n);\n  }.\n\nGlobal Instance N_uint_sizeable : UInt_sizeable N := {\n    usize n := repr _ (Z.of_N n);\n    from_uint_size n := Z.to_N (unsigned n);\n  }.\n\nGlobal Instance Z_uint_sizeable : UInt_sizeable Z := {\n    usize n := repr _ n;\n    from_uint_size n := unsigned n;\n  }.\n\n\n(* Same, but for int_size *)\nClass Int_sizeable (A : Type) := {\n    isize : A -> int_size;\n    from_int_size : int_size -> A;\n  }.\n\nArguments isize {_} {_}.\nArguments from_int_size {_} {_}.\n\nGlobal Instance nat_Int_sizeable : Int_sizeable nat := {\n    isize n := repr _ (Z.of_nat n);\n    from_int_size n := Z.to_nat (signed n);\n  }.\n\nGlobal Instance N_Int_sizeable : Int_sizeable N := {\n    isize n := repr _ (Z.of_N n);\n    from_int_size n := Z.to_N (signed n);\n  }.\n\nGlobal Instance Z_Int_sizeable : Int_sizeable Z := {\n    isize n := repr _ n;\n    from_int_size n := signed n;\n  }.\n\n(**** Public integers *)\n\nDefinition pub_u8 (n : uint_size) : int8 := repr _ (unsigned n).\nDefinition pub_i8 (n : uint_size) : int8 := repr _ (unsigned n).\nDefinition pub_u16 (n : uint_size) : int16 := repr _ (unsigned n).\nDefinition pub_i16 (n : uint_size) : int16 := repr _ (unsigned n).\nDefinition pub_u32 (n : uint_size) : int32 := repr _ (unsigned n).\nDefinition pub_i32 (n : uint_size) : int32 := repr _ (unsigned n).\nDefinition pub_u64 (n : uint_size) : int64 := repr _ (unsigned n).\nDefinition pub_i64 (n : uint_size) : int64 := repr _ (unsigned n).\nDefinition pub_u128 (n : uint_size) : int128 := repr _ (unsigned n).\nDefinition pub_i128 (n : uint_size) : int128 := repr _ (unsigned n).\n\n(**** Operations *)\n\nDefinition uint8_rotate_left (u: int8) (s: int8) : int8 := rol u s.\n\nDefinition uint8_rotate_right (u: int8) (s: int8) : int8 := ror u s.\n\nDefinition uint16_rotate_left (u: int16) (s: int16) : int16 :=\n  rol u s.\n\nDefinition uint16_rotate_right (u: int16) (s: int16) : int16 :=\n  ror u s.\n\nDefinition uint32_rotate_left (u: int32) (s: int32) : int32 :=\n  rol u s.\n\nDefinition uint32_rotate_right (u: int32) (s: int32) : int32 :=\n  ror u s.\n\nDefinition uint64_rotate_left (u: int64) (s: int64) : int64 :=\n  rol u s.\n\nDefinition uint64_rotate_right (u: int64) (s: int64) : int64 :=\n  ror u s.\n\nDefinition uint128_rotate_left (u: int128) (s: int128) : int128 :=\n  rol u s.\n\nDefinition uint128_rotate_right (u: int128) (s: int128) : int128 :=\n  ror u s.\n\nDefinition usize_shift_right (u: uint_size) (s: int32) : uint_size :=\n  wshr u (unsigned (@repr U32 (from_uint_size s))).\nInfix \"usize_shift_right\" := (usize_shift_right) (at level 77) : hacspec_scope.\n\nDefinition usize_shift_left (u: uint_size) (s: int32) : uint_size :=\n  (rol u s).\nInfix \"usize_shift_left\" := (usize_shift_left) (at level 77) : hacspec_scope.\n\nDefinition pub_uint128_wrapping_add (x y: int128) : int128 :=\n  x .+ y.\n\nDefinition shift_left_ `{WS : wsize} (i : (@int WS)) (j : uint_size) : (@int WS) :=\n  wshl i (unsigned (@repr WS (from_uint_size j))).\n\nDefinition shift_right_ `{WS : wsize} (i : (@int WS)) (j : uint_size) : (@int WS):=\n  wshr i (unsigned (@repr WS (from_uint_size j))) .\n\nInfix \"shift_left\" := (shift_left_) (at level 77) : hacspec_scope.\nInfix \"shift_right\" := (shift_right_) (at level 77) : hacspec_scope.\n\n(*** Positive util *)\n\nSection Util.\n\n  Fixpoint binary_representation_pre (n : nat) {struct n}: positive :=\n    match n with\n    | O => 1\n    | S O => 1\n    | S n => Pos.succ (binary_representation_pre n)\n    end%positive.\n  Definition binary_representation (n : nat) `(n <> O) := binary_representation_pre n.\n\n  Theorem positive_is_succs : forall n, forall (H : n <> O) (K : S n <> O),\n      @binary_representation (S n) K = Pos.succ (@binary_representation n H).\n  Proof. induction n ; [contradiction | reflexivity]. Qed.\n\n  (* Conversion of positive to binary representation *)\n  Theorem positive_to_positive_succs : forall p, binary_representation (Pos.to_nat p) (Nat.neq_sym _ _ (Nat.lt_neq _ _ (Pos2Nat.is_pos p))) = p.\n  Proof.\n    intros p.\n    generalize dependent (Nat.neq_sym 0 (Pos.to_nat p) (Nat.lt_neq 0 (Pos.to_nat p) (Pos2Nat.is_pos p))).\n\n    destruct Pos.to_nat eqn:ptno.\n    - contradiction.\n    - generalize dependent p.\n      induction n ; intros.\n      + cbn.\n        apply Pos2Nat.inj.\n        symmetry.\n        apply ptno.\n      + rewrite positive_is_succs with (H := Nat.neq_succ_0 n).\n        rewrite IHn with (p := Pos.of_nat (S n)).\n        * rewrite <- Nat2Pos.inj_succ by apply Nat.neq_succ_0.\n          rewrite <- ptno.\n          apply Pos2Nat.id.\n        * apply Nat2Pos.id.\n          apply Nat.neq_succ_0.\n  Qed.\n\n  (*** Uint size util *)\n\n  (* If a natural number is in bound then a smaller natural number is still in bound *)\n  Lemma range_of_nat_succ :\n    forall {WS : wsize},\n    forall i, (Z.pred 0 < Z.of_nat (S i) < modulus WS)%Z -> (Z.pred 0 < Z.of_nat i < modulus WS)%Z.\n  Proof. lia. Qed.\n\n  (* Conversion to equivalent bound *)\n  Lemma modulus_range_helper :\n    forall {WS : wsize},\n    forall i, (Z.pred 0 < i < modulus WS)%Z -> (0 <= i <= wmax_unsigned WS)%Z.\n  Proof.\n    intros.\n    unfold wmax_unsigned.\n    unfold wbase.\n    unfold nat_of_wsize in H.\n    lia.\n  Qed.\n\n  Definition unsigned_repr_alt {WS : wsize} (a : Z) `((Z.pred 0 < a < modulus WS)%Z) :\n    unsigned (@repr WS a) = a.\n  Proof.\n    apply wunsigned_repr_small.\n    intros.\n    unfold wbase.\n    unfold nat_of_wsize in H.\n    lia.\n  Qed.\n\n  Theorem zero_always_modulus {WS : wsize} : (Z.pred 0 < 0 < modulus WS)%Z.\n  Proof. easy. Qed.\n\n  (* any uint_size can be represented as a natural number and a bound *)\n  (* this is easier for proofs, however less efficient for computation *)\n  (* as Z uses a binary representation *)\n\n  Theorem uint_size_as_nat :\n    forall (us: uint_size),\n      { n : nat |\n        us = repr _ (Z.of_nat n) /\\ (Z.pred 0 < Z.of_nat n < @modulus U32)%Z}.\n  Proof.\n    intros.\n    exists (Z.to_nat (unsigned us)).\n    rewrite Z2Nat.id by apply (ssrbool.elimT (word_ssrZ.leZP _ _) (urepr_ge0 us)).\n    split.\n    - rewrite wrepr_unsigned.\n      reflexivity.\n    - pose (wunsigned_range us).\n      unfold wbase in a.\n      unfold nat_of_wsize.\n      cbn in *.\n      lia.\n  Qed.\n\n  (* destruct uint_size as you would a natural number *)\n  Definition destruct_uint_size_as_nat  (a : uint_size) : forall (P : uint_size -> Prop),\n    forall (zero_case : P (repr _ 0%Z)),\n    forall (succ_case : forall (n : nat), (Z.pred 0 < Z.of_nat n < @modulus U32)%Z -> P (repr _ (Z.of_nat n))),\n      P a.\n  Proof.\n    intros.\n    destruct (uint_size_as_nat a) as [ n y ] ; destruct y as [ya yb] ; subst.\n    destruct n.\n    - apply zero_case.\n    - apply succ_case.\n      apply yb.\n  Qed.\n\n\n  (* induction for uint_size as you would do for a natural number *)\n  Definition induction_uint_size_as_nat :\n    forall (P : uint_size -> Prop),\n      (P (repr _ 0%Z)) ->\n      (forall n,\n          (Z.pred 0 < Z.succ (Z.of_nat n) < @modulus U32)%Z ->\n          P (repr _ (Z.of_nat n)) ->\n          P (repr _ (Z.succ (Z.of_nat n)))) ->\n      forall (a : uint_size), P a.\n  Proof.\n    intros P H_zero H_ind a.\n    destruct (uint_size_as_nat a) as [ n y ] ; destruct y as [ya yb] ; subst.\n    induction n.\n    - apply H_zero.\n    - rewrite Nat2Z.inj_succ.\n      apply H_ind.\n      + rewrite <- Nat2Z.inj_succ.\n        apply yb.\n      + apply IHn.\n        lia.\n  Qed.\n\n  (* conversion of usize to positive or zero and the respective bound *)\n  Theorem uint_size_as_positive :\n    forall (us: uint_size),\n      { pu : unit + positive |\n        match pu with\n        | inl u => us = repr _ Z0\n        | inr p => us = repr _ (Z.pos p) /\\ (Z.pred 0 < Z.pos p < @modulus U32)%Z\n        end\n      }.\n  Proof.\n    intros.\n\n    destruct us as [val H_].\n    pose proof (H := H_).\n    apply Bool.andb_true_iff in H as [lt gt].\n    apply (ssrbool.elimT (word_ssrZ.leZP _ _)) in lt.\n    apply (ssrbool.elimT (word_ssrZ.ltZP _ _)) in gt.\n\n    destruct val.\n    - exists (inl tt). apply word_ext. reflexivity.\n    - exists (inr p).\n      split.\n      + apply word_ext.\n        rewrite Zmod_small by (unfold nat_of_wsize in gt ; lia).\n        reflexivity.\n      + cbn in gt.\n        unfold nat_of_wsize.\n        simpl.\n        lia.\n    - contradiction.\n  Defined.\n\n  (* destruction of uint_size as positive *)\n  Definition destruct_uint_size_as_positive  (a : uint_size) : forall (P : uint_size -> Prop),\n      (P (repr _ 0%Z)) ->\n      (forall b, (Z.pred 0 < Z.pos b < @modulus U32)%Z -> P (repr _ (Z.pos b))) ->\n      P a.\n  Proof.\n    intros P H_zero H_succ.\n    destruct (uint_size_as_positive a) as [ [ _ | b ] y ] ; [ subst | destruct y as [ya yb] ; subst ].\n    - apply H_zero.\n    - apply H_succ.\n      apply yb.\n  Qed.\n\n  (* induction of uint_size as positive *)\n  Definition induction_uint_size_as_positive :\n    forall (P : uint_size -> Prop),\n      (P (repr _ 0%Z)) ->\n      (P (repr _ 1%Z)) ->\n      (forall b,\n          (Z.pred 0 < Z.succ (Z.pos b) < @modulus U32)%Z ->\n          P (repr _ (Z.pos b)) ->\n          P (repr _ (Z.succ (Z.pos b)))) ->\n      forall (a : uint_size), P a.\n  Proof.\n    intros P H_zero H_one H_ind a.\n\n    destruct (uint_size_as_positive a) as [ [ _ | b ] y ] ; [ subst | destruct y as [ya yb] ; subst ].\n    - apply H_zero.\n    - pose proof (pos_succ_b := positive_to_positive_succs b)\n      ; symmetry in pos_succ_b\n      ; rewrite pos_succ_b in *\n      ; clear pos_succ_b.\n\n      generalize dependent (Nat.neq_sym 0 (Pos.to_nat b) (Nat.lt_neq 0 (Pos.to_nat b) (Pos2Nat.is_pos b))).\n\n      induction (Pos.to_nat b).\n      + contradiction.\n      + intros n_neq yb.\n        destruct n.\n        * apply H_one.\n        * rewrite (positive_is_succs _  (Nat.neq_succ_0 n) n_neq) in *.\n          rewrite Pos2Z.inj_succ in *.\n          apply H_ind.\n          -- apply yb.\n          -- apply IHn.\n             lia.\n  Qed.\n\nEnd Util.\n\nGlobal Ltac destruct_uint_size_as_nat_named a H_zero H_succ :=\n  generalize dependent a ;\n  intros a ;\n  apply (destruct_uint_size_as_nat a) ; [ pose proof (H_zero := @unsigned_repr_alt U32 0 zero_always_modulus) | let n := fresh in let H := fresh in intros n H ; pose proof (H_succ := @unsigned_repr_alt U32 _ H)] ; intros.\n\nGlobal Ltac destruct_uint_size_as_nat a :=\n  let H_zero := fresh in\n  let H_succ := fresh in\n  destruct_uint_size_as_nat_named a H_zero H_succ.\n\nGlobal Ltac induction_uint_size_as_nat var :=\n  generalize dependent var ;\n  intros var ;\n  apply induction_uint_size_as_nat with (a := var) ; [ pose proof (@unsigned_repr_alt U32 0 zero_always_modulus) | let n := fresh in let IH := fresh in intros n IH ; pose proof (@unsigned_repr_alt U32 _ IH)] ; intros.\n\n\n\n(*** Loops *)\n\nOpen Scope nat_scope.\nFixpoint foldi_\n         {acc : Type}\n         (fuel : nat)\n         (i : uint_size)\n         (f : uint_size -> acc -> acc)\n         (cur : acc) : acc :=\n  match fuel with\n  | 0 => cur\n  | S n' => foldi_ n' (i .+ one) f (f i cur)\n  end.\nClose Scope nat_scope.\nDefinition foldi\n           {acc: Type}\n           (lo: uint_size)\n           (hi: uint_size) (* {lo <= hi} *)\n           (f: (uint_size) -> acc -> acc) (* {i < hi} *)\n           (init: acc)\n  : acc :=\n  match Z.sub (unsigned hi) (unsigned lo) with\n  | Z0 => init\n  | Zneg p => init\n  | Zpos p => foldi_ (Pos.to_nat p) lo f init\n  end.\n\n(* Fold done using natural numbers for bounds *)\nFixpoint foldi_nat_\n         {acc : Type}\n         (fuel : nat)\n         (i : nat)\n         (f : nat -> acc -> acc)\n         (cur : acc) : acc :=\n  match fuel with\n  | O => cur\n  | S n' => foldi_nat_ n' (S i) f (f i cur)\n  end.\n\n\nFixpoint for_loop_\n         {acc : Type}\n         (fuel : nat)\n         (f : nat -> acc -> acc)\n         (cur : acc) : acc :=\n  match fuel with\n  | O => cur\n  | S n' => f n' (for_loop_ n' f cur)\n  end.\n\nDefinition foldi_nat\n           {acc: Type}\n           (lo: nat)\n           (hi: nat) (* {lo <= hi} *)\n           (f: nat -> acc -> acc) (* {i < hi} *)\n           (init: acc) : acc :=\n  match Nat.sub hi lo with\n  | O => init\n  | S n' => foldi_nat_ (S n') lo f init\n  end.\n\nDefinition for_loop_range\n           {acc: Type}\n           (lo: nat)\n           (hi: nat) (* {lo <= hi} *)\n           (f: nat -> acc -> acc) (* {i < hi} *)\n           (init: acc) : acc :=\n  match Nat.sub hi lo with\n  | O => init\n  | S n' => for_loop_ (S n') (fun x => f (x + lo)%nat)  init\n  end.\n\nDefinition for_loop_usize {acc : Type} (lo hi : uint_size) (f : uint_size -> acc -> acc) init : acc :=\n  for_loop_range (from_uint_size lo) (from_uint_size hi) (fun x => f (usize x)) init.\n\n\nLemma foldi__move_S :\n  forall {acc: Type}\n         (fuel : nat)\n         (i : uint_size)\n         (f : uint_size -> acc -> acc)\n         (cur : acc),\n    foldi_ fuel (i .+ one) f (f i cur) = foldi_ (S fuel) i f cur.\nProof. reflexivity. Qed.\n\nLemma foldi__nat_move_S :\n  forall {acc: Type}\n         (fuel : nat)\n         (i : nat)\n         (f : nat -> acc -> acc)\n         (cur : acc),\n    foldi_nat_ fuel (S i) f (f i cur) = foldi_nat_ (S fuel) i f cur.\nProof. reflexivity. Qed.\n\nLemma foldi__nat_move_S_append :\n  forall {acc: Type}\n         (fuel : nat)\n         (i : nat)\n         (f : nat -> acc -> acc)\n         (cur : acc),\n    f (i + fuel)%nat (foldi_nat_ fuel i f cur) = foldi_nat_ (S fuel) i f cur.\nProof.\n  induction fuel ; intros.\n  - rewrite <- foldi__nat_move_S.\n    unfold foldi_nat_.\n    rewrite Nat.add_0_r.\n    reflexivity.\n  - rewrite <- foldi__nat_move_S.\n    rewrite <- foldi__nat_move_S.\n    replace (i + S fuel)%nat with (S i + fuel)%nat by lia.\n    rewrite IHfuel.\n    reflexivity.\nQed.\n\nTheorem foldi_for_loop_eq :\n  forall {acc} fuel f (cur : acc),\n    foldi_nat_ fuel 0 f cur\n    =\n      for_loop_ fuel f cur.\nProof.\n  induction fuel ; intros.\n  - reflexivity.\n  - unfold for_loop_ ; fold (@for_loop_ acc).\n    rewrite <- foldi__nat_move_S_append.\n    rewrite <- IHfuel.\n    reflexivity.\nQed.\n\nLemma foldi__nat_move_to_function :\n  forall {acc: choice_type}\n         (fuel : nat)\n         (i : nat)\n         (f : nat -> acc -> acc)\n         (cur : acc),\n    foldi_nat_ fuel i (fun x => f (S x)) (cur) = foldi_nat_ fuel (S i) f cur.\nProof.\n  induction fuel ; intros.\n  - reflexivity.\n  - cbn.\n    rewrite IHfuel.\n    reflexivity.\nQed.\n\nLemma foldi__nat_move_to_function_add :\n  forall {acc: choice_type}\n         (fuel : nat)\n         (i j : nat)\n         (f : nat -> acc ->  acc)\n         (cur : acc),\n    foldi_nat_ fuel i (fun x => f (x + j)%nat) (cur) = foldi_nat_ fuel (i + j) f cur.\nProof.\n  intros acc fuel i j. generalize dependent i.\n  induction j ; intros.\n  - rewrite Nat.add_0_r.\n    replace (fun x : nat => f (x + 0)%nat) with f.\n    reflexivity.\n    apply functional_extensionality.\n    intros.\n    now rewrite Nat.add_0_r.\n  - replace (i + S j)%nat with (S i + j)%nat by lia.\n    rewrite <- IHj.\n    rewrite <- foldi__nat_move_to_function.\n    f_equal.\n    apply functional_extensionality.\n    intros.\n    f_equal.\n    lia.\nQed.\n\nTheorem foldi_for_loop_range_eq :\n  forall {acc : choice_type} lo hi f (cur : acc),\n    foldi_nat lo hi f cur\n    =\n      for_loop_range lo hi f cur.\nProof.\n  unfold foldi_nat.\n  unfold for_loop_range.\n  intros.\n\n  destruct (hi - lo)%nat.\n  - reflexivity.\n  - rewrite <- foldi_for_loop_eq.\n    induction lo.\n    + f_equal.\n      apply functional_extensionality.\n      intros.\n      now rewrite Nat.add_0_r.\n    + replace (fun x : nat => f (x + S lo)%nat) with (fun x : nat => f (S (x + lo))%nat).\n      2:{\n        apply functional_extensionality.\n        intros.\n        f_equal.\n        lia.\n      }\n\n      rewrite (foldi__nat_move_to_function (S n) 0 (fun x => f (x + lo)%nat)).\n      rewrite foldi__nat_move_to_function_add.\n      reflexivity.\nQed.\n\n(* You can do one iteration of the fold by burning a unit of fuel *)\nLemma foldi__move_S_fuel :\n  forall {acc: Type}\n         (fuel : nat)\n         (i : uint_size)\n         (f : uint_size -> acc -> acc)\n         (cur : acc),\n    (0 <= Z.of_nat fuel <= wmax_unsigned U32)%Z ->\n    f ((repr _ (Z.of_nat fuel)) .+ i) (foldi_ (fuel) i f cur) = foldi_ (S (fuel)) i f cur.\nProof.\n  intros acc fuel.\n  induction fuel ; intros.\n  - cbn.\n    replace (repr _ 0%Z) with (@zero U32) by (rewrite wrepr0 ; reflexivity).\n    rewrite add_zero_l.\n    reflexivity.\n  - do 2 rewrite <- foldi__move_S.\n    replace (int_add (repr _ (Z.of_nat (S fuel))) i)\n      with (int_add (repr _ (Z.of_nat fuel)) (int_add i one)).\n    2 : {\n      unfold int_add.\n      setoid_rewrite addwA.\n      rewrite addwC.\n      rewrite addwA.\n      f_equal.\n\n      rewrite Nat2Z.inj_succ.\n      (* unfold repr. *)\n      unfold add_word.\n      unfold wrepr.\n      f_equal.\n      rewrite urepr_word.\n\n      replace (@toword (nat_of_wsize U32) (@one U32))%Z with 1%Z by reflexivity.\n      (* unfold urepr. *)\n      (* unfold eqtype.val. *)\n      (* (* unfold word_subType. *) *)\n      (* unfold toword. *)\n      (* unfold mkword. *)\n      \n      rewrite Z.add_1_l.\n      f_equal.\n      rewrite mkwordK.\n      rewrite Zmod_small.\n        \n      reflexivity.\n\n      clear -H.\n      unfold modulus.\n      unfold two_power_nat.\n      cbn in *.\n      lia.\n    }\n    rewrite IHfuel.\n    reflexivity.\n    lia.\nQed.\n\n(* You can do one iteration of the fold by burning a unit of fuel *)\nLemma foldi__nat_move_S_fuel :\n  forall {acc: Type}\n         (fuel : nat)\n         (i : nat)\n         (f : nat -> acc -> acc)\n         (cur : acc),\n    (0 <= Z.of_nat fuel <= @wmax_unsigned U32)%Z ->\n    f (fuel + i)%nat (foldi_nat_ fuel i f cur) = foldi_nat_ (S fuel) i f cur.\nProof.\n  induction fuel ; intros.\n  - reflexivity.\n  - do 2 rewrite <- foldi__nat_move_S.\n    replace (S fuel + i)%nat with (fuel + (S i))%nat by (symmetry ; apply plus_Snm_nSm).\n    rewrite IHfuel.\n    + reflexivity.\n    + lia.\nQed.\n\n(* folds and natural number folds compute the same thing *)\nLemma foldi_to_foldi_nat :\n  forall {acc: Type}\n         (lo: uint_size)\n         (hi: uint_size) (* {lo <= hi} *)\n         (f: (uint_size) -> acc -> acc) (* {i < hi} *)\n         (init: acc),\n    (unsigned lo <= unsigned hi)%Z ->\n    foldi lo hi f init = foldi_nat (Z.to_nat (unsigned lo)) (Z.to_nat (unsigned hi)) (fun x => f (repr _ (Z.of_nat x))) init.\nProof.\n  intros.\n\n  unfold foldi.\n  unfold foldi_nat.\n\n  destruct (uint_size_as_nat hi) as [ hi_n [ hi_eq hi_H ] ] ; subst.\n  rewrite (@unsigned_repr_alt U32 _ hi_H) in *.\n  rewrite Nat2Z.id.\n\n  destruct (uint_size_as_nat lo) as [ lo_n [ lo_eq lo_H ] ] ; subst.\n  rewrite (@unsigned_repr_alt U32 _ lo_H) in *.\n  rewrite Nat2Z.id.\n\n  remember (hi_n - lo_n)%nat as n.\n  apply f_equal with (f := Z.of_nat) in Heqn.\n  rewrite (Nat2Z.inj_sub) in Heqn by (apply Nat2Z.inj_le ; apply H).\n  rewrite <- Heqn.\n\n  assert (H_bound : (Z.pred 0 < Z.of_nat n < @modulus U32)%Z) by lia.\n\n  clear Heqn.\n  induction n.\n  - reflexivity.\n  - pose proof (H_max_bound := modulus_range_helper _ (range_of_nat_succ _ H_bound)).\n    rewrite <- foldi__nat_move_S_fuel by apply H_max_bound.\n    cbn.\n    rewrite SuccNat2Pos.id_succ.\n    rewrite <- foldi__move_S_fuel by apply H_max_bound.\n\n    destruct n.\n    + cbn.\n      replace (repr _ 0%Z) with (@zero U32) by (rewrite wrepr0 ; reflexivity).\n      rewrite add_zero_l.\n      reflexivity.\n    + cbn in *.\n      assert (H_bound_pred: (Z.pred 0 < Z.pos (Pos.of_succ_nat n) < @modulus U32)%Z) by lia.\n      rewrite <- (IHn H_bound_pred) ; clear IHn.\n      f_equal.\n      * rewrite add_repr.\n        do 2 rewrite Zpos_P_of_succ_nat.\n        rewrite Z.add_succ_l.\n        rewrite Nat2Z.inj_add.\n        reflexivity.\n      * rewrite SuccNat2Pos.id_succ.\n        rewrite foldi__move_S.\n        reflexivity.\nQed.\n\n(* folds can be computed by doing one iteration and incrementing the lower bound *)\nLemma foldi_nat_split_S :\n  forall {acc: Type}\n         (lo: nat)\n         (hi: nat) (* {lo <= hi} *)\n         (f: nat -> acc -> acc) (* {i < hi} *)\n         (init: acc),\n    (lo < hi)%nat ->\n    foldi_nat lo hi f init = foldi_nat (S lo) hi f (foldi_nat lo (S lo) f init).\nProof.\n  unfold foldi_nat.\n  intros.\n\n  assert (succ_sub_diag : forall n, (S n - n = 1)%nat) by lia.\n  rewrite (succ_sub_diag lo).\n\n  induction hi ; [ lia | ].\n  destruct (S hi =? S lo)%nat eqn:hi_eq_lo.\n  - apply Nat.eqb_eq in hi_eq_lo ; rewrite hi_eq_lo in *.\n    rewrite (succ_sub_diag lo).\n    rewrite Nat.sub_diag.\n    reflexivity.\n  - apply Nat.eqb_neq in hi_eq_lo.\n    apply Nat.lt_gt_cases in hi_eq_lo.\n    destruct hi_eq_lo.\n    + lia.\n    + rewrite (Nat.sub_succ_l (S lo)) by apply (Nat.lt_le_pred _ _ H0).\n      rewrite Nat.sub_succ_l by apply (Nat.lt_le_pred _ _ H).\n      replace ((S (hi - S lo))) with (hi - lo)%nat by lia.\n      reflexivity.\nQed.\n\n(* folds can be split at some valid offset from lower bound *)\nLemma foldi_nat_split_add :\n  forall (k : nat),\n  forall {acc: Type}\n         (lo: nat)\n         (hi: nat) (* {lo <= hi} *)\n         (f: nat -> acc -> acc) (* {i < hi} *)\n         (init: acc),\n  forall {guarantee: (lo + k <= hi)%nat},\n    foldi_nat lo hi f init = foldi_nat (k + lo) hi f (foldi_nat lo (k + lo) f init).\nProof.\n  induction k ; intros.\n  - cbn.\n    unfold foldi_nat.\n    rewrite Nat.sub_diag.\n    reflexivity.\n  - rewrite foldi_nat_split_S by lia.\n    replace (S k + lo)%nat with (k + S lo)%nat by lia.\n    specialize (IHk acc (S lo) hi f (foldi_nat lo (S lo) f init)).\n    rewrite IHk by lia.\n    f_equal.\n    rewrite <- foldi_nat_split_S by lia.\n    reflexivity.\nQed.\n\n(* folds can be split at some midpoint *)\nLemma foldi_nat_split :\n  forall (mid : nat), (* {lo <= mid <= hi} *)\n  forall {acc: Type}\n         (lo: nat)\n         (hi: nat) (* {lo <= hi} *)\n         (f: nat -> acc -> acc) (* {i < hi} *)\n         (init: acc),\n  forall {guarantee: (lo <= mid <= hi)%nat},\n    foldi_nat lo hi f init = foldi_nat mid hi f (foldi_nat lo mid f init).\nProof.\n  intros.\n  assert (mid_is_low_plus_constant : {k : nat | (mid = lo + k)%nat})  by (exists (mid - lo)%nat ; lia).\n  destruct mid_is_low_plus_constant ; subst.\n  rewrite Nat.add_comm.\n  apply foldi_nat_split_add.\n  apply guarantee.\nQed.\n\n(* folds can be split at some midpoint *)\nLemma foldi_split :\n  forall (mid : uint_size), (* {lo <= mid <= hi} *)\n  forall {acc: Type}\n         (lo: uint_size)\n         (hi: uint_size) (* {lo <= hi} *)\n         (f: uint_size -> acc -> acc) (* {i < hi} *)\n         (init: acc),\n  forall {guarantee: (unsigned lo <= unsigned mid <= unsigned hi)%Z},\n    foldi lo hi f init = foldi mid hi f (foldi lo mid f init).\nProof.\n  intros.\n  do 3 rewrite foldi_to_foldi_nat by lia.\n  apply foldi_nat_split ; lia.\nQed.\n\n(*** Path / Sorted util *)\n\nLemma path_sorted_tl :\n  forall {T : ordType} {A} {e} {fmval : list (T * A)},\n  is_true (path.sorted e (seq.unzip1 fmval)) ->\n  is_true (path.sorted e (seq.unzip1 (tl fmval))).\nProof.\n  intros.\n  destruct fmval.\n  - easy.\n  - cbn.\n    cbn in H.\n    destruct (seq.unzip1 fmval).\n    + reflexivity.\n    + cbn in H.\n      now rewrite LocationUtility.is_true_split_and in H.\nQed.\n\nCorollary path_path_tl :\n  forall {T : ordType} {A} {e} {x : T} {fmval : list (T * A)},\n    is_true (path.path e x (seq.unzip1 fmval)) ->\n    is_true (path.sorted e (seq.unzip1 (fmval))).\nProof.\n  intros.\n  destruct fmval. reflexivity.\n  apply (path_sorted_tl (fmval := (x, snd p) :: p :: fmval)).\n  apply H.\nQed.\n\nLemma path_sorted_remove :\n  forall {A : ordType} {B} {e} (x y : A * B) (l : list (A * B)),\n    ssrbool.transitive e ->\n  is_true\n    (path.sorted e\n        (seq.unzip1\n           (x :: y :: l))) ->\n  is_true\n    (path.sorted e\n        (seq.unzip1\n           (x :: l))).\nProof.\n  intros.\n  cbn.\n  induction l.\n  reflexivity.\n  cbn.\n  cbn in *.\n  rewrite !LocationUtility.is_true_split_and in H0.\n  destruct H0 as [? []].\n  rewrite H0 in IHl.\n\n  rewrite !LocationUtility.is_true_split_and.\n  split.\n  - eapply H.\n    apply H0.\n    apply H1.\n  - apply H2.\nQed.\n\nCorollary path_path_remove :\n  forall {A : ordType} {B} {e} (x : A) (y : A * B) (l : list (A * B)),\n    ssrbool.transitive (T:=A) e ->\n  is_true (path.path e (x) (seq.unzip1 (y :: l))) ->\n  is_true (path.path e (x) (seq.unzip1 l)).\nProof.\n  intros.\n  apply (path_sorted_remove (x, snd y) y l H).\n  apply H0.\nQed.\n\nLemma path_sorted_rev_last :\n  forall {A : ordType} {B} {e} (a0 : A * B) (l : list (A * B)),\n  is_true (path.sorted e (seq.unzip1 (seq.rev (a0 :: l)))) ->\n  is_true (path.sorted e (seq.unzip1 (seq.rev l))).\nProof.\n  intros.\n\n  unfold seq.unzip1 ; rewrite seq.map_rev ; fold (seq.unzip1 l).\n  rewrite path.rev_sorted.\n  apply (path_sorted_tl (fmval := (a0 :: l))).\n  rewrite <- path.rev_sorted.\n  unfold seq.unzip1 ; rewrite <- seq.map_rev ; fold (seq.unzip1 (seq.rev (a0 :: l))).\n  assumption.\nQed.\n\n(*** Seq *)\n\nDefinition nseq_ (A: choice_type) (len : nat) : choice_type :=\n  match len with\n  | O => chUnit\n  | S n => chMap ('fin (S n)) (A)\n  end.\nNotation \"'nseq'\" := (fun (A: choice_type) (len : choice.Choice.sort uint_size) => nseq_ A (from_uint_size (UInt_sizeable := nat_uint_sizeable) len)).\n\n(* Definition nseq_type (A: choice_type) (len : nat) : Type := *)\n(*   match len with *)\n(*   | 0%nat => unit *)\n(*   | S n => { fmap ('I_len) -> A } *)\n(*   end. *)\n\nDefinition seq (A : choice_type) : choice_type := chMap 'nat (A).\n(* Definition seq_type (A : choice_type) : Type := FMap.fmap_type nat_ordType (A). *)\n\nDefinition public_byte_seq := seq int8.\nDefinition byte_seq := seq int8.\nDefinition list_len := length.\n\nDefinition seq_index_nat {A: choice_type} (s: (seq A)) (i : nat) : A :=\n  match getm s i with\n  | Some a => a\n  | None => chCanonical A\n  end.\n\nDefinition seq_index {A: choice_type} (s: (seq A)) (i : uint_size) : A :=\n  seq_index_nat s (from_uint_size i).\n\nDefinition seq_len_nat {A: choice_type} (s: (seq A)) : nat :=\n  match (FMap.fmval s) with\n  | [] => 0\n  | (x :: xs) => S (fst (seq.last x xs))\n  end.\n\nDefinition seq_len {A: choice_type} (s: (seq A)) : (uint_size) :=\n  usize (seq_len_nat s).\n\nDefinition seq_to_list (A: choice_type) (s : (seq A)) : list (A) :=\n  seq.map (fun n => seq_index_nat s n) (seq.iota 0 (seq_len_nat s)).\n\nDefinition seq_from_list (A : choice_type) (l : list (A)) : (seq A) :=\n  fmap_of_seq l.\n\nLemma seq_from_list_cat : forall A l a, seq_from_list A (l ++ [a]) = setm (seq_from_list A l) (seq.size l) a.\nProof.\n  clear ; intros.\n  unfold seq_from_list.\n  apply eq_fmap.\n\n  intros i.\n  rewrite fmap_of_seqE.\n  rewrite setmE.\n\n  destruct eqtype.eq_op eqn:i_size_l.\n  - apply (ssrbool.elimT eqtype.eqP) in i_size_l.\n    subst.\n\n    rewrite (seq.nth_map a).\n    2:{\n      rewrite seq.size_cat.\n      now rewrite ssrnat.addn1.\n    }\n    rewrite seq.nth_cat.\n    rewrite ssrnat.ltnn.\n    rewrite ssrnat.subnn.\n    reflexivity.\n  - rewrite fmap_of_seqE.\n    destruct (ssrnat.leq (seq.size (l ++ [a])) i) eqn:i_in_l.\n    + rewrite seq.nth_default.\n      2:{\n        rewrite seq.size_map.\n        apply i_in_l.\n      }\n      rewrite seq.nth_default.\n      2:{\n        rewrite seq.size_map.\n        eapply ssrnat.leq_trans.\n        apply ssrnat.leqnSn.\n        rewrite seq.size_cat in i_in_l.\n        rewrite ssrnat.addn1 in i_in_l.\n        apply i_in_l.\n      }\n      reflexivity.\n    + assert (is_true (ssrnat.leq (S i) (seq.size l))).\n      {\n        rewrite ssrnat.leqNgt.\n        rewrite ssrnat.ltnS.\n        rewrite ssrnat.leq_eqVlt.\n        rewrite Bool.negb_orb.\n        rewrite eqtype.eq_sym.\n        setoid_rewrite i_size_l.\n        rewrite seq.size_cat in i_in_l.\n        rewrite ssrnat.addn1 in i_in_l.\n        rewrite i_in_l.\n        reflexivity.\n      }\n\n      rewrite <- (@seq.nth_take (seq.size l) (option (A)) None i H (seq.map (fun x : A => Some x) (l ++ [a]))).\n      rewrite <- seq.map_take.\n      rewrite seq.take_size_cat ; [ | reflexivity ].\n      reflexivity.\nQed.\n\nLemma sorted_last_leq :\n  forall {A : ordType }{B} (a0 : A * B) (l : list (A * B)),\n    is_true (path.sorted Ord.lt (seq.unzip1 (a0 :: l))) ->\n    is_true (fst a0 <= (fst (seq.last a0 l)))%ord.\nProof.\n  intros ? ? a0 fmval i.\n\n  generalize dependent a0.\n  induction fmval ; intros.\n  - apply Ord.leqxx.\n  - simpl.\n    specialize (IHfmval a0 (path_sorted_remove (e := Ord.lt) _ _ _ (@Ord.lt_trans _) i)).\n    erewrite Ord.leq_trans.\n    reflexivity.\n    apply IHfmval.\n    destruct fmval.\n    + simpl.\n      simpl in i.\n      rewrite Bool.andb_true_r in i.\n      unfold Ord.lt in i.\n      rewrite LocationUtility.is_true_split_and in i.\n      apply i.\n    + simpl.\n      apply Ord.leqxx.\nQed.\n\nCorollary sorted_last_nat_lt :\n   forall {B} (a0 : nat * B) (l : list (nat * B)),\n    is_true (path.sorted Ord.lt (seq.unzip1 (a0 :: l))) ->\n    is_true (fst a0 < S (fst (seq.last a0 l)))%ord.\nProof.\n  intros.\n  pose (sorted_last_leq a0 l H).\n  rewrite Ord.lt_neqAle.\n  rewrite (Ord.leq_trans _ _ _ i) ; [ | easy ].\n\n  destruct (eqtype.eq_op _ _) eqn:p_eq_last.\n  - apply (ssrbool.elimT eqtype.eqP) in p_eq_last.\n    setoid_rewrite p_eq_last in i.\n    cbn in i.\n    rewrite <- ssrnat.subnE in i.\n    rewrite ssrnat.subSnn in i.\n    discriminate.\n  - easy.\nQed.\n\nTheorem ord_lt_nleq_and_neq :\n   forall {A : ordType} {a b : A},\n     is_true (a < b)%ord -> (b < a)%ord = false /\\ (eqtype.eq_op b a) = false.\nProof.\n  intros.\n\n  rewrite Ord.ltNge in H.\n  rewrite Ord.leq_eqVlt in H.\n  rewrite Bool.negb_orb in H.\n  rewrite LocationUtility.is_true_split_and in H.\n  destruct H.\n  apply ssrbool.negbTE in H.\n  apply ssrbool.negbTE in H0.\n  rewrite H , H0 ; clear H H0.\n  easy.\nQed.\n\nCorollary sorted_last_is_last :\n   forall {B} (a0 : nat * B) (l : list (nat * B)),\n    is_true (path.sorted Ord.lt (seq.unzip1 (a0 :: l))) ->\n        (S (fst (seq.last a0 l)) < fst a0)%ord = false /\\\n          (@eqtype.eq_op (nat : ordType) (S (fst (seq.last a0 l))) (fst a0) = false).\nProof.\n  intros.\n\n  pose (i0 := sorted_last_nat_lt a0 l H).\n  destruct (ord_lt_nleq_and_neq i0).\n  easy.\nQed.\n\nTheorem ord_leq_lt_trans :\n  forall {A : ordType} {a b c : A}, is_true (a <= b)%ord -> is_true (b < c)%ord -> is_true (a < c)%ord.\nProof.\n  intros.\n  pose proof (Ord.leq_trans _ _ _ H (Ord.ltW H0)).\n  rewrite Ord.leq_eqVlt in H1.\n  rewrite LocationUtility.is_true_split_or in H1.\n  destruct H1.\n  - apply (ssrbool.elimT eqtype.eqP) in H1.\n    subst.\n    rewrite Ord.leq_eqVlt in H.\n    rewrite LocationUtility.is_true_split_or in H.\n    destruct H.\n    + apply (ssrbool.elimT eqtype.eqP) in H.\n      subst.\n      now rewrite Ord.ltxx in H0.\n    + pose proof (Ord.lt_trans H H0).\n      now rewrite Ord.ltxx in H1.\n  - apply H1.\nQed.\n\nTheorem ord_lt_leq_trans :\n  forall {A : ordType} {a b c : A}, is_true (a < b)%ord -> is_true (b <= c)%ord -> is_true (a < c)%ord.\nProof.\n  intros.\n  pose proof (Ord.leq_trans _ _ _ (Ord.ltW H) H0).\n  rewrite Ord.leq_eqVlt in H1.\n  rewrite LocationUtility.is_true_split_or in H1.\n  destruct H1.\n  - apply (ssrbool.elimT eqtype.eqP) in H1.\n    subst.\n    rewrite Ord.leq_eqVlt in H0.\n    rewrite LocationUtility.is_true_split_or in H0.\n    destruct H0.\n    + apply (ssrbool.elimT eqtype.eqP) in H0.\n      subst.\n      now rewrite Ord.ltxx in H.\n    + pose proof (Ord.lt_trans H H0).\n      now rewrite Ord.ltxx in H1.\n  - apply H1.\nQed.\n\nTheorem ord_lt_is_leq :\n  forall {a b : nat}, is_true (a < b)%ord -> is_true (S a <= b)%ord.\nProof.\n  intros.\n  generalize dependent a.\n  induction b ; intros.\n  - destruct a ; easy.\n  - destruct a ; [ easy | ].\n    cbn.\n    cbn in IHb.\n    apply IHb.\n    apply H.\nQed.\n\nTheorem seq_len_nat_setm : forall {A} (l : (seq A)) a,\n    forall x, is_true (seq_len_nat l <= x)%ord ->\n    seq_len_nat (setm l x a) = S x.\nProof.\n  intros.\n  destruct l.\n  destruct fmval.\n  -     reflexivity.\n  - unfold seq_len_nat.\n    simpl.\n    destruct (ord_lt_nleq_and_neq (ord_lt_leq_trans (sorted_last_nat_lt p fmval i) H)).\n    setoid_rewrite H0.\n    setoid_rewrite H1.\n    clear H0 H1.\n\n    generalize dependent p.\n    induction fmval ; intros.\n    * reflexivity.\n    * simpl.\n\n      destruct (ord_lt_nleq_and_neq (ord_lt_leq_trans (sorted_last_nat_lt a0 fmval (path_sorted_tl i)) H)).\n      setoid_rewrite H0.\n      setoid_rewrite H1.\n      clear H0 H1.\n      simpl.\n\n      rewrite (IHfmval a0 (path_sorted_tl i)).\n      reflexivity.\n      apply H.\nQed.\n\nCorollary seq_len_nat_setm_len : forall {A} (l : (seq A)) a,\n    seq_len_nat (setm l (seq_len_nat l) a) = S (seq_len_nat l).\nProof.\n  intros.\n  apply seq_len_nat_setm.\n  easy.\nQed.\n\nTheorem seq_from_list_size : forall A l,\n    seq.size l = seq_len_nat (seq_from_list A l).\nProof.\n  intros.\n  rewrite <- (rev_involutive l).\n  induction (rev l).\n  - reflexivity.\n  - simpl.\n    rewrite seq_from_list_cat.\n    rewrite seq.size_cat.\n    rewrite IHl0 ; clear IHl0.\n    rewrite ssrnat.addn1.\n\n    now erewrite (seq_len_nat_setm (seq_from_list A (rev l0))).\nQed.\n\n\nLemma destruct_fmap_last :\n  forall {A : ordType} {B} (a0 : A * B) (l : list (A * B)) i,\n    (FMap.FMap (fmval:=seq.rev (a0 :: l)) i = setm (FMap.FMap (fmval:=seq.rev l) (path_sorted_rev_last a0 l i)) (fst a0) (snd a0)).\nProof.\n  intros.\n  apply eq_fmap.\n  intros v.\n  rewrite setmE.\n\n  destruct (eqtype.eq_op v (fst a0)) eqn:v_eq_a0.\n  - apply (ssrbool.elimT eqtype.eqP) in v_eq_a0.\n    subst.\n\n    generalize dependent l.\n    intros l.\n    rewrite seq.rev_cons.\n    intros.\n    unfold getm ; simpl.\n\n    induction (seq.rev l).\n    + simpl. now rewrite eqtype.eq_refl.\n    + simpl.\n      rewrite IHl0 ; clear IHl0.\n      * simpl in i.\n        unfold seq.unzip1 in i.\n        rewrite seq.map_rcons in i.\n        rewrite path.rcons_path in i.\n        rewrite LocationUtility.is_true_split_and in i.\n        destruct i.\n        pose (H1 := sorted_last_leq a l0 H).\n        rewrite seq.last_map in H0.\n        epose (ord_leq_lt_trans H1 H0).\n        rewrite Ord.lt_neqAle in i.\n        rewrite LocationUtility.is_true_split_and in i.\n        destruct i.\n        apply ssrbool.negbTE in H2.\n        rewrite eqtype.eq_sym.\n        rewrite H2.\n        reflexivity.\n\n        destruct l0.\n        -- reflexivity.\n        -- simpl.\n           simpl in i.\n           rewrite LocationUtility.is_true_split_and in i.\n           apply i.\n      * unfold getm.\n        simpl.\n        unfold seq.rev at 1.\n        simpl.\n        rewrite seq.catrevE.\n        induction (seq.rev l) ; simpl.\n        -- now rewrite v_eq_a0.\n        -- now rewrite IHl0.\nQed.\n\nLemma seq_to_list_setm : forall {A : choice_type} (l : (seq A)) a,\n  seq_to_list A (setm l (seq_len_nat l) a) = seq_to_list A l ++ [a].\nProof.\n  intros.\n\n  unfold seq_to_list.\n  rewrite seq_len_nat_setm.\n  rewrite <- ssrnat.addn1.\n  rewrite seq.iotaD.\n  rewrite ssrnat.add0n.\n  simpl.\n  rewrite seq.map_cat.\n  simpl.\n  unfold seq_index_nat.\n  rewrite setmE.\n  rewrite eqtype.eq_refl.\n\n  set (seq.map _ _).\n  set (seq.map _ _).\n\n  assert (l0 = l1) ; subst l0 l1.\n  {\n    set (seq_len_nat l) at 1.\n    assert (seq_len_nat l <= n)%nat by reflexivity.\n    generalize dependent n.\n    induction (seq_len_nat l) ; intros.\n    - reflexivity.\n    - rewrite <- ssrnat.addn1.\n      rewrite seq.iotaD.\n      rewrite <- ssrnat.addn1.\n      rewrite seq.iotaD.\n      rewrite !ssrnat.add0n.\n      rewrite !ssrnat.addn0.\n      simpl.\n      rewrite seq.map_cat.\n      rewrite seq.map_cat.\n\n      f_equal.\n      {\n        setoid_rewrite IHn.\n        reflexivity.\n        lia.\n      }\n      {\n        simpl.\n        rewrite setmE.\n        replace (eqtype.eq_op _ _) with false.\n        2:{\n          clear -H.\n          cbn.\n          generalize dependent n0.\n          induction n ; intros.\n          - destruct n0 ; easy.\n          - destruct n0 ; [ easy | ].\n            simpl.\n            specialize (IHn n0).\n            rewrite IHn.\n            reflexivity.\n            lia.\n        }\n        reflexivity.\n      }\n  }\n\n  now rewrite H.\n  easy.\nQed.\n\nDefinition seq_from_list_id : forall {A : choice_type} (t : list (A)),\n    seq_to_list  A (seq_from_list A t) = t.\nProof.\n  intros.\n  rewrite <- (seq.revK t).\n  induction (seq.rev t).\n  - reflexivity.\n  - simpl.\n    rewrite seq.rev_cons.\n    set (h := seq.rev l) at 1 ; rewrite <- IHl ; subst h. clear IHl.\n    rewrite <- !seq.cats1.\n    rewrite seq_from_list_cat.\n    rewrite seq_from_list_size.\n    rewrite seq_to_list_setm.\n    reflexivity.\nQed.\n\nDefinition seq_to_list_size :\n  forall {A : choice_type} (t : (seq A)),\n    seq.size (seq_to_list A t) = seq_len_nat t.\nProof.\n  intros.\n  destruct t.\n  generalize dependent fmval.\n  intros fmval.\n  rewrite <- (seq.revK fmval).\n  intros.\n\n  induction (seq.rev fmval).\n  - reflexivity.\n  - rewrite destruct_fmap_last.\n\n    intros.\n\n    unfold seq_to_list in *.\n    rewrite seq_len_nat_setm.\n\n    rewrite <- ssrnat.addn1.\n    rewrite seq.iotaD.\n    rewrite ssrnat.add0n.\n    simpl.\n    rewrite seq.map_cat.\n    simpl.\n\n    rewrite ssrnat.addn1.\n\n    unfold seq_index_nat.\n\n    rewrite setmE.\n    rewrite eqtype.eq_refl.\n\n    rewrite seq.size_cat.\n    rewrite seq.size_map.\n    rewrite seq.size_iota.\n    simpl.\n    rewrite ssrnat.addn1.\n    reflexivity.\n\n    unfold seq_len_nat.\n    simpl.\n\n    clear -i.\n\n    rewrite seq.rev_cons in i.\n    rewrite <- seq.cats1 in i.\n\n    (* set seq.rev in i ; unfold Ord.sort, nat_ordType in l0 ; subst l0. *)\n    destruct (seq.rev _).\n    + easy.\n    + generalize dependent p.\n      induction l0 ; intros.\n      * simpl.\n        simpl in i.\n        rewrite Bool.andb_true_r in i.\n        now apply ord_lt_is_leq.\n      * simpl.\n        apply IHl0.\n        apply (path_sorted_tl i).\nQed.\n\nDefinition seq_new_ {A: choice_type} (init : A) (len: uint_size) : (seq A) :=\n  fmap_of_seq (repeat init (Z.to_nat (unsigned len))).\n\nDefinition seq_new {A: choice_type} (len: uint_size) : (seq A) :=\n  seq_new_ (chCanonical A) len.\n\nDefinition seq_create {A: choice_type} (len: uint_size) : (seq A) :=\n  seq_new len.\n\nDefinition repr_Z_succ : forall WS z, @repr WS (Z.succ z) = (repr _ z .+ one).\nProof.\n  intros.\n  replace one with (@repr WS 1%Z) by (unfold one ; now rewrite word1_zmodE).\n  now rewrite add_repr.\nQed.\n\nLemma lt_succ_diag_r_sub : forall x k, (x - k < S x)%nat.\nProof.\n  intros.\n  generalize dependent x.\n  induction k ; intros.\n  - rewrite Nat.sub_0_r.\n    apply Nat.lt_succ_diag_r.\n  - destruct x.\n    + apply Nat.lt_succ_diag_r.\n    + cbn.\n      apply Nat.lt_lt_succ_r.\n      apply (IHk x).\nQed.\n\nDefinition setm_leave_default {T : ordType} {S : choice_type}\n       (m : {fmap T -> S}) (i : T) (e : S) : {fmap T -> S} :=\n  if eqtype.eq_op e (chCanonical S)\n  then m\n  else setm m i e.\n\nEquations array_from_list_helper {A: choice_type} (x : A) (xs: list (A)) (k : nat) : (nseq_ A (S k)) :=\n  array_from_list_helper x [] k :=\n    setm\n      emptym\n      (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k O)))\n      x ;\n  array_from_list_helper x (y :: ys) k :=\n    setm\n      (array_from_list_helper y ys k)\n      (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k (length (y :: ys)))))\n      x.\n\nDefinition array_from_list {A: choice_type} (l: list (A))\n  : (nseq_ A (length l)) :=\n  match l with\n    nil => tt\n  | (x :: xs) => array_from_list_helper x xs (length xs)\n  end.\n\nDefinition resize_to_k {A : choice_type} (l : list A) k := List.rev (seq.drop (length l - k) (List.rev l)) ++ (List.repeat (chCanonical A) (k - length l)).\n\nTheorem length_resize_to_k : forall {A : choice_type} (l : list A) k, List.length (resize_to_k l k) = k.\nProof.\n  intros.\n  unfold resize_to_k.\n  rewrite List.app_length.\n  rewrite List.rev_length.\n  rewrite seq.size_drop.\n  rewrite List.repeat_length.\n  rewrite List.rev_length.\n  Lia.lia.\nDefined.\n\nTheorem resize_to_length_idemp : forall {A : choice_type} (l : list A), l = resize_to_k l (length l).\nProof.\n  intros.\n  induction l.\n  - reflexivity.\n  - unfold resize_to_k.\n    rewrite (Nat.sub_diag).\n    rewrite seq.drop0.\n    rewrite List.rev_involutive.\n    now rewrite List.app_nil_r.\nQed.\n\nDefinition array_from_list' {A: choice_type}  (l: list (A)) (k : nat)\n  : (nseq_ A k) :=\n  match k with\n  | O => (tt : (nseq_ A O))\n  | S k' =>\n      match resize_to_k l (S k') with\n        nil => fmap.emptym\n      | (x :: xs) => array_from_list_helper x xs k'\n      end\n  end.\n\nDefinition lift_ordinal n (x : 'I_n) : 'I_(S n).\nProof.\n  destruct x.\n  apply (Ordinal (m := S m)).\n  apply i.\nDefined.\n\nEquations lift_fval {A : choice_type} {n} (a : list ('I_(S n) * (A))) : list ('I_(S(S n)) * (A)) :=\n  lift_fval [] := [] ;\n  lift_fval (x :: xs) :=\n    (lift_ordinal (S n) (fst x) , snd x) :: lift_fval xs.\n\nLemma lift_is_sorted : forall  {A : choice_type} {n} (a : {fmap 'I_(S n) -> (A)}), is_true (path.sorted Ord.lt (seq.unzip1 (lift_fval a))).\nProof.\n  intros.\n  destruct a.\n  simpl.\n\n  induction fmval.\n  - reflexivity.\n  - destruct a.\n    simpl.\n    intros.\n    rewrite lift_fval_equation_2 ; simpl.\n    destruct fmval.\n    + reflexivity.\n    + pose proof i.\n      rewrite lift_fval_equation_2 ; simpl.\n\n      simpl in H.\n      rewrite LocationUtility.is_true_split_and in H.\n      destruct H.\n\n      rewrite LocationUtility.is_true_split_and.\n      split ; [ | ].\n      2:{\n        apply IHfmval.\n        apply H0.\n      }\n\n      unfold lift_ordinal.\n      destruct s.\n      destruct (fst _).\n      apply H.\nQed.\n\nDefinition lift_nseq {A: choice_type} {len : nat} (x: nseq_ A len) : (nseq_ A (S len)) :=\n  match len as k return nseq_ A k -> nseq_ A (S k) with\n  | O => fun _ => emptym\n  | S n =>\n      fun x => @FMap.FMap _ _ (lift_fval (FMap.fmval x)) (lift_is_sorted x)\n  end x.\n\nDefinition setm_option {T : ordType} {S : choice_type}\n       (m : {fmap T -> S}) (i : T) (e : chOption S) : {fmap T -> S} :=\n  match e with\n  | Some x => setm m i x\n  | None => m\n  end.\n\nEquations array_from_option_list_helper {A: choice_type} (x : chOption A) (xs: list (chOption A)) (k : nat) : (nseq_ A (S k)) :=\n  array_from_option_list_helper x (y :: ys) O :=\n      emptym ;\n  array_from_option_list_helper x [] k :=\n    setm_option\n      emptym\n      (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k O)))\n      x ;\n  array_from_option_list_helper x (y :: ys) (S k) :=\n    setm_option\n      (lift_nseq (array_from_option_list_helper y ys k))\n      (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub (S k) (length (y :: ys)))))\n      x.\nFail Next Obligation.\n\nDefinition array_from_option_list' {A: choice_type}  (l: list (chOption A)) (k : nat)\n  : (nseq_ A k) :=\n  match k with\n  | O => (tt : (nseq_ A O))\n  | S k' =>\n      match resize_to_k l (S k') with\n        nil => fmap.emptym\n      | (x :: xs) => array_from_option_list_helper x xs k'\n      end\n  end.\n\nTheorem list_rev_is_seq_rev : forall T (x : list T), List.rev x = seq.rev x.\nProof.\n  intros.\n  induction x.\n  - reflexivity.\n  - simpl.\n    rewrite IHx.\n    replace (a :: nil) with (seq.rev (a :: nil)) by reflexivity.\n    now rewrite <- seq.rev_cat.\nQed.\n\nTheorem simple0_array_from_list : forall (A : choice_type) (x : list A), array_from_list' x (List.length x) = array_from_list x.\nProof.\n  intros.\n  subst.\n  simpl.\n  induction x.\n  - reflexivity.\n  - simpl.\n    unfold resize_to_k.\n    simpl.\n    rewrite (Nat.sub_diag (length x)).\n    setoid_rewrite seq.drop0.\n    change (List.rev _ ++ _ :: nil) with (List.rev (a :: x)).\n    rewrite List.rev_involutive.\n    now rewrite List.app_nil_r.\nDefined.\n\nTheorem simple_array_from_list : forall (A : choice_type) (x : list A) len (H : List.length x = len), array_from_list' x len = (eq_rect (length x) (fun n : nat => nseq_ A n) (array_from_list x) len H).\nProof.\n  intros.\n  subst.\n  apply simple0_array_from_list.\nDefined.\n\n(**** Array manipulation *)\n\nDefinition array_new_ {A: choice_type} (init:A) (len: nat) : (nseq_ A len) :=\n  match len with\n    O => (tt : (nseq_ A 0))\n  | (S n') => array_from_list_helper init (repeat init n') n'\n  end.\n\nEquations array_index {A: choice_type} {len : nat} (s: (nseq_ A len)) {WS} (i: (@int WS)) : A :=\n  array_index (len := 0) s i := (chCanonical A) ;\n  array_index (len := (S n)) s i with le_lt_dec (S n) (Z.to_nat (unsigned i)) := {\n    | right a with (@getm _ _ s (fintype.Ordinal (n := S n) (m := Z.to_nat (unsigned i)) ((ssrbool.introT ssrnat.ltP a)))) => {\n      | Some f => f\n      | None => (chCanonical A)\n      }\n    | left b => (chCanonical A)\n    }.\n\nEquations array_upd {A: choice_type} {len : nat} (s: (nseq_ A len)) {WS} (i: (@int WS)) (new_v: A) : (nseq_ A len) :=\n  array_upd s i new_v with len :=\n    {\n      array_upd s i new_v n with lt_dec (Z.to_nat (unsigned i)) n := {\n        array_upd s i new_v O (left l)  => ltac:(apply Nat.ltb_lt in l ; discriminate) ;\n        array_upd s i new_v (S n) (left l)  => (setm s (fintype.Ordinal (n := S n) (m := Z.to_nat (unsigned i)) (ssrbool.introT ssrnat.ltP l)) new_v) ;\n        array_upd s i new_v n (right _) => s\n      }\n    }.\n\nDefinition array_upd2 {A: choice_type} {len : nat} (s: (nseq_ A len)) {WS} (i: (@int WS)) (new_v: A) : (nseq_ A len).\nProof.\n  destruct (Z.to_nat (unsigned i) <? len)%nat eqn:v.\n  (* If i < len, update normally *)\n  - apply Nat.ltb_lt in v.\n    destruct len. { lia. }\n    apply (setm s (fintype.Ordinal (m := Z.to_nat (unsigned i)) (ssrbool.introT ssrnat.ltP v)) new_v).\n  (* otherwise return original array *)\n  - exact s.\nDefined.\n\n(* substitutes a sequence (nseq) into an array (nseq), given index interval  *)\nDefinition update_sub {A : choice_type} {len slen} (v : (nseq_ A len)) (i : nat) (n : nat) (sub : (nseq_ A slen)) : (nseq_ A len) :=\n  let fix rec x acc :=\n    match x with\n    | 0%nat => acc\n    | S x => rec x (array_upd acc (usize (i+x)%nat) (array_index sub (usize x)))\n    end in\n  rec (n - i + 1)%nat v.\n\nDefinition array_from_seq\n           {A: choice_type}\n           (out_len:nat)\n           (input: (seq A))\n  : (nseq_ A out_len) :=\n  let out := array_new_ (chCanonical A) out_len in\n  update_sub out 0 (out_len - 1) (@array_from_list A (@seq_to_list A input)).\n\nDefinition slice {A} (l : list A) (i j : nat) : list A :=\n  if (j <=? i)%nat then [] else firstn (j-i+1) (skipn i l).\n\nDefinition lower_ordinal n (x : 'I_(S n)) (H: is_true (ord0 < x)%ord) : 'I_n.\nProof.\n  destruct x.\n  apply (Ordinal (m := Nat.pred m)).\n  apply ssrnat.ltnSE.\n  rewrite (Nat.lt_succ_pred 0).\n  - apply i.\n  - destruct m.\n    + discriminate.\n    + lia.\nDefined.\n\n\n\nEquations lower_fval {A : choice_type} {n} (a : list ('I_(S(S n)) * (A))) (H : forall x, In x a -> is_true (ord0 < fst x)%ord ) : list ('I_(S n) * (A)) :=\n  lower_fval [] H := [] ;\n  lower_fval (x :: xs) H :=\n    (lower_ordinal (S n) (fst x) (H x (or_introl eq_refl)) , snd x)\n      :: lower_fval xs (fun y H0 => H y (in_cons x y xs H0)).\n\nLemma lower_keeps_value : forall  {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> (A)}) H, (seq.map snd a = seq.map snd (lower_fval a H)).\nProof.\n  intros.\n  destruct a.\n  simpl in *.\n  induction fmval.\n  - cbn.\n    reflexivity.\n  - destruct a.\n    rewrite seq.map_cons.\n    erewrite IHfmval.\n    rewrite lower_fval_equation_2 ; simpl.\n    f_equal.\n    apply (path_sorted_tl i).\nQed.\n\nLemma lower_is_sorted : forall  {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> (A)}) H, is_true (path.sorted Ord.lt (seq.unzip1 (lower_fval a H))).\nProof.\n  intros.\n  destruct a.\n  simpl.\n  induction fmval.\n  - reflexivity.\n  - destruct a.\n    simpl.\n    intros.\n    rewrite lower_fval_equation_2 ; simpl.\n    destruct fmval.\n    + reflexivity.\n    + pose proof i.\n      rewrite lower_fval_equation_2 ; simpl.\n      simpl in H0 |- *.\n      rewrite LocationUtility.is_true_split_and in H0 |- *.\n\n      destruct H0.\n      split ; [ | ].\n      destruct p.\n      simpl.\n      destruct s, s1.\n\n      pose proof (H (Ordinal (n:=S (S n)) (m:=m) i0, s0) (or_introl eq_refl)).\n      pose proof (H (Ordinal (n:=S (S n)) (m:=m0) i1, s2)\n          (in_cons (Ordinal (n:=S (S n)) (m:=m) i0, s0)\n             (Ordinal (n:=S (S n)) (m:=m0) i1, s2)\n             ((Ordinal (n:=S (S n)) (m:=m0) i1, s2) :: fmval)\n             (or_introl eq_refl))).\n\n      unfold Ord.lt in H0 |- *.\n      unfold Ord.leq in H0 |- *.\n      cbn.\n\n      clear -H0 H2 H3.\n      rewrite LocationUtility.is_true_split_and in H0 |- *.\n      destruct H0.\n      cbn in H , H0.\n      destruct m, m0 ; easy.\n\n      specialize (IHfmval (path_sorted_tl i) ( fun x H2 => H x (in_cons _ _ _  H2))).\n      rewrite lower_fval_equation_2 in IHfmval ; simpl in IHfmval.\n      simpl.\n      apply IHfmval.\nQed.\n\nCorollary lower_list_is_sorted : forall  {A : choice_type} {n} (a : list ('I_(S(S n)) * (A))) H, is_true (path.sorted Ord.lt (seq.unzip1 a)) -> is_true (path.sorted Ord.lt (seq.unzip1 (lower_fval a H))).\nProof.\n  intros.\n  refine (lower_is_sorted (@FMap.FMap _ _ a _) _).\n  apply H0.\nQed.\n\nLemma ord_ext : forall {n} m0 m1 {H1 H2}, m0 = m1 <-> Ordinal (n := S n) (m := m0) H1 = Ordinal (n := S n) (m := m1) H2.\nProof.\n  intros.\n  rewrite <- (inord_val (Ordinal H1)).\n  rewrite <- (inord_val (Ordinal H2)).\n  split. intros. subst. reflexivity.\n  intros. cbn in H.\n  unfold inord in H.\n  unfold eqtype.insubd in H.\n  unfold eqtype.insub in H.\n  destruct ssrbool.idP in H.\n  destruct ssrbool.idP in H.\n  cbn in H.\n  inversion H.\n  reflexivity.\n  contradiction.\n  contradiction.\nQed.\n\nLemma lower_fval_ext : forall  {A : choice_type} {n} (a b : {fmap 'I_(S(S n)) -> A}) H1 H2, a = b <-> lower_fval a H1 = lower_fval b H2.\nProof.\n  intros.\n  split.\n  - intros.\n    subst.\n    destruct b.\n    simpl.\n    induction fmval.\n    + reflexivity.\n    + simpl.\n      destruct a, s.\n      rewrite !lower_fval_equation_2.\n      f_equal.\n      * f_equal.\n        apply ord_ext. reflexivity.\n      * apply IHfmval.\n        apply (path_sorted_tl i).\n  - intros.\n    apply eq_fmap.\n    intros i.\n\n    destruct a.\n    destruct b.\n    cbn in H.\n    cbn.\n\n    f_equal.\n\n    generalize dependent fmval0.\n    induction fmval as [ | p ] ; intros ; destruct fmval0 as [ | p0 ] ; try rewrite !lower_fval_equation_2 in H ; try rewrite !lower_fval_equation_1 in H ; try easy.\n\n    inversion H.\n    epose (H1 p (or_introl eq_refl)).\n    epose (H2 p0 (or_introl eq_refl)).\n\n    destruct p.\n    destruct p0.\n    cbn in H4.\n    subst.\n    destruct s.\n    destruct s1.\n    apply ord_ext in H3.\n    f_equal.\n    {\n      f_equal.\n      apply ord_ext.\n      destruct m, m0 ; try discriminate.\n      cbn in H3.\n      now rewrite H3.\n    }\n    {\n      eapply IHfmval.\n      apply H5.\n\n      Unshelve.\n      apply (path_sorted_tl i0).\n      apply (path_sorted_tl i1).\n    }\nQed.\n\n\nLemma lower_fval_ext_list : forall  {A : choice_type} {n} (a b : list ('I_(S(S n)) * (A))) (Ha : is_true (path.sorted Ord.lt (seq.unzip1 a))) (Hb : is_true (path.sorted Ord.lt (seq.unzip1 b))) H1 H2, a = b <-> lower_fval a H1 = lower_fval b H2.\nProof.\n  intros.\n  epose (lower_fval_ext (@FMap.FMap _ _ a Ha) (@FMap.FMap _ _ b Hb) H1 H2).\n  simpl in i.\n  rewrite <- i.\n  split.\n  intros.\n  apply fmap.eq_fmap.\n  intros x.\n  subst.\n  reflexivity.\n  intros.\n  now inversion H.\nQed.\n\n\nLemma gt_smallest_sorted : forall {A} {n} {p : 'I_n * A} {fmval}, is_true (path.sorted Ord.lt (seq.unzip1 (p :: fmval))) -> (forall x, In x fmval -> is_true (fst p < fst x)%ord).\n  intros.\n  induction fmval.\n  - contradiction.\n  - cbn in H.\n    rewrite LocationUtility.is_true_split_and in H.\n    destruct H.\n    destruct H0.\n    + subst.\n      apply H.\n    + apply IHfmval.\n      cbn.\n      eapply path.path_le.\n      apply (@Ord.lt_trans _).\n      apply H.\n      apply H1.\n      apply H0.\nQed.\n\nCorollary tl_gt_0_sorted : forall {A} {n} {p : 'I_(S n) * A} {fmval}, is_true (path.sorted Ord.lt (seq.unzip1 (p :: fmval))) -> (forall x, In x fmval -> is_true (ord0 < fst x)%ord).\n  intros.\n  induction fmval ; [ easy | ].\n  pose proof H.\n  simpl in H1.\n  rewrite LocationUtility.is_true_split_and in H1.\n  destruct H1.\n  destruct H0.\n  - subst.\n    destruct p, o.\n    destruct m.\n    + apply H1.\n    + eapply Ord.lt_trans. 2: apply (gt_smallest_sorted H) ; now left.\n      easy.\n  - refine (IHfmval _ H0).\n    cbn.\n    eapply path.path_le.\n    apply Ord.lt_trans.\n    apply H1.\n    apply H2.\nQed.\n\nLemma in_nseq_tl_gt_zero {A} {n} {m'} {i3} {k} fmval (i :\n  is_true (path.path Ord.lt (fst (@Ordinal _ (S m') i3, k)) (seq.unzip1 fmval))) :\n  (forall x : 'I_(S (S n)) * A,\n   In x ((@Ordinal _ (S m') i3, k) :: fmval) ->\n   is_true (ord0 < fst x)%ord).\nProof.\n  intros.\n  destruct H.\n  - subst. reflexivity.\n  - eapply tl_gt_0_sorted.\n    apply i.\n    apply H.\nQed.\n\nEquations tl_fmap {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> A}) : {fmap 'I_(S n) -> A} :=\n  tl_fmap (@FMap.FMap _ _ [] i) := emptym ;\n  tl_fmap (@FMap.FMap _ _ ((@Ordinal _ 0 i3, k) :: fmval) i) :=\n    @FMap.FMap _ _ (lower_fval fmval (gt_smallest_sorted i)) (lower_list_is_sorted _ _ (path_path_tl i)) ;\n  tl_fmap (@FMap.FMap _ _ ((@Ordinal _ (S m') i3, k) :: fmval) i) :=\n    @FMap.FMap _ _ (lower_fval ((Ordinal (n:=S (S n)) (m:=S m') i3, k) :: fmval) (in_nseq_tl_gt_zero fmval i)) (lower_list_is_sorted _ _ i).\nFail Next Obligation.\n\nDefinition nseq_hd {A : choice_type} {n} (a : (nseq_ A (S n))) : A :=\n  match a with\n  | @FMap.FMap _ _ [] _ => (chCanonical A)\n  | @FMap.FMap _ _ (p :: _) _ =>\n      match nat_of_ord (fst p) with\n      | O => snd p\n      | S _ => (chCanonical A)\n      end\n  end.\n\nDefinition nseq_hd_option {A : choice_type} {n} (a : (nseq_ A (S n))) : chOption A :=\n  match a with\n  | @FMap.FMap _ _ [] _ => None\n  | @FMap.FMap _ _ (p :: _) _ =>\n      match nat_of_ord (fst p) with\n      | O => Some (snd p)\n      | S _ => None\n      end\n  end.\n\nDefinition nseq_tl {A : choice_type} {n} (a : (nseq_ A (S n))) : (nseq_ A n).\nProof. destruct n ; [exact tt | apply (tl_fmap a) ]. Defined.\n\nDefinition split_nseq_ {A : choice_type} {n} (a : (nseq_ A (S n))) : A * (nseq_ A n) := (nseq_hd a, nseq_tl a).\n\n\nLemma lower_fval_smaller_length {A : choice_type} {n} (a : {fmap 'I_(S(S n)) -> A}) : (length (FMap.fmval a) <= S (length (FMap.fmval (tl_fmap a))))%nat.\nProof.\n  destruct a.\n  induction fmval.\n  - cbn ; lia.\n  - simpl.\n    simpl in IHfmval.\n    destruct a, s. destruct m.\n    + apply Nat.eq_le_incl.\n      f_equal.\n      rewrite tl_fmap_equation_2.\n      (* rewrite mkfmapK ; [ | apply (lower_is_sorted (@FMap.FMap _ _ fmval (path_sorted_tl i)))]. *)\n      epose (lower_keeps_value (FMap.FMap (T:=fintype_ordinal__canonical__Ord_Ord (S (S n))) (fmval:=fmval) (path_sorted_tl i))).\n      simpl in e.\n      rewrite <- (map_length snd).\n      rewrite <- (map_length snd).\n      assert (forall {A B} (f : A -> B) (l : list A), seq.map f l = map f l).\n      {\n        clear ; intros.\n        induction l.\n        - reflexivity.\n        - cbn.\n          f_equal.\n      }\n      setoid_rewrite <- H.\n      erewrite e.\n      reflexivity.\n    + rewrite tl_fmap_equation_3.\n      apply le_n_S.\n      eapply le_trans ; [ apply (IHfmval (path_sorted_tl i)) | ].\n      apply Nat.eq_le_incl.\n      (* rewrite mkfmapK ; [ | apply (lower_is_sorted (@FMap.FMap _ _ ((Ordinal (n:=S (S n)) (m:=S m) i0, s0) :: fmval) i)) ]. *)\n      simpl.\n      f_equal.\n      f_equal.\n      clear.\n\n      induction fmval.\n      * reflexivity.\n      * destruct a, s.\n        destruct m0 ; [ discriminate | ].\n        rewrite tl_fmap_equation_3.\n        simpl.\n        erewrite (proj1 (lower_fval_ext (@FMap.FMap _ _ ((Ordinal (n:=S (S n)) (m:=S m0) i1, s1) :: fmval) (path_sorted_tl i)) _ _ _) eq_refl).\n        reflexivity.\nQed.\n\n\nLemma ord_gt : (forall {A : ordType} {x y : A}, ((x < y)%ord = false) -> eqtype.eq_op x y = false -> is_true (y < x)%ord).\nProof.\n  clear ; intros.\n  rewrite Ord.ltNge in H.\n  apply ssrbool.negbFE in H.\n  rewrite Ord.leq_eqVlt in H.\n  rewrite LocationUtility.is_true_split_or in H.\n\n  rewrite eqtype.eq_sym in H0.\n  cbn in H.\n  cbn in H0.\n  rewrite H0 in H.\n  destruct H ; [ discriminate | ].\n  apply H.\nQed.\n\nLemma path_path_setm_move_lowerbound :\n  forall {A : ordType} B v (y z : A * B) (l : list (A * B)),\n  is_true (fst y < fst z)%ord ->\n  is_true\n    (path.sorted Ord.lt (seq.unzip1 (y :: l))) ->\n  is_true\n    (path.sorted Ord.lt (seq.unzip1 ((setm_def l (fst z) v)))) ->\n  is_true\n    (path.sorted Ord.lt (seq.unzip1 (y :: (setm_def l (fst z) v)))).\nProof.\n  intros.\n  generalize dependent y.\n  destruct l ; intros.\n  - cbn.\n    now rewrite H.\n  - cbn.\n    cbn in H1.\n    pose proof (path_sorted_tl H1).\n    cbn in H1.\n    set (fst z < fst p)%ord in *.\n    destruct b eqn:b_lt ; subst b ; cbn in H1.\n    + cbn.\n      rewrite H.\n      rewrite b_lt.\n      cbn.\n      rewrite H2.\n      reflexivity.\n    + destruct eqtype.eq_op eqn:b_eq ; cbn in H1.\n      * cbn.\n        rewrite H.\n        cbn.\n        rewrite H1.\n        reflexivity.\n      * pose proof (ord_gt b_lt b_eq).\n        clear b_lt b_eq.\n        cbn.\n        rewrite H1.\n\n        cbn in H0.\n        rewrite LocationUtility.is_true_split_and in H0.\n        destruct H0.\n        rewrite H0.\n        reflexivity.\nQed.\n\nLemma setm_def_cons :\n  forall (A : ordType) B (a : A * B) s (k : A) v,\n  setm_def (a :: s) k v = ((if (fst a < k)%ord\n                           then a\n                           else (k, v)\n   ) :: if (k < fst a)%ord\n       then a :: s\n       else\n         if eqtype.eq_op k (fst a)\n         then s\n         else setm_def (T:=A) s k v).\nProof.\n  intros.\n  cbn.\n  destruct (k < fst a)%ord eqn:k_lt_a.\n  - unfold Ord.lt in k_lt_a.\n    apply (ssrbool.rwP ssrbool.andP) in k_lt_a.\n    destruct k_lt_a.\n    rewrite Ord.leqNgt in H.\n    apply ssrbool.negbTE in H.\n    rewrite H.\n    reflexivity.\n  - destruct eqtype.eq_op eqn:k_eq_a.\n    + unfold Ord.lt.\n      rewrite eqtype.eq_sym in k_eq_a.\n      rewrite k_eq_a.\n      cbn.\n      rewrite Bool.andb_false_r.\n      reflexivity.\n    + rewrite Ord.ltNge in k_lt_a.\n      apply ssrbool.negbFE in k_lt_a.\n      unfold Ord.lt.\n      rewrite k_lt_a.\n      rewrite eqtype.eq_sym in k_eq_a.\n      rewrite k_eq_a.\n      reflexivity.\nQed.\n\nLemma setm_cons :\n  forall (A : ordType) B (a : A * B) s (k : A) v H,\n    setm (FMap.FMap (fmval:=(a :: s)) H) k v =\n      setm (setm (FMap.FMap (fmval:=s) (path_sorted_tl H)) (fst a) (snd a)) k v.\nProof.\n  intros.\n  apply eq_fmap.\n  intros t.\n  rewrite !setmE.\n  reflexivity.\nQed.\n\nLemma array_is_max_length {A : choice_type} {n} (a : (nseq_ A (S n))) : (length (FMap.fmval a) <= S n)%nat.\nProof.\n  induction n.\n  - destruct a.\n    cbn.\n    destruct fmval.\n    + cbn. lia.\n    + destruct fmval.\n      * cbn. lia.\n      * cbn in i.\n        destruct p , p0.\n        destruct s , s1.\n        cbn in i.\n        destruct m , m0 ; discriminate.\n  - cbn in *.\n    specialize (IHn (tl_fmap a)).\n    apply le_n_S in IHn.\n    refine (le_trans (length (FMap.fmval a)) _ (S (S n)) _ IHn).\n    apply lower_fval_smaller_length.\nQed.\n\n\nDefinition nth_nseq_ {A : choice_type} {n} (a : (nseq_ A (S n))) (i : nat) (H : (i <= n)%nat) : A.\nProof.\n  generalize dependent i.\n  induction n ; intros.\n  - apply (nseq_hd a).\n  - destruct i.\n    + apply (nseq_hd a).\n    + apply (IHn (nseq_tl a) i).\n      apply le_S_n.\n      apply H.\nDefined.\n\nEquations array_to_list {A : choice_type} {n} (f : (nseq_ A n)) : list (A) :=\n  array_to_list (n:=O%nat) f := [] ;\n  array_to_list (n:=S _%nat) f := nseq_hd f :: array_to_list (nseq_tl f).\nFail Next Obligation.\n\nTheorem array_to_length_list_is_len : forall (A : choice_type) len (x : nseq_ A len), List.length (array_to_list x) = len.\nProof.\n  intros.\n  induction len.\n  - reflexivity.\n  - rewrite array_to_list_equation_2.\n    simpl.\n    rewrite IHlen.\n    reflexivity.\nDefined.\n\nEquations array_to_option_list {A : choice_type} {n} (f : (nseq_ A n)) : list (chOption A) :=\n  array_to_option_list (n:=O%nat) f := [] ;\n  array_to_option_list (n:=S _%nat) f := nseq_hd_option f :: array_to_option_list (nseq_tl f).\nFail Next Obligation.\n\nTheorem array_to_length_option_list_is_len : forall (A : choice_type) len (x : nseq_ A len), List.length (array_to_option_list x) = len.\nProof.\n  intros.\n  induction len.\n  - reflexivity.\n  - rewrite array_to_option_list_equation_2.\n    simpl.\n    rewrite IHlen.\n    reflexivity.\nDefined.\n\nLemma nseq_hd_ord0 :\n  forall A n (a : (nseq_ A (S n))) (x : A),\n    @nseq_hd A (n) (setm a ord0 x) = x.\nProof.\n  intros.\n  cbn.\n  destruct a.\n  destruct fmval.\n  + reflexivity.\n  + cbn.\n    destruct negb eqn:O_p.\n    * reflexivity.\n    * apply ssrbool.negbFE in O_p.\n      rewrite O_p.\n      reflexivity.\nQed.\n\nLemma nseq_tl_ord0 :\n  forall A n (a : (nseq_ A (S n))) (x : A),\n    @nseq_tl A n (setm a ord0 x) = nseq_tl a.\nProof.\n  intros.\n  destruct n.\n  + reflexivity.\n  + destruct a.\n    induction fmval as [ | p ].\n    * apply eq_fmap. intros ?.\n      reflexivity.\n    * destruct p, s.\n      unfold setm.\n      unfold fmap.\n      unfold ord0.\n      cbn.\n      destruct m.\n\n      -- cbn.\n         rewrite !tl_fmap_equation_2.\n         apply eq_fmap. intros ?.\n         cbn.\n         f_equal.\n         now erewrite (proj1 (lower_fval_ext (@FMap.FMap _ _ fmval (path_sorted_tl i)) _ _ _) eq_refl).\n      -- cbn.\n         rewrite tl_fmap_equation_2.\n         rewrite tl_fmap_equation_3.\n         apply eq_fmap. intros ?.\n         cbn.\n         f_equal.\n         now erewrite (proj1 (lower_fval_ext (@FMap.FMap _ _ ((Ordinal (n:=S (S n)) (m:=S m) i0, s0) :: fmval) i) _ _ _) eq_refl).\nQed.\n\nLemma array_to_list_ord0 :\n  forall A n (a : (nseq_ A (S n))) (x : A),\n    @array_to_list A (S n) (setm a ord0 x) = x :: array_to_list (nseq_tl a).\nProof.\n  intros.\n  rewrite array_to_list_equation_2.\n  f_equal.\n  - apply nseq_hd_ord0.\n  - f_equal.\n    apply nseq_tl_ord0.\nQed.\n\nLemma split_nseq_correct {A : choice_type} {n} (a : (nseq_ A (S n))) : nseq_hd a :: array_to_list (nseq_tl a) = array_to_list a.\nProof.\n  reflexivity.\nQed.\n\nDefinition array_to_seq {A : choice_type} {n} (f : (nseq_ A n)) : (seq A) :=\n  seq_from_list _ (array_to_list f).\n\nDefinition positive_slice {A : choice_type} {n} `{H: Positive n} (l : (nseq_ A n)) (i j : nat) `{H1: (i < j)%nat} `{(j - i < length (array_to_list l) - i)%nat} : Positive (length (slice (array_to_list l) i j)).\nProof.\n  unfold slice.\n  rewrite (proj2 (Nat.leb_gt j i) H1).\n  rewrite firstn_length_le.\n  - unfold Positive.\n    apply (ssrbool.introT ssrnat.ltP).\n    lia.\n  - rewrite skipn_length.\n    apply lt_n_Sm_le.\n    lia.\nDefined.\n\nTheorem slice_length :\n  forall A (l : list A) (i j : nat),\n    length (slice l i j) =\n      if (j <=? i)%nat then @length A ([]) else length (firstn (j - i + 1) (skipn i l)).\nProof.\n  intros.\n  unfold slice.\n  destruct (j <=? i)%nat.\n  - reflexivity.\n  - reflexivity.\nQed.\n\nDefinition lseq_slice {A : choice_type} {n} (l : (nseq_ A n)) (i j : nat) :\n  (@nseq_ A (length (slice (array_to_list l) (i) (j)))) :=\n  array_from_list (slice (array_to_list l) (i) (j)).\n\nDefinition seq_sub {A : choice_type} (s : seq A) (start n : nat) :=\n  lseq_slice (array_from_seq (from_uint_size (seq_len s)) s) start (start + n)%nat.\n\nDefinition array_update_slice\n           {A : choice_type}\n           {l : nat}\n           (out: ((nseq_ A l)))\n           (start_out: uint_size)\n           (input: seq A)\n           (start_in: uint_size)\n           (len: nat)\n  : nseq_ A l :=\n  update_sub out (from_uint_size start_out) (len) (seq_sub input (from_uint_size start_in) len).\n\nDefinition array_from_slice\n           {A: choice_type}\n           (default_value: A)\n           (out_len: nat)\n           (input: (seq A))\n           (start: nat)\n           (slice_len: nat)\n  : (nseq_ A out_len) :=\n  let out := array_new_ default_value out_len in\n  array_from_seq out_len input.\n\nDefinition array_slice\n           {A: choice_type}\n           (input: (seq A))\n           (start: nat)\n           (slice_len: nat)\n  : (nseq_ A slice_len) :=\n  array_from_slice (chCanonical A) (slice_len) input (slice_len) (slice_len).\n\nDefinition array_from_slice_range\n           {a: choice_type}\n           (default_value: a)\n           (out_len: nat)\n           (input: (seq a))\n           (start_fin: (uint_size * uint_size))\n  : (nseq_ a out_len).\nProof.\n  pose (out := array_new_ default_value (out_len)).\n  destruct start_fin as [start fin].\n  refine (update_sub out 0 ((from_uint_size fin) - (from_uint_size start)) _).\n\n  apply (@lseq_slice a ((from_uint_size fin) - (from_uint_size start)) (array_from_seq ((from_uint_size fin) - (from_uint_size start)) input) (from_uint_size start) (from_uint_size fin)).\nDefined.\n\nDefinition array_slice_range\n           {a: choice_type}\n           {len : nat}\n           (input: (nseq_ a len))\n           (start_fin:(uint_size * uint_size))\n  : (seq a) :=\n  array_to_seq (lseq_slice input (from_uint_size (fst start_fin)) (from_uint_size (snd start_fin))).\n\nDefinition array_update\n           {a: choice_type}\n           {len: nat}\n           (s: (nseq_ a len))\n           (start : uint_size)\n           (start_s: (seq a))\n  : (nseq_ a len) :=\n  update_sub s (from_uint_size start) (from_uint_size (seq_len start_s)) (array_from_seq (from_uint_size (seq_len start_s)) (start_s)).\n\nDefinition array_update_start\n           {a: choice_type}\n           {len: nat}\n           (s: (nseq_ a len))\n           (start_s: (seq a))\n  : (nseq_ a len) :=\n  update_sub s 0 (from_uint_size (seq_len start_s)) (array_from_seq (from_uint_size (seq_len start_s)) start_s).\n\n\nDefinition array_len  {a: choice_type} {len: nat} (s: (nseq_ a len)) : uint_size := usize len.\n(* May also come up as 'length' instead of 'len' *)\nDefinition array_length  {a: choice_type} {len: nat} (s: (nseq_ a len)) : uint_size := usize len.\n\n(**** Seq manipulation *)\n\nDefinition seq_slice\n           {a: choice_type}\n           (s: ((seq a)))\n           (start: (uint_size))\n           (len: (uint_size))\n  : (seq a) :=\n  array_to_seq (lseq_slice (array_from_seq (from_uint_size (seq_len s)) s) (from_uint_size start) ((from_uint_size start) + (from_uint_size len))).\n\nDefinition seq_slice_range\n           {a: choice_type}\n           (input: ((seq a)))\n           (start_fin:(((uint_size)) * ((uint_size))))\n  : ((seq a)) :=\n  seq_slice input (fst start_fin) (snd start_fin).\n\n\n\nEquations seq_update_sub {A : choice_type} (v : (seq A)) (i : nat) (n : nat) (sub : (seq A)) : (seq A) :=\n  seq_update_sub v i 0 sub := v ;\n  seq_update_sub v i (S n) sub :=\n      seq_update_sub (setm v (i+n)%nat match getm sub n with\n                                           | Some y => y\n                                           | None => (chCanonical A)\n                                           end) i n sub.\n\n(* updating a subsequence in a sequence *)\nDefinition seq_update\n           {a: choice_type}\n           (s: ((seq a)))\n           (start: uint_size)\n           (input: ((seq a)))\n  : ((seq a)) :=\n  seq_update_sub s (from_uint_size start) (from_uint_size (seq_len input)) input.\n\nDefinition old_seq_update\n  {a: choice_type}\n           (s: ((seq a)))\n           (start: uint_size)\n           (input: ((seq a)))\n  : ((seq a)) :=\n  array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len s)) s) (from_uint_size start) (from_uint_size (seq_len input)) (array_from_seq (from_uint_size (seq_len input)) input)).\n\n(* updating only a single value in a sequence*)\nDefinition seq_upd\n           {a: choice_type}\n           (s: ((seq a)))\n           (start: uint_size)\n           (v: ((a)))\n  : ((seq a)) :=\n  seq_update s start (setm emptym 0%nat v).\n\nDefinition seq_update_start\n           {a: choice_type}\n           (s: ((seq a)))\n           (start_s: ((seq a)))\n  : ((seq a)) :=\n  array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len s)) s) 0 (from_uint_size (seq_len start_s)) (array_from_seq (from_uint_size (seq_len start_s)) start_s)).\n\nDefinition seq_update_slice\n           {A : choice_type}\n           (out: seq A)\n           (start_out: nat)\n           (input: seq A)\n           (start_in: nat)\n           (len: nat)\n  : ((seq A))\n  :=\n  array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len out)) out) start_out len (seq_sub input start_in len)).\n\nDefinition seq_concat\n           {A : choice_type}\n           (s1 :seq A)\n           (s2: seq A)\n  : ((seq A)) :=\n  seq_from_list _ (seq_to_list _ s1 ++ seq_to_list _ s2).\n\nDefinition seq_concat_owned\n           {A : choice_type}\n           (s1 :seq A)\n           (s2: seq A)\n  : ((seq A)) := seq_concat s1 s2.\n\nDefinition seq_push\n           {A : choice_type}\n           (s1 :seq A)\n           (s2: ((A)))\n  : ((seq A)) :=\n  setm s1 (seq_len_nat s1) s2.\n\nTheorem seq_push_list_app : forall {A : choice_type} (t : (seq A)) (s : A),\n    (seq_to_list A (Hacspec_Lib_Pre.seq_push t s) = seq_to_list A t ++ [s]).\nProof.\n  intros.\n\n  unfold seq_push.\n  rewrite seq_to_list_setm.\n  reflexivity.\nQed.\n\nDefinition seq_push_owned\n           {a : choice_type}\n           (s1 :((seq a)))\n           (s2: ((a)))\n  : ((seq a)) := seq_push s1 s2.\n\nDefinition seq_from_slice\n           {A: choice_type}\n           (input: ((seq A)))\n           (start_fin: (((uint_size)) * ((uint_size))))\n  : ((seq A)) :=\n  let out := array_new_ ((chCanonical A)) (from_uint_size (seq_len input)) in\n  let (start, fin) := start_fin in\n  array_to_seq (update_sub out 0 ((from_uint_size fin) - (from_uint_size start)) ((lseq_slice (array_from_seq (from_uint_size (seq_len input)) input) (from_uint_size start) (from_uint_size fin)))).\n\nDefinition seq_from_slice_range\n           {A: choice_type}\n           (input: ((seq A)))\n           (start_fin: (((uint_size)) * ((uint_size))))\n  : ((seq A)) :=\n  let out := array_new_ (chCanonical A) (from_uint_size (seq_len input)) in\n  let (start, fin) := start_fin in\n  array_to_seq (update_sub out 0 ((from_uint_size fin) - (from_uint_size start)) ((lseq_slice (array_from_seq (from_uint_size (seq_len input)) input) (from_uint_size start) (from_uint_size fin)))).\n\nDefinition seq_from_seq {A} (l : (seq A)) : (seq A) := l.\n\n(**** Chunking *)\n\nDefinition seq_num_chunks {a: choice_type} (s: ((seq a))) (chunk_len: uint_size) : uint_size :=\n  ((seq_len s .+ chunk_len .- one) ./ chunk_len)%nat.\n\nDefinition seq_chunk_len\n           {a: choice_type}\n           (s: ((seq a)))\n           (chunk_len: nat)\n           (chunk_num: nat)\n  : 'nat :=\n  let idx_start := (chunk_len * chunk_num)%nat in\n  if ((from_uint_size (seq_len s)) <.? (idx_start + chunk_len))%nat then\n    ((from_uint_size (seq_len s)) - idx_start)%nat\n  else\n    chunk_len.\n\nDefinition seq_get_chunk\n           {a: choice_type}\n           (s: ((seq a)))\n           (chunk_len: uint_size)\n           (chunk_num: uint_size)\n  : (((uint_size × seq a)))\n  :=\n  let idx_start := (from_uint_size chunk_len * from_uint_size chunk_num)%nat in\n  let out_len := seq_chunk_len s (from_uint_size chunk_len) (from_uint_size chunk_num) in\n  (usize out_len, array_to_seq (lseq_slice (array_from_seq (from_uint_size (seq_len s)) s) idx_start (idx_start + seq_chunk_len s (from_uint_size chunk_len) (from_uint_size chunk_num)))).\n\nDefinition seq_set_chunk\n           {a: choice_type}\n           (s: ((seq a)))\n           (chunk_len: uint_size)\n           (chunk_num: uint_size)\n           (chunk: ((seq a)) ) : ((seq a)) :=\n  let idx_start := (from_uint_size chunk_len * from_uint_size chunk_num)%nat in\n  let out_len := seq_chunk_len s (from_uint_size chunk_len) (from_uint_size chunk_num) in\n  array_to_seq (update_sub (array_from_seq (from_uint_size (seq_len s)) s) idx_start out_len (array_from_seq (from_uint_size (seq_len chunk)) chunk)).\n\n\nDefinition seq_num_exact_chunks {a} (l : ((seq a))) (chunk_size : ((uint_size))) : ((uint_size)) :=\n  (repr _ (Z.of_nat (length l))) ./ chunk_size.\n\nDefinition seq_get_exact_chunk {a : choice_type} (l : ((seq a))) (chunk_size chunk_num: ((uint_size))) : ((seq a)) :=\n  let '(len, chunk) := seq_get_chunk l chunk_size chunk_num in\n  if eqtype.eq_op len chunk_size then emptym else chunk.\n\nDefinition seq_set_exact_chunk {A : choice_type} :=\n  @seq_set_chunk A.\n\nDefinition seq_get_remainder_chunk {a : choice_type} (l : (seq a)) (chunk_size : uint_size) : (seq a) :=\n  let chunks := seq_num_chunks l chunk_size in\n  let last_chunk := if (zero <.? chunks)\n                    then (chunks .- one)%nat\n                    else zero in\n  let (len, chunk) := seq_get_chunk l chunk_size last_chunk in\n  if eqtype.eq_op len chunk_size\n  then emptym\n  else chunk.\n\nFixpoint list_xor_ {WS} (x y : list ((@int WS))) : list ((@int WS)) :=\n  match x, y with\n  | (x :: xs), (y :: ys) => (int_xor x y) :: (list_xor_ xs ys)\n  | [] , _ => y\n  | _, [] => x\n  end.\n\nDefinition seq_xor_ {WS} (x y : (seq (@int WS))) : (seq (@int WS)) :=\n  seq_from_list _ (list_xor_ (seq_to_list _ x) (seq_to_list _ y)).\nInfix \"seq_xor\" := seq_xor_ (at level 33) : hacspec_scope.\n\nFixpoint list_truncate {a} (x : list a) (n : nat) : list a :=\n  match x, n with\n  | _, O => []\n  | [], _ => []\n  | (x :: xs), S n' => x :: (list_truncate xs n')\n  end.\nDefinition seq_truncate {a : choice_type} (x : (seq a)) (n : nat) : (seq a) :=\n  seq_from_list _ (list_truncate (seq_to_list _ x) n).\n\n(**** Numeric operations *)\n\n(* takes two nseq's and joins them using a function op : a -> a -> a *)\nDefinition array_join_map\n           {a: choice_type}\n           {len: nat}\n           (op: ((a)) -> ((a)) -> ((a)))\n           (s1: ((nseq_ a len)))\n           (s2 : ((nseq_ a len))) :=\n  let out := s1 in\n  foldi (usize 0%nat) (usize len) (fun i out =>\n                                       array_upd out i (op (array_index s1 i) (array_index s2 i))\n                                    ) out.\n\nInfix \"array_xor\" := (array_join_map (a := int _) int_xor) (at level 33) : hacspec_scope.\nInfix \"array_add\" := (array_join_map (a := int _) int_add) (at level 33) : hacspec_scope.\nInfix \"array_minus\" := (array_join_map (a := int _) int_sub) (at level 33) : hacspec_scope.\n\nInfix \"array_mul\" := (array_join_map (a := int _) int_mul) (at level 33) : hacspec_scope.\nInfix \"array_div\" := (array_join_map (a := int _) int_div) (at level 33) : hacspec_scope.\nInfix \"array_or\" := (array_join_map (a := int _) int_or) (at level 33) : hacspec_scope.\nInfix \"array_and\" := (array_join_map (a := int _) int_and) (at level 33) : hacspec_scope.\n\nFixpoint array_eq_\n         {a: choice_type}\n         {len: nat}\n         (eq: ((a)) -> ((a)) -> bool)\n         (s1: ((nseq_ a len)))\n         (s2 : ((nseq_ a len)))\n         {struct len}\n  : bool.\nProof.\n  destruct len ; cbn in *.\n  - exact  true.\n  - destruct (getm s1 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s | ].\n    + destruct (getm s2 (fintype.Ordinal (m := len) (ssrnat.ltnSn _))) as [s0 | ].\n      * exact (eq s s0).\n      * exact false.\n    + exact false.\nDefined.\n\nInfix \"array_eq\" := (array_eq_ eq) (at level 33) : hacspec_scope.\nInfix \"array_neq\" := (fun s1 s2 => negb (array_eq_ eq s1 s2)) (at level 33) : hacspec_scope.\n\n\n\n\n(*** Nats *)\n\n\nDefinition nat_mod (p : Z) : choice_type := 'fin (S (Init.Nat.pred (Z.to_nat p))).\n(* Definition nat_mod_type {p : Z} : Type := 'I_(S (Init.Nat.pred (Z.to_nat p))). *)\nDefinition mk_natmod {p} (z : Z) : (nat_mod p) := @zmodp.inZp (Init.Nat.pred (Z.to_nat p)) (Z.to_nat z).\n\nDefinition nat_mod_equal {p} (a b : (nat_mod p)) : bool :=\n  @eqtype.eq_op (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b.\n\nDefinition nat_mod_equal_reflect {p} {a b} : Bool.reflect (a = b) (@nat_mod_equal p a b) :=\n  @eqtype.eqP (fintype_ordinal__canonical__eqtype_Equality (S (Init.Nat.pred (Z.to_nat p)))) a b.\n\nDefinition nat_mod_zero {p} : (nat_mod p) := zmodp.Zp0.\nDefinition nat_mod_one {p} : (nat_mod p) := zmodp.Zp1.\nDefinition nat_mod_two {p} : (nat_mod p) := zmodp.inZp 2.\n\nDefinition nat_mod_add {n : Z} (a : (nat_mod n)) (b : (nat_mod n)) : (nat_mod n) := zmodp.Zp_add a b.\n\nInfix \"+%\" := nat_mod_add (at level 33) : hacspec_scope.\n\nDefinition nat_mod_mul {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) := zmodp.Zp_mul a b.\nInfix \"*%\" := nat_mod_mul (at level 33) : hacspec_scope.\n\nDefinition nat_mod_sub {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) := zmodp.Zp_add a (zmodp.Zp_opp b).\nInfix \"-%\" := nat_mod_sub (at level 33) : hacspec_scope.\n\nDefinition nat_mod_div {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) := zmodp.Zp_mul a (zmodp.Zp_inv b).\nInfix \"/%\" := nat_mod_div (at level 33) : hacspec_scope.\n\nDefinition nat_mod_neg {n : Z} (a:(nat_mod n)) : (nat_mod n) := zmodp.Zp_opp a.\n\nDefinition nat_mod_inv {n : Z} (a:(nat_mod n)) : (nat_mod n) := zmodp.Zp_inv a.\n\nDefinition nat_mod_exp_def {p : Z} (a:(nat_mod p)) (n : nat) : (nat_mod p) :=\n  let fix exp_ (e : (nat_mod p)) (n : nat) :=\n    match n with\n    | 0%nat => nat_mod_one\n    | S n => nat_mod_mul a (exp_ a n)\n    end in\n  exp_ a n.\n\nDefinition nat_mod_exp {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)).\nDefinition nat_mod_pow {WS} {p} a n := @nat_mod_exp_def p a (Z.to_nat (@unsigned WS n)).\nDefinition nat_mod_pow_felem {p} a n := @nat_mod_exp_def p a (Z.to_nat (from_uint_size n)).\nDefinition nat_mod_pow_self {p} a n := @nat_mod_pow_felem p a n.\n\nClose Scope nat_scope.\n\nDefinition nat_mod_from_secret_literal {m : Z} (x:int128) : (nat_mod m) := @zmodp.inZp (Init.Nat.pred (Z.to_nat m)) (Z.to_nat (unsigned x)).\n\nDefinition nat_mod_from_literal (m : Z) (x:int128) : (nat_mod m) := nat_mod_from_secret_literal x.\n\nAxiom nat_mod_to_byte_seq_le : forall {n : Z}, (nat_mod n) -> (seq int8).\nAxiom nat_mod_to_byte_seq_be : forall {n : Z}, (nat_mod n) -> (seq int8).\nAxiom nat_mod_to_public_byte_seq_le : forall (n : Z), (nat_mod n) -> (seq int8).\nAxiom nat_mod_to_public_byte_seq_be : forall (n : Z), (nat_mod n) -> (seq int8).\n\nDefinition nat_mod_val (p : Z) (a : (nat_mod p)) : Z := Z.of_nat (nat_of_ord a).\n\nDefinition nat_mod_bit {n : Z} (a : (nat_mod n)) (i : uint_size) : 'bool :=\n  Z.testbit (nat_mod_val _ a) (from_uint_size i).\n\n(* Alias for nat_mod_bit *)\nDefinition nat_get_mod_bit {p} (a : (nat_mod p)) := nat_mod_bit a.\nDefinition nat_mod_get_bit {p} (a : (nat_mod p)) n :=\n  if (nat_mod_bit a n)\n  then @nat_mod_one p\n  else @nat_mod_zero p.\n\nAxiom array_declassify_eq : forall  {A l}, (nseq_ A l) -> (nseq_ A l) -> 'bool.\nAxiom array_to_le_uint32s : forall {A l}, (nseq_ A l) -> (seq uint32).\nAxiom array_to_be_uint32s : forall {l}, (nseq_ uint8 l) -> (seq uint32).\nAxiom array_to_le_uint64s : forall {A l}, (nseq_ A l) -> (seq uint64).\nAxiom array_to_be_uint64s : forall {l}, (nseq_ uint8 l) -> (seq uint64).\nAxiom array_to_le_uint128s : forall {A l}, (nseq_ A l) -> (seq uint128).\nAxiom array_to_be_uint128s : forall {l}, (nseq_ uint8 l) -> (seq uint128).\nAxiom array_to_le_bytes : forall {A l}, (nseq_ A l) -> (seq uint8).\nAxiom array_to_be_bytes : forall {A l}, (nseq_ A l) -> (seq uint8).\nAxiom nat_mod_from_byte_seq_le : forall  {A n}, (seq A) -> (nat_mod n).\nAxiom most_significant_bit : forall {m}, (nat_mod m) -> uint_size -> uint_size.\n\n\n(* We assume 2^x < m *)\nDefinition nat_mod_pow2 (m : Z) (x : N) : (nat_mod m) := mk_natmod (Z.pow 2 (Z.of_N x)).\n\n\nSection Casting.\n\n  (* Type casts, as defined in Section 4.5 in https://arxiv.org/pdf/1106.3448.pdf *)\n  Class Cast A B := cast : A -> B.\n\n  Arguments cast {_} _ {_}.\n\n  Notation \"' x\" := (cast _ x) (at level 20) : hacspec_scope.\n\n  (* Casting to self is always possible *)\n  Global Instance cast_self {A} : Cast A A := {\n      cast a := a\n    }.\n\n  Global Instance cast_transitive {A B C} `{Hab: Cast A B} `{Hbc: Cast B C} : Cast A C := {\n      cast a := Hbc (Hab a)\n    }.\n\n  Global Instance cast_prod {A B C D} `{Cast A B} `{Cast C D} : Cast (A * C) (B * D) := {\n      cast '(a, c) := (cast _ a, cast _ c)\n    }.\n\n  Global Instance cast_option {A B} `{Cast A B} : Cast (option A) (option B) := {\n      cast a := match a with Some a => Some (cast _ a) | None => None end\n    }.\n\n  Global Instance cast_option_b {A B} `{Cast A B} : Cast A (option B) := {\n      cast a := Some (cast _ a)\n    }.\n\n  (* Global Instances for common types *)\n\n  Global Instance cast_nat_to_N : Cast nat N := {\n      cast := N.of_nat\n    }.\n\n  Global Instance cast_N_to_Z : Cast N Z := {\n      cast := Z.of_N\n    }.\n\n  Global Instance cast_Z_to_int {WORDSIZE} : Cast Z ((@int WORDSIZE)) := {\n      cast n := repr _ n\n    }.\n\n  Global Instance cast_natmod_to_Z {p} : Cast ((nat_mod p)) Z := {\n      cast n := nat_mod_val _ n\n    }.\n\n  (* Note: should be aware of typeclass resolution with int/uint since they are just aliases of each other currently *)\n  Global Instance cast_int8_to_uint32 : Cast (int8) (uint32) := {\n      cast n := repr _ (unsigned n)\n    }.\n  Global Instance cast_int8_to_int32 : Cast (int8) (int32) := {\n      cast n := repr _ (signed n)\n    }.\n\n  Global Instance cast_uint8_to_uint32 : Cast (uint8) (uint32) := {\n      cast n := repr _ (unsigned n)\n    }.\n\n  Global Instance cast_int_to_nat `{WS : wsize} : Cast (int _) nat := {\n      cast n := Z.to_nat (@signed WS n)\n    }.\n\n  Close Scope hacspec_scope.\nEnd Casting.\n\n\nGlobal Arguments pair {_ _} & _ _.\n\nSection Coercions.\n  (* First, in order to have automatic coercions for tuples, we add bidirectionality hints: *)\n\n  Global Coercion N.to_nat : N >-> nat.\n  Global Coercion Z.of_N : N >-> Z.\n\n  Definition Z_to_int `{WS : wsize} (n : Z) : (int WS) := repr _ n.\n  Global Coercion  Z_to_int : Z >-> choice.Choice.sort.\n\n  Definition Z_to_uint_size (n : Z) : uint_size := repr _ n.\n  Global Coercion Z_to_uint_size : Z >-> choice.Choice.sort.\n  Definition Z_to_int_size (n : Z) : int_size := repr _ n.\n  Global Coercion Z_to_int_size : Z >-> choice.Choice.sort.\n\n  Definition N_to_int `{WS : wsize} (n : N) : (@int WS) := repr _ (Z.of_N n).\n  Global Coercion N.of_nat : nat >-> N.\n  Global Coercion N_to_int : N >-> choice.Choice.sort.\n  Definition N_to_uint_size (n : Z) : uint_size := repr _ n.\n  Global Coercion N_to_uint_size : Z >-> choice.Choice.sort.\n  Definition nat_to_int `{WS : wsize} (n : nat) : (@int WS) := repr _ (Z.of_nat n).\n  Global Coercion nat_to_int : nat >-> choice.Choice.sort.\n\n  Definition uint_size_to_nat (n : uint_size) : nat := from_uint_size n.\n  Global Coercion uint_size_to_nat : choice.Choice.sort >-> nat.\n\n  Definition uint_size_to_Z (n : uint_size) : Z := from_uint_size n.\n  Global Coercion uint_size_to_Z : choice.Choice.sort >-> Z.\n\n  Definition uint32_to_nat (n : uint32) : nat := Z.to_nat (unsigned n).\n  Global Coercion uint32_to_nat : choice.Choice.sort >-> nat.\n\n  Definition int8_to_nat (n : int8) : nat := Z.to_nat (unsigned n).\n  Global Coercion int8_to_nat : choice.Choice.sort >-> nat.\n  Definition int16_to_nat (n : int16) : nat := Z.to_nat (unsigned n).\n  Global Coercion int16_to_nat : choice.Choice.sort >-> nat.\n  Definition int32_to_nat (n : int32) : nat := Z.to_nat (unsigned n).\n  Global Coercion int32_to_nat : choice.Choice.sort >-> nat.\n  Definition int64_to_nat (n : int64) : nat := Z.to_nat (unsigned n).\n  Global Coercion int64_to_nat : choice.Choice.sort >-> nat.\n  Definition int128_to_nat (n : int128) : nat := Z.to_nat (unsigned n).\n  Global Coercion int128_to_nat : choice.Choice.sort >-> nat.\n\n  Definition int8_to_int16 (n : int8) : int16 := (repr _ (unsigned n)).\n  Global Coercion int8_to_int16 : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition int8_to_int32 (n : int8) : int32 := repr _ (unsigned n).\n  Global Coercion int8_to_int32 : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition int16_to_int32 (n : int16) : int32 := repr _ (unsigned n).\n  Global Coercion int16_to_int32 : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition int32_to_int64 (n : int32) : int64 := repr _ (unsigned n).\n  Global Coercion int32_to_int64 : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition int64_to_int128 (n : int64) : int128 := repr _ (unsigned n).\n  Global Coercion int64_to_int128 : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition int32_to_int128 (n : int32) : int128 := repr _ (unsigned n).\n  Global Coercion int32_to_int128 : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition uint_size_to_int64 (n : uint_size) : int64 := repr _ (unsigned n).\n  Global Coercion uint_size_to_int64 : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition Z_in_nat_mod {m : Z} (x:Z) : (@nat_mod m) := @mk_natmod m x.\n\n  Definition int_in_nat_mod {m : Z} `{WS : wsize} (x:(@int WS)) : (@nat_mod m) := mk_natmod (unsigned x).\n  Global Coercion int_in_nat_mod : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition nat_mod_in_int {m : Z} `{WS : wsize} (x:(@nat_mod m)) : (@int WS) := (repr _ (nat_mod_val _ x)).\n  Global Coercion nat_mod_in_int : choice.Choice.sort >-> choice.Choice.sort.\n\n  Definition nat_mod_in_Z {m : Z} `{WS : wsize} (x:(@nat_mod m)) : Z := (nat_mod_val _ x).\n  Global Coercion nat_mod_in_Z : choice.Choice.sort >-> Z.\n\n  Definition uint_size_in_nat_mod (n : uint_size) : (@nat_mod 16) := (int_in_nat_mod n).\n  Global Coercion uint_size_in_nat_mod : choice.Choice.sort >-> choice.Choice.sort.\n\nEnd Coercions.\n\n\n(*** Casting *)\n\nDefinition uint128_from_usize (n : uint_size) : int128 := repr _ (unsigned n).\nDefinition uint64_from_usize (n : uint_size) : int64 := repr _ (unsigned n).\nDefinition uint32_from_usize (n : uint_size) : int32 := repr _ (unsigned n).\nDefinition uint16_from_usize (n : uint_size) : int16 := repr _ (unsigned n).\nDefinition uint8_from_usize (n : uint_size) : int8 := repr _ (unsigned n).\n\nDefinition uint128_from_uint8 (n : int8) : int128 := repr _ (unsigned n).\nDefinition uint64_from_uint8 (n : int8) : int64 := repr _ (unsigned n).\nDefinition uint32_from_uint8 (n : int8) : int32 := repr _ (unsigned n).\nDefinition uint16_from_uint8 (n : int8) : int16 := repr _ (unsigned n).\nDefinition usize_from_uint8 (n : int8) : uint_size := repr _ (unsigned n).\n\nDefinition uint128_from_uint16 (n : int16) : int128 := repr _ (unsigned n).\nDefinition uint64_from_uint16 (n : int16) : int64 := repr _ (unsigned n).\nDefinition uint32_from_uint16 (n : int16) : int32 := repr _ (unsigned n).\nDefinition uint8_from_uint16 (n : int16) : int8 := repr _ (unsigned n).\nDefinition usize_from_uint16 (n : int16) : uint_size := repr _ (unsigned n).\n\nDefinition uint128_from_uint32 (n : int32) : int128 := repr _ (unsigned n).\nDefinition uint64_from_uint32 (n : int32) : int64 := repr _ (unsigned n).\nDefinition uint16_from_uint32 (n : int32) : int16 := repr _ (unsigned n).\nDefinition uint8_from_uint32 (n : int32) : int8 := repr _ (unsigned n).\nDefinition usize_from_uint32 (n : int32) : uint_size := repr _ (unsigned n).\n\nDefinition uint128_from_uint64 (n : int64) : int128 := repr _ (unsigned n).\nDefinition uint32_from_uint64 (n : int64) : int32 := repr _ (unsigned n).\nDefinition uint16_from_uint64 (n : int64) : int16 := repr _ (unsigned n).\nDefinition uint8_from_uint64 (n : int64) : int8 := repr _ (unsigned n).\nDefinition usize_from_uint64 (n : int64) : uint_size := repr _ (unsigned n).\n\nDefinition uint64_from_uint128 (n : int128) : int64 := repr _ (unsigned n).\nDefinition uint32_from_uint128 (n : int128) : int32 := repr _ (unsigned n).\nDefinition uint16_from_uint128 (n : int128) : int16 := repr _ (unsigned n).\nDefinition uint8_from_uint128 (n : int128) : int8 := repr _ (unsigned n).\nDefinition usize_from_uint128 (n : int128) : uint_size := repr _ (unsigned n).\n\n\nDefinition uint8_equal : int8 -> int8 -> bool := eqb.\n\nTheorem nat_mod_eqb_spec : forall {p} (a b : (nat_mod p)), nat_mod_equal a b = true <-> a = b.\nProof.\n  symmetry ; exact (ssrbool.rwP nat_mod_equal_reflect).\nQed.\n\nGlobal Instance nat_mod_eqdec {p} : EqDec ((nat_mod p)) := {\n    eqb := nat_mod_equal ;\n    eqb_leibniz := nat_mod_eqb_spec;\n  }.\n\nGlobal Instance nat_mod_comparable `{p : Z} : Comparable ((nat_mod p)) := {\n    ltb a b := Z.ltb (nat_mod_val p a) (nat_mod_val p b);\n    leb a b := if Zeq_bool (nat_mod_val p a) (nat_mod_val p b) then true else Z.ltb (nat_mod_val p a) (nat_mod_val p b) ;\n    gtb a b := Z.ltb (nat_mod_val p b) (nat_mod_val p a);\n    geb a b := if Zeq_bool (nat_mod_val p b) (nat_mod_val p a) then true else Z.ltb (nat_mod_val p b) (nat_mod_val p a) ;\n  }.\n\nFixpoint nat_mod_rem_aux {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) (f : nat) {struct f} : (nat_mod n) :=\n  match f with\n  | O => a\n  | S f' =>\n      if geb a b\n      then nat_mod_rem_aux (nat_mod_sub a b) b f'\n      else a\n  end.\n\nDefinition nat_mod_rem {n : Z} (a:(nat_mod n)) (b:(nat_mod n)) : (nat_mod n) :=\n  if nat_mod_equal b nat_mod_zero\n  then nat_mod_one\n  else nat_mod_rem_aux a b (S (Z.to_nat (nat_mod_val n (nat_mod_div a b)))).\n\nInfix \"rem\" := nat_mod_rem (at level 33) : hacspec_scope.\n\nGlobal Instance bool_eqdec : EqDec bool := {\n    eqb := Bool.eqb;\n    eqb_leibniz := Bool.eqb_true_iff;\n  }.\n\nGlobal Instance string_eqdec : EqDec String.string := {\n    eqb := String.eqb;\n    eqb_leibniz := String.eqb_eq ;\n  }.\n\nFixpoint list_eqdec {A} `{EqDec A} (l1 l2 : list A) : bool :=\n  match l1, l2 with\n  | x::xs, y::ys => if eqb x y then list_eqdec xs ys else false\n  | [], [] => true\n  | _,_ => false\n  end.\n\nLemma list_eqdec_refl : forall {A} `{EqDec A} (l1 : list A), list_eqdec l1 l1 = true.\nProof.\n  intros ; induction l1 ; cbn ; try rewrite eqb_refl ; easy.\nQed.\n\nLemma list_eqdec_sound : forall {A} `{EqDec A} (l1 l2 : list A), list_eqdec l1 l2 = true <-> l1 = l2.\nProof.\n  intros A H l1.\n  induction l1 ; induction l2 ; split ; intros ; simpl in * ; try easy ; try inversion H0.\n  - (* inductive case *)\n    apply Field_theory.if_true in H0; destruct H0.\n    f_equal.\n    (* show heads are equal *)\n    + apply (proj1 (eqb_leibniz a a0) H0).\n    (* show tails are equal using induction hypothesis *)\n    + apply IHl1. assumption.\n  - rewrite eqb_refl.\n    apply list_eqdec_refl.\nQed.\n\nGlobal Instance List_eqdec {A} `{EqDec A} : EqDec (list A) := {\n    eqb := list_eqdec;\n    eqb_leibniz := list_eqdec_sound;\n  }.\n\nGlobal Program Instance Dec_eq_prod (A B : Type) `{EqDec A} `{EqDec B} : EqDec (A * B) := {\n    eqb '(a0, b0) '(a1, b1) := andb (eqb a0 a1) (eqb b0 b1)\n  }.\nNext Obligation.\n  split ; intros ; destruct x ; destruct y.\n  - rewrite LocationUtility.is_true_split_and in H1. destruct H1.\n    rewrite (eqb_leibniz) in H1.\n    rewrite (eqb_leibniz) in H2.\n    rewrite H1. rewrite H2. reflexivity.\n  - inversion_clear H1. now do 2 rewrite eqb_refl.\nDefined.\n\n(*** Be Bytes *)\n\n\nFixpoint nat_be_range_at_position (k : nat) (z : Z) (n : Z) : list bool :=\n  match k with\n  | O => []\n  | S k' => Z.testbit z (n + k') :: nat_be_range_at_position k' z n\n  end.\n\nFixpoint nat_be_range_to_position_ (z : list bool) (val : Z) : Z :=\n  match z with\n  | [] => val\n  | x :: xs => nat_be_range_to_position_ xs ((if x then 2 ^ List.length xs else 0) + val)\n  end.\n\nDefinition nat_be_range_to_position (k : nat) (z : list bool) (n : Z) : Z :=\n  (nat_be_range_to_position_ z 0 * 2^(k * n)).\n\nDefinition nat_be_range' (k : nat) (z : Z) (n : nat) : Z :=\n  nat_be_range_to_position_ (nat_be_range_at_position k z (n * k)) 0.\n\nDefinition nat_be_range (k : nat) (z : Z) (n : nat) :=\n  ((z / 2 ^ (n * k)%Z) mod 2 ^ k)%Z.\n\nDefinition to_be_bytes' {WS} : Z -> list Z :=\n  (fun (k : Z) =>\n     (map\n        (fun i : nat => nat_be_range 8 k i)\n        (seq.iota 0 (nat_of_wsize WS / 8)))).\n\nDefinition to_be_bytes'' {WS} : Z -> list Z :=\n  (fun (k : Z) =>\n     (map\n        (fun i : nat => nat_be_range' 8 k i)\n        (seq.iota 0 (nat_of_wsize WS / 8)))).\n\nDefinition to_be_bytes {WS} : (@int WS) -> (nseq_ int8 (WS / 8)) :=\n  (fun (k : int _) =>\n     eq_rect\n       (seq.size (seq.iota 0 (nat_of_wsize WS / 8)))\n       (fun n : nat => (nseq_ uint8 n))\n       (eq_rect _ (fun n : nat => (nseq_ uint8 n))\n                (array_from_list\n                                 (map\n                                    (fun i : nat => repr _ (nat_be_range 8 (toword k) i) : int _)\n                                    (seq.iota 0 (nat_of_wsize WS / 8))))\n                (length (seq.iota 0 (nat_of_wsize WS / 8)))\n                (map_length\n                   (fun i : nat =>\n                      repr _ (nat_be_range 8 (toword k) i))\n                   (seq.iota 0 (nat_of_wsize WS / 8))))\n       (nat_of_wsize WS / 8)%nat\n       (seq.size_iota 0 (nat_of_wsize WS / 8))).\n\nDefinition from_be_bytes_fold_fun {WS} (i : int8) (s : ('nat × @int WS)) : ('nat × @int WS) :=\n  let (n,v) := s in\n  (S n, v .+ (repr WS (int8_to_nat i * (2 ^ (8 * Z.of_nat n)))%Z)).\n\nDefinition from_be_bytes {WS : wsize} : (nseq_ int8 (WS / 8)) -> (@int WS) :=\n   (fun v => snd (List.fold_right from_be_bytes_fold_fun (0%nat, @repr WS 0%Z) (array_to_list v))).\n\nDefinition to_le_bytes' {WS} : Z -> list Z :=\n  (fun (k : Z) =>\n     (map\n        (fun i : nat => nat_be_range 8 k i)\n        (rev (seq.iota 0 (nat_of_wsize WS / 8))))).\n\nDefinition to_le_bytes'' {WS} : Z -> list Z :=\n  (fun (k : Z) =>\n     (map\n        (fun i : nat => nat_be_range' 8 k i)\n        (rev (seq.iota 0 (nat_of_wsize WS / 8))))).\n\nDefinition to_le_bytes {WS} : (@int WS) -> (nseq_ int8 (WS / 8)) :=\n  fun (k : int _) =>\n   eq_rect (seq.size (seq.iota 0 (nat_of_wsize WS / 8))) (fun n : nat => (nseq_ uint8 n))\n     (eq_rect (length (rev (seq.iota 0 (nat_of_wsize WS / 8))))\n     (fun n : nat => (nseq_ uint8 n)) (eq_rect\n     (length\n        (map\n           (fun i : nat =>\n            repr _ (nat_be_range 8 (toword k) i))\n           (rev (seq.iota 0 (nat_of_wsize WS / 8)))))\n     (fun n : nat => (nseq_ uint8 n)) (array_from_list\n     (map\n        (fun i : nat =>\n         repr _ (nat_be_range 8 (toword k) i))\n        (rev (seq.iota 0 (nat_of_wsize WS / 8)))))\n     (length (rev (seq.iota 0 (nat_of_wsize WS / 8))))\n     (map_length\n        (fun i : nat =>\n         repr _ (nat_be_range 8 (toword k) i))\n        (rev (seq.iota 0 (nat_of_wsize WS / 8))))) (length (seq.iota 0 (nat_of_wsize WS / 8)))\n     (rev_length (seq.iota 0 (nat_of_wsize WS / 8)))) (nat_of_wsize WS / 8)%nat (seq.size_iota 0 (nat_of_wsize WS / 8)).\n\nDefinition from_le_bytes_fold_fun {WS} (i : int8) (s : ('nat × @int WS)) : ('nat × @int WS) :=\n  let (n,v) := s in\n  (Nat.pred n, v .+ (@repr WS ((int8_to_nat i) * 2 ^ (8 * Z.of_nat n))%Z)).\n\nDefinition from_le_bytes {WS : wsize} : (nseq_ int8 (WS / 8)) -> (@int WS) :=\n   (fun v => snd (List.fold_right from_be_bytes_fold_fun (((WS / 8) - 1)%nat, @repr WS 0%Z) (array_to_list v))).\n\n(**** Integers to arrays *)\nDefinition uint16_to_le_bytes : int16 -> (nseq_ int8 2) := @to_le_bytes U16.\nDefinition uint16_to_be_bytes : int16 -> (nseq_ int8 2) := @to_be_bytes U16.\nDefinition uint16_from_le_bytes : (nseq_ int8 2) -> int16 := @from_le_bytes U16.\nDefinition uint16_from_be_bytes : (nseq_ int8 2) -> int16 := @from_be_bytes U16.\n\nDefinition uint32_to_le_bytes : int32 -> (nseq_ int8 4) := @to_le_bytes U32.\nDefinition uint32_to_be_bytes : int32 -> (nseq_ int8 4) := @to_be_bytes U32.\nDefinition uint32_from_le_bytes : (nseq_ int8 4) -> int32 := @from_le_bytes U32.\nDefinition uint32_from_be_bytes : (nseq_ int8 4) -> int32 := @from_be_bytes U32.\n\nDefinition uint64_to_le_bytes : int64 -> (nseq_ int8 8) := @to_le_bytes U64.\nDefinition uint64_to_be_bytes : int64 -> (nseq_ int8 8) := @to_be_bytes U64.\nDefinition uint64_from_le_bytes : (nseq_ int8 8) -> int64 := @from_le_bytes U64.\nDefinition uint64_from_be_bytes : (nseq_ int8 8) -> int64 := @from_be_bytes U64.\n\nDefinition uint128_to_le_bytes : int128 -> (nseq_ int8 16) := @to_le_bytes U128.\nDefinition uint128_to_be_bytes : int128 -> (nseq_ int8 16) := @to_be_bytes U128.\nDefinition uint128_from_le_bytes : (nseq_ int8 16) -> int128 := @from_le_bytes U128.\nDefinition uint128_from_be_bytes : (nseq_ int8 16) -> int128 := @from_be_bytes U128.\n\nDefinition u16_to_be_bytes : int16 -> (nseq_ int8 2) := @to_be_bytes U16.\nDefinition u16_from_be_bytes : (nseq_ int8 2) -> int16 := @from_be_bytes U16.\nDefinition u16_to_le_bytes : int16 -> (nseq_ int8 2) := @to_le_bytes U16.\nDefinition u16_from_le_bytes : (nseq_ int8 2) -> int16 := @from_le_bytes U16.\n\nDefinition u32_to_be_bytes : int32 -> (nseq_ int8 4) := @to_be_bytes U32.\nDefinition u32_from_be_bytes : (nseq_ int8 4) -> int32 := @from_be_bytes U32.\nDefinition u32_to_le_bytes : int32 -> (nseq_ int8 4) := @to_le_bytes U32.\nDefinition u32_from_le_bytes : (nseq_ int8 4) -> int32 := @from_le_bytes U32.\n\nDefinition u64_to_be_bytes : int64 -> (nseq_ int8 8) := @to_be_bytes U64.\nDefinition u64_from_be_bytes : (nseq_ int8 8) -> int64 := @from_be_bytes U64.\nDefinition u64_to_le_bytes : int64 -> (nseq_ int8 8) := @to_le_bytes U64.\nDefinition u64_from_le_bytes : (nseq_ int8 8) -> int64 := @from_le_bytes U64.\n\nDefinition u128_to_be_bytes : int128 -> (nseq_ int8 16) := @to_be_bytes U128.\nDefinition u128_from_be_bytes : (nseq_ int8 16) -> int128 := @from_be_bytes U128.\nDefinition u128_to_le_bytes : int128 -> (nseq_ int8 16) := @to_le_bytes U128.\nDefinition u128_from_le_bytes : (nseq_ int8 16) -> int128 := @from_le_bytes U128.\n\n(*** Result *)\n\nDefinition result (b a : choice_type) := chSum a b.\n(* #[global] #[refine] Instance result (b a : choice_type) : choice_type := *)\n(*   {| ct := chSum a b ; := (a + b)%type |}. *)\n(* Proof. *)\n(*   intros. *)\n(*   cbn. *)\n(*   do 2 rewrite ChoiceEq. *)\n(*   reflexivity. *)\n(* Defined. *)\n\nDefinition Ok {a b : choice_type} : a -> (result b a) := @inl (a) (b).\nDefinition Err {a b : choice_type} : b -> (result b a) := @inr (a) (b).\n\nArguments Ok {_ _}.\nArguments Err {_ _}.\n\nDefinition result_unwrap_safe {a b} (x : (result b a)) `{match x with inl _ => True | inr _ => False end} : a.\n  destruct x.\n  apply s.\n  contradiction.\nDefined.\nAxiom falso : False. Ltac admit_falso := destruct falso.\nDefinition result_unwrap {a b} (x : (result b a)) : a :=\n  result_unwrap_safe x (H := ltac:(admit_falso)).\n\nDefinition option := chOption.\n(* Program Definition option_choice_type (a : choice_type) := *)\n(*   {| ct := chOption a ; := option a ; |}. *)\n(* Next Obligation. *)\n(*   intros. *)\n(*   rewrite ChoiceEq. *)\n(*   reflexivity. *)\n(* Qed. *)\n\n(*** Monad / Bind *)\n\nModule choice_typeMonad.\n  Class CEMonad : Type :=\n    {\n      M :> choice_type -> choice_type ;\n      bind {A B : choice_type} (x : (M A)) (f : A -> (M B)) : (M B) ;\n      ret {A : choice_type} (x : A) : (M A) ;\n      monad_law1 : forall {A B : choice_type} a (f : A -> M B),\n        bind (ret a) f = f a ;\n      monad_law2 : forall {A : choice_type} c, bind c (@ret A) = c ;\n      monad_law3 : forall {A B C : choice_type} c (f : A -> M B) (g : B -> M C),\n          bind (bind c f) g\n          = bind c (fun a => bind (f a) g)\n    }.\n\n  (* Class CEMonad2 (M : choice_type -> choice_type) : Type := *)\n  (*   { *)\n  (*     unit {A : choice_type} (x : A) : (M A) ; *)\n  (*     fmap {A B : choice_type} (f : A -> B) (x : (M A)) : (M B) ; *)\n  (*     join {A : choice_type} (x : (M (M A))) : (M A) ; *)\n  (*   }. *)\n\n  (* #[global] Instance CEMonadToCEMonad2 `{CEMonad} : CEMonad2 M := *)\n  (*   {| *)\n  (*     unit A := @ret M _ A ; *)\n  (*     fmap A B f x := bind x (fun y => ret (f y)) ; *)\n  (*     join A x := bind x id *)\n  (*   |}. *)\n\n  (* #[global] Instance CEMonad2ToCEMonad `{CEMonad2} : CEMonad M := *)\n  (*   {| *)\n  (*     ret A := @unit M _ A ; *)\n  (*     bind A B x f := join (fmap f x) *)\n  (*   |}. *)\n\n  (* Class CEMonad_prod (M M0 : choice_type -> choice_type) := *)\n  (*   { prod : forall A, (M0 (M (M0 A))) -> (M (M0 A)) }. *)\n\n  (* #[global] Program Instance ComposeProd2 `{CEMonad2} `{CEMonad2} `{@CEMonad_prod M M0} : CEMonad2 (fun x => M (M0 x)) := *)\n  (*   {| *)\n  (*     unit A x := unit (A := M0 A) (unit x) ; *)\n  (*     fmap A B f x := fmap (A := M0 A) (B := M0 B) (fmap f) x ; *)\n  (*     join A x := join (A := M0 A) (fmap (@prod M M0 _ A) x) *)\n  (*   |}. *)\n\n  (* #[global] Instance ComposeProd `{CEMonad} `{CEMonad} `(@CEMonad_prod M M0) : CEMonad (fun x => M (M0 x)) := (@CEMonad2ToCEMonad _ ComposeProd2). *)\n\n  (* Definition bind_prod `{CEMonad} `{CEMonad} `{@CEMonad_prod M M0} *)\n  (*            {A B} (x : (M (M0 A))) (f : A -> (M (M0 B))) *)\n  (*   : (M (M0 B)) := *)\n  (*   (@bind (fun x => M (M0 x)) (ComposeProd _) A B x f). *)\n\n\n  (* Class CEMonad_swap (M M0 : choice_type -> choice_type) := *)\n  (*   { swap : forall A, (M0 (M A)) -> (M (M0 A)) }. *)\n\n  (* #[global] Program Instance ComposeSwap2 `{CEMonad2 } `{CEMonad2} `{@CEMonad_swap M M0} : CEMonad2 (fun x => M (M0 x)) := *)\n  (*   {| *)\n  (*     unit A x := unit (A := M0 A) (unit x) ; *)\n  (*     fmap A B f x := fmap (A := M0 A) (B := M0 B) (fmap f) x ; *)\n  (*     join A x := fmap (join (M := M0)) (join (fmap (@swap M M0 _ (M0 A)) x)) *)\n  (*   |}. *)\n\n  (* #[global] Instance ComposeSwap `{CEMonad} `{CEMonad} `(@CEMonad_swap M M0) : CEMonad (fun x => M (M0 x)) := (@CEMonad2ToCEMonad _ ComposeSwap2). *)\n\n  (* Definition bind_swap `{CEMonad} `{CEMonad} `{@CEMonad_swap M M0} *)\n  (*            A B (x : (M (M0 A))) (f : A -> (M (M0 B))) : (M (M0 B)) := *)\n  (*   (@bind _ (@ComposeSwap M _ M0 _ _) A B x f). *)\n\n\n  Section ResultMonad.\n    Definition result_bind {C A B} (r : (result C A)) (f : A -> (result C B)) : (result C B) :=\n      match r with\n      | inl a => f a\n      | inr e => (@Err B C e)\n      end.\n\n    Definition result_ret {C A : choice_type} (a : A) : (result C A) := Ok a.\n\n    Global Program Instance result_monad {C : choice_type} : CEMonad :=\n      {|\n        M := result C ;\n        bind := @result_bind C ;\n        ret := @result_ret C ;\n      |}.\n    Solve All Obligations with now destruct c.\n    Arguments result_monad {_} &.\n\n  End ResultMonad.\n\n  Definition option_bind {A B} (r : (option A)) (f : A -> (option B)) : (option B) :=\n    match r with\n      Some (a) => f a\n    | None => None\n    end.\n\n  Definition option_ret {A : choice_type} (a : A) : (option A) := Some a.\n\n  Global Program Instance option_monad : CEMonad :=\n    Build_CEMonad option (@option_bind) (@option_ret) _ _ _.\n  Solve All Obligations with now destruct c.\n\n  Definition option_is_none {A} (x : (option A)) : bool :=\n    match x with\n    | None => true\n    | _ => false\n    end.\n\nEnd choice_typeMonad.\n\n(* #[global] Notation \"x 'm(' v ')' ⇠ c1 ;; c2\" := *)\n(*   (choice_typeMonad.bind (M := v) c1 (fun x => c2)) *)\n(*     (at level 100, c1 at next level, right associativity, *)\n(*       format \"x  'm(' v ')'  ⇠  c1  ;;  '//' c2\") *)\n(*     : hacspec_scope. *)\n\n(* #[global] Notation \" ' x 'm(' v ')' ⇠ c1 ;; c2\" := *)\n(*   (choice_typeMonad.bind (M := v) c1 (fun x => c2)) *)\n(*     (at level 100, c1 at next level, x pattern, right associativity, *)\n(*       format \" ' x  'm(' v ')'  ⇠  c1  ;;  '//' c2\") *)\n(*     : hacspec_scope. *)\n\nDefinition foldi_bind {A : choice_type} `{mnd : choice_typeMonad.CEMonad} (a : uint_size) (b : uint_size) (f : uint_size -> A -> (choice_typeMonad.M A)) (init : (choice_typeMonad.M A)) : (choice_typeMonad.M A) :=\n  @foldi ((choice_typeMonad.M A)) a b (fun x y => choice_typeMonad.bind y (f x)) init.\n\n(*** Notation *)\n\nNotation \"'ifbnd' b 'then' x 'else' y '>>' f\" := (if b then f x else f y) (at level 200) : hacspec_scope.\nNotation \"'ifbnd' b 'thenbnd' x 'else' y '>>' f\" := (if b then (choice_typeMonad.bind x) f else f y) (at level 200) : hacspec_scope.\nNotation \"'ifbnd' b 'then' x 'elsebnd' y '>>' f\" := (if b then f x else (choice_typeMonad.bind y) f) (at level 200) : hacspec_scope.\nNotation \"'ifbnd' b 'thenbnd' x 'elsebnd' y '>>' f\" := (if b then choice_typeMonad.bind x f else choice_typeMonad.bind y f) (at level 200).\n\nNotation \"'foldibnd' s 'to' e 'M(' v ')' 'for' z '>>' f\" :=\n  (Hacspec_Lib_Pre.foldi s e (choice_typeMonad.ret z) (fun x y => choice_typeMonad.bind y (f x))) (at level 50) : hacspec_scope.\n\nAxiom nat_mod_from_byte_seq_be : forall  {A n}, (seq A) -> (nat_mod n).\n\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_Seq.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom mathcomp Require Import ssrZ word.\n(* From Jasmin Require Import word. *)\nFrom Crypt Require Import jasmin_word.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\n(*** Seq *)\n\n(* Section Seqs. *)\n\n(**** Unsafe functions *)\n\nNotation seq_new_ := (lift2_both seq_new_).\nNotation seq_new := (lift1_both seq_new).\nEquations seq_len {A : choice_type} (x : both (seq A)) : both (uint_size) :=\n  seq_len := (lift1_both Hacspec_Lib_Pre.seq_len).\nFail Next Obligation.\nNotation seq_index := (lift2_both seq_index).\n\n(**** Seq manipulation *)\n\n(* Notation seq_slice := (lift3_both seq_slice). *)\n\nNotation seq_slice_range :=\n  (lift2_both seq_slice_range).\n\n(* updating a subsequence in a sequence *)\nDefinition seq_update\n  {a: choice_type}\n  (s: ((seq a)))\n  (start: uint_size)\n  (input: ((seq a)))\n  : both ((seq a)) :=\n  ret_both (seq_update s start input).\n\n(* updating only a single value in a sequence*)\nDefinition seq_upd\n  {a: choice_type}\n\n  (s: ((seq a)))\n  (start: uint_size)\n  (v: ((a)))\n  : both ((seq a)) :=\n  ret_both (seq_upd s start v).\n\nDefinition seq_update_start\n  {a: choice_type}\n\n  (s: ( (seq a)))\n  (start_s: ( (seq a)))\n  : both ((seq a)) :=\n  ret_both (seq_update_start s start_s).\n\nDefinition seq_update_slice\n  {A : choice_type}\n  (out: ( (seq A)))\n  (start_out: nat)\n  (input: ( (seq A)))\n  (start_in: nat)\n  (len: nat)\n  : both ((seq A)) :=\n  ret_both (seq_update_slice out start_out input start_in len).\n\nDefinition seq_concat\n  {a : choice_type}\n\n  (s1 :( (seq a)))\n  (s2: ( (seq a)))\n  : both ((seq a)) :=\n  ret_both (seq_concat s1 s2).\n\nNotation seq_push := (lift2_both seq_push).\n\nDefinition seq_from_slice\n  {a: choice_type}\n\n  (input: ( (seq a)))\n  (start_fin: uint_size × uint_size)\n  : both ((seq a)) :=\n  ret_both (seq_from_slice input start_fin).\n\nDefinition seq_from_slice_range\n  {a: choice_type}\n\n  (input: ( (seq a)))\n  (start_fin: uint_size × uint_size)\n  : both ((seq a)) :=\n  ret_both (seq_from_slice_range input start_fin).\n\nDefinition seq_from_seq {A} (l : (seq A)) : both (seq A) :=\n  ret_both (seq_from_seq l).\n\n(**** Chunking *)\n\nDefinition seq_num_chunks {a: choice_type} (s: ( (seq a))) (chunk_len: uint_size) : both (uint_size) :=\n  ret_both (seq_num_chunks s chunk_len).\n\nDefinition seq_chunk_len\n  {a: choice_type}\n  (s: ( (seq a)))\n  (chunk_len: nat)\n  (chunk_num: nat)\n  : both (('nat)) :=\n  ret_both (seq_chunk_len s chunk_len chunk_num).\n\nDefinition seq_get_chunk\n  {a: choice_type}\n\n  (s: ( (seq a)))\n  (chunk_len: uint_size)\n  (chunk_num: uint_size)\n  : both (((uint_size × seq a))) :=\n  ret_both (seq_get_chunk s chunk_len chunk_num).\n\nDefinition seq_set_chunk\n  {a: choice_type}\n\n  (s: ( (seq a)))\n  (chunk_len: uint_size)\n  (chunk_num: uint_size)\n  (chunk: ( (seq a)) ) : both ((seq a)) :=\n  ret_both (seq_set_chunk s chunk_len chunk_num chunk).\n\n\nDefinition seq_num_exact_chunks {a} (l : ( (seq a))) (chunk_size : ( (uint_size))) : (both uint_size) :=\n  ret_both (seq_num_exact_chunks l chunk_size).\n\nDefinition seq_get_exact_chunk {a : choice_type}  (l : ( (seq a))) (chunk_size chunk_num: ( (uint_size))) :\n  both ((seq a)) :=\n  ret_both (seq_get_exact_chunk l chunk_size chunk_num).\n\nDefinition seq_set_exact_chunk {a : choice_type} :=\n  @seq_set_chunk a.\n\nDefinition seq_get_remainder_chunk {a : choice_type}  (l : (seq a)) (chunk_size : (uint_size)) : both ((seq a)) :=\n  ret_both (seq_get_remainder_chunk l chunk_size).\n\nDefinition seq_xor_ {WS} (x y : seq (@int WS)) : both (seq (@int WS)) :=\n  ret_both (seq_xor_ x y).\n\nDefinition seq_truncate {a : choice_type}  (x : seq a) (n : nat) : both (seq a) :=\n  ret_both (seq_truncate x n).\n\n(* End Seqs. *)\nInfix \"seq_xor\" := seq_xor_ (at level 33) : hacspec_scope.\n\n(* Section Arrays. *)\n(**** types *)\n\n(***** prelude.rs *)\nDefinition uint128_word_t : choice_type := nseq_ uint8 16.\nDefinition uint64_word_t : choice_type := nseq_ uint8 8.\nDefinition uint32_word_t : choice_type := nseq_ uint8 4.\nDefinition uint16_word_t : choice_type := nseq_ uint8 2.\n\n(**** Array manipulation *)\nEquations array_new_ {A: choice_type} (init: both A) `(len: uint_size) : both (nseq A len) :=\n  array_new_ init len := lift1_both (fun x => Hacspec_Lib_Pre.array_new_ x (from_uint_size len)) init.\n\nEquations array_index\n  {A: choice_type} {len : nat} (x : both (nseq_ A len)) {WS} (y : both (int WS)) : both A :=\n  array_index x (WS := WS) y := lift2_both (fun x y => Hacspec_Lib_Pre.array_index x y) x y.\nFail Next Obligation.\n\nEquations array_upd {A : choice_type} {len} (s: both (nseq_ A len)) (i: both (@int U32)) (new_v: both A) : both (nseq_ A len) :=\n  array_upd s i new_v :=\n    (lift3_both (fun (s : nseq_ A len) i new_v => Hacspec_Lib_Pre.array_upd s i new_v) s i new_v).\n\n(* substitutes a sequence (seq) into an array (nseq), given index interval  *)\nDefinition update_sub {A : choice_type} {len slen}  (v : (nseq_ A len)) (i : nat) (n : nat) (sub : (nseq_ A slen)) : both ((nseq_ A len)) :=\n  ret_both (update_sub v i n sub).\n\nEquations array_from_list_helper {A: choice_type} (x : both A) (xs: list (both A)) (k : nat) : both (nseq_ A (S k)) :=\n  array_from_list_helper x [] k :=\n    lift1_both (fun x => setm emptym (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k O))) x : nseq_ A (S k)) x ;\n  array_from_list_helper x (y :: ys) k :=\n    bind_both x (fun temp_x =>\n    bind_both (array_from_list_helper y ys k) (fun temp_y =>\n    lift_both (ret_both (setm (temp_y : nseq_ A (S k)) (Ordinal (ssrbool.introT ssrnat.ltP (lt_succ_diag_r_sub k (length (y :: ys))))) temp_x : nseq_ A (S k))))).\nFail Next Obligation.\n\nEquations array_from_list {A: choice_type} (l: list (both A))\n  : both (nseq_ A (length l)) :=\n  array_from_list l :=\n    match l as k return both (nseq_ A (length k)) with\n      [] => solve_lift (ret_both (tt : nseq_ A 0))\n    | (x :: xs) => array_from_list_helper x xs (length xs)\n    end.\nSolve All Obligations with (intros ; (fset_equality || solve_in_fset)).\nFail Next Obligation.\n\nProgram Definition array_from_seq {A: choice_type} (out_len: nat) (input: both (seq A)) : both (nseq_ A out_len) :=\n  lift1_both  (* (H_loc_incl_x := fsubsetxx _) (H_opsig_incl_x := fsubsetxx _) *) (array_from_seq out_len) input.\n\nEquations array_to_seq\n  {A : choice_type} {n} (f : both (nseq_ A n))\n  (* `{H_loc_incl_x : is_true (fsubset L1 L2)} `{H_opsig_incl_x : is_true (fsubset I1 I2)} *) : both (seq A) :=\n  array_to_seq := (lift1_both Hacspec_Lib_Pre.array_to_seq).\nFail Next Obligation.\n\nDefinition array_from_slice\n  {a: choice_type}\n\n  (default_value: ( a))\n  (out_len: nat)\n  (input: (seq a))\n  (start: uint_size)\n  (slice_len: uint_size)  : both ((nseq_ a out_len)) :=\n  ret_both (array_from_slice default_value out_len input (from_uint_size start) (from_uint_size slice_len)).\n\nDefinition array_slice\n  {a: choice_type}\n\n  (input: (seq a))\n  (start: nat)\n  (slice_len: nat)\n  : both ((nseq_ a slice_len)) :=\n  ret_both (array_slice input start slice_len).\n\nDefinition array_from_slice_range\n  {a: choice_type}\n\n  (default_value: a)\n  (out_len: nat)\n  (input: (seq a))\n  (start_fin: (uint_size × uint_size))\n  : both ((nseq_ a out_len)) :=\n  ret_both (array_from_slice_range default_value out_len input start_fin).\n\nDefinition array_slice_range\n  {a: choice_type}\n\n  {len : nat}\n  (input: (nseq_ a len))\n  (start_fin:(uint_size × uint_size))\n  : both ((seq a)) :=\n  ret_both (array_slice_range input start_fin).\n\nDefinition array_update\n  {a: choice_type}\n\n  {len: nat}\n  (s: (nseq_ a len))\n             (start : uint_size)\n             (start_s: (seq a))\n    : both ((nseq_ a len)) :=\n    ret_both (array_update s start start_s).\n\n  Definition array_update_start\n             {a: choice_type}\n\n             {len: nat}\n             (s: (nseq_ a len))\n             (start_s: (seq a))\n    : both ((nseq_ a len)) :=\n    ret_both (array_update_start s start_s).\n\n  Definition array_len  {a: choice_type} {len: nat} (s: (nseq_ a len)) : both (uint_size) := ret_both (array_len s).\n  (* May also come up as 'length' instead of 'len' *)\n  Definition array_length  {a: choice_type} {len: nat} (s: (nseq_ a len)) : both (uint_size) := ret_both (array_length s).\n\n  Definition array_update_slice\n             {a : choice_type}\n\n             {l : nat}\n             (out: ( (nseq_ a l)))\n             (start_out: uint_size)\n             (input: ( (seq a)))\n             (start_in: uint_size)\n             (len: uint_size)\n    : both ((nseq_ a _)) :=\n    ret_both (array_update_slice (l := l) out start_out input start_in (from_uint_size len)).\n\n  (**** Numeric operations *)\n\n(* End Arrays. *)\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/Hacspec_Lib_TODO.v",
    "content": "Global Set Warnings \"-ambiguous-paths\".\nGlobal Set Warnings \"-uniform-inheritance\".\nGlobal Set Warnings \"-auto-template\".\nGlobal Set Warnings \"-disj-pattern-notation\".\nGlobal Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\nRequire Import Lia.\nRequire Import Coq.Logic.FunctionalExtensionality.\nRequire Import Sumbool.\n\nFrom mathcomp Require Import fintype.\n\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset fmap.\n\nFrom mathcomp Require Import ssrZ word.\n(* From Jasmin Require Import word. *)\nFrom Crypt Require Import jasmin_word.\n\nFrom Coq Require Import ZArith List.\nImport List.ListNotations.\n\nImport choice.Choice.Exports.\n\n(********************************************************)\n(*   Implementation of all Hacspec library functions    *)\n(* for Both types.                                      *)\n(********************************************************)\n\nDeclare Scope hacspec_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\n\nOpen Scope bool_scope.\nOpen Scope hacspec_scope.\nOpen Scope nat_scope.\nOpen Scope list_scope.\n\nFrom Hacspec Require Import Hacspec_Lib_Integers.\nFrom Hacspec Require Import Hacspec_Lib_Seq.\nFrom Hacspec Require Import Hacspec_Lib_Natmod.\nFrom Hacspec Require Import Hacspec_Lib_Monad.\nFrom Hacspec Require Import Hacspec_Lib_Ltac.\n\n(*** Result *)\n\nDefinition Ok {a b : choice_type} : both a -> both (result b a) := lift1_both Ok.\nDefinition Err {a b : choice_type} : both b -> both (result b a) := lift1_both Err.\n\nInfix \"&&\" := andb : bool_scope.\nInfix \"||\" := orb : bool_scope.\n\nDefinition u32_word_t := nseq_ uint8 4.\nDefinition u128_word_t := nseq_ uint8 16.\n\n(*** Hacspec-v2 specific fixes *)\n\nImport choice.Choice.Exports.\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(** Should be moved to Hacspec_Lib.v **)\nProgram Definition int_xI {WS : wsize} (a : (@int WS)) : (@int WS) :=\n  Hacspec_Lib_Pre.int_add (Hacspec_Lib_Pre.int_mul a (@repr WS 2)) (@one WS).\n\nProgram Definition int_xO {WS : wsize} (a : int WS) : int WS :=\n  Hacspec_Lib_Pre.int_mul a (@repr WS 2).\n\nDefinition both_int_one {WS : wsize} : both (@int WS) := ret_both (one).\n\nOpen Scope hacspec_scope.\nDefinition int_num {WS : wsize} := int WS.\nNumber Notation int_num Pos.of_num_int Pos.to_num_int (via positive mapping [[int_xI] => xI, [int_xO] => xO , [one] => xH]) : hacspec_scope.\n\nNotation \"0\" := (repr _ 0%Z) : hacspec_scope.\n\n(** Ops *)\n\nClass Addition (A : choice_type) :=\n  add : both A -> both A -> both A.\nNotation \"a .+ b\" := (add a b).\nInstance int_add_inst {ws : wsize} : Addition (@int ws) := { add a b := int_add a b }.\n\nClass Subtraction (A : choice_type):=\n  sub : both A -> both A -> both A.\nNotation \"a .- b\" := (sub a b (Subtraction := _)).\nInstance int_sub_inst {ws : wsize} : Subtraction (@int ws) := { sub a b := int_sub a b }.\n\nClass Multiplication A := mul : both A -> both A -> both A.\nNotation \"a .* b\" := (mul a b).\nProgram Instance int_mul_inst {ws : wsize} : Multiplication (@int ws) := { mul a b := int_mul a b }.\nFail Next Obligation.\n\nClass Xor A := xor : both A -> both A -> both A.\nNotation \"a .^ b\" := (xor a b).\n\nProgram Instance int_xor_inst {ws : wsize} : Xor (@int ws) := { xor a b := int_xor a b }.\nFail Next Obligation.\n\n(** Iter *)\n\nStructure array_or_seq A (len : nat) :=\n  { as_nseq :> both (nseq_ A len) ;\n    as_seq :> both (seq A) ;\n    as_list :> both (chList A)\n  }.\n\nArguments as_seq {_} {_}. (* array_or_seq. *)\nArguments as_nseq {_} {_}. (* array_or_seq. *)\nArguments as_list {_} {_}. (* array_or_seq. *)\n\nDefinition array_to_list {A n} := lift1_both (fun x => (@array_to_list A n x) : chList _).\n\nDefinition seq_to_list {A} := lift1_both (fun x => (@seq_to_list A x) : chList _).\n\nDefinition seq_from_list {A} := lift1_both (fun (x : chList _) => seq_from_list A (x : list _)).\n\nDefinition array_from_list' {A} {n : nat} := lift1_both (fun (x : chList A) => @array_from_list' A x n : nseq_ _ _).\n\nEquations nseq_array_or_seq {A len} (val : both (nseq_ A len)) : array_or_seq A len :=\n  nseq_array_or_seq val := {| as_seq := array_to_seq val ; as_nseq := val ; as_list := array_to_list val |}.\nSolve All Obligations with intros ; exact fset0.\nFail Next Obligation.\n\nArguments nseq_array_or_seq {A} {len}.\nCoercion nseq_array_or_seq : both >-> array_or_seq.\nCanonical Structure nseq_array_or_seq.\n\nDefinition n_seq_array_or_seq {A} {B} (x : both B)\n           `(contra : match B with\n                      | chUnit => True\n                      | chMap (chFin (@mkpos (S n) _)) (C) => C = A\n                      | chMap 'nat (C) => C = A\n                      | chList C => C = A\n                      | _ => False\n                      end) :\n  let len := (match B as K return\n                    match K with\n                    | chUnit => True\n                    | chMap (chFin (@mkpos (S n) _)) (C) => C = A\n                    | chMap 'nat (C) => C = A\n                    | chList C => C = A\n                    | _ => False\n                    end -> nat\n              with\n              | chUnit => fun _ => 0%nat\n              | chMap (chFin (@mkpos p _)) C =>\n                  fun m_contra =>\n                    match p as p_ return match p_ with\n                                         | O => False\n                                         | _ => C = A\n                                         end -> nat\n                          with\n                  | O => fun m_contra => False_rect nat m_contra\n                  | S n => fun _ => S n\n                  end m_contra\n              | chMap 'nat C =>\n                  fun m_contra => 3%nat\n              | chList C => fun m_contra => 4%nat\n              | _ => fun m_contra => False_rect nat m_contra\n              end contra) in\n  array_or_seq A len.\nProof.\n  intros.\n  destruct B ; try contradiction contra.\n  - change 'unit with (nseq_ A len) in x.\n    exact {| as_seq := array_to_seq x ; as_nseq := x; as_list := array_to_list x |}.\n  - destruct B1 ; try contradiction contra ; simpl in *.\n    + subst.\n      change (chMap 'nat A) with (seq A) in x.\n      exact ({| as_seq := x ; as_nseq := array_from_seq _ x ; as_list := seq_to_list x |}).\n    + destruct n.\n      destruct pos.\n      * contradiction.\n      * subst.\n        replace (chMap (chFin _) A) with (nseq_ A len) in x.\n        2:{\n          simpl.\n          f_equal.\n          f_equal.\n          apply (ssrbool.elimT (positive_eqP _ _)).\n          unfold positive_eq.\n          apply eqtype.eq_refl.\n        }\n        exact {| as_seq := array_to_seq x ; as_nseq := x; as_list := array_to_list x |}.\n  - subst.\n    exact {| as_seq := seq_from_list x ; as_nseq := array_from_list' x ; as_list := x |}.\nDefined.\n\nNotation \" x '.a[' a ']'\" := (array_index (n_seq_array_or_seq x _) a) (at level 40).\n\nFail Next Obligation.\nNotation \" x '.a[' i ']<-' a\" := (array_upd x i a) (at level 40).\n\nNotation update_at := array_upd.\nNotation update_at_usize := array_upd.\n\nNotation t_Seq := seq.\nNotation num_exact_chunks := seq_num_exact_chunks.\nNotation get_exact_chunk := seq_get_exact_chunk.\n\nNotation get_remainder_chunk := seq_get_remainder_chunk.\nNotation \"a <> b\" := (negb (eqb a b)).\n\nNotation from_secret_literal := nat_mod_from_secret_literal.\n\nNotation zero := nat_mod_zero.\nNotation to_byte_seq_le := nat_mod_to_byte_seq_le.\nNotation U128_to_le_bytes := u128_to_le_bytes.\nNotation U64_to_le_bytes := u64_to_le_bytes.\n     Notation from_byte_seq_le := nat_mod_from_byte_seq_le.\nDefinition from_literal {m} := nat_mod_from_literal m.\nNotation inv := nat_mod_inv.\nNotation update_start := array_update_start.\nNotation pow := nat_mod_pow_self.\nNotation bit := nat_mod_bit.\n\nNotation Build_secret := secret.\nNotation \"a -× b\" :=\n(prod a b) (at level 80, right associativity) : hacspec_scope.\nNotation Result_t := result.\nAxiom TODO_name : Type.\nNotation ONE := nat_mod_one.\nNotation exp := nat_mod_exp.\n\nNotation TWO := nat_mod_two.\nNotation ne := (fun x y => negb (eqb x y)).\nNotation eq := (eqb).\nNotation rotate_right := (ror).\nNotation to_be_U32s := array_to_be_uint32s.\nNotation get_chunk := seq_get_chunk.\nNotation num_chunks := seq_num_chunks.\nNotation U64_to_be_bytes := uint64_to_be_bytes.\nNotation to_be_bytes := array_to_be_bytes.\nNotation U8_from_usize := uint8_from_usize.\nNotation concat := seq_concat.\nNotation declassify := id.\nNotation U128_from_be_bytes := uint128_from_be_bytes.\nNotation U128_to_be_bytes := uint128_to_be_bytes.\nNotation slice_range := array_slice_range.\nNotation truncate := seq_truncate.\n\nNotation to_be_U64s := array_to_be_uint64s.\nNotation classify := id.\nNotation U64_from_U8 := uint64_from_uint8.\n\nDefinition Build_t_Range {WS} {f_start : both (int WS)} {f_end : both (int WS)} := prod_b (f_start,f_end).\nNotation Build_Range  := Build_t_Range.\n\nNotation declassify_eq := eq.\nNotation String_t := String.string.\n\nNotation \"'i8(' v ')'\" := (ret_both (v : int8) : both _).\nNotation \"'i16(' v ')'\" := (ret_both (v : int16) : both _).\nNotation \"'i32(' v ')'\" := (ret_both (v : int32) : both _).\nNotation \"'i64(' v ')'\" := (ret_both (v : int64) : both _).\nNotation \"'i128(' v ')'\" := (ret_both (v : int128) : both _).\n\nDefinition len {A ws} := lift1_both  (fun (x : chList A) => repr ws (List.length x)).\n\nDefinition orb (x : both 'bool) (y : both 'bool) : both 'bool := lift2_both (fun (x y : 'bool) => Datatypes.orb x y : 'bool) x y.\nDefinition andb (x : both 'bool) (y : both 'bool) : both 'bool := lift2_both (fun (x y : 'bool) => Datatypes.andb x y : 'bool) x y.\nDefinition negb (x : both 'bool) : both 'bool := lift1_both (fun (x : 'bool) => Datatypes.negb x : 'bool) x.\nNotation \"a <> b\" := (negb (eqb a b)).\nNotation \"'not'\" := (negb).\nNotation \"x ':of:' y\" := (x : both _ _ y) (at level 100).\nNotation \"x ':of0:' y\" := (x : both y) (at level 100).\n\n(** Trait impls *)\n\nClass t_Serialize (Self : choice_type).\nClass t_Deserial (Self : choice_type).\nClass t_Serial (Self : choice_type).\nNotation \"'t_Eq'\" := (EqDec).\n(** end of: Should be moved to Hacspec_Lib.v **)\n\nDefinition t_Result A B := result B A.\n\n(** Should be part of core.V **)\n\nClass t_Sized (A : choice_type) := Sized : A -> A.\nClass t_TryFrom (A : choice_type) := TryFrom : A -> A.\nClass t_Into (A : choice_type) := Into : A -> A.\nClass t_PartialEq (A : choice_type) (B : choice_type) := PartialEq : A -> B -> bool.\nClass t_Copy (A : choice_type) := Copy : A -> A.\nClass t_Clone (A : choice_type) := Clone : A -> A.\nDefinition t_Option : choice_type -> choice_type := chOption.\nInductive vec_typ :=\n| t_Global.\nDefinition t_Vec : choice_type -> vec_typ -> choice_type := fun A _ => chList A.\n\nNotation t_Default := Default.\n\n#[global] Instance bool_copy : t_Copy 'bool := {Copy x := x}.\n#[global] Instance bool_clone : t_Clone 'bool := {Clone x := x}.\n#[global] Instance bool_sized : t_Sized 'bool := {Sized x := x}.\n\nDefinition ilog2 {WS} (x : both (int WS)) : both (int WS) := x. (* TODO *)\n\nDefinition collect {A} (x : both (chList A)) : both (t_Vec A t_Global) := x.\n\n\nEquations swap_both_list {A} (x : list (both A)) : both (chList A) :=\n  swap_both_list x :=\n  (List.fold_left (fun (x : both (chList A)) y =>\n   bind_both x (fun x' =>\n   bind_both y (fun y' =>\n   solve_lift (ret_both ((y' :: x') : chList A))))) x (solve_lift (ret_both ([] : chList A)))).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nEquations match_list {A B : choice_type} (x : both (chList A)) (f : list A -> B) : both B :=\n  match_list x f :=\n  bind_both x (fun x' => solve_lift (ret_both (f x'))).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nEquations map {A B} (x : both (chList A))  (f : both A -> both B) : both (chList B) :=\n  map x f :=\n  bind_both x (fun x' => swap_both_list (List.map (fun y => f (solve_lift (ret_both y))) x')).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nDefinition cloned {A} (x : both (chList A)) : both (chList A) := x.\n\nEquations iter {A} (x : both (seq A)) : both (chList A) :=\n  iter x :=\n  bind_both x (fun x' => solve_lift (ret_both (Hacspec_Lib_Pre.seq_to_list _ x' : chList A))).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nDefinition dedup {A} (x : both (t_Vec A t_Global)) : both (t_Vec A t_Global) := x.\n\nDefinition t_String := Coq.Strings.String.string.\nEquations new {A} : both (t_Vec A t_Global) :=\n  new := solve_lift (ret_both ([] : chList A)).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nDefinition enumerate {A} (x : both (t_Vec A t_Global)) : both (t_Vec A t_Global) := x.\n\n(*** More functions *)\nDefinition t_Drain : choice_type -> vec_typ -> choice_type := t_Vec.\nInductive t_Range := RangeFull.\nEquations drain : forall {A}, both (t_Vec A t_Global) -> t_Range -> both (t_Drain A t_Global × t_Vec A t_Global) :=\n  drain x _ :=\n    bind_both x (fun x' => solve_lift (ret_both ((x', []) : (t_Drain A t_Global × t_Vec A t_Global)))).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\nNotation t_Rev := id.\nEquations rev {A} (x : both (chList A)) : both (chList A) := rev x := bind_both x (fun x => solve_lift (ret_both (List.rev x : chList _))).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\n\nDefinition pop {A} : both (chList A) -> both (chOption A × t_Vec A (t_Global)) :=\n  lift1_both (fun (x : chList A) => (List.hd_error x , List.tl x) : (chOption A × t_Vec A (t_Global))).\n\nDefinition push {A} : both (t_Vec A t_Global) -> both A -> both (t_Vec A (t_Global)) :=\n  lift2_both (fun  (x : chList A) y => y :: x : chList A).\n\nNotation Option_Some := Some.\nDefinition append {A : choice_type} (l : both (chList A)) (x : both (chList A)) : both (chList A × chList A) :=\n  lift2_both (fun (x : chList A) (y : chList A) => (app y x, []) : chList A × chList A) x l.\n\nNotation f_clone := id.\nDefinition seq_unzip {A B} (s : chList (A × B)) : chList A × chList B := (seq.unzip1 s, seq.unzip2 s).\nDefinition unzip {A B} : both (chList (A × B)) -> both (chList A × chList B) := lift1_both seq_unzip.\nEquations deref {A} : both (t_Vec A t_Global) -> both (seq A) :=\n  deref X := bind_both X (fun x : t_Vec A t_Global => solve_lift (ret_both (Hacspec_Lib_Pre.seq_from_list A x))).\nSolve All Obligations with solve_ssprove_obligations.\nFail Next Obligation.\nDefinition t_Never : choice_type := 'unit.\nDefinition abort : both t_Never := ret_both (tt : 'unit).\n\nNotation Result_Err := Err.\nNotation Result_Ok := Ok.\n\nNotation \"'ret_both' 'tt'\" := (ret_both (tt : 'unit)).\n\n(** Should be part of concordium.v **)\nClass HasInitContext (Self : choice_type).\nClass t_HasInitContext (Self : choice_type) (something : choice_type).\nClass t_HasActions (Self : choice_type) := {f_accept : both Self}.\nClass HasReceiveContext (Self : choice_type).\nDefinition t_ParamType := 'unit.\nDefinition t_ParseError := 'unit.\nClass t_HasReceiveContext (Self : choice_type) (something : choice_type) := { f_get : forall (Ctx : Self), both (t_ParamType × t_Result Self something) }.\n(* Arguments f_get {Self} {something} (t_HasReceiveContext) {Ctx}. *)\n\nDefinition f_parameter_cursor {T : _} (ctx : both (T)) : T := is_pure ctx.\n\nNotation ControlFlow_Continue := Result_Ok.\nNotation v_Break := Result_Err.\nNotation never_to_any := id.\nEquations run {A} (x : both (choice_typeMonad.M (CEMonad := (@choice_typeMonad.mnd (choice_typeMonad.result_bind_code A))) A)) : both A :=\n  run x :=\n  bind_both x (fun y => match y with\n                             | inl r | inr r => solve_lift ret_both r\n                             end).\nFail Next Obligation.\n\n\nNotation \"'matchb' x 'with' '|' a '=>' b 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b end)) (at level 100, a pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d  'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d end)) (at level 100, a pattern, c pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f  'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f end)) (at level 100, a pattern, c pattern, e pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h end)) (at level 100, a pattern, c pattern, e pattern, g pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern).\n\nNotation \"'matchb' x 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v 'end'\" :=\n  (bind_both x (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x 'end'\" :=\n  (bind_both x_val (fun y => match y with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1\n                      | q1 => r1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1\n                      | q1 => r1\n                      | s1 => t1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1\n                      | q1 => r1\n                      | s1 => t1\n                      | u1 => v1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1\n                      | q1 => r1\n                      | s1 => t1\n                      | u1 => v1\n                      | w1 => x1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 '|' y1 '=>' z1 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1\n                      | q1 => r1\n                      | s1 => t1\n                      | u1 => v1\n                      | w1 => x1\n                      | y1 => z1 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern, y1 pattern).\n\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 '|' y1 '=>' z1 '|' a2 '=>' b2 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1\n                      | q1 => r1\n                      | s1 => t1\n                      | u1 => v1\n                      | w1 => x1\n                      | y1 => z1\n                      | a2 => b2 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern, y1 pattern, a2 pattern).\nNotation \"'matchb' x_val 'with' '|' a '=>' b '|' c '=>' d '|' e '=>' f '|' g '=>' h '|' i '=>' j '|' k '=>' l '|' m '=>' n '|' o '=>' p '|' q '=>' r '|' s '=>' t '|' u '=>' v '|' w '=>' x '|' y '=>' z '|' a1 '=>' b1 '|' c1 '=>' d1 '|' e1 '=>' f1 '|' g1 '=>' h1 '|' i1 '=>' j1 '|' k1 '=>' l1 '|' m1 '=>' n1 '|' o1 '=>' p1 '|' q1 '=>' r1 '|' s1 '=>' t1 '|' u1 '=>' v1 '|' w1 '=>' x1 '|' y1 '=>' z1 '|' a2 '=>' b2 '|' c2 '=>' d2 'end'\" :=\n  (bind_both x_val (fun y_val => match y_val with\n                      | a => b\n                      | c => d\n                      | e => f\n                      | g => h\n                      | i => j\n                      | k => l\n                      | m => n\n                      | o => p\n                      | q => r\n                      | s => t\n                      | u => v\n                      | w => x\n                      | y => z\n                      | a1 => b1\n                      | c1 => d1\n                      | e1 => f1\n                      | g1 => h1\n                      | i1 => j1\n                      | k1 => l1\n                      | m1 => n1\n                      | o1 => p1\n                      | q1 => r1\n                      | s1 => t1\n                      | u1 => v1\n                      | w1 => x1\n                      | y1 => z1\n                      | a2 => b2\n                      | c2 => d2 end)) (at level 100, a pattern, c pattern, e pattern, g pattern, i pattern, k pattern, m pattern, o pattern, q pattern, s pattern, u pattern, w pattern, y pattern, a1 pattern, c1 pattern, e1 pattern, g1 pattern, i1 pattern, k1 pattern, m1 pattern, o1 pattern, q1 pattern, s1 pattern, u1 pattern, w1 pattern, y1 pattern, a2 pattern, c2 pattern).\n\nNotation f_branch := id.\nNotation ControlFlow_Break_case := inr.\nNotation ControlFlow_Continue_case := inl.\n\nNotation f_from_residual := Result_Err.\n\nLtac remove_duplicate_pair :=\n  normalize_fset ;\n  repeat match goal with\n  | |- context G [(?a :|: (?a :|: ?c))] =>\n      replace (a :|: (a :|: c)) with (a :|: a :|: c) by (now rewrite <- fsetUA) ; rewrite fsetUid\n  end.\n\n\nAxiom t_Reject : choice_type.\nEquations repeat {A} (e : both A) (n : both uint_size) : both (nseq A (is_pure n)) :=\n  repeat e n :=\n (eq_rect\n       (Datatypes.length (List.repeat (solve_lift e) (Z.to_nat (unsigned (is_pure n)))))\n       (fun n0 : nat => both (nseq_ A n0)) (bind_both e\n       (fun _ : A =>\n        array_from_list (List.repeat (solve_lift e) (Z.to_nat (unsigned (is_pure n)))))\n)\n       (Z.to_nat (unsigned (is_pure n)))\n       (List.repeat_length (solve_lift e) (Z.to_nat (unsigned (is_pure n))))).\nFail Next Obligation.\n\nClass iterable (A B : choice_type) := {f_into_iter : both A -> both (chList B)}.\nInstance nseq_iterable_seq {A n} : iterable (nseq A n) A := {| f_into_iter := array_to_list |}.\nProgram Instance range_iterable {WS} : iterable ((int WS) × (int WS)) (int WS) :=\n  {| f_into_iter :=\n    fun x =>\n      bind_both x (fun '((a, b) : int WS × int WS) => solve_lift (ret_both (List.map (fun x => repr WS (Z.of_nat x)) (List.seq (Z.to_nat (unsigned a)) (Z.to_nat (unsigned (b))-Z.to_nat (unsigned a))) : chList (int WS) )))\n  |}.\nFail Next Obligation.\nNotation t_IntoIter := (chList _).\nInstance nseq_iterable_vec {A n} : iterable (t_Vec A n) A := {| f_into_iter := fun x => x |}.\n\nDefinition t_Amount := int64.\n\nDefinition impl_20__contains_key := int64.\nDefinition f_micro_ccd := int64.\nEquations Build_t_Amount {f_micro_ccd : both int64} : both (t_Amount) :=\n  Build_t_Amount  :=\n    bind_both f_micro_ccd (fun f_micro_ccd =>\n                             solve_lift (ret_both ((f_micro_ccd) : (t_Amount)))) : both (t_Amount).\nFail Next Obligation.\nDefinition t_Timestamp := int32.\nDefinition t_BTreeMap (A B : Type) (C : vec_typ) := int32.\nDefinition f_slot_time := int64.\nDefinition f_metadata := int64.\nDefinition t_AccountAddress : choice_type := int64 ∐ int64.\nDefinition Address_Contract_case (addr : int64) : t_AccountAddress := inl addr.\nDefinition Address_Account_case (addr : int64) : t_AccountAddress := inr addr.\nDefinition f_sender : t_AccountAddress :=\n  Address_Account_case 0.\n\nNotation f_into_iter_loc := fset0.\nNotation f_end_loc := fset0.\nNotation f_start_loc := fset0.\nNotation f_eq_loc := fset0.\nEquations impl__into_vec {A n} : both (nseq_ A n) -> both (t_Vec A t_Global) :=\n  impl__into_vec X := bind_both X (fun x : nseq_ A n => solve_lift (ret_both (Hacspec_Lib_Pre.array_to_list x : chList _))).\nFail Next Obligation.\n\nDefinition unsize {A} := @id A.\nDefinition box_new {A} := @id A.\n\nNotation f_get_loc := fset0.\nNotation f_clone_loc := fset0.\nNotation f_accept_loc := fset0.\nNotation f_parameter_cursor_loc := fset0.\n\nNotation Result_Ok_case := inl.\nNotation Result_Err_case := inr.\n\nDefinition impl__map_err {A B C : choice_type} (r : both (t_Result A B)) (f : B -> C) : both (t_Result A C) :=\n  matchb r with\n  | inl a => ret_both (inl a : t_Result A C)\n  | inr b => ret_both (inr (f b) : t_Result A C)\nend.\nDefinition f_from {A B : choice_type} : A -> (Result_t A B) :=\n  inr.\n\nFrom mathcomp Require Import eqtype.\nFrom mathcomp Require Import ssrbool.\n\nInstance copy {C : _} : t_Copy C := fun x => x.\nInstance partial_eq {C : _} : t_PartialEq C C := fun x y => x == y.\nInstance serialize {C : _} : t_Serialize C. Defined.\nInstance clone {C : _} : t_Clone C := fun x => x.\nInstance is_eq {C : choice_type} : t_Eq C :=\n  {|\n    Hacspec_Lib_Comparable.eqb := _ ;\n    eqb_leibniz := fun x y : C => RelationClasses.symmetry (rwP eqP)\n  |}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/LocationUtility.v",
    "content": "From mathcomp Require Import all_ssreflect all_algebra.\n\nFrom Coq Require Import ZArith List.\nFrom Crypt Require Import choice_type Package.\nImport PackageNotation.\nFrom Crypt Require Import pkg_interpreter.\nFrom extructures Require Import ord fset fmap.\nRequire Import Hacspec_Lib_Comparable.\n\nRequire Import Coq.Logic.FunctionalExtensionality.\nImport List.ListNotations.\n\n(*****************************************************)\n(*   This file defines a utility functions to reason *)\n(* about equivalence of Locations and Signatures     *)\n(*****************************************************)\n\n(*** Location *)\n\nFrom HB Require Import structures.\nHB.instance Definition _ := hasDecEq.Build Location (fun x y => @tag_eqP _ _ x y).\n(* Variables (I : eqType) (T_ : I -> eqType). *)\n(* Implicit Types u v : {i : I & T_ i}. *)\n(* HB.instance Definition _ := hasDecEq.Build {x : _ & _} (fun x y => @tag_eqP _ _ x y). *)\nDefinition loc_eqType := pkg_core_definition_Location__canonical__eqtype_Equality.\n\nDefinition location_eqb (ℓ ℓ' : Location) :=\n  andb (@eqtype.eq_op Datatypes_nat__canonical__eqtype_Equality (projT2 ℓ) (projT2 ℓ'))\n       (@eqtype.eq_op _ (projT1 ℓ) (projT1 ℓ')).\n\nDefinition location_eqbP : forall (l1 l2 : Location),\n    @location_eqb (l1) (l2)\n    = (@eqtype.eq_op\n         _ (* (@eqtype.tag_eqType choice_type_eqType *)\n           (*                   (fun _ : choice_type => ssrnat.nat_eqType)) *) l1 l2).\nProof.\n  intros.\n\n  unfold location_eqb.\n  unfold eqtype.eq_op.\n\n  cbn.\n  rewrite ssrnat.eqnE.\n  unfold eqtype.tag_eq.\n  unfold eqtype.tagged_as.\n  unfold ssrfun.tag.\n  unfold ssrfun.tagged.\n\n  rewrite Bool.andb_comm.\n\n  unfold eq_rect_r, eq_rect.\n\n  set (eqtype.eq_op _ _) at 2.\n  replace (choice_type_eq _ _) with b by reflexivity.\n\n  destruct b eqn:b_eq ; subst b.\n  - f_equal.\n    case eqtype.eqP ; intros.\n    + rewrite e in b_eq.\n      rewrite <- e.\n      simpl.\n      reflexivity.\n    + exfalso.\n      apply (ssrbool.elimT eqtype.eqP) in b_eq.\n      apply n.\n      eapply b_eq.\n  - reflexivity.\nQed.\n\nTheorem is_true_split_or : forall a b, is_true (a || b)%bool = (is_true a \\/ is_true b).\nProof.\n  intros.\n  rewrite boolp.propeqE.\n  symmetry.\n  apply (ssrbool.rwP ssrbool.orP).\nQed.\nTheorem is_true_split_and : forall a b, is_true (a && b)%bool = (is_true a /\\ is_true b).\nProof.\n  intros.\n  rewrite boolp.propeqE.\n  symmetry.\n  apply (ssrbool.rwP ssrbool.andP).\nQed.\n\nTheorem is_true_split_or_ : forall a b, ((a || b)%bool = true) = (a = true \\/ b = true).\nProof.\n  intros.\n  rewrite boolp.propeqE.\n  symmetry.\n  apply (ssrbool.rwP ssrbool.orP).\nQed.\nTheorem is_true_split_and_ : forall a b, ((a && b)%bool = true) = (a = true /\\ b = true).\nProof.\n  intros.\n  rewrite boolp.propeqE.\n  symmetry.\n  apply (ssrbool.rwP ssrbool.andP).\nQed.\n\n(* Theorem LocsSubset : (forall {A} (L1 L2 : list A) (a : A), *)\n(*                          List.incl L1 L2 -> *)\n(*                          List.In a L1 -> *)\n(*                          List.In a L2). *)\n(*   intros. *)\n(*   induction L1 as [ | a0 L ] ; cbn in *. *)\n(*   - contradiction. *)\n(*   - destruct (List.incl_cons_inv H). *)\n(*     destruct H0. *)\n(*     + subst. *)\n(*       assumption. *)\n(*     + apply IHL ; assumption. *)\n(* Qed. *)\n\nLemma location_eqb_sound : forall ℓ ℓ' : Location, is_true (location_eqb ℓ ℓ') <-> ℓ = ℓ'.\nProof.\n  intros.\n  rewrite location_eqbP.\n  pose (@eqtype.eqP loc_eqType).\n  (* unfold eqtype.Equality.axiom in a. *)\n  pose (ssrbool.elimT).\n  pose (@eqtype.tag_eqP ).\n\n  split.\n\n  apply (Couplings.reflection_nonsense _ ℓ ℓ').\n  intros. subst.\n  apply eqtype.eq_refl.\nQed.\n\nGlobal Program Instance location_eqdec: EqDec (Location) := {\n    eqb := location_eqb;\n    eqb_leibniz := location_eqb_sound;\n  }.\n\nDefinition location_ltb : Location -> Location -> bool :=\n  (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord)).\n\nDefinition location_ltb_simple : Location -> Location -> bool :=\n  fun x y => ltb (projT2 x) (projT2 y).\n\nGlobal Instance location_comparable : Comparable (Location) :=\n  eq_dec_lt_Comparable location_ltb.\n\nDefinition le_is_ord_leq : forall s s0 : Datatypes_nat__canonical__Ord_Ord,\n    eqtype.eq_op s s0 = false -> ltb s s0 = (s <= s0)%ord.\nProof.\n  intros s s0.\n  unfold ltb , nat_comparable , Nat.ltb.\n  intros e.\n\n  generalize dependent s.\n  induction s0 ; intros.\n  * destruct s ; easy.\n  * destruct s. reflexivity.\n    cbn.\n    cbn in IHs0.\n    rewrite IHs0.\n    reflexivity.\n    assumption.\nQed.\n\nDefinition opsig_eqb (ℓ ℓ' : opsig) : bool :=\n  andb (@eqtype.eq_op Datatypes_nat__canonical__eqtype_Equality (fst ℓ) (fst ℓ'))\n       (andb (@eqtype.eq_op _ (fst (snd ℓ)) (fst (snd ℓ')))\n             (@eqtype.eq_op _ (snd (snd ℓ)) (snd (snd ℓ')))).\n\nLemma opsig_eqb_sound : forall ℓ ℓ' : opsig, is_true (opsig_eqb ℓ ℓ') <-> ℓ = ℓ'.\nProof.\n  intros.\n\n  destruct ℓ as [? []] , ℓ' as [? []].\n  setoid_rewrite is_true_split_and.\n  rewrite is_true_split_and.\n  unfold fst, snd in *.\n\n  transitivity (i = i0 /\\ c = c1 /\\ c0 = c2).\n  {\n    apply ZifyClasses.and_morph.\n    symmetry.\n    apply (ssrbool.rwP (@eqtype.eqP Datatypes_nat__canonical__eqtype_Equality i i0)).\n    apply ZifyClasses.and_morph.\n    symmetry.\n    apply (ssrbool.rwP (@eqtype.eqP _ c c1)).\n    symmetry.\n    apply (ssrbool.rwP (@eqtype.eqP _ c0 c2)).\n  }\n\n  split ; [ intros [? []] | intros H ; inversion H ] ; subst ; easy.\nQed.\n\nGlobal Program Instance opsig_eqdec: EqDec (opsig) := {\n    eqb := opsig_eqb;\n    eqb_leibniz := opsig_eqb_sound;\n  }.\n\n(* Theorem fset_compute : forall {T : ordType}, forall l : T, forall n : list T, List.In l n <-> is_true (ssrbool.in_mem l (@ssrbool.mem _ (seq.seq_predType (Ord.eqType T)) n)). *)\n(*   intros. *)\n(*   apply (ssrbool.rwP (xseq.InP _ _)). *)\n(* Qed. *)\n\nDefinition opsig_ordType := (Datatypes_prod__canonical__Ord_Ord Datatypes_nat__canonical__Ord_Ord (Datatypes_prod__canonical__Ord_Ord choice_type_choice_type__canonical__Ord_Ord choice_type_choice_type__canonical__Ord_Ord)).\n\nDefinition loc_ordType : ordType := @Specif_sigT__canonical__Ord_Ord choice_type_choice_type__canonical__Ord_Ord (fun _ : choice_type => Datatypes_nat__canonical__Ord_Ord).\n\nFixpoint incl_expand A `{EqDec A} (l1 l2 : list A) : Prop :=\n  match l1 with\n  | nil => True\n  | (x :: xs) => In x l2 /\\ incl_expand A xs l2\n  end.\n\n(* Theorem in_remove_fset : forall {T : ordType} a (l : list T), List.In a l <-> List.In a (fset l). *)\n(* Proof. *)\n(*   intros. *)\n(*   do 2 rewrite fset_compute. *)\n(*   now rewrite <- in_fset. *)\n(* Qed. *)\n\n\n\n(* Theorem in_split_cat : forall a (l0 l1 : list Location), List.In a (seq.cat l0 l1) <-> List.In a l0 \\/ List.In a l1. *)\n(* Proof. *)\n(*   split ; intros. *)\n(*   - induction l0. *)\n(*     + right. apply H. *)\n(*     + destruct H. *)\n(*       * left. left. assumption. *)\n(*       * destruct (IHl0 H). *)\n(*         -- left. right. assumption. *)\n(*         -- right. assumption. *)\n(*   - destruct H. *)\n(*     + induction l0. *)\n(*       * contradiction. *)\n(*       * destruct H. *)\n(*         -- left. assumption. *)\n(*         -- right. *)\n(*            apply IHl0. *)\n(*            assumption. *)\n(*     + induction l0. *)\n(*       * assumption. *)\n(*       * right. *)\n(*         assumption. *)\n(* Qed. *)\n\n(* Theorem in_split_fset_cat : forall a (l0 l1 : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}), List.In a (l0 :|: l1) <-> List.In a l0 \\/ List.In a l1. *)\n(* Proof. *)\n(*   intros. *)\n(*   transitivity (In a (seq.cat (eqtype.val l0) (eqtype.val l1))). *)\n(*   symmetry. *)\n(*   apply in_remove_fset. *)\n(*   apply in_split_cat. *)\n(* Qed. *)\n\n(* Theorem loc_list_incl_fsubset : forall (l0 l1 : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}), is_true (fsubset l0 l1) <-> List.incl l0 l1. *)\n(* Proof. *)\n(*   intros. *)\n(*   rewrite <- (ssrbool.rwP (@fsubsetP _ l0 l1)). *)\n\n(*   unfold ssrbool.sub_mem. *)\n(*   unfold incl. *)\n\n(*   assert (forall {A} (P Q : A -> Prop), (forall x, P x <-> Q x) -> (forall x, P x) <-> (forall x, Q x)). *)\n(*   { split ; intros ; apply H ; apply H0. } *)\n(*   apply H. clear H. *)\n(*   intros x. cbn in *. *)\n\n(*   rewrite fset_compute. *)\n(*   rewrite fset_compute. *)\n\n(*   reflexivity. *)\n(* Qed. *)\n\n(* Theorem opsig_list_incl_fsubset : forall (l0 l1 : _), is_true (fsubset (T:=opsig_ordType) l0 l1) <-> List.incl l0 l1. *)\n(* Proof. *)\n(*   intros. *)\n(*   rewrite <- (ssrbool.rwP (@fsubsetP _ l0 l1)). *)\n\n(*   unfold ssrbool.sub_mem. *)\n(*   unfold incl. *)\n\n(*   assert (forall {A} (P Q : A -> Prop), (forall x, P x <-> Q x) -> (forall x, P x) <-> (forall x, Q x)). *)\n(*   { split ; intros ; apply H ; apply H0. } *)\n(*   apply H. clear H. *)\n(*   intros x. cbn in *. *)\n\n(*   rewrite fset_compute. *)\n(*   rewrite fset_compute. *)\n\n(*   reflexivity. *)\n(* Qed. *)\n\n\n(* Lemma valid_injectLocations_b : *)\n(*   forall (import : Interface) (A : choice.Choice.type) *)\n(*          (L1 L2 : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}) *)\n(*          (v : raw_code A), *)\n(*     List.incl L1 L2 -> ValidCode L1 import v -> ValidCode L2 import v. *)\n(* Proof. *)\n(*   intros I A L1 L2 v incl. *)\n(*   apply valid_injectLocations. *)\n(*   apply loc_list_incl_fsubset. *)\n(*   apply incl. *)\n(* Qed. *)\n\n(* Lemma valid_injectOpsig_b : *)\n(*   forall (I1 I2 : Interface) (A : choice.Choice.type) *)\n(*          (L : {fset tag_ordType (I:=choice_type_ordType) (fun _ : choice_type => nat_ordType)}) *)\n(*          (v : raw_code A), *)\n(*     List.incl I1 I2 -> ValidCode L I1 v -> ValidCode L I2 v. *)\n(* Proof. *)\n(*   intros I1 I2 A L v incl. *)\n(*   apply valid_injectMap. *)\n(*   apply opsig_list_incl_fsubset. *)\n(*   apply incl. *)\n(* Qed. *)\n\n(* Theorem loc_list_incl_remove_fset {A} `{EqDec A} : forall (l1 l2 : list Location), List.incl l1 l2 <-> List.incl (fset l1) (fset l2). *)\n(* Proof. *)\n(*   intros. *)\n\n(*   cbn in *. *)\n\n(*   induction l1. *)\n(*   - rewrite <- fset0E. easy. *)\n(*   - cbn. *)\n(*     unfold incl. *)\n(*     cbn. *)\n(*     split. *)\n(*     + intros. *)\n(*       rewrite <- in_remove_fset. *)\n(*       rewrite <- in_remove_fset in H1. *)\n(*       apply H0. *)\n(*       apply H1. *)\n(*     + intros. *)\n(*       pose (@in_remove_fset). *)\n(*       rewrite -> (in_remove_fset (T:=loc_ordType)). *)\n(*       apply H0. *)\n(*       rewrite <- (in_remove_fset (T:=loc_ordType)). *)\n(*       apply H1. *)\n(* Qed. *)\n\n\n(* Theorem opsig_list_incl_remove_fset {A} `{EqDec A} : forall (l1 l2 : list opsig), List.incl l1 l2 <-> List.incl (fset l1) (fset l2). *)\n(* Proof. *)\n(*   intros. *)\n\n(*   cbn in *. *)\n\n(*   induction l1. *)\n(*   - rewrite <- fset0E. easy. *)\n(*   - cbn. *)\n(*     unfold incl. *)\n(*     cbn. *)\n(*     split. *)\n(*     + intros. *)\n(*       rewrite <- in_remove_fset in H1 |- *. *)\n(*       apply H0. *)\n(*       apply H1. *)\n(*     + intros. *)\n(*       rewrite -> (in_remove_fset (T:=opsig_ordType)). *)\n(*       apply H0. *)\n(*       rewrite <- (in_remove_fset (T:=opsig_ordType)). *)\n(*       apply H1. *)\n(* Qed. *)\n\n(* Theorem list_incl_cons_iff : (forall A (a : A) l1 l2, List.incl (a :: l1) l2 <-> (List.In a l2 /\\ List.incl l1 l2)). *)\n(* Proof. *)\n(*   split. *)\n(*   - pose List.incl_cons_inv. *)\n(*     apply List.incl_cons_inv. *)\n(*   - intros []. *)\n(*     apply List.incl_cons ; assumption. *)\n(* Qed. *)\n\n(* Theorem loc_list_incl_expand {A} `{EqDec A} : forall (l1 l2 : list Location), *)\n(*     List.incl l1 l2 <-> incl_expand _ l1 l2. *)\n(* Proof. *)\n(*   induction l1. *)\n(*   - split ; intros. *)\n(*     reflexivity. *)\n(*     apply incl_nil_l. *)\n(*   - intros. *)\n(*     rewrite list_incl_cons_iff. *)\n(*     cbn. *)\n(*     apply and_iff_compat_l. *)\n(*     apply IHl1. *)\n(* Qed. *)\n\n(* Theorem opsig_list_incl_expand {A} `{EqDec A} : forall (l1 l2 : list opsig), *)\n(*     List.incl l1 l2 <-> incl_expand _ l1 l2. *)\n(* Proof. *)\n(*   induction l1. *)\n(*   - split ; intros. *)\n(*     reflexivity. *)\n(*     apply incl_nil_l. *)\n(*   - intros. *)\n(*     rewrite list_incl_cons_iff. *)\n(*     cbn. *)\n(*     apply and_iff_compat_l. *)\n(*     apply IHl1. *)\n(* Qed. *)\n\nDefinition location_lebP : (tag_leq (I:=choice_type_choice_type__canonical__Ord_Ord) (T_:=fun _ : choice_type => Datatypes_nat__canonical__eqtype_Equality)) = leb.\nProof.\n  intros.\n  do 2 (apply (@functional_extensionality Location) ; intros []).\n  cbn.\n\n  unfold tag_leq.\n  unfold eqtype.tag_eq.\n\n  unfold location_ltb.\n  unfold tag_leq.\n\n  unfold location_eqb.\n\n  unfold ssrfun.tag , ssrfun.tagged , projT1 , projT2 in *.\n\n  rewrite (Bool.andb_comm _ (eqtype.eq_op _ _)).\n\n  destruct (eqtype.eq_op x _) eqn:x_eq_x0.\n  2: reflexivity.\n  apply Couplings.reflection_nonsense in x_eq_x0.\n  subst.\n  rewrite eqtype.eq_refl.\n  rewrite Bool.andb_true_l.\n  rewrite Bool.andb_true_l.\n  rewrite Ord.ltxx.\n  rewrite Bool.orb_false_l.\n\n  destruct (eqtype.eq_op _ _) eqn:n_eq_n0.\n  2: reflexivity.\n\n  unfold eqtype.tagged_as in *.\n  unfold ssrfun.tagged ,  projT2 in *.\n  unfold eq_rect_r , eq_rect in *.\n\n  destruct eqtype.eqP in *.\n  2: contradiction.\n  cbn in n_eq_n0.\n  rewrite <- e.\n  rewrite ssrnat.eqnE in n_eq_n0.\n  apply Couplings.reflection_nonsense in n_eq_n0.\n  apply Ord.eq_leq. assumption.\nQed.\n\nLemma iff_extensionality : forall {A} (P Q : A -> Prop), (forall a, P a <-> Q a) -> ((forall a, P a) <-> (forall a, Q a)).\nProof.\n  intros. split ; intuition.\nQed.\n\nLemma iff_eq_sym : forall {A} (x y : A), (x = y) <-> (y = x).\nProof.\n  intros. split ; intuition.\nQed.\n\nDefinition loc_seq_has (a : Location) := seq.has (ssrbool.fun_of_rel (@eqtype.eq_op loc_eqType) a).\n\nTheorem loc_seq_has_remove_sort {A} `{EqDec A} : forall (l : list Location) (a : Location) leb,\n    is_true (loc_seq_has a l) <->\n    is_true (loc_seq_has a (path.sort leb l)).\nProof.\n  intros.\n  rewrite <- (Bool.negb_involutive (loc_seq_has a (path.sort leb l))).\n\n  unfold loc_seq_has.\n\n  rewrite <- seq.all_predC.\n  rewrite path.all_sort.\n  rewrite seq.all_predC.\n\n  rewrite Bool.negb_involutive.\n\n  reflexivity.\nQed.\n\n(* Theorem list_in_iff_seq_has {A} `{EqDec A} : forall (l : list Location) (a : Location), *)\n(*     is_true (loc_seq_has a l) <-> List.In a l. *)\n(* Proof. *)\n(*   induction l ; intros. *)\n(*   - split ; intros ; easy. *)\n(*   - cbn. *)\n(*     rewrite is_true_split_or. *)\n(*     apply ZifyClasses.or_morph. *)\n(*     + rewrite <- (ssrbool.rwP (@eqtype.eqP loc_eqType a0 a)). *)\n(*       apply iff_eq_sym. *)\n(*     + apply IHl. *)\n(* Qed. *)\n\n(* Theorem list_in_iff_list_in_sort {A} `{EqDec A} : forall (l : list Location) (a : Location) leb, *)\n(*     List.In a l <-> List.In a (path.sort leb l). *)\n(* Proof. *)\n(*   intros. *)\n(*   rewrite <- (list_in_iff_seq_has (path.sort leb l)). *)\n(*   rewrite <- loc_seq_has_remove_sort. *)\n(*   rewrite list_in_iff_seq_has. *)\n(*   reflexivity. *)\n(* Qed. *)\n\n(* Theorem list_in_sort_order_ignorant_compute {A} `{EqDec A} : forall (l : list Location) leb1 leb2 a, *)\n(*     (List.In a (path.sort leb1 l)) <-> List.In a (path.sort leb2 l). *)\n(* Proof. *)\n(*   intros. *)\n(*   rewrite <- list_in_iff_list_in_sort. *)\n(*   rewrite <- list_in_iff_list_in_sort. *)\n(*   reflexivity. *)\n(* Qed. *)\n\n(* Theorem list_incl_sort_order_ignorant_compute {A} `{EqDec A} : forall (l1 l2 : list Location) leb1 leb2, *)\n(*     List.incl (path.sort leb1 l1) (path.sort leb1 l2) <-> List.incl (path.sort leb2 l1) (path.sort leb2 l2). *)\n(* Proof. *)\n(*   intros. *)\n(*   apply iff_extensionality. *)\n(*   intros a. *)\n\n(*   rewrite list_in_sort_order_ignorant_compute with (leb1 := leb1) (leb2 := leb2). *)\n(*   rewrite list_in_sort_order_ignorant_compute with (leb1 := leb1) (leb2 := leb2). *)\n(*   reflexivity. *)\n(* Qed. *)\n\n(* Theorem list_incl_sort {A} `{EqDec A} : forall (l1 l2 : list Location) leb, *)\n(*     List.incl l1 l2 <-> List.incl (path.sort leb l1) (path.sort leb l2). *)\n(* Proof. *)\n(*   intros. *)\n(*   apply iff_extensionality. *)\n(*   intros a. *)\n(*   rewrite <- list_in_iff_list_in_sort. *)\n(*   rewrite <- list_in_iff_list_in_sort. *)\n(*   reflexivity. *)\n(* Qed. *)\n\nTheorem choice_type_test_refl : forall x , is_true (choice_type_test x x).\nProof.\n  intros.\n  replace (choice_type_test _ _) with (eqtype.eq_op x x) by reflexivity.\n  apply eqtype.eq_refl.\nQed.\n\n(* Theorem fset_eqEincl: forall a b : list Location, fset a = fset b <-> List.incl a b /\\ List.incl b a. *)\n(* Proof. *)\n(*   intros. *)\n(*   rewrite (ssrbool.rwP (@eqtype.eqP _ (fset a) (fset b))). *)\n(*   rewrite (@eqEfsubset _ (fset a) (fset b)). *)\n(*   rewrite is_true_split_and. *)\n\n(*   apply ZifyClasses.and_morph ; rewrite loc_list_incl_fsubset ; rewrite <- loc_list_incl_remove_fset ; reflexivity. *)\n(* Qed. *)\n\n\nLemma path_sorted_tl :\n  forall {T : ordType} {A} {e} {fmval : list A},\n  is_true (path.sorted e fmval) ->\n  is_true (path.sorted e (tl fmval)).\nProof.\n  intros.\n  destruct fmval.\n  - easy.\n  - cbn.\n    cbn in H.\n    destruct (fmval).\n    + reflexivity.\n    + cbn in H.\n      now rewrite LocationUtility.is_true_split_and in H.\nQed.\n\nFixpoint eqb_fset_helper {T : ordType} `{EqDec T} (x : list T) (i : is_true (path.sorted Ord.lt x)) (y : list T) (j : is_true (path.sorted Ord.lt y)) : bool :=\n  match x, y return\n        is_true (path.sorted Ord.lt x) ->\n        is_true (path.sorted Ord.lt y) ->\n        bool\n  with\n  | [], [] => fun _ _ => true\n  | a :: xs , b :: ys =>\n      fun i j =>\n        andb\n          (eqb a b)\n          (eqb_fset_helper xs (path_sorted_tl (T := T) i) ys (path_sorted_tl (T := T) j))\n  | _, _ => fun _ _ => false\n  end i j.\nTransparent eqb_fset_helper.\n\nDefinition eqb_fset {T : ordType} `{EqDec T} (x y : {fset T}) : bool :=\n  match x , y with\n  | @FSet.FSet _ fsval i, @FSet.FSet _ fsval0 i0 =>\n      eqb_fset_helper fsval i fsval0 i0\n  end.\nTransparent eqb_fset.\n\nTheorem eqb_leibniz_fset {T : ordType} `{EqDec T} : forall  (x y : {fset T}),\n    is_true (eqb_fset x y) <-> x = y.\nProof.\n  intros.\n  split.\n  - intros.\n    destruct x , y.\n    unfold eqb_fset in H0.\n\n    apply pkg_composition.fsval_eq.\n    simpl.\n\n    generalize dependent fsval0.\n    induction fsval ; intros.\n    + destruct fsval0.\n      * reflexivity.\n      * discriminate H0.\n    + destruct fsval0.\n      * discriminate H0.\n      * cbn in H0.\n\n        rewrite is_true_split_and in H0 ; destruct H0.\n\n        apply (eqb_leibniz a s) in H0.\n        subst.\n        f_equal.\n\n        eapply IHfsval.\n        apply H1.\n  - intros.\n    subst.\n    destruct y.\n    simpl.\n    induction fsval.\n    + reflexivity.\n    + simpl.\n      rewrite IHfsval.\n      now rewrite eqb_refl.\nQed.\n\nInstance fset_EqDec {T : ordType} `{EqDec T} : EqDec {fset T} :=\n  {| eqb := eqb_fset  ; eqb_leibniz := eqb_leibniz_fset |}.\n"
  },
  {
    "path": "hax-lib/proof-libs/coq/ssprove/src/dune",
    "content": "(coq.theory\n (name Hacspec) ; -R flag\n (package coq-hacspec-ssprove)\n (flags -w all)\n (theories\n   mathcomp elpi HB deriving ; Mathcomp\n   extructures\n   Equations\n   ConCert stdpp MetaCoq Ltac2 ; ConCert\n   ; Jasmin\n   Crypt Mon Relational ; SSProve\n   )\n ; (libraries <ocaml_libraries>)\n )\n; (include_subdirs qualified)"
  },
  {
    "path": "hax-lib/proof-libs/fstar/.envrc",
    "content": "use flake .#examples\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/Makefile.copy",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= $(HAX_LIBS_HOME)/.cache\nHINT_DIR      ?= $(HAX_LIBS_HOME)/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/README.md",
    "content": "## Libraries for Hax\n\nThe goal of this directory is to serve as a snapshot of the current F*\nsupporting libraries for Hax.\n\nThe dependency chain is:\n\n`rust_primitives` <- `core` <- `hax_lib`\n\n# Rust Primitives\n\nThe `/rust_primitives` directory contains hand-written models for Rust\nbuilt-in features like machine integers and arrays. In particular, the\ncode in this directory reconciles any type or semantic differences\nbetween Rust and F*. A number of files in this directory use the \n[HACL Library](https://github.com/hacl-star/hacl-star/tree/main/lib).\n\n# Core & Alloc\n\nThe `/core` directory contains hand-written models for some parts of\nthe Core and Alloc libraries of Rust.\n\nAs a first goal, we would like to typecheck the code in this directory\nagainst interfaces generated from Rust Core and Alloc.\n\nAs a second goal, we would like to generate the code in this directory\nfrom an annotated version of Rust Core and Alloc.\n\n# Hax Library\n\nThe `/hax_lib` directory contains hand-written and generated code\nfor the Hax library which adds new features and functionality to Rust\nto help programmers. For example, this library includes bounded indexes\nfor arrays, unbounded integers etc.\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Alloc.fst",
    "content": "module Alloc.Alloc\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Global = | Global : t_Global\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Borrow.fst",
    "content": "module Alloc.Borrow\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Cow (v_T: Type0) = | Cow : v_T -> t_Cow v_T\n\nclass t_ToOwned (v_Self: Type0) = {\n  f_to_owned_pre:v_Self -> Type0;\n  f_to_owned_post:v_Self -> v_Self -> Type0;\n  f_to_owned:x0: v_Self\n    -> Prims.Pure v_Self (f_to_owned_pre x0) (fun result -> f_to_owned_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T: Type0) : t_ToOwned v_T =\n  {\n    f_to_owned_pre = (fun (self: v_T) -> true);\n    f_to_owned_post = (fun (self: v_T) (out: v_T) -> true);\n    f_to_owned = fun (self: v_T) -> self\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Boxed.fst",
    "content": "module Alloc.Boxed\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Box (v_T: Type0) = | Box : v_T -> t_Box v_T\n\nlet impl__new (#v_T: Type0) (v: v_T) : v_T = v\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Collections.Binary_heap.fst",
    "content": "module Alloc.Collections.Binary_heap\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nopen Rust_primitives.Notations\n\ntype t_BinaryHeap (v_T: Type0) (v_A: Type0) =\n  | BinaryHeap : Alloc.Vec.t_Vec v_T v_A -> t_BinaryHeap v_T v_A\n\nlet impl_10__new\n      (#v_T: Type0)\n      (#v_A: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T)\n      (_: Prims.unit)\n    : t_BinaryHeap v_T v_A =\n  BinaryHeap\n  (Alloc.Vec.Vec (Rust_primitives.Sequence.seq_empty #v_T ())\n      (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData v_A)\n    <:\n    Alloc.Vec.t_Vec v_T v_A)\n  <:\n  t_BinaryHeap v_T v_A\n\nlet impl_11__len\n      (#v_T #v_A: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T)\n      (self: t_BinaryHeap v_T v_A)\n    : usize = Alloc.Vec.impl_1__len #v_T #v_A self._0\n\nlet impl_10__push\n      (#v_T #v_A: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T)\n      (self: t_BinaryHeap v_T v_A)\n      (v: v_T)\n    : Prims.Pure (t_BinaryHeap v_T v_A)\n      (requires (impl_11__len #v_T #v_A self <: usize) <. Core_models.Num.impl_usize__MAX)\n      (fun _ -> Prims.l_True) =\n  let self:t_BinaryHeap v_T v_A =\n    { self with _0 = Alloc.Vec.impl_1__push #v_T #v_A self._0 v } <: t_BinaryHeap v_T v_A\n  in\n  self\n\nlet impl_10__pop\n      (#v_T #v_A: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T)\n      (self: t_BinaryHeap v_T v_A)\n    : Prims.Pure (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T)\n      Prims.l_True\n      (ensures\n        fun temp_0_ ->\n          let (self_e_future: t_BinaryHeap v_T v_A), (res: Core_models.Option.t_Option v_T) =\n            temp_0_\n          in\n          ((impl_11__len #v_T #v_A self <: usize) >. mk_usize 0 <: bool) =.\n          (Core_models.Option.impl__is_some #v_T res <: bool)) =\n  let (max: Core_models.Option.t_Option v_T):Core_models.Option.t_Option v_T =\n    Core_models.Option.Option_None <: Core_models.Option.t_Option v_T\n  in\n  let index:usize = mk_usize 0 in\n  let (index: usize), (max: Core_models.Option.t_Option v_T) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (impl_11__len #v_T #v_A self <: usize)\n      (fun temp_0_ i ->\n          let (index: usize), (max: Core_models.Option.t_Option v_T) = temp_0_ in\n          let i:usize = i in\n          (i >. mk_usize 0 <: bool) =. (Core_models.Option.impl__is_some #v_T max <: bool) <: bool)\n      (index, max <: (usize & Core_models.Option.t_Option v_T))\n      (fun temp_0_ i ->\n          let (index: usize), (max: Core_models.Option.t_Option v_T) = temp_0_ in\n          let i:usize = i in\n          if\n            Core_models.Option.impl__is_none_or #v_T\n              #(v_T -> bool)\n              max\n              (fun max ->\n                  let max:v_T = max in\n                  Core_models.Cmp.f_gt #v_T\n                    #v_T\n                    #FStar.Tactics.Typeclasses.solve\n                    (self._0.[ i ] <: v_T)\n                    max\n                  <:\n                  bool)\n            <:\n            bool\n          then\n            let max:Core_models.Option.t_Option v_T =\n              Core_models.Option.Option_Some self._0.[ i ] <: Core_models.Option.t_Option v_T\n            in\n            let index:usize = i in\n            index, max <: (usize & Core_models.Option.t_Option v_T)\n          else index, max <: (usize & Core_models.Option.t_Option v_T))\n  in\n  let (self: t_BinaryHeap v_T v_A), (hax_temp_output: Core_models.Option.t_Option v_T) =\n    if Core_models.Option.impl__is_some #v_T max\n    then\n      let (tmp0: Alloc.Vec.t_Vec v_T v_A), (out: v_T) =\n        Alloc.Vec.impl_1__remove #v_T #v_A self._0 index\n      in\n      let self:t_BinaryHeap v_T v_A = { self with _0 = tmp0 } <: t_BinaryHeap v_T v_A in\n      self, (Core_models.Option.Option_Some out <: Core_models.Option.t_Option v_T)\n      <:\n      (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T)\n    else\n      self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T)\n      <:\n      (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T)\n  in\n  self, hax_temp_output <: (t_BinaryHeap v_T v_A & Core_models.Option.t_Option v_T)\n\nlet impl_11__peek\n      (#v_T #v_A: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Cmp.t_Ord v_T)\n      (self: t_BinaryHeap v_T v_A)\n    : Prims.Pure (Core_models.Option.t_Option v_T)\n      Prims.l_True\n      (ensures\n        fun res ->\n          let res:Core_models.Option.t_Option v_T = res in\n          ((impl_11__len #v_T #v_A self <: usize) >. mk_usize 0 <: bool) =.\n          (Core_models.Option.impl__is_some #v_T res <: bool)) =\n  let (max: Core_models.Option.t_Option v_T):Core_models.Option.t_Option v_T =\n    Core_models.Option.Option_None <: Core_models.Option.t_Option v_T\n  in\n  let max:Core_models.Option.t_Option v_T =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (impl_11__len #v_T #v_A self <: usize)\n      (fun max i ->\n          let max:Core_models.Option.t_Option v_T = max in\n          let i:usize = i in\n          (i >. mk_usize 0 <: bool) =. (Core_models.Option.impl__is_some #v_T max <: bool) <: bool)\n      max\n      (fun max i ->\n          let max:Core_models.Option.t_Option v_T = max in\n          let i:usize = i in\n          if\n            Core_models.Option.impl__is_none_or #v_T\n              #(v_T -> bool)\n              max\n              (fun max ->\n                  let max:v_T = max in\n                  Core_models.Cmp.f_gt #v_T\n                    #v_T\n                    #FStar.Tactics.Typeclasses.solve\n                    (self._0.[ i ] <: v_T)\n                    max\n                  <:\n                  bool)\n            <:\n            bool\n          then\n            let max:Core_models.Option.t_Option v_T =\n              Core_models.Option.Option_Some self._0.[ i ] <: Core_models.Option.t_Option v_T\n            in\n            max\n          else max)\n  in\n  max\n\nassume val lemma_peek_pop: #t:Type -> (#a: Type) -> (#i: Core_models.Cmp.t_Ord t) -> h: t_BinaryHeap t a\n  -> Lemma (impl_11__peek h == snd (impl_10__pop h))\n          [SMTPat (impl_11__peek #t #a h)]\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Collections.Btree.Set.fsti",
    "content": "module Alloc.Collections.Btree.Set\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval t_BTreeSet (v_T v_U: Type0) : eqtype\n\nval impl_11__new: #v_T: Type0 -> #v_U: Type0 -> Prims.unit\n  -> Prims.Pure (t_BTreeSet v_T v_U) Prims.l_True (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Collections.Vec_deque.fsti",
    "content": "module Alloc.Collections.Vec_deque\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_VecDeque (v_T: Type0) (v_A: Type0) =\n  | VecDeque : Rust_primitives.Sequence.t_Seq v_T -> Core_models.Marker.t_PhantomData v_A\n    -> t_VecDeque v_T v_A\n\nval impl_5__push_back (#v_T #v_A: Type0) (self: t_VecDeque v_T v_A) (x: v_T)\n    : Prims.Pure (t_VecDeque v_T v_A) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_5__len (#v_T #v_A: Type0) (self: t_VecDeque v_T v_A)\n    : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_5__pop_front (#v_T #v_A: Type0) (self: t_VecDeque v_T v_A)\n    : Prims.Pure (t_VecDeque v_T v_A & Core_models.Option.t_Option v_T)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6 (#v_T #v_A: Type0) : Core_models.Ops.Index.t_Index (t_VecDeque v_T v_A) usize =\n  {\n    f_Output = v_T;\n    f_index_pre = (fun (self: t_VecDeque v_T v_A) (i: usize) -> true);\n    f_index_post = (fun (self: t_VecDeque v_T v_A) (i: usize) (out: v_T) -> true);\n    f_index\n    =\n    fun (self: t_VecDeque v_T v_A) (i: usize) -> Rust_primitives.Sequence.seq_index #v_T self._0 i\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Fmt.fst",
    "content": "module Alloc.Fmt\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nassume\nval format': args: Core_models.Fmt.t_Arguments -> Alloc.String.t_String\n\nunfold\nlet format = format'\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Slice.fst",
    "content": "module Alloc.Slice\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nlet impl__to_vec (#v_T: Type0) (s: t_Slice v_T) : Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global =\n  Alloc.Vec.Vec (Rust_primitives.Sequence.seq_from_slice #v_T s)\n    (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData Alloc.Alloc.t_Global)\n  <:\n  Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global\n\nlet impl__into_vec (#v_T #v_A: Type0) (s: t_Slice v_T) : Alloc.Vec.t_Vec v_T v_A =\n  Alloc.Vec.Vec (Rust_primitives.Sequence.seq_from_slice #v_T s)\n    (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData v_A)\n  <:\n  Alloc.Vec.t_Vec v_T v_A\n\nassume\nval impl__sort_by':\n    #v_T: Type0 ->\n    #v_F: Type0 ->\n    {| i0: Core_models.Ops.Function.t_Fn v_F (v_T & v_T) |} ->\n    s: t_Slice v_T ->\n    compare: v_F\n  -> t_Slice v_T\n\nunfold\nlet impl__sort_by\n      (#v_T #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_Fn v_F (v_T & v_T))\n     = impl__sort_by' #v_T #v_F #i0\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.String.fst",
    "content": "module Alloc.String\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_String = | String : string -> t_String\n\nlet impl_String__new (_: Prims.unit) : t_String = String \"\" <: t_String\n\nlet impl_String__push_str (self: t_String) (other: string) : t_String =\n  let self:t_String = String (Rust_primitives.String.str_concat self._0 other) <: t_String in\n  self\n\nlet impl_String__push (self: t_String) (c: FStar.Char.char) : t_String =\n  let self:t_String =\n    String\n    (Rust_primitives.String.str_concat self._0 (Rust_primitives.String.str_of_char c <: string))\n    <:\n    t_String\n  in\n  self\n\nlet impl_String__pop (self: t_String) : (t_String & Core_models.Option.t_Option FStar.Char.char) =\n  let l:usize = Core_models.Str.impl_str__len self._0 in\n  let (self: t_String), (hax_temp_output: Core_models.Option.t_Option FStar.Char.char) =\n    if l >. mk_usize 0\n    then\n      let self:t_String =\n        String (Rust_primitives.String.str_sub self._0 (mk_usize 0) (l -! mk_usize 1 <: usize))\n        <:\n        t_String\n      in\n      self,\n      (Core_models.Option.Option_Some\n        (Rust_primitives.String.str_index self._0 (l -! mk_usize 1 <: usize))\n        <:\n        Core_models.Option.t_Option FStar.Char.char)\n      <:\n      (t_String & Core_models.Option.t_Option FStar.Char.char)\n    else\n      self, (Core_models.Option.Option_None <: Core_models.Option.t_Option FStar.Char.char)\n      <:\n      (t_String & Core_models.Option.t_Option FStar.Char.char)\n  in\n  self, hax_temp_output <: (t_String & Core_models.Option.t_Option FStar.Char.char)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Vec.Drain.fst",
    "content": "module Alloc.Vec.Drain\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Drain (v_T: Type0) (v_A: Type0) =\n  | Drain : Rust_primitives.Sequence.t_Seq v_T -> Core_models.Marker.t_PhantomData v_A\n    -> t_Drain v_T v_A\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T #v_A: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_Drain v_T v_A) =\n  {\n    f_Item = v_T;\n    f_next_pre = (fun (self: t_Drain v_T v_A) -> true);\n    f_next_post\n    =\n    (fun (self: t_Drain v_T v_A) (out: (t_Drain v_T v_A & Core_models.Option.t_Option v_T)) -> true);\n    f_next\n    =\n    fun (self: t_Drain v_T v_A) ->\n      let (self: t_Drain v_T v_A), (hax_temp_output: Core_models.Option.t_Option v_T) =\n        if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T)\n          <:\n          (t_Drain v_T v_A & Core_models.Option.t_Option v_T)\n        else\n          let res:v_T = Rust_primitives.Sequence.seq_first #v_T self._0 in\n          let self:t_Drain v_T v_A =\n            {\n              self with\n              _0\n              =\n              Rust_primitives.Sequence.seq_slice #v_T\n                self._0\n                (mk_usize 1)\n                (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize)\n            }\n            <:\n            t_Drain v_T v_A\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option v_T)\n          <:\n          (t_Drain v_T v_A & Core_models.Option.t_Option v_T)\n      in\n      self, hax_temp_output <: (t_Drain v_T v_A & Core_models.Option.t_Option v_T)\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Vec.Into_iter.fsti",
    "content": "module Alloc.Vec.Into_iter\n\nval t_IntoIter (t: Type0) (_: unit): Type0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval into_iter_into_iterator (t: Type0): \n  Core_models.Iter.Traits.Collect.t_IntoIterator (t_IntoIter t Alloc.Alloc.t_Global)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Alloc.Vec.fst",
    "content": "module Alloc.Vec\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Vec (v_T: Type0) (v_A: Type0) =\n  | Vec : Rust_primitives.Sequence.t_Seq v_T -> Core_models.Marker.t_PhantomData v_A\n    -> t_Vec v_T v_A\n\nlet from_elem\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T)\n      (item: v_T)\n      (len: usize)\n    : t_Vec v_T Alloc.Alloc.t_Global =\n  Vec (Rust_primitives.Sequence.seq_create #v_T item len)\n    (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData Alloc.Alloc.t_Global)\n  <:\n  t_Vec v_T Alloc.Alloc.t_Global\n\nlet impl__new (#v_T: Type0) (_: Prims.unit) : t_Vec v_T Alloc.Alloc.t_Global =\n  Vec (Rust_primitives.Sequence.seq_empty #v_T ())\n    (Core_models.Marker.PhantomData <: Core_models.Marker.t_PhantomData Alloc.Alloc.t_Global)\n  <:\n  t_Vec v_T Alloc.Alloc.t_Global\n\nlet impl__with_capacity (#v_T: Type0) (e_c: usize) : t_Vec v_T Alloc.Alloc.t_Global =\n  impl__new #v_T ()\n\nlet impl_1__len (#v_T #v_A: Type0) (self: t_Vec v_T v_A) : usize =\n  Rust_primitives.Sequence.seq_len #v_T self._0\n\nlet impl_1__pop (#v_T #v_A: Type0) (self: t_Vec v_T v_A)\n    : (t_Vec v_T v_A & Core_models.Option.t_Option v_T) =\n  let (self: t_Vec v_T v_A), (hax_temp_output: Core_models.Option.t_Option v_T) =\n    if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) >. mk_usize 0\n    then\n      let last:v_T = Rust_primitives.Sequence.seq_last #v_T self._0 in\n      let self:t_Vec v_T v_A =\n        {\n          self with\n          _0\n          =\n          Rust_primitives.Sequence.seq_slice #v_T\n            self._0\n            (mk_usize 0)\n            ((Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) -! mk_usize 1 <: usize)\n        }\n        <:\n        t_Vec v_T v_A\n      in\n      self, (Core_models.Option.Option_Some last <: Core_models.Option.t_Option v_T)\n      <:\n      (t_Vec v_T v_A & Core_models.Option.t_Option v_T)\n    else\n      self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T)\n      <:\n      (t_Vec v_T v_A & Core_models.Option.t_Option v_T)\n  in\n  self, hax_temp_output <: (t_Vec v_T v_A & Core_models.Option.t_Option v_T)\n\nlet impl_1__is_empty (#v_T #v_A: Type0) (self: t_Vec v_T v_A) : bool =\n  (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0\n\nlet impl_1__as_slice (#v_T #v_A: Type0) (self: t_Vec v_T v_A) : t_Slice v_T =\n  Rust_primitives.Sequence.seq_to_slice #v_T self._0\n\nassume\nval impl_1__truncate': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> n: usize\n  -> t_Vec v_T v_A\n\nunfold\nlet impl_1__truncate (#v_T #v_A: Type0) = impl_1__truncate' #v_T #v_A\n\nassume\nval impl_1__swap_remove': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> n: usize\n  -> (t_Vec v_T v_A & v_T)\n\nunfold\nlet impl_1__swap_remove (#v_T #v_A: Type0) = impl_1__swap_remove' #v_T #v_A\n\nassume\nval impl_1__remove': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> index: usize\n  -> (t_Vec v_T v_A & v_T)\n\nunfold\nlet impl_1__remove (#v_T #v_A: Type0) = impl_1__remove' #v_T #v_A\n\nassume\nval impl_1__clear': #v_T: Type0 -> #v_A: Type0 -> self: t_Vec v_T v_A -> t_Vec v_T v_A\n\nunfold\nlet impl_1__clear (#v_T #v_A: Type0) = impl_1__clear' #v_T #v_A\n\nassume\nval impl_1__drain': #v_T: Type0 -> #v_A: Type0 -> #v_R: Type0 -> self: t_Vec v_T v_A -> e_range: v_R\n  -> (t_Vec v_T v_A & Alloc.Vec.Drain.t_Drain v_T v_A)\n\nunfold\nlet impl_1__drain (#v_T #v_A #v_R: Type0) = impl_1__drain' #v_T #v_A #v_R\n\nlet impl_1__push (#v_T #v_A: Type0) (self: t_Vec v_T v_A) (x: v_T)\n    : Prims.Pure (t_Vec v_T v_A)\n      (requires\n        (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) <. Core_models.Num.impl_usize__MAX)\n      (fun _ -> Prims.l_True) =\n  let self:t_Vec v_T v_A =\n    {\n      self with\n      _0\n      =\n      Rust_primitives.Sequence.seq_concat #v_T\n        self._0\n        (Rust_primitives.Sequence.seq_one #v_T x <: Rust_primitives.Sequence.t_Seq v_T)\n    }\n    <:\n    t_Vec v_T v_A\n  in\n  self\n\nlet impl_1__insert (#v_T #v_A: Type0) (self: t_Vec v_T v_A) (index: usize) (element: v_T)\n    : Prims.Pure (t_Vec v_T v_A)\n      (requires\n        index <=. (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) &&\n        (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) <. Core_models.Num.impl_usize__MAX)\n      (fun _ -> Prims.l_True) =\n  let left:Rust_primitives.Sequence.t_Seq v_T =\n    Rust_primitives.Sequence.seq_slice #v_T self._0 (mk_usize 0) index\n  in\n  let right:Rust_primitives.Sequence.t_Seq v_T =\n    Rust_primitives.Sequence.seq_slice #v_T\n      self._0\n      index\n      (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize)\n  in\n  let left:Rust_primitives.Sequence.t_Seq v_T =\n    Rust_primitives.Sequence.seq_concat #v_T\n      left\n      (Rust_primitives.Sequence.seq_one #v_T element <: Rust_primitives.Sequence.t_Seq v_T)\n  in\n  let left:Rust_primitives.Sequence.t_Seq v_T =\n    Rust_primitives.Sequence.seq_concat #v_T left right\n  in\n  let self:t_Vec v_T v_A = { self with _0 = left } <: t_Vec v_T v_A in\n  self\n\nassume\nval impl_1__resize':\n    #v_T: Type0 ->\n    #v_A: Type0 ->\n    self: t_Vec v_T v_A ->\n    new_size: usize ->\n    value: v_T\n  -> Prims.Pure (t_Vec v_T v_A)\n      Prims.l_True\n      (ensures\n        fun self_e_future ->\n          let self_e_future:t_Vec v_T v_A = self_e_future in\n          (impl_1__len #v_T #v_A self_e_future <: usize) =. new_size)\n\nunfold\nlet impl_1__resize (#v_T #v_A: Type0) = impl_1__resize' #v_T #v_A\n\nlet impl_1__append (#v_T #v_A: Type0) (self other: t_Vec v_T v_A)\n    : Prims.Pure (t_Vec v_T v_A & t_Vec v_T v_A)\n      (requires\n        ((Rust_primitives.Hax.Int.from_machine (impl_1__len #v_T #v_A self <: usize)\n            <:\n            Hax_lib.Int.t_Int) +\n          (Rust_primitives.Hax.Int.from_machine (impl_1__len #v_T #v_A other <: usize)\n            <:\n            Hax_lib.Int.t_Int)\n          <:\n          Hax_lib.Int.t_Int) <=\n        (Rust_primitives.Hax.Int.from_machine Core_models.Num.impl_usize__MAX <: Hax_lib.Int.t_Int))\n      (fun _ -> Prims.l_True) =\n  let self:t_Vec v_T v_A =\n    { self with _0 = Rust_primitives.Sequence.seq_concat #v_T self._0 other._0 } <: t_Vec v_T v_A\n  in\n  let other:t_Vec v_T v_A =\n    { other with _0 = Rust_primitives.Sequence.seq_empty #v_T () } <: t_Vec v_T v_A\n  in\n  self, other <: (t_Vec v_T v_A & t_Vec v_T v_A)\n\nlet impl_2__extend_from_slice (#v_T #v_A: Type0) (s: t_Vec v_T v_A) (other: t_Slice v_T)\n    : Prims.Pure (t_Vec v_T v_A)\n      (requires\n        ((Rust_primitives.Hax.Int.from_machine (Rust_primitives.Sequence.seq_len #v_T s._0 <: usize)\n            <:\n            Hax_lib.Int.t_Int) +\n          (Rust_primitives.Hax.Int.from_machine (Core_models.Slice.impl__len #v_T other <: usize)\n            <:\n            Hax_lib.Int.t_Int)\n          <:\n          Hax_lib.Int.t_Int) <=\n        (Rust_primitives.Hax.Int.from_machine Core_models.Num.impl_usize__MAX <: Hax_lib.Int.t_Int))\n      (fun _ -> Prims.l_True) =\n  let s:t_Vec v_T v_A =\n    {\n      s with\n      _0\n      =\n      Rust_primitives.Sequence.seq_concat #v_T\n        s._0\n        (Rust_primitives.Sequence.seq_from_slice #v_T other <: Rust_primitives.Sequence.t_Seq v_T)\n    }\n    <:\n    t_Vec v_T v_A\n  in\n  s\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3 (#v_T #v_A: Type0) : Core_models.Ops.Index.t_Index (t_Vec v_T v_A) usize =\n  {\n    f_Output = v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Vec v_T v_A) (i: usize) -> i <. (impl_1__len #v_T #v_A self_ <: usize));\n    f_index_post = (fun (self: t_Vec v_T v_A) (i: usize) (out: v_T) -> true);\n    f_index\n    =\n    fun (self: t_Vec v_T v_A) (i: usize) -> Rust_primitives.Sequence.seq_index #v_T self._0 i\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4 (#v_T #v_A: Type0) : Core_models.Ops.Deref.t_Deref (t_Vec v_T v_A) =\n  {\n    f_Target = t_Slice v_T;\n    f_deref_pre = (fun (self: t_Vec v_T v_A) -> true);\n    f_deref_post = (fun (self: t_Vec v_T v_A) (out: t_Slice v_T) -> true);\n    f_deref = fun (self: t_Vec v_T v_A) -> impl_1__as_slice #v_T #v_A self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_5': #v_T: Type0\n  -> Core_models.Iter.Traits.Collect.t_FromIterator (t_Vec v_T Alloc.Alloc.t_Global) v_T\n\nunfold\nlet impl_5 (#v_T: Type0) = impl_5' #v_T\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Array.Iter.fst",
    "content": "module Core_models.Array.Iter\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_IntoIter (v_T: Type0) (v_N: usize) =\n  | IntoIter : Rust_primitives.Sequence.t_Seq v_T -> t_IntoIter v_T v_N\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T: Type0) (v_N: usize)\n    : Core_models.Iter.Traits.Iterator.t_Iterator (t_IntoIter v_T v_N) =\n  {\n    f_Item = v_T;\n    f_next_pre = (fun (self: t_IntoIter v_T v_N) -> true);\n    f_next_post\n    =\n    (fun (self: t_IntoIter v_T v_N) (out: (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T)) ->\n        true);\n    f_next\n    =\n    fun (self: t_IntoIter v_T v_N) ->\n      let (self: t_IntoIter v_T v_N), (hax_temp_output: Core_models.Option.t_Option v_T) =\n        if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T)\n          <:\n          (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T)\n        else\n          let res:v_T = Rust_primitives.Sequence.seq_first #v_T self._0 in\n          let self:t_IntoIter v_T v_N =\n            {\n              self with\n              _0\n              =\n              Rust_primitives.Sequence.seq_slice #v_T\n                self._0\n                (mk_usize 1)\n                (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize)\n            }\n            <:\n            t_IntoIter v_T v_N\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option v_T)\n          <:\n          (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T)\n      in\n      self, hax_temp_output <: (t_IntoIter v_T v_N & Core_models.Option.t_Option v_T)\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Array.fst",
    "content": "module Core_models.Array\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_TryFromSliceError = | TryFromSliceError : t_TryFromSliceError\n\nlet impl_23__map\n      (#v_T: Type0)\n      (v_N: usize)\n      (#v_F #v_U: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (s: t_Array v_T v_N)\n      (f: (v_T -> v_U))\n    : t_Array v_U v_N = Rust_primitives.Slice.array_map #v_T #v_U v_N #(v_T -> v_U) s f\n\nlet impl_23__as_slice (#v_T: Type0) (v_N: usize) (s: t_Array v_T v_N) : t_Slice v_T =\n  Rust_primitives.Slice.array_as_slice #v_T v_N s\n\nlet from_fn\n      (#v_T: Type0)\n      (v_N: usize)\n      (#v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F usize)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_T})\n      (f: (usize -> v_T))\n    : t_Array v_T v_N = Rust_primitives.Slice.array_from_fn #v_T v_N #(usize -> v_T) f\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_24 (#v_T: Type0) (v_N: usize)\n    : Core_models.Iter.Traits.Collect.t_IntoIterator (t_Array v_T v_N) =\n  {\n    f_IntoIter = Core_models.Array.Iter.t_IntoIter v_T v_N;\n    f_into_iter_pre = (fun (self: t_Array v_T v_N) -> true);\n    f_into_iter_post\n    =\n    (fun (self: t_Array v_T v_N) (out: Core_models.Array.Iter.t_IntoIter v_T v_N) -> true);\n    f_into_iter\n    =\n    fun (self: t_Array v_T v_N) ->\n      Core_models.Array.Iter.IntoIter (Rust_primitives.Sequence.seq_from_array #v_T v_N self)\n      <:\n      Core_models.Array.Iter.t_IntoIter v_T v_N\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_25 (#v_T: Type0) (v_N: usize) : Core_models.Ops.Index.t_Index (t_Array v_T v_N) usize =\n  {\n    f_Output = v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Array v_T v_N) (i: usize) ->\n        i <. (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize));\n    f_index_post = (fun (self: t_Array v_T v_N) (i: usize) (out: v_T) -> true);\n    f_index\n    =\n    fun (self: t_Array v_T v_N) (i: usize) -> Rust_primitives.Slice.array_index #v_T v_N self i\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_26 (#v_T: Type0) (v_N: usize)\n    : Core_models.Ops.Index.t_Index (t_Array v_T v_N) (Core_models.Ops.Range.t_Range usize) =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Array v_T v_N) (i: Core_models.Ops.Range.t_Range usize) ->\n        i.Core_models.Ops.Range.f_start <=. i.Core_models.Ops.Range.f_end &&\n        i.Core_models.Ops.Range.f_end <=.\n        (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize));\n    f_index_post\n    =\n    (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_Range usize) (out: t_Slice v_T) -> true\n    );\n    f_index\n    =\n    fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_Range usize) ->\n      Rust_primitives.Slice.array_slice #v_T\n        v_N\n        self\n        i.Core_models.Ops.Range.f_start\n        i.Core_models.Ops.Range.f_end\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_27 (#v_T: Type0) (v_N: usize)\n    : Core_models.Ops.Index.t_Index (t_Array v_T v_N) (Core_models.Ops.Range.t_RangeTo usize) =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeTo usize) ->\n        i.Core_models.Ops.Range.f_end <=.\n        (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize));\n    f_index_post\n    =\n    (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeTo usize) (out: t_Slice v_T) ->\n        true);\n    f_index\n    =\n    fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeTo usize) ->\n      Rust_primitives.Slice.array_slice #v_T v_N self (mk_usize 0) i.Core_models.Ops.Range.f_end\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_28 (#v_T: Type0) (v_N: usize)\n    : Core_models.Ops.Index.t_Index (t_Array v_T v_N) (Core_models.Ops.Range.t_RangeFrom usize) =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFrom usize) ->\n        i.Core_models.Ops.Range.f_start <=.\n        (Core_models.Slice.impl__len #v_T (self_ <: t_Slice v_T) <: usize));\n    f_index_post\n    =\n    (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFrom usize) (out: t_Slice v_T) ->\n        true);\n    f_index\n    =\n    fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFrom usize) ->\n      Rust_primitives.Slice.array_slice #v_T v_N self i.Core_models.Ops.Range.f_start v_N\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_29 (#v_T: Type0) (v_N: usize)\n    : Core_models.Ops.Index.t_Index (t_Array v_T v_N) Core_models.Ops.Range.t_RangeFull =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre = (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFull) -> true);\n    f_index_post\n    =\n    (fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFull) (out: t_Slice v_T) -> true);\n    f_index\n    =\n    fun (self: t_Array v_T v_N) (i: Core_models.Ops.Range.t_RangeFull) ->\n      Rust_primitives.Slice.array_slice #v_T v_N self (mk_usize 0) v_N\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Borrow.fsti",
    "content": "module Core_models.Borrow\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Borrow (v_Self: Type0) (v_Borrowed: Type0) = {\n  f_borrow_pre:v_Self -> Type0;\n  f_borrow_post:v_Self -> v_Borrowed -> Type0;\n  f_borrow:x0: v_Self\n    -> Prims.Pure v_Borrowed (f_borrow_pre x0) (fun result -> f_borrow_post x0 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Bundle.fst",
    "content": "module Core_models.Bundle\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Option (v_T: Type0) =\n  | Option_Some : v_T -> t_Option v_T\n  | Option_None : t_Option v_T\n\nlet impl__is_some_and\n      (#v_T #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == bool})\n      (self: t_Option v_T)\n      (f: v_F)\n    : bool =\n  match self <: t_Option v_T with\n  | Option_None  -> false\n  | Option_Some x ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x\n\nlet impl__is_none_or\n      (#v_T #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == bool})\n      (self: t_Option v_T)\n      (f: v_F)\n    : bool =\n  match self <: t_Option v_T with\n  | Option_None  -> true\n  | Option_Some x ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x\n\nlet impl__as_ref (#v_T: Type0) (self: t_Option v_T) : t_Option v_T =\n  match self <: t_Option v_T with\n  | Option_Some x -> Option_Some x <: t_Option v_T\n  | Option_None  -> Option_None <: t_Option v_T\n\nlet impl__unwrap_or (#v_T: Type0) (self: t_Option v_T) (v_default: v_T) : v_T =\n  match self <: t_Option v_T with\n  | Option_Some x -> x\n  | Option_None  -> v_default\n\nlet impl__unwrap_or_else\n      (#v_T #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Ops.Function.t_FnOnce v_F Prims.unit)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_T})\n      (self: t_Option v_T)\n      (f: v_F)\n    : v_T =\n  match self <: t_Option v_T with\n  | Option_Some x -> x\n  | Option_None  ->\n    Core_models.Ops.Function.f_call_once #v_F\n      #Prims.unit\n      #FStar.Tactics.Typeclasses.solve\n      f\n      (() <: Prims.unit)\n\nlet impl__unwrap_or_default\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Default.t_Default v_T)\n      (self: t_Option v_T)\n    : v_T =\n  match self <: t_Option v_T with\n  | Option_Some x -> x\n  | Option_None  -> Core_models.Default.f_default #v_T #FStar.Tactics.Typeclasses.solve ()\n\nlet impl__map\n      (#v_T #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (self: t_Option v_T)\n      (f: v_F)\n    : t_Option v_U =\n  match self <: t_Option v_T with\n  | Option_Some x ->\n    Option_Some\n    (Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x)\n    <:\n    t_Option v_U\n  | Option_None  -> Option_None <: t_Option v_U\n\nlet impl__map_or\n      (#v_T #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (self: t_Option v_T)\n      (v_default: v_U)\n      (f: v_F)\n    : v_U =\n  match self <: t_Option v_T with\n  | Option_Some t ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t\n  | Option_None  -> v_default\n\nlet impl__map_or_else\n      (#v_T #v_U #v_D #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_D Prims.unit)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (#_: unit{i1.Core_models.Ops.Function.f_Output == v_U})\n      (self: t_Option v_T)\n      (v_default: v_D)\n      (f: v_F)\n    : v_U =\n  match self <: t_Option v_T with\n  | Option_Some t ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t\n  | Option_None  ->\n    Core_models.Ops.Function.f_call_once #v_D\n      #Prims.unit\n      #FStar.Tactics.Typeclasses.solve\n      v_default\n      (() <: Prims.unit)\n\nlet impl__map_or_default\n      (#v_T #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Default.t_Default v_U)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (self: t_Option v_T)\n      (f: v_F)\n    : v_U =\n  match self <: t_Option v_T with\n  | Option_Some t ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t\n  | Option_None  -> Core_models.Default.f_default #v_U #FStar.Tactics.Typeclasses.solve ()\n\nlet impl__and_then\n      (#v_T #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == t_Option v_U})\n      (self: t_Option v_T)\n      (f: v_F)\n    : t_Option v_U =\n  match self <: t_Option v_T with\n  | Option_Some x ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f x\n  | Option_None  -> Option_None <: t_Option v_U\n\nlet impl__take (#v_T: Type0) (self: t_Option v_T) : (t_Option v_T & t_Option v_T) =\n  (Option_None <: t_Option v_T), self <: (t_Option v_T & t_Option v_T)\n\nlet impl__is_some (#v_T: Type0) (self: t_Option v_T)\n    : Prims.Pure bool\n      Prims.l_True\n      (ensures\n        fun res ->\n          let res:bool = res in\n          b2t res ==> Option_Some? self) =\n  match self <: t_Option v_T with\n  | Option_Some _ -> true\n  | _ -> false\n\nlet impl__is_none (#v_T: Type0) (self: t_Option v_T) : bool =\n  (impl__is_some #v_T self <: bool) =. false\n\nlet impl__expect (#v_T: Type0) (self: t_Option v_T) (e_msg: string)\n    : Prims.Pure v_T (requires impl__is_some #v_T self) (fun _ -> Prims.l_True) =\n  match self <: t_Option v_T with\n  | Option_Some v_val -> v_val\n  | Option_None  -> Core_models.Panicking.Internal.panic #v_T ()\n\nlet impl__unwrap (#v_T: Type0) (self: t_Option v_T)\n    : Prims.Pure v_T (requires impl__is_some #v_T self) (fun _ -> Prims.l_True) =\n  match self <: t_Option v_T with\n  | Option_Some v_val -> v_val\n  | Option_None  -> Core_models.Panicking.Internal.panic #v_T ()\n\ntype t_Result (v_T: Type0) (v_E: Type0) =\n  | Result_Ok : v_T -> t_Result v_T v_E\n  | Result_Err : v_E -> t_Result v_T v_E\n\nlet impl__ok_or (#v_T #v_E: Type0) (self: t_Option v_T) (err: v_E) : t_Result v_T v_E =\n  match self <: t_Option v_T with\n  | Option_Some v -> Result_Ok v <: t_Result v_T v_E\n  | Option_None  -> Result_Err err <: t_Result v_T v_E\n\nlet impl__ok_or_else\n      (#v_T #v_E #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Ops.Function.t_FnOnce v_F Prims.unit)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_E})\n      (self: t_Option v_T)\n      (err: v_F)\n    : t_Result v_T v_E =\n  match self <: t_Option v_T with\n  | Option_Some v -> Result_Ok v <: t_Result v_T v_E\n  | Option_None  ->\n    Result_Err\n    (Core_models.Ops.Function.f_call_once #v_F\n        #Prims.unit\n        #FStar.Tactics.Typeclasses.solve\n        err\n        (() <: Prims.unit))\n    <:\n    t_Result v_T v_E\n\nlet impl__unwrap_or__from__result (#v_T #v_E: Type0) (self: t_Result v_T v_E) (v_default: v_T) : v_T =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t -> t\n  | Result_Err _ -> v_default\n\nlet impl__map__from__result\n      (#v_T #v_E #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (self: t_Result v_T v_E)\n      (op: v_F)\n    : t_Result v_U v_E =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t ->\n    Result_Ok (Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve op t)\n    <:\n    t_Result v_U v_E\n  | Result_Err e -> Result_Err e <: t_Result v_U v_E\n\nlet impl__map_or__from__result\n      (#v_T #v_E #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (self: t_Result v_T v_E)\n      (v_default: v_U)\n      (f: v_F)\n    : v_U =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t\n  | Result_Err e_e -> v_default\n\nlet impl__map_or_else__from__result\n      (#v_T #v_E #v_U #v_D #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_D v_E)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_U})\n      (#_: unit{i1.Core_models.Ops.Function.f_Output == v_U})\n      (self: t_Result v_T v_E)\n      (v_default: v_D)\n      (f: v_F)\n    : v_U =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve f t\n  | Result_Err e ->\n    Core_models.Ops.Function.f_call_once #v_D #v_E #FStar.Tactics.Typeclasses.solve v_default e\n\nlet impl__map_err\n      (#v_T #v_E #v_F #v_O: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_O v_E)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == v_F})\n      (self: t_Result v_T v_E)\n      (op: v_O)\n    : t_Result v_T v_F =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t -> Result_Ok t <: t_Result v_T v_F\n  | Result_Err e ->\n    Result_Err\n    (Core_models.Ops.Function.f_call_once #v_O #v_E #FStar.Tactics.Typeclasses.solve op e)\n    <:\n    t_Result v_T v_F\n\nlet impl__is_ok (#v_T #v_E: Type0) (self: t_Result v_T v_E) : bool =\n  match self <: t_Result v_T v_E with\n  | Result_Ok _ -> true\n  | _ -> false\n\nlet impl__and_then__from__result\n      (#v_T #v_E #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnOnce v_F v_T)\n      (#_: unit{i0.Core_models.Ops.Function.f_Output == t_Result v_U v_E})\n      (self: t_Result v_T v_E)\n      (op: v_F)\n    : t_Result v_U v_E =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t ->\n    Core_models.Ops.Function.f_call_once #v_F #v_T #FStar.Tactics.Typeclasses.solve op t\n  | Result_Err e -> Result_Err e <: t_Result v_U v_E\n\nlet impl__ok (#v_T #v_E: Type0) (self: t_Result v_T v_E) : t_Option v_T =\n  match self <: t_Result v_T v_E with\n  | Result_Ok x -> Option_Some x <: t_Option v_T\n  | Result_Err _ -> Option_None <: t_Option v_T\n\nlet impl__unwrap__from__result (#v_T #v_E: Type0) (self: t_Result v_T v_E)\n    : Prims.Pure v_T (requires impl__is_ok #v_T #v_E self) (fun _ -> Prims.l_True) =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t -> t\n  | Result_Err _ -> Core_models.Panicking.Internal.panic #v_T ()\n\nlet impl__expect__from__result (#v_T #v_E: Type0) (self: t_Result v_T v_E) (e_msg: string)\n    : Prims.Pure v_T (requires impl__is_ok #v_T #v_E self) (fun _ -> Prims.l_True) =\n  match self <: t_Result v_T v_E with\n  | Result_Ok t -> t\n  | Result_Err _ -> Core_models.Panicking.Internal.panic #v_T ()\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Clone.fst",
    "content": "module Core_models.Clone\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Clone self = {\n  f_clone_pre: self -> Type0;\n  f_clone_post: self -> self -> Type0;\n  f_clone: x:self -> r:self {x == r}\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T: Type0) : t_Clone v_T =\n  {\n    f_clone_pre = (fun (self: v_T) -> true);\n    f_clone_post = (fun (self: v_T) (out: v_T) -> true);\n    f_clone = fun (self: v_T) -> self\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Cmp.fst",
    "content": "module Core_models.Cmp\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_PartialEq (v_Self: Type0) (v_Rhs: Type0) = {\n  f_eq_pre:self_: v_Self -> other: v_Rhs -> pred: Type0{true ==> pred};\n  f_eq_post:v_Self -> v_Rhs -> bool -> Type0;\n  f_eq:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure bool (f_eq_pre x0 x1) (fun result -> f_eq_post x0 x1 result)\n}\n\nclass t_Eq (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_PartialEq v_Self v_Self\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Eq v_Self|} -> i._super_i0\n\ntype t_Ordering =\n  | Ordering_Less : t_Ordering\n  | Ordering_Equal : t_Ordering\n  | Ordering_Greater : t_Ordering\n\nlet anon_const_Ordering_Less__anon_const_0: isize = mk_isize (-1)\n\nlet anon_const_Ordering_Equal__anon_const_0: isize = mk_isize 0\n\nlet anon_const_Ordering_Greater__anon_const_0: isize = mk_isize 1\n\nlet t_Ordering_cast_to_repr (x: t_Ordering) : isize =\n  match x <: t_Ordering with\n  | Ordering_Less  -> anon_const_Ordering_Less__anon_const_0\n  | Ordering_Equal  -> anon_const_Ordering_Equal__anon_const_0\n  | Ordering_Greater  -> anon_const_Ordering_Greater__anon_const_0\n\nclass t_PartialOrd (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_PartialEq v_Self v_Rhs;\n  f_partial_cmp_pre:self_: v_Self -> other: v_Rhs -> pred: Type0{true ==> pred};\n  f_partial_cmp_post:v_Self -> v_Rhs -> Core_models.Option.t_Option t_Ordering -> Type0;\n  f_partial_cmp:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure (Core_models.Option.t_Option t_Ordering)\n        (f_partial_cmp_pre x0 x1)\n        (fun result -> f_partial_cmp_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) (v_Rhs:Type0) {|i: t_PartialOrd v_Self v_Rhs|} -> i._super_i0\n\nclass t_Neq (v_Self: Type0) (v_Rhs: Type0) = {\n  f_neq_pre:self_: v_Self -> y: v_Rhs -> pred: Type0{true ==> pred};\n  f_neq_post:v_Self -> v_Rhs -> bool -> Type0;\n  f_neq:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure bool (f_neq_pre x0 x1) (fun result -> f_neq_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialEq v_T v_T)\n    : t_Neq v_T v_T =\n  {\n    f_neq_pre = (fun (self: v_T) (y: v_T) -> true);\n    f_neq_post = (fun (self: v_T) (y: v_T) (out: bool) -> true);\n    f_neq\n    =\n    fun (self: v_T) (y: v_T) ->\n      (f_eq #v_T #v_T #FStar.Tactics.Typeclasses.solve self y <: bool) =. false\n  }\n\nclass t_PartialOrdDefaults (v_Self: Type0) (v_Rhs: Type0) = {\n  f_lt_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs\n    -> pred: Type0{true ==> pred};\n  f_lt_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0;\n  f_lt:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure bool (f_lt_pre #i1 x0 x1) (fun result -> f_lt_post #i1 x0 x1 result);\n  f_le_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs\n    -> pred: Type0{true ==> pred};\n  f_le_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0;\n  f_le:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure bool (f_le_pre #i1 x0 x1) (fun result -> f_le_post #i1 x0 x1 result);\n  f_gt_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs\n    -> pred: Type0{true ==> pred};\n  f_gt_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0;\n  f_gt:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure bool (f_gt_pre #i1 x0 x1) (fun result -> f_gt_post #i1 x0 x1 result);\n  f_ge_pre:{| i1: t_PartialOrd v_Self v_Rhs |} -> self_: v_Self -> y: v_Rhs\n    -> pred: Type0{true ==> pred};\n  f_ge_post:{| i1: t_PartialOrd v_Self v_Rhs |} -> v_Self -> v_Rhs -> bool -> Type0;\n  f_ge:{| i1: t_PartialOrd v_Self v_Rhs |} -> x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure bool (f_ge_pre #i1 x0 x1) (fun result -> f_ge_post #i1 x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialOrd v_T v_T)\n    : t_PartialOrdDefaults v_T v_T =\n  {\n    f_lt_pre\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        ->\n        true);\n    f_lt_post\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        (out: bool)\n        ->\n        true);\n    f_lt\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        ->\n        match\n          f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y\n          <:\n          Core_models.Option.t_Option t_Ordering\n        with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> true\n        | _ -> false);\n    f_le_pre\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        ->\n        true);\n    f_le_post\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        (out: bool)\n        ->\n        true);\n    f_le\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        ->\n        match\n          f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y\n          <:\n          Core_models.Option.t_Option t_Ordering\n        with\n        | Core_models.Option.Option_Some (Ordering_Less )\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> true\n        | _ -> false);\n    f_gt_pre\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        ->\n        true);\n    f_gt_post\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        (out: bool)\n        ->\n        true);\n    f_gt\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        ->\n        match\n          f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y\n          <:\n          Core_models.Option.t_Option t_Ordering\n        with\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> true\n        | _ -> false);\n    f_ge_pre\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        ->\n        true);\n    f_ge_post\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T)\n        (self: v_T)\n        (y: v_T)\n        (out: bool)\n        ->\n        true);\n    f_ge\n    =\n    fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_PartialOrd v_T v_T) (self: v_T) (y: v_T) ->\n      match\n        f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve self y\n        <:\n        Core_models.Option.t_Option t_Ordering\n      with\n      | Core_models.Option.Option_Some (Ordering_Greater )\n      | Core_models.Option.Option_Some (Ordering_Equal ) -> true\n      | _ -> false\n  }\n\nclass t_Ord (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Eq v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i1:t_PartialOrd v_Self v_Self;\n  f_cmp_pre:self_: v_Self -> other: v_Self -> pred: Type0{true ==> pred};\n  f_cmp_post:v_Self -> v_Self -> t_Ordering -> Type0;\n  f_cmp:x0: v_Self -> x1: v_Self\n    -> Prims.Pure t_Ordering (f_cmp_pre x0 x1) (fun result -> f_cmp_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Ord v_Self|} -> i._super_i0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Ord v_Self|} -> i._super_i1\n\nlet max (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Ord v_T) (v1 v2: v_T) : v_T =\n  match f_cmp #v_T #FStar.Tactics.Typeclasses.solve v1 v2 <: t_Ordering with\n  | Ordering_Greater  -> v1\n  | _ -> v2\n\nlet min (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Ord v_T) (v1 v2: v_T) : v_T =\n  match f_cmp #v_T #FStar.Tactics.Typeclasses.solve v1 v2 <: t_Ordering with\n  | Ordering_Greater  -> v2\n  | _ -> v1\n\ntype t_Reverse (v_T: Type0) = | Reverse : v_T -> t_Reverse v_T\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialEq v_T v_T)\n    : t_PartialEq (t_Reverse v_T) (t_Reverse v_T) =\n  {\n    f_eq_pre = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> true);\n    f_eq_post = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) (out: bool) -> true);\n    f_eq\n    =\n    fun (self: t_Reverse v_T) (other: t_Reverse v_T) ->\n      f_eq #v_T #v_T #FStar.Tactics.Typeclasses.solve other._0 self._0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PartialOrd v_T v_T)\n    : t_PartialOrd (t_Reverse v_T) (t_Reverse v_T) =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> true);\n    f_partial_cmp_post\n    =\n    (fun\n        (self: t_Reverse v_T)\n        (other: t_Reverse v_T)\n        (out: Core_models.Option.t_Option t_Ordering)\n        ->\n        true);\n    f_partial_cmp\n    =\n    fun (self: t_Reverse v_T) (other: t_Reverse v_T) ->\n      f_partial_cmp #v_T #v_T #FStar.Tactics.Typeclasses.solve other._0 self._0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Eq v_T)\n    : t_Eq (t_Reverse v_T) = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_5 (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Ord v_T)\n    : t_Ord (t_Reverse v_T) =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) -> true);\n    f_cmp_post = (fun (self: t_Reverse v_T) (other: t_Reverse v_T) (out: t_Ordering) -> true);\n    f_cmp\n    =\n    fun (self: t_Reverse v_T) (other: t_Reverse v_T) ->\n      f_cmp #v_T #FStar.Tactics.Typeclasses.solve other._0 self._0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6: t_PartialEq u8 u8 =\n  {\n    f_eq_pre = (fun (self: u8) (other: u8) -> true);\n    f_eq_post = (fun (self: u8) (other: u8) (out: bool) -> true);\n    f_eq = fun (self: u8) (other: u8) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_30: t_PartialOrd u8 u8 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: u8) (other: u8) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: u8) (other: u8) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: u8) (other: u8) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_u8: t_Eq u8 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_u8: t_Ord u8 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: u8) (other: u8) -> true);\n    f_cmp_post\n    =\n    (fun (self_: u8) (other: u8) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: u8) (other: u8) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_8: t_PartialEq i8 i8 =\n  {\n    f_eq_pre = (fun (self: i8) (other: i8) -> true);\n    f_eq_post = (fun (self: i8) (other: i8) (out: bool) -> true);\n    f_eq = fun (self: i8) (other: i8) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_32: t_PartialOrd i8 i8 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: i8) (other: i8) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: i8) (other: i8) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: i8) (other: i8) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_i8: t_Eq i8 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_i8: t_Ord i8 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: i8) (other: i8) -> true);\n    f_cmp_post\n    =\n    (fun (self_: i8) (other: i8) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: i8) (other: i8) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_10: t_PartialEq u16 u16 =\n  {\n    f_eq_pre = (fun (self: u16) (other: u16) -> true);\n    f_eq_post = (fun (self: u16) (other: u16) (out: bool) -> true);\n    f_eq = fun (self: u16) (other: u16) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_34: t_PartialOrd u16 u16 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: u16) (other: u16) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: u16) (other: u16) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: u16) (other: u16) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_u16: t_Eq u16 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_u16: t_Ord u16 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: u16) (other: u16) -> true);\n    f_cmp_post\n    =\n    (fun (self_: u16) (other: u16) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: u16) (other: u16) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_12: t_PartialEq i16 i16 =\n  {\n    f_eq_pre = (fun (self: i16) (other: i16) -> true);\n    f_eq_post = (fun (self: i16) (other: i16) (out: bool) -> true);\n    f_eq = fun (self: i16) (other: i16) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_36: t_PartialOrd i16 i16 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: i16) (other: i16) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: i16) (other: i16) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: i16) (other: i16) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_i16: t_Eq i16 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_i16: t_Ord i16 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: i16) (other: i16) -> true);\n    f_cmp_post\n    =\n    (fun (self_: i16) (other: i16) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: i16) (other: i16) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_14: t_PartialEq u32 u32 =\n  {\n    f_eq_pre = (fun (self: u32) (other: u32) -> true);\n    f_eq_post = (fun (self: u32) (other: u32) (out: bool) -> true);\n    f_eq = fun (self: u32) (other: u32) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_38: t_PartialOrd u32 u32 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: u32) (other: u32) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: u32) (other: u32) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: u32) (other: u32) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_u32: t_Eq u32 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_u32: t_Ord u32 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: u32) (other: u32) -> true);\n    f_cmp_post\n    =\n    (fun (self_: u32) (other: u32) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: u32) (other: u32) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_16: t_PartialEq i32 i32 =\n  {\n    f_eq_pre = (fun (self: i32) (other: i32) -> true);\n    f_eq_post = (fun (self: i32) (other: i32) (out: bool) -> true);\n    f_eq = fun (self: i32) (other: i32) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_40: t_PartialOrd i32 i32 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: i32) (other: i32) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: i32) (other: i32) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: i32) (other: i32) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_i32: t_Eq i32 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_i32: t_Ord i32 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: i32) (other: i32) -> true);\n    f_cmp_post\n    =\n    (fun (self_: i32) (other: i32) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: i32) (other: i32) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_18: t_PartialEq u64 u64 =\n  {\n    f_eq_pre = (fun (self: u64) (other: u64) -> true);\n    f_eq_post = (fun (self: u64) (other: u64) (out: bool) -> true);\n    f_eq = fun (self: u64) (other: u64) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_42: t_PartialOrd u64 u64 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: u64) (other: u64) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: u64) (other: u64) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: u64) (other: u64) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_u64: t_Eq u64 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_u64: t_Ord u64 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: u64) (other: u64) -> true);\n    f_cmp_post\n    =\n    (fun (self_: u64) (other: u64) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: u64) (other: u64) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_20: t_PartialEq i64 i64 =\n  {\n    f_eq_pre = (fun (self: i64) (other: i64) -> true);\n    f_eq_post = (fun (self: i64) (other: i64) (out: bool) -> true);\n    f_eq = fun (self: i64) (other: i64) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_44: t_PartialOrd i64 i64 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: i64) (other: i64) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: i64) (other: i64) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: i64) (other: i64) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_i64: t_Eq i64 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_i64: t_Ord i64 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: i64) (other: i64) -> true);\n    f_cmp_post\n    =\n    (fun (self_: i64) (other: i64) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: i64) (other: i64) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_22: t_PartialEq u128 u128 =\n  {\n    f_eq_pre = (fun (self: u128) (other: u128) -> true);\n    f_eq_post = (fun (self: u128) (other: u128) (out: bool) -> true);\n    f_eq = fun (self: u128) (other: u128) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_46: t_PartialOrd u128 u128 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: u128) (other: u128) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: u128) (other: u128) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: u128) (other: u128) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_u128: t_Eq u128 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_u128: t_Ord u128 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: u128) (other: u128) -> true);\n    f_cmp_post\n    =\n    (fun (self_: u128) (other: u128) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: u128) (other: u128) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_24: t_PartialEq i128 i128 =\n  {\n    f_eq_pre = (fun (self: i128) (other: i128) -> true);\n    f_eq_post = (fun (self: i128) (other: i128) (out: bool) -> true);\n    f_eq = fun (self: i128) (other: i128) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_48: t_PartialOrd i128 i128 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: i128) (other: i128) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: i128) (other: i128) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: i128) (other: i128) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_i128: t_Eq i128 = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_i128: t_Ord i128 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: i128) (other: i128) -> true);\n    f_cmp_post\n    =\n    (fun (self_: i128) (other: i128) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: i128) (other: i128) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_26: t_PartialEq usize usize =\n  {\n    f_eq_pre = (fun (self: usize) (other: usize) -> true);\n    f_eq_post = (fun (self: usize) (other: usize) (out: bool) -> true);\n    f_eq = fun (self: usize) (other: usize) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_50: t_PartialOrd usize usize =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: usize) (other: usize) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: usize) (other: usize) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: usize) (other: usize) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_usize: t_Eq usize = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_usize: t_Ord usize =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: usize) (other: usize) -> true);\n    f_cmp_post\n    =\n    (fun (self_: usize) (other: usize) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: usize) (other: usize) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_28: t_PartialEq isize isize =\n  {\n    f_eq_pre = (fun (self: isize) (other: isize) -> true);\n    f_eq_post = (fun (self: isize) (other: isize) (out: bool) -> true);\n    f_eq = fun (self: isize) (other: isize) -> self =. other\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_52: t_PartialOrd isize isize =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: isize) (other: isize) -> true);\n    f_partial_cmp_post\n    =\n    (fun (self_: isize) (other: isize) (res: Core_models.Option.t_Option t_Ordering) ->\n        match res <: Core_models.Option.t_Option t_Ordering with\n        | Core_models.Option.Option_Some (Ordering_Less ) -> self_ <. other\n        | Core_models.Option.Option_Some (Ordering_Equal ) -> self_ =. other\n        | Core_models.Option.Option_Some (Ordering_Greater ) -> self_ >. other\n        | Core_models.Option.Option_None  -> false);\n    f_partial_cmp\n    =\n    fun (self: isize) (other: isize) ->\n      if self <. other\n      then\n        Core_models.Option.Option_Some (Ordering_Less <: t_Ordering)\n        <:\n        Core_models.Option.t_Option t_Ordering\n      else\n        if self >. other\n        then\n          Core_models.Option.Option_Some (Ordering_Greater <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n        else\n          Core_models.Option.Option_Some (Ordering_Equal <: t_Ordering)\n          <:\n          Core_models.Option.t_Option t_Ordering\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Eq_for_isize: t_Eq isize = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Ord_for_isize: t_Ord isize =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: isize) (other: isize) -> true);\n    f_cmp_post\n    =\n    (fun (self_: isize) (other: isize) (res: t_Ordering) ->\n        match res <: t_Ordering with\n        | Ordering_Less  -> self_ <. other\n        | Ordering_Equal  -> self_ =. other\n        | Ordering_Greater  -> self_ >. other);\n    f_cmp\n    =\n    fun (self: isize) (other: isize) ->\n      if self <. other\n      then Ordering_Less <: t_Ordering\n      else if self >. other then Ordering_Greater <: t_Ordering else Ordering_Equal <: t_Ordering\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Convert.fst",
    "content": "module Core_models.Convert\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Into (v_Self: Type0) (v_T: Type0) = {\n  f_into_pre:self_: v_Self -> pred: Type0{true ==> pred};\n  f_into_post:v_Self -> v_T -> Type0;\n  f_into:x0: v_Self -> Prims.Pure v_T (f_into_pre x0) (fun result -> f_into_post x0 result)\n}\n\nclass t_From (v_Self: Type0) (v_T: Type0) = {\n  f_from_pre:x: v_T -> pred: Type0{true ==> pred};\n  f_from_post:v_T -> v_Self -> Type0;\n  f_from:x0: v_T -> Prims.Pure v_Self (f_from_pre x0) (fun result -> f_from_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T #v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_From v_U v_T)\n    : t_Into v_T v_U =\n  {\n    f_into_pre = (fun (self: v_T) -> true);\n    f_into_post = (fun (self: v_T) (out: v_U) -> true);\n    f_into = fun (self: v_T) -> f_from #v_U #v_T #FStar.Tactics.Typeclasses.solve self\n  }\n\ntype t_Infallible = | Infallible : t_Infallible\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4 (#v_T: Type0) : t_From v_T v_T =\n  {\n    f_from_pre = (fun (x: v_T) -> true);\n    f_from_post = (fun (x: v_T) (out: v_T) -> true);\n    f_from = fun (x: v_T) -> x\n  }\n\nclass t_AsRef (v_Self: Type0) (v_T: Type0) = {\n  f_as_ref_pre:self_: v_Self -> pred: Type0{true ==> pred};\n  f_as_ref_post:v_Self -> v_T -> Type0;\n  f_as_ref:x0: v_Self -> Prims.Pure v_T (f_as_ref_pre x0) (fun result -> f_as_ref_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_5 (#v_T: Type0) : t_AsRef v_T v_T =\n  {\n    f_as_ref_pre = (fun (self: v_T) -> true);\n    f_as_ref_post = (fun (self: v_T) (out: v_T) -> true);\n    f_as_ref = fun (self: v_T) -> self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6: t_From u16 u8 =\n  {\n    f_from_pre = (fun (x: u8) -> true);\n    f_from_post = (fun (x: u8) (out: u16) -> true);\n    f_from = fun (x: u8) -> cast (x <: u8) <: u16\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_7: t_From u32 u8 =\n  {\n    f_from_pre = (fun (x: u8) -> true);\n    f_from_post = (fun (x: u8) (out: u32) -> true);\n    f_from = fun (x: u8) -> cast (x <: u8) <: u32\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_8: t_From u32 u16 =\n  {\n    f_from_pre = (fun (x: u16) -> true);\n    f_from_post = (fun (x: u16) (out: u32) -> true);\n    f_from = fun (x: u16) -> cast (x <: u16) <: u32\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_9: t_From u64 u8 =\n  {\n    f_from_pre = (fun (x: u8) -> true);\n    f_from_post = (fun (x: u8) (out: u64) -> true);\n    f_from = fun (x: u8) -> cast (x <: u8) <: u64\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_10: t_From u64 u16 =\n  {\n    f_from_pre = (fun (x: u16) -> true);\n    f_from_post = (fun (x: u16) (out: u64) -> true);\n    f_from = fun (x: u16) -> cast (x <: u16) <: u64\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_11: t_From u64 u32 =\n  {\n    f_from_pre = (fun (x: u32) -> true);\n    f_from_post = (fun (x: u32) (out: u64) -> true);\n    f_from = fun (x: u32) -> cast (x <: u32) <: u64\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_12: t_From u128 u8 =\n  {\n    f_from_pre = (fun (x: u8) -> true);\n    f_from_post = (fun (x: u8) (out: u128) -> true);\n    f_from = fun (x: u8) -> cast (x <: u8) <: u128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_13: t_From u128 u16 =\n  {\n    f_from_pre = (fun (x: u16) -> true);\n    f_from_post = (fun (x: u16) (out: u128) -> true);\n    f_from = fun (x: u16) -> cast (x <: u16) <: u128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_14: t_From u128 u32 =\n  {\n    f_from_pre = (fun (x: u32) -> true);\n    f_from_post = (fun (x: u32) (out: u128) -> true);\n    f_from = fun (x: u32) -> cast (x <: u32) <: u128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_15: t_From u128 u64 =\n  {\n    f_from_pre = (fun (x: u64) -> true);\n    f_from_post = (fun (x: u64) (out: u128) -> true);\n    f_from = fun (x: u64) -> cast (x <: u64) <: u128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_16: t_From u128 usize =\n  {\n    f_from_pre = (fun (x: usize) -> true);\n    f_from_post = (fun (x: usize) (out: u128) -> true);\n    f_from = fun (x: usize) -> cast (x <: usize) <: u128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_17: t_From usize u8 =\n  {\n    f_from_pre = (fun (x: u8) -> true);\n    f_from_post = (fun (x: u8) (out: usize) -> true);\n    f_from = fun (x: u8) -> cast (x <: u8) <: usize\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_18: t_From usize u16 =\n  {\n    f_from_pre = (fun (x: u16) -> true);\n    f_from_post = (fun (x: u16) (out: usize) -> true);\n    f_from = fun (x: u16) -> cast (x <: u16) <: usize\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_19: t_From i16 i8 =\n  {\n    f_from_pre = (fun (x: i8) -> true);\n    f_from_post = (fun (x: i8) (out: i16) -> true);\n    f_from = fun (x: i8) -> cast (x <: i8) <: i16\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_20: t_From i32 i8 =\n  {\n    f_from_pre = (fun (x: i8) -> true);\n    f_from_post = (fun (x: i8) (out: i32) -> true);\n    f_from = fun (x: i8) -> cast (x <: i8) <: i32\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_21: t_From i32 i16 =\n  {\n    f_from_pre = (fun (x: i16) -> true);\n    f_from_post = (fun (x: i16) (out: i32) -> true);\n    f_from = fun (x: i16) -> cast (x <: i16) <: i32\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_22: t_From i64 i8 =\n  {\n    f_from_pre = (fun (x: i8) -> true);\n    f_from_post = (fun (x: i8) (out: i64) -> true);\n    f_from = fun (x: i8) -> cast (x <: i8) <: i64\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_23: t_From i64 i16 =\n  {\n    f_from_pre = (fun (x: i16) -> true);\n    f_from_post = (fun (x: i16) (out: i64) -> true);\n    f_from = fun (x: i16) -> cast (x <: i16) <: i64\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_24: t_From i64 i32 =\n  {\n    f_from_pre = (fun (x: i32) -> true);\n    f_from_post = (fun (x: i32) (out: i64) -> true);\n    f_from = fun (x: i32) -> cast (x <: i32) <: i64\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_25: t_From i128 i8 =\n  {\n    f_from_pre = (fun (x: i8) -> true);\n    f_from_post = (fun (x: i8) (out: i128) -> true);\n    f_from = fun (x: i8) -> cast (x <: i8) <: i128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_26: t_From i128 i16 =\n  {\n    f_from_pre = (fun (x: i16) -> true);\n    f_from_post = (fun (x: i16) (out: i128) -> true);\n    f_from = fun (x: i16) -> cast (x <: i16) <: i128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_27: t_From i128 i32 =\n  {\n    f_from_pre = (fun (x: i32) -> true);\n    f_from_post = (fun (x: i32) (out: i128) -> true);\n    f_from = fun (x: i32) -> cast (x <: i32) <: i128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_28: t_From i128 i64 =\n  {\n    f_from_pre = (fun (x: i64) -> true);\n    f_from_post = (fun (x: i64) (out: i128) -> true);\n    f_from = fun (x: i64) -> cast (x <: i64) <: i128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_29: t_From i128 isize =\n  {\n    f_from_pre = (fun (x: isize) -> true);\n    f_from_post = (fun (x: isize) (out: i128) -> true);\n    f_from = fun (x: isize) -> cast (x <: isize) <: i128\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_30: t_From isize i8 =\n  {\n    f_from_pre = (fun (x: i8) -> true);\n    f_from_post = (fun (x: i8) (out: isize) -> true);\n    f_from = fun (x: i8) -> cast (x <: i8) <: isize\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_31: t_From isize i16 =\n  {\n    f_from_pre = (fun (x: i16) -> true);\n    f_from_post = (fun (x: i16) (out: isize) -> true);\n    f_from = fun (x: i16) -> cast (x <: i16) <: isize\n  }\n\nclass t_TryInto (v_Self: Type0) (v_T: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Error:Type0;\n  f_try_into_pre:self_: v_Self -> pred: Type0{true ==> pred};\n  f_try_into_post:v_Self -> Core_models.Result.t_Result v_T f_Error -> Type0;\n  f_try_into:x0: v_Self\n    -> Prims.Pure (Core_models.Result.t_Result v_T f_Error)\n        (f_try_into_pre x0)\n        (fun result -> f_try_into_post x0 result)\n}\n\nclass t_TryFrom (v_Self: Type0) (v_T: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Error:Type0;\n  f_try_from_pre:x: v_T -> pred: Type0{true ==> pred};\n  f_try_from_post:v_T -> Core_models.Result.t_Result v_Self f_Error -> Type0;\n  f_try_from:x0: v_T\n    -> Prims.Pure (Core_models.Result.t_Result v_Self f_Error)\n        (f_try_from_pre x0)\n        (fun result -> f_try_from_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1 (#v_T #v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_From v_U v_T)\n    : t_TryFrom v_U v_T =\n  {\n    f_Error = t_Infallible;\n    f_try_from_pre = (fun (x: v_T) -> true);\n    f_try_from_post = (fun (x: v_T) (out: Core_models.Result.t_Result v_U t_Infallible) -> true);\n    f_try_from\n    =\n    fun (x: v_T) ->\n      Core_models.Result.Result_Ok (f_from #v_U #v_T #FStar.Tactics.Typeclasses.solve x)\n      <:\n      Core_models.Result.t_Result v_U t_Infallible\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2\n      (#v_T: Type0)\n      (v_N: usize)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n    : t_TryFrom (t_Array v_T v_N) (t_Slice v_T) =\n  {\n    f_Error = Core_models.Array.t_TryFromSliceError;\n    f_try_from_pre = (fun (x: t_Slice v_T) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: t_Slice v_T)\n        (out: Core_models.Result.t_Result (t_Array v_T v_N) Core_models.Array.t_TryFromSliceError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: t_Slice v_T) ->\n      if (Core_models.Slice.impl__len #v_T x <: usize) =. v_N\n      then\n        Core_models.Result.Result_Ok\n        (Rust_primitives.Slice.array_from_fn #v_T\n            v_N\n            #(usize -> v_T)\n            (fun i ->\n                let i:usize = i in\n                Rust_primitives.Slice.slice_index #v_T x i <: v_T))\n        <:\n        Core_models.Result.t_Result (t_Array v_T v_N) Core_models.Array.t_TryFromSliceError\n      else\n        Core_models.Result.Result_Err\n        (Core_models.Array.TryFromSliceError <: Core_models.Array.t_TryFromSliceError)\n        <:\n        Core_models.Result.t_Result (t_Array v_T v_N) Core_models.Array.t_TryFromSliceError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3 (#v_T #v_U: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_TryFrom v_U v_T)\n    : t_TryInto v_T v_U =\n  {\n    f_Error = i0.f_Error;\n    f_try_into_pre = (fun (self: v_T) -> true);\n    f_try_into_post = (fun (self: v_T) (out: Core_models.Result.t_Result v_U i0.f_Error) -> true);\n    f_try_into = fun (self: v_T) -> f_try_from #v_U #v_T #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_32: t_TryFrom u8 u16 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u16) -> true);\n    f_try_from_post\n    =\n    (fun (x: u16) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u16) ->\n      if\n        x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u16) ||\n        x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u16)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u16) <: u8)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_33: t_TryFrom u8 u32 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u32) -> true);\n    f_try_from_post\n    =\n    (fun (x: u32) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u32) ->\n      if\n        x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u32) ||\n        x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u32)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u32) <: u8)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_34: t_TryFrom u16 u32 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u32) -> true);\n    f_try_from_post\n    =\n    (fun (x: u32) (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u32) ->\n      if\n        x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: u32) ||\n        x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: u32)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u32) <: u16)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_35: t_TryFrom usize u32 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u32) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: u32)\n        (out: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: u32) ->\n      if\n        x >. (cast (Core_models.Num.impl_usize__MAX <: usize) <: u32) ||\n        x <. (cast (Core_models.Num.impl_usize__MIN <: usize) <: u32)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u32) <: usize)\n        <:\n        Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_36: t_TryFrom u8 u64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u64) -> true);\n    f_try_from_post\n    =\n    (fun (x: u64) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u64) ->\n      if\n        x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u64) ||\n        x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u64) <: u8)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_37: t_TryFrom u16 u64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u64) -> true);\n    f_try_from_post\n    =\n    (fun (x: u64) (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u64) ->\n      if\n        x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: u64) ||\n        x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: u64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u64) <: u16)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_38: t_TryFrom u32 u64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u64) -> true);\n    f_try_from_post\n    =\n    (fun (x: u64) (out: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u64) ->\n      if\n        x >. (cast (Core_models.Num.impl_u32__MAX <: u32) <: u64) ||\n        x <. (cast (Core_models.Num.impl_u32__MIN <: u32) <: u64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u64) <: u32)\n        <:\n        Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_39: t_TryFrom usize u64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u64) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: u64)\n        (out: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: u64) ->\n      if\n        x >. (cast (Core_models.Num.impl_usize__MAX <: usize) <: u64) ||\n        x <. (cast (Core_models.Num.impl_usize__MIN <: usize) <: u64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u64) <: usize)\n        <:\n        Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_40: t_TryFrom u8 u128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u128) -> true);\n    f_try_from_post\n    =\n    (fun (x: u128) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u128) ->\n      if\n        x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: u128) ||\n        x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: u128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u128) <: u8)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_41: t_TryFrom u16 u128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u128) -> true);\n    f_try_from_post\n    =\n    (fun (x: u128) (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u128) ->\n      if\n        x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: u128) ||\n        x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: u128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u128) <: u16)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_42: t_TryFrom u32 u128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u128) -> true);\n    f_try_from_post\n    =\n    (fun (x: u128) (out: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u128) ->\n      if\n        x >. (cast (Core_models.Num.impl_u32__MAX <: u32) <: u128) ||\n        x <. (cast (Core_models.Num.impl_u32__MIN <: u32) <: u128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u128) <: u32)\n        <:\n        Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_43: t_TryFrom u64 u128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u128) -> true);\n    f_try_from_post\n    =\n    (fun (x: u128) (out: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: u128) ->\n      if\n        x >. (cast (Core_models.Num.impl_u64__MAX <: u64) <: u128) ||\n        x <. (cast (Core_models.Num.impl_u64__MIN <: u64) <: u128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u128) <: u64)\n        <:\n        Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_44: t_TryFrom usize u128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: u128) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: u128)\n        (out: Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: u128) ->\n      if\n        x >. (cast (Core_models.Num.impl_usize__MAX <: usize) <: u128) ||\n        x <. (cast (Core_models.Num.impl_usize__MIN <: usize) <: u128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: u128) <: usize)\n        <:\n        Core_models.Result.t_Result usize Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_45: t_TryFrom u8 usize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: usize) -> true);\n    f_try_from_post\n    =\n    (fun (x: usize) (out: Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: usize) ->\n      if\n        x >. (cast (Core_models.Num.impl_u8__MAX <: u8) <: usize) ||\n        x <. (cast (Core_models.Num.impl_u8__MIN <: u8) <: usize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: usize) <: u8)\n        <:\n        Core_models.Result.t_Result u8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_46: t_TryFrom u16 usize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: usize) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: usize)\n        (out: Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: usize) ->\n      if\n        x >. (cast (Core_models.Num.impl_u16__MAX <: u16) <: usize) ||\n        x <. (cast (Core_models.Num.impl_u16__MIN <: u16) <: usize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: usize) <: u16)\n        <:\n        Core_models.Result.t_Result u16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_47: t_TryFrom u32 usize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: usize) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: usize)\n        (out: Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: usize) ->\n      if\n        x >. (cast (Core_models.Num.impl_u32__MAX <: u32) <: usize) ||\n        x <. (cast (Core_models.Num.impl_u32__MIN <: u32) <: usize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: usize) <: u32)\n        <:\n        Core_models.Result.t_Result u32 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_48: t_TryFrom u64 usize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: usize) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: usize)\n        (out: Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: usize) ->\n      if\n        x >. (cast (Core_models.Num.impl_u64__MAX <: u64) <: usize) ||\n        x <. (cast (Core_models.Num.impl_u64__MIN <: u64) <: usize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: usize) <: u64)\n        <:\n        Core_models.Result.t_Result u64 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_49: t_TryFrom i8 i16 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i16) -> true);\n    f_try_from_post\n    =\n    (fun (x: i16) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i16) ->\n      if\n        x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i16) ||\n        x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i16)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i16) <: i8)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_50: t_TryFrom i8 i32 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i32) -> true);\n    f_try_from_post\n    =\n    (fun (x: i32) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i32) ->\n      if\n        x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i32) ||\n        x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i32)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i32) <: i8)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_51: t_TryFrom i16 i32 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i32) -> true);\n    f_try_from_post\n    =\n    (fun (x: i32) (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i32) ->\n      if\n        x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: i32) ||\n        x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: i32)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i32) <: i16)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_52: t_TryFrom isize i32 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i32) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: i32)\n        (out: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: i32) ->\n      if\n        x >. (cast (Core_models.Num.impl_isize__MAX <: isize) <: i32) ||\n        x <. (cast (Core_models.Num.impl_isize__MIN <: isize) <: i32)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i32) <: isize)\n        <:\n        Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_53: t_TryFrom i8 i64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i64) -> true);\n    f_try_from_post\n    =\n    (fun (x: i64) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i64) ->\n      if\n        x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i64) ||\n        x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i64) <: i8)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_54: t_TryFrom i16 i64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i64) -> true);\n    f_try_from_post\n    =\n    (fun (x: i64) (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i64) ->\n      if\n        x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: i64) ||\n        x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: i64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i64) <: i16)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_55: t_TryFrom i32 i64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i64) -> true);\n    f_try_from_post\n    =\n    (fun (x: i64) (out: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i64) ->\n      if\n        x >. (cast (Core_models.Num.impl_i32__MAX <: i32) <: i64) ||\n        x <. (cast (Core_models.Num.impl_i32__MIN <: i32) <: i64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i64) <: i32)\n        <:\n        Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_56: t_TryFrom isize i64 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i64) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: i64)\n        (out: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: i64) ->\n      if\n        x >. (cast (Core_models.Num.impl_isize__MAX <: isize) <: i64) ||\n        x <. (cast (Core_models.Num.impl_isize__MIN <: isize) <: i64)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i64) <: isize)\n        <:\n        Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_57: t_TryFrom i8 i128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i128) -> true);\n    f_try_from_post\n    =\n    (fun (x: i128) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i128) ->\n      if\n        x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: i128) ||\n        x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: i128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i128) <: i8)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_58: t_TryFrom i16 i128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i128) -> true);\n    f_try_from_post\n    =\n    (fun (x: i128) (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i128) ->\n      if\n        x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: i128) ||\n        x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: i128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i128) <: i16)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_59: t_TryFrom i32 i128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i128) -> true);\n    f_try_from_post\n    =\n    (fun (x: i128) (out: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i128) ->\n      if\n        x >. (cast (Core_models.Num.impl_i32__MAX <: i32) <: i128) ||\n        x <. (cast (Core_models.Num.impl_i32__MIN <: i32) <: i128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i128) <: i32)\n        <:\n        Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_60: t_TryFrom i64 i128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i128) -> true);\n    f_try_from_post\n    =\n    (fun (x: i128) (out: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: i128) ->\n      if\n        x >. (cast (Core_models.Num.impl_i64__MAX <: i64) <: i128) ||\n        x <. (cast (Core_models.Num.impl_i64__MIN <: i64) <: i128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i128) <: i64)\n        <:\n        Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_61: t_TryFrom isize i128 =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: i128) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: i128)\n        (out: Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: i128) ->\n      if\n        x >. (cast (Core_models.Num.impl_isize__MAX <: isize) <: i128) ||\n        x <. (cast (Core_models.Num.impl_isize__MIN <: isize) <: i128)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: i128) <: isize)\n        <:\n        Core_models.Result.t_Result isize Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_62: t_TryFrom i8 isize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: isize) -> true);\n    f_try_from_post\n    =\n    (fun (x: isize) (out: Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError) ->\n        true);\n    f_try_from\n    =\n    fun (x: isize) ->\n      if\n        x >. (cast (Core_models.Num.impl_i8__MAX <: i8) <: isize) ||\n        x <. (cast (Core_models.Num.impl_i8__MIN <: i8) <: isize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: isize) <: i8)\n        <:\n        Core_models.Result.t_Result i8 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_63: t_TryFrom i16 isize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: isize) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: isize)\n        (out: Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: isize) ->\n      if\n        x >. (cast (Core_models.Num.impl_i16__MAX <: i16) <: isize) ||\n        x <. (cast (Core_models.Num.impl_i16__MIN <: i16) <: isize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: isize) <: i16)\n        <:\n        Core_models.Result.t_Result i16 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_64: t_TryFrom i32 isize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: isize) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: isize)\n        (out: Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: isize) ->\n      if\n        x >. (cast (Core_models.Num.impl_i32__MAX <: i32) <: isize) ||\n        x <. (cast (Core_models.Num.impl_i32__MIN <: i32) <: isize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: isize) <: i32)\n        <:\n        Core_models.Result.t_Result i32 Core_models.Num.Error.t_TryFromIntError\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_65: t_TryFrom i64 isize =\n  {\n    f_Error = Core_models.Num.Error.t_TryFromIntError;\n    f_try_from_pre = (fun (x: isize) -> true);\n    f_try_from_post\n    =\n    (fun\n        (x: isize)\n        (out: Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError)\n        ->\n        true);\n    f_try_from\n    =\n    fun (x: isize) ->\n      if\n        x >. (cast (Core_models.Num.impl_i64__MAX <: i64) <: isize) ||\n        x <. (cast (Core_models.Num.impl_i64__MIN <: i64) <: isize)\n      then\n        Core_models.Result.Result_Err\n        (Core_models.Num.Error.TryFromIntError (() <: Prims.unit)\n          <:\n          Core_models.Num.Error.t_TryFromIntError)\n        <:\n        Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError\n      else\n        Core_models.Result.Result_Ok (cast (x <: isize) <: i64)\n        <:\n        Core_models.Result.t_Result i64 Core_models.Num.Error.t_TryFromIntError\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Core_arch.Arm_shared.Neon.fsti",
    "content": "module Core_models.Core_arch.Arm_shared.Neon\n\nval t_int8x8_t:Type0\nval t_int8x16_t:Type0\nval t_int16x4_t:Type0\nval t_int16x8_t:Type0\nval t_int32x2_t:Type0\nval t_int32x4_t:Type0\nval t_int64x1_t:Type0\nval t_int64x2_t:Type0\n\nval t_uint8x8_t:Type0\nval t_uint8x16_t:Type0\nval t_uint16x4_t:Type0\nval t_uint16x8_t:Type0\nval t_uint32x2_t:Type0\nval t_uint32x4_t:Type0\nval t_uint64x1_t:Type0\nval t_uint64x2_t:Type0\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86.Pclmulqdq.fsti",
    "content": "module Core_models.Core_arch.X86.Pclmulqdq\n\nval e_mm_clmulepi64_si128 : Rust_primitives.Integers.i32 -> Core_models.Core_arch.X86.t_e_ee_m128i -> Core_models.Core_arch.X86.t_e_ee_m128i -> Core_models.Core_arch.X86.t_e_ee_m128i\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86.Sse2.fsti",
    "content": "module Core_models.Core_arch.X86.Sse2\n\nval e_mm_set_epi64x: Rust_primitives.Integers.i64 -> Rust_primitives.Integers.i64 -> Core_models.Core_arch.X86.t_e_ee_m128i\nval e_mm_cvtsi128_si32: Core_models.Core_arch.X86.t_e_ee_m128i -> Rust_primitives.Integers.i32\nval e_mm_srli_si128: Rust_primitives.Integers.i32 -> Core_models.Core_arch.X86.t_e_ee_m128i -> Core_models.Core_arch.X86.t_e_ee_m128i"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86.fsti",
    "content": "module Core_models.Core_arch.X86\n\nval t_e_ee_m128i:Type0\n\nval t_e_ee_m256i:Type0\n\nval t_e_ee_m256:Type0"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Core_arch.X86_64_.Sse2.fsti",
    "content": "module Core_models.Core_arch.X86_64_.Sse2\n\nval e_mm_cvtsi128_si64: Core_models.Core_arch.X86.t_e_ee_m128i -> Rust_primitives.Integers.i64\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Core_arch.fsti",
    "content": "module Core_models.Core_arch\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Default.fsti",
    "content": "module Core_models.Default\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Default (v_Self: Type0) = {\n  f_default_pre:x: Prims.unit\n    -> pred:\n      Type0\n        { (let _:Prims.unit = x in\n            true) ==>\n          pred };\n  f_default_post:Prims.unit -> v_Self -> Type0;\n  f_default:x0: Prims.unit\n    -> Prims.Pure v_Self (f_default_pre x0) (fun result -> f_default_post x0 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Error.fsti",
    "content": "module Core_models.Error\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Error (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Fmt.t_Display v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i1:Core_models.Fmt.t_Debug v_Self\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Error v_Self|} -> i._super_i0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Error v_Self|} -> i._super_i1\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.F32.fst",
    "content": "module Core_models.F32\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nassume\nval impl_f32__abs': x: float -> float\n\nunfold\nlet impl_f32__abs = impl_f32__abs'\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Fmt.Rt.fsti",
    "content": "module Core_models.Fmt.Rt\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval t_ArgumentType:eqtype\n\ntype t_Argument = { f_ty:t_ArgumentType }\n\nval impl__new_display (#v_T: Type0) (x: v_T)\n    : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True)\n\nval impl__new_debug (#v_T: Type0) (x: v_T)\n    : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True)\n\nval impl__new_lower_hex (#v_T: Type0) (x: v_T)\n    : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_1__new_binary (#v_T: Type0) (x: v_T)\n    : Prims.Pure t_Argument Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_1__new_const (#v_T #v_U: Type0) (x: v_T) (y: v_U)\n    : Prims.Pure Core_models.Fmt.t_Arguments Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_1__new_v1 (#v_T #v_U #v_V #v_W: Type0) (x: v_T) (y: v_U) (z: v_V) (t: v_W)\n    : Prims.Pure Core_models.Fmt.t_Arguments Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_1__none: Prims.unit\n  -> Prims.Pure (t_Array t_Argument (mk_usize 0)) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_1__new_v1_formatted (#v_T #v_U #v_V: Type0) (x: v_T) (y: v_U) (z: v_V)\n    : Prims.Pure Core_models.Fmt.t_Arguments Prims.l_True (fun _ -> Prims.l_True)\n\ntype t_Count =\n  | Count_Is : u16 -> t_Count\n  | Count_Param : u16 -> t_Count\n  | Count_Implied : t_Count\n\ntype t_Placeholder = {\n  f_position:usize;\n  f_flags:u32;\n  f_precision:t_Count;\n  f_width:t_Count\n}\n\ntype t_UnsafeArg = | UnsafeArg : t_UnsafeArg\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Fmt.fsti",
    "content": "module Core_models.Fmt\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Error = | Error : t_Error\n\ntype t_Formatter = | Formatter : t_Formatter\n\nclass t_Display (v_Self: Type0) = {\n  f_fmt_pre:v_Self -> t_Formatter -> Type0;\n  f_fmt_post:v_Self -> t_Formatter -> (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error)\n    -> Type0;\n  f_fmt:x0: v_Self -> x1: t_Formatter\n    -> Prims.Pure (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error)\n        (f_fmt_pre x0 x1)\n        (fun result -> f_fmt_post x0 x1 result)\n}\n\nclass t_Debug (v_Self: Type0) = {\n  f_dbg_fmt_pre:v_Self -> t_Formatter -> Type0;\n  f_dbg_fmt_post:\n      v_Self ->\n      t_Formatter ->\n      (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error)\n    -> Type0;\n  f_dbg_fmt:x0: v_Self -> x1: t_Formatter\n    -> Prims.Pure (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error)\n        (f_dbg_fmt_pre x0 x1)\n        (fun result -> f_dbg_fmt_post x0 x1 result)\n}\n\ntype t_Arguments = | Arguments : Prims.unit -> t_Arguments\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl (#v_T: Type0) : t_Debug v_T\n\nval impl_11__write_fmt (f: t_Formatter) (args: t_Arguments)\n    : Prims.Pure (t_Formatter & Core_models.Result.t_Result Prims.unit t_Error)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Hash.fsti",
    "content": "module Core_models.Hash\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Hasher (v_Self: Type0) = { __marker_trait_t_Hasher:Prims.unit }\n\nclass t_Hash (v_Self: Type0) = {\n  f_hash_pre:#v_H: Type0 -> {| i1: t_Hasher v_H |} -> self_: v_Self -> h: v_H\n    -> pred: Type0{true ==> pred};\n  f_hash_post:#v_H: Type0 -> {| i1: t_Hasher v_H |} -> v_Self -> v_H -> v_H -> Type0;\n  f_hash:#v_H: Type0 -> {| i1: t_Hasher v_H |} -> x0: v_Self -> x1: v_H\n    -> Prims.Pure v_H (f_hash_pre #v_H #i1 x0 x1) (fun result -> f_hash_post #v_H #i1 x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl (#v_T: Type0) : t_Hash v_T\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Hint.fsti",
    "content": "module Core_models.Hint\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval black_box (#v_T: Type0) (dummy: v_T)\n    : Prims.Pure v_T\n      Prims.l_True\n      (ensures\n        fun res ->\n          let res:v_T = res in\n          res == dummy)\n\nval must_use (#v_T: Type0) (value: v_T)\n    : Prims.Pure v_T\n      Prims.l_True\n      (ensures\n        fun res ->\n          let res:v_T = res in\n          res == value)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Enumerate.fst",
    "content": "module Core_models.Iter.Adapters.Enumerate\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_Enumerate as t_Enumerate}\n\ninclude Core_models.Iter.Bundle {impl__new as impl__new}\n\ninclude Core_models.Iter.Bundle {impl_1 as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Flat_map.fst",
    "content": "module Core_models.Iter.Adapters.Flat_map\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_FlatMap as t_FlatMap}\n\ninclude Core_models.Iter.Bundle {impl__new__from__flat_map as impl__new}\n\ninclude Core_models.Iter.Bundle {impl_1__from__flat_map as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Flatten.fst",
    "content": "module Core_models.Iter.Adapters.Flatten\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_Flatten as t_Flatten}\n\ninclude Core_models.Iter.Bundle {impl__new__from__flatten as impl__new}\n\ninclude Core_models.Iter.Bundle {impl_1__from__flatten as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Map.fst",
    "content": "module Core_models.Iter.Adapters.Map\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_Map as t_Map}\n\ninclude Core_models.Iter.Bundle {impl__new__from__map as impl__new}\n\ninclude Core_models.Iter.Bundle {impl_1__from__map as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Rev.fsti",
    "content": "module Core_models.Iter.Adapters.Rev\n\ntype t_Rev (t:Type0)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Step_by.fst",
    "content": "module Core_models.Iter.Adapters.Step_by\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_StepBy as t_StepBy}\n\ninclude Core_models.Iter.Bundle {impl__new__from__step_by as impl__new}\n\ninclude Core_models.Iter.Bundle {impl_1__from__step_by as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Take.fst",
    "content": "module Core_models.Iter.Adapters.Take\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_Take as t_Take}\n\ninclude Core_models.Iter.Bundle {impl__new__from__take as impl__new}\n\ninclude Core_models.Iter.Bundle {impl_1__from__take as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Adapters.Zip.fst",
    "content": "module Core_models.Iter.Adapters.Zip\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_Zip as t_Zip}\n\ninclude Core_models.Iter.Bundle {impl__new__from__zip as impl__new}\n\ninclude Core_models.Iter.Bundle {impl_1__from__zip as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Bundle.fst",
    "content": "module Core_models.Iter.Bundle\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Enumerate (v_I: Type0) = {\n  f_iter:v_I;\n  f_count:usize\n}\n\nlet impl__new (#v_I: Type0) (iter: v_I) : t_Enumerate v_I =\n  { f_iter = iter; f_count = mk_usize 0 } <: t_Enumerate v_I\n\ntype t_FlatMap (v_I: Type0) (v_U: Type0) (v_F: Type0) = {\n  f_it:v_I;\n  f_f:v_F;\n  f_current:Core_models.Option.t_Option v_U\n}\n\ntype t_Map (v_I: Type0) (v_F: Type0) = {\n  f_iter:v_I;\n  f_f:v_F\n}\n\nlet impl__new__from__map (#v_I #v_F: Type0) (iter: v_I) (f: v_F) : t_Map v_I v_F =\n  { f_iter = iter; f_f = f } <: t_Map v_I v_F\n\ntype t_StepBy (v_I: Type0) = {\n  f_iter:v_I;\n  f_step:usize\n}\n\nlet impl__new__from__step_by (#v_I: Type0) (iter: v_I) (step: usize) : t_StepBy v_I =\n  { f_iter = iter; f_step = step } <: t_StepBy v_I\n\ntype t_Take (v_I: Type0) = {\n  f_iter:v_I;\n  f_n:usize\n}\n\nlet impl__new__from__take (#v_I: Type0) (iter: v_I) (n: usize) : t_Take v_I =\n  { f_iter = iter; f_n = n } <: t_Take v_I\n\ntype t_Zip (v_I1: Type0) (v_I2: Type0) = {\n  f_it1:v_I1;\n  f_it2:v_I2\n}\n\nclass t_Iterator (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Item:Type0;\n  f_next_pre:self_: v_Self -> pred: Type0{true ==> pred};\n  f_next_post:v_Self -> (v_Self & Core_models.Option.t_Option f_Item) -> Type0;\n  f_next:x0: v_Self\n    -> Prims.Pure (v_Self & Core_models.Option.t_Option f_Item)\n        (f_next_pre x0)\n        (fun result -> f_next_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1 (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n    : t_Iterator (t_Enumerate v_I) =\n  {\n    f_Item = (usize & i0.f_Item);\n    f_next_pre = (fun (self: t_Enumerate v_I) -> true);\n    f_next_post\n    =\n    (fun\n        (self: t_Enumerate v_I)\n        (out1: (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item)))\n        ->\n        true);\n    f_next\n    =\n    fun (self: t_Enumerate v_I) ->\n      let (tmp0: v_I), (out: Core_models.Option.t_Option i0.f_Item) =\n        f_next #v_I #FStar.Tactics.Typeclasses.solve self.f_iter\n      in\n      let self:t_Enumerate v_I = { self with f_iter = tmp0 } <: t_Enumerate v_I in\n      let\n      (self: t_Enumerate v_I), (hax_temp_output: Core_models.Option.t_Option (usize & i0.f_Item)) =\n        match out <: Core_models.Option.t_Option i0.f_Item with\n        | Core_models.Option.Option_Some a ->\n          let i:usize = self.f_count in\n          let _:Prims.unit =\n            Hax_lib.v_assume (b2t (self.f_count <. Core_models.Num.impl_usize__MAX <: bool))\n          in\n          let self:t_Enumerate v_I =\n            { self with f_count = self.f_count +! mk_usize 1 } <: t_Enumerate v_I\n          in\n          self,\n          (Core_models.Option.Option_Some (i, a <: (usize & i0.f_Item))\n            <:\n            Core_models.Option.t_Option (usize & i0.f_Item))\n          <:\n          (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item))\n        | Core_models.Option.Option_None  ->\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option (usize & i0.f_Item))\n          <:\n          (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item))\n      in\n      self, hax_temp_output <: (t_Enumerate v_I & Core_models.Option.t_Option (usize & i0.f_Item))\n  }\n\nlet impl__new__from__flat_map\n      (#v_I #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i2:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n      (#_: unit{i2.Core_models.Ops.Function.f_Output == v_U})\n      (it: v_I)\n      (f: v_F)\n    : t_FlatMap v_I v_U v_F =\n  {\n    f_it = it;\n    f_f = f;\n    f_current = Core_models.Option.Option_None <: Core_models.Option.t_Option v_U\n  }\n  <:\n  t_FlatMap v_I v_U v_F\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1__from__flat_map':\n    #v_I: Type0 ->\n    #v_U: Type0 ->\n    #v_F: Type0 ->\n    {| i0: t_Iterator v_I |} ->\n    {| i1: t_Iterator v_U |} ->\n    {| i2: Core_models.Ops.Function.t_FnOnce v_F i0.f_Item |} ->\n    #_: unit{i2.Core_models.Ops.Function.f_Output == v_U}\n  -> t_Iterator (t_FlatMap v_I v_U v_F)\n\nunfold\nlet impl_1__from__flat_map\n      (#v_I #v_U #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i2:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n      (#_: unit{i2.Core_models.Ops.Function.f_Output == v_U})\n     = impl_1__from__flat_map' #v_I #v_U #v_F #i0 #i1 #i2 #_\n\nnoeq\n\ntype t_Flatten (v_I: Type0) {| i0: t_Iterator v_I |} {| i1: t_Iterator i0.f_Item |} = {\n  f_it:v_I;\n  f_current:Core_models.Option.t_Option i0.f_Item\n}\n\nlet impl__new__from__flatten\n      (#v_I: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item)\n      (it: v_I)\n    : t_Flatten v_I =\n  { f_it = it; f_current = Core_models.Option.Option_None <: Core_models.Option.t_Option i0.f_Item }\n  <:\n  t_Flatten v_I\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1__from__flatten':\n    #v_I: Type0 ->\n    {| i0: t_Iterator v_I |} ->\n    {| i1: t_Iterator i0.f_Item |}\n  -> t_Iterator (t_Flatten v_I)\n\nunfold\nlet impl_1__from__flatten\n      (#v_I: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item)\n     = impl_1__from__flatten' #v_I #i0 #i1\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1__from__map\n      (#v_I #v_O #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n      (#_: unit{i1.Core_models.Ops.Function.f_Output == v_O})\n    : t_Iterator (t_Map v_I v_F) =\n  {\n    f_Item = v_O;\n    f_next_pre = (fun (self: t_Map v_I v_F) -> true);\n    f_next_post\n    =\n    (fun (self: t_Map v_I v_F) (out1: (t_Map v_I v_F & Core_models.Option.t_Option v_O)) -> true);\n    f_next\n    =\n    fun (self: t_Map v_I v_F) ->\n      let (tmp0: v_I), (out: Core_models.Option.t_Option i0.f_Item) =\n        f_next #v_I #FStar.Tactics.Typeclasses.solve self.f_iter\n      in\n      let self:t_Map v_I v_F = { self with f_iter = tmp0 } <: t_Map v_I v_F in\n      let hax_temp_output:Core_models.Option.t_Option v_O =\n        match out <: Core_models.Option.t_Option i0.f_Item with\n        | Core_models.Option.Option_Some v ->\n          Core_models.Option.Option_Some\n          (Core_models.Ops.Function.f_call_once #v_F\n              #i0.f_Item\n              #FStar.Tactics.Typeclasses.solve\n              self.f_f\n              v)\n          <:\n          Core_models.Option.t_Option v_O\n        | Core_models.Option.Option_None  ->\n          Core_models.Option.Option_None <: Core_models.Option.t_Option v_O\n      in\n      self, hax_temp_output <: (t_Map v_I v_F & Core_models.Option.t_Option v_O)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1__from__step_by': #v_I: Type0 -> {| i0: t_Iterator v_I |} -> t_Iterator (t_StepBy v_I)\n\nunfold\nlet impl_1__from__step_by\n      (#v_I: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n     = impl_1__from__step_by' #v_I #i0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1__from__take (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n    : t_Iterator (t_Take v_I) =\n  {\n    f_Item = i0.f_Item;\n    f_next_pre = (fun (self: t_Take v_I) -> true);\n    f_next_post\n    =\n    (fun (self: t_Take v_I) (out1: (t_Take v_I & Core_models.Option.t_Option i0.f_Item)) -> true);\n    f_next\n    =\n    fun (self: t_Take v_I) ->\n      let (self: t_Take v_I), (hax_temp_output: Core_models.Option.t_Option i0.f_Item) =\n        if self.f_n <>. mk_usize 0\n        then\n          let self:t_Take v_I = { self with f_n = self.f_n -! mk_usize 1 } <: t_Take v_I in\n          let (tmp0: v_I), (out: Core_models.Option.t_Option i0.f_Item) =\n            f_next #v_I #FStar.Tactics.Typeclasses.solve self.f_iter\n          in\n          let self:t_Take v_I = { self with f_iter = tmp0 } <: t_Take v_I in\n          self, out <: (t_Take v_I & Core_models.Option.t_Option i0.f_Item)\n        else\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i0.f_Item)\n          <:\n          (t_Take v_I & Core_models.Option.t_Option i0.f_Item)\n      in\n      self, hax_temp_output <: (t_Take v_I & Core_models.Option.t_Option i0.f_Item)\n  }\n\nlet impl__new__from__zip\n      (#v_I1 #v_I2: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I1)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2)\n      (it1: v_I1)\n      (it2: v_I2)\n    : t_Zip v_I1 v_I2 = { f_it1 = it1; f_it2 = it2 } <: t_Zip v_I1 v_I2\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1__from__zip':\n    #v_I1: Type0 ->\n    #v_I2: Type0 ->\n    {| i0: t_Iterator v_I1 |} ->\n    {| i1: t_Iterator v_I2 |}\n  -> t_Iterator (t_Zip v_I1 v_I2)\n\nunfold\nlet impl_1__from__zip\n      (#v_I1 #v_I2: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I1)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2)\n     = impl_1__from__zip' #v_I1 #v_I2 #i0 #i1\n\nclass t_IteratorMethods (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Iterator v_Self;\n  f_fold_pre:\n      #v_B: Type0 ->\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & (_super_i0).f_Item) |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == v_B} ->\n      v_Self ->\n      v_B ->\n      v_F\n    -> Type0;\n  f_fold_post:\n      #v_B: Type0 ->\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & (_super_i0).f_Item) |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == v_B} ->\n      v_Self ->\n      v_B ->\n      v_F ->\n      v_B\n    -> Type0;\n  f_fold:\n      #v_B: Type0 ->\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (v_B & (_super_i0).f_Item) |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == v_B} ->\n      x0: v_Self ->\n      x1: v_B ->\n      x2: v_F\n    -> Prims.Pure v_B\n        (f_fold_pre #v_B #v_F #i1 #_ x0 x1 x2)\n        (fun result -> f_fold_post #v_B #v_F #i1 #_ x0 x1 x2 result);\n  f_enumerate_pre:v_Self -> Type0;\n  f_enumerate_post:v_Self -> t_Enumerate v_Self -> Type0;\n  f_enumerate:x0: v_Self\n    -> Prims.Pure (t_Enumerate v_Self)\n        (f_enumerate_pre x0)\n        (fun result -> f_enumerate_post x0 result);\n  f_step_by_pre:v_Self -> usize -> Type0;\n  f_step_by_post:v_Self -> usize -> t_StepBy v_Self -> Type0;\n  f_step_by:x0: v_Self -> x1: usize\n    -> Prims.Pure (t_StepBy v_Self)\n        (f_step_by_pre x0 x1)\n        (fun result -> f_step_by_post x0 x1 result);\n  f_map_pre:\n      #v_O: Type0 ->\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == v_O} ->\n      v_Self ->\n      v_F\n    -> Type0;\n  f_map_post:\n      #v_O: Type0 ->\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == v_O} ->\n      v_Self ->\n      v_F ->\n      t_Map v_Self v_F\n    -> Type0;\n  f_map:\n      #v_O: Type0 ->\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == v_O} ->\n      x0: v_Self ->\n      x1: v_F\n    -> Prims.Pure (t_Map v_Self v_F)\n        (f_map_pre #v_O #v_F #i1 #_ x0 x1)\n        (fun result -> f_map_post #v_O #v_F #i1 #_ x0 x1 result);\n  f_all_pre:\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == bool} ->\n      v_Self ->\n      v_F\n    -> Type0;\n  f_all_post:\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == bool} ->\n      v_Self ->\n      v_F ->\n      bool\n    -> Type0;\n  f_all:\n      #v_F: Type0 ->\n      {| i1: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i1.Core_models.Ops.Function.f_Output == bool} ->\n      x0: v_Self ->\n      x1: v_F\n    -> Prims.Pure bool\n        (f_all_pre #v_F #i1 #_ x0 x1)\n        (fun result -> f_all_post #v_F #i1 #_ x0 x1 result);\n  f_take_pre:v_Self -> usize -> Type0;\n  f_take_post:v_Self -> usize -> t_Take v_Self -> Type0;\n  f_take:x0: v_Self -> x1: usize\n    -> Prims.Pure (t_Take v_Self) (f_take_pre x0 x1) (fun result -> f_take_post x0 x1 result);\n  f_flat_map_pre:\n      #v_U: Type0 ->\n      #v_F: Type0 ->\n      {| i1: t_Iterator v_U |} ->\n      {| i2: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i2.Core_models.Ops.Function.f_Output == v_U} ->\n      v_Self ->\n      v_F\n    -> Type0;\n  f_flat_map_post:\n      #v_U: Type0 ->\n      #v_F: Type0 ->\n      {| i1: t_Iterator v_U |} ->\n      {| i2: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i2.Core_models.Ops.Function.f_Output == v_U} ->\n      v_Self ->\n      v_F ->\n      t_FlatMap v_Self v_U v_F\n    -> Type0;\n  f_flat_map:\n      #v_U: Type0 ->\n      #v_F: Type0 ->\n      {| i1: t_Iterator v_U |} ->\n      {| i2: Core_models.Ops.Function.t_FnOnce v_F (_super_i0).f_Item |} ->\n      #_: unit{i2.Core_models.Ops.Function.f_Output == v_U} ->\n      x0: v_Self ->\n      x1: v_F\n    -> Prims.Pure (t_FlatMap v_Self v_U v_F)\n        (f_flat_map_pre #v_U #v_F #i1 #i2 #_ x0 x1)\n        (fun result -> f_flat_map_post #v_U #v_F #i1 #i2 #_ x0 x1 result);\n  f_flatten_pre:{| i1: t_Iterator (_super_i0).f_Item |} -> v_Self -> Type0;\n  f_flatten_post:{| i1: t_Iterator (_super_i0).f_Item |} -> v_Self -> t_Flatten v_Self -> Type0;\n  f_flatten:{| i1: t_Iterator (_super_i0).f_Item |} -> x0: v_Self\n    -> Prims.Pure (t_Flatten v_Self)\n        (f_flatten_pre #i1 x0)\n        (fun result -> f_flatten_post #i1 x0 result);\n  f_zip_pre:#v_I2: Type0 -> {| i1: t_Iterator v_I2 |} -> v_Self -> v_I2 -> Type0;\n  f_zip_post:#v_I2: Type0 -> {| i1: t_Iterator v_I2 |} -> v_Self -> v_I2 -> t_Zip v_Self v_I2\n    -> Type0;\n  f_zip:#v_I2: Type0 -> {| i1: t_Iterator v_I2 |} -> x0: v_Self -> x1: v_I2\n    -> Prims.Pure (t_Zip v_Self v_I2)\n        (f_zip_pre #v_I2 #i1 x0 x1)\n        (fun result -> f_zip_post #v_I2 #i1 x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_IteratorMethods v_Self|} -> i._super_i0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_I: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n    : t_IteratorMethods v_I =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_fold_pre\n    =\n    (fun\n        (#v_B: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F (v_B & i0.f_Item))\n        (self: v_I)\n        (init: v_B)\n        (f: v_F)\n        ->\n        true);\n    f_fold_post\n    =\n    (fun\n        (#v_B: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F (v_B & i0.f_Item))\n        (self: v_I)\n        (init: v_B)\n        (f: v_F)\n        (out: v_B)\n        ->\n        true);\n    f_fold\n    =\n    (fun\n        (#v_B: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F (v_B & i0.f_Item))\n        (self: v_I)\n        (init: v_B)\n        (f: v_F)\n        ->\n        init);\n    f_enumerate_pre = (fun (self: v_I) -> true);\n    f_enumerate_post = (fun (self: v_I) (out: t_Enumerate v_I) -> true);\n    f_enumerate = (fun (self: v_I) -> impl__new #v_I self);\n    f_step_by_pre = (fun (self: v_I) (step: usize) -> true);\n    f_step_by_post = (fun (self: v_I) (step: usize) (out: t_StepBy v_I) -> true);\n    f_step_by = (fun (self: v_I) (step: usize) -> impl__new__from__step_by #v_I self step);\n    f_map_pre\n    =\n    (fun\n        (#v_O: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        ->\n        true);\n    f_map_post\n    =\n    (fun\n        (#v_O: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        (out: t_Map v_I v_F)\n        ->\n        true);\n    f_map\n    =\n    (fun\n        (#v_O: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        ->\n        impl__new__from__map #v_I #v_F self f);\n    f_all_pre\n    =\n    (fun\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        ->\n        true);\n    f_all_post\n    =\n    (fun\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        (out: bool)\n        ->\n        true);\n    f_all\n    =\n    (fun\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        ->\n        true);\n    f_take_pre = (fun (self: v_I) (n: usize) -> true);\n    f_take_post = (fun (self: v_I) (n: usize) (out: t_Take v_I) -> true);\n    f_take = (fun (self: v_I) (n: usize) -> impl__new__from__take #v_I self n);\n    f_flat_map_pre\n    =\n    (fun\n        (#v_U: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i2:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        ->\n        true);\n    f_flat_map_post\n    =\n    (fun\n        (#v_U: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i2:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        (out: t_FlatMap v_I v_U v_F)\n        ->\n        true);\n    f_flat_map\n    =\n    (fun\n        (#v_U: Type0)\n        (#v_F: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_U)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i2:\n          Core_models.Ops.Function.t_FnOnce v_F i0.f_Item)\n        (self: v_I)\n        (f: v_F)\n        ->\n        impl__new__from__flat_map #v_I #v_U #v_F self f);\n    f_flatten_pre\n    =\n    (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item) (self: v_I) -> true);\n    f_flatten_post\n    =\n    (fun\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item)\n        (self: v_I)\n        (out: t_Flatten v_I)\n        ->\n        true);\n    f_flatten\n    =\n    (fun (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator i0.f_Item) (self: v_I) ->\n        impl__new__from__flatten #v_I self);\n    f_zip_pre\n    =\n    (fun\n        (#v_I2: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2)\n        (self: v_I)\n        (it2: v_I2)\n        ->\n        true);\n    f_zip_post\n    =\n    (fun\n        (#v_I2: Type0)\n        (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2)\n        (self: v_I)\n        (it2: v_I2)\n        (out: t_Zip v_I v_I2)\n        ->\n        true);\n    f_zip\n    =\n    fun\n      (#v_I2: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_Iterator v_I2)\n      (self: v_I)\n      (it2: v_I2)\n      ->\n      impl__new__from__zip #v_I #v_I2 self it2\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1__from__iterator\n      (#v_I: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Iterator v_I)\n    : Core_models.Iter.Traits.Collect.t_IntoIterator v_I =\n  {\n    f_IntoIter = v_I;\n    f_into_iter_pre = (fun (self: v_I) -> true);\n    f_into_iter_post = (fun (self: v_I) (out: v_I) -> true);\n    f_into_iter = fun (self: v_I) -> self\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Sources.Repeat_with.fsti",
    "content": "module Core_models.Iter.Sources.Repeat_with\n\nval t_RepeatWith: Type0 -> Type0\n\nval repeat_with #t (y: Prims.unit -> t): t_RepeatWith (Prims.unit -> t)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Traits.Collect.fst",
    "content": "module Core_models.Iter.Traits.Collect\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_IntoIterator (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_IntoIter:Type0;\n  f_into_iter_pre:v_Self -> Type0;\n  f_into_iter_post:v_Self -> f_IntoIter -> Type0;\n  f_into_iter:x0: v_Self\n    -> Prims.Pure f_IntoIter (f_into_iter_pre x0) (fun result -> f_into_iter_post x0 result)\n}\n\nclass t_FromIterator (v_Self: Type0) (v_A: Type0) = {\n  f_from_iter_pre:#v_T: Type0 -> {| i1: t_IntoIterator v_T |} -> iter: v_T\n    -> pred: Type0{true ==> pred};\n  f_from_iter_post:#v_T: Type0 -> {| i1: t_IntoIterator v_T |} -> v_T -> v_Self -> Type0;\n  f_from_iter:#v_T: Type0 -> {| i1: t_IntoIterator v_T |} -> x0: v_T\n    -> Prims.Pure v_Self\n        (f_from_iter_pre #v_T #i1 x0)\n        (fun result -> f_from_iter_post #v_T #i1 x0 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Traits.Iterator.fst",
    "content": "module Core_models.Iter.Traits.Iterator\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Iter.Bundle {t_Iterator as t_Iterator}\n\ninclude Core_models.Iter.Bundle {f_Item as f_Item}\n\ninclude Core_models.Iter.Bundle {f_next_pre as f_next_pre}\n\ninclude Core_models.Iter.Bundle {f_next_post as f_next_post}\n\ninclude Core_models.Iter.Bundle {f_next as f_next}\n\ninclude Core_models.Iter.Bundle {t_IteratorMethods as t_IteratorMethods}\n\ninclude Core_models.Iter.Bundle {f_fold_pre as f_fold_pre}\n\ninclude Core_models.Iter.Bundle {f_fold_post as f_fold_post}\n\ninclude Core_models.Iter.Bundle {f_fold as f_fold}\n\ninclude Core_models.Iter.Bundle {f_enumerate_pre as f_enumerate_pre}\n\ninclude Core_models.Iter.Bundle {f_enumerate_post as f_enumerate_post}\n\ninclude Core_models.Iter.Bundle {f_enumerate as f_enumerate}\n\ninclude Core_models.Iter.Bundle {f_step_by_pre as f_step_by_pre}\n\ninclude Core_models.Iter.Bundle {f_step_by_post as f_step_by_post}\n\ninclude Core_models.Iter.Bundle {f_step_by as f_step_by}\n\ninclude Core_models.Iter.Bundle {f_map_pre as f_map_pre}\n\ninclude Core_models.Iter.Bundle {f_map_post as f_map_post}\n\ninclude Core_models.Iter.Bundle {f_map as f_map}\n\ninclude Core_models.Iter.Bundle {f_all_pre as f_all_pre}\n\ninclude Core_models.Iter.Bundle {f_all_post as f_all_post}\n\ninclude Core_models.Iter.Bundle {f_all as f_all}\n\ninclude Core_models.Iter.Bundle {f_take_pre as f_take_pre}\n\ninclude Core_models.Iter.Bundle {f_take_post as f_take_post}\n\ninclude Core_models.Iter.Bundle {f_take as f_take}\n\ninclude Core_models.Iter.Bundle {f_flat_map_pre as f_flat_map_pre}\n\ninclude Core_models.Iter.Bundle {f_flat_map_post as f_flat_map_post}\n\ninclude Core_models.Iter.Bundle {f_flat_map as f_flat_map}\n\ninclude Core_models.Iter.Bundle {f_flatten_pre as f_flatten_pre}\n\ninclude Core_models.Iter.Bundle {f_flatten_post as f_flatten_post}\n\ninclude Core_models.Iter.Bundle {f_flatten as f_flatten}\n\ninclude Core_models.Iter.Bundle {f_zip_pre as f_zip_pre}\n\ninclude Core_models.Iter.Bundle {f_zip_post as f_zip_post}\n\ninclude Core_models.Iter.Bundle {f_zip as f_zip}\n\ninclude Core_models.Iter.Bundle {impl as impl}\n\ninclude Core_models.Iter.Bundle {impl_1__from__iterator as impl_1}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Iter.Traits.fst",
    "content": "module Core_models.Iter.Traits\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Iterator (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Item:Type0;\n  f_next_pre:v_Self -> Type0;\n  f_next_post:v_Self -> (v_Self & Core_models.Option.t_Option f_Item) -> Type0;\n  f_next:x0: v_Self\n    -> Prims.Pure (v_Self & Core_models.Option.t_Option f_Item)\n        (f_next_pre x0)\n        (fun result -> f_next_post x0 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Marker.fst",
    "content": "module Core_models.Marker\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Copy (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Clone.t_Clone v_Self\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Copy v_Self|} -> i._super_i0\n\nclass t_Send (v_Self: Type0) = { __marker_trait_t_Send:Prims.unit }\n\nclass t_Sync (v_Self: Type0) = { __marker_trait_t_Sync:Prims.unit }\n\nclass t_Sized (v_Self: Type0) = { __marker_trait_t_Sized:Prims.unit }\n\nclass t_StructuralPartialEq (v_Self: Type0) = { __marker_trait_t_StructuralPartialEq:Prims.unit }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T: Type0) : t_Send v_T = { __marker_trait_t_Send = () }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1 (#v_T: Type0) : t_Sync v_T = { __marker_trait_t_Sync = () }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2 (#v_T: Type0) : t_Sized v_T = { __marker_trait_t_Sized = () }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T)\n    : t_Copy v_T = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\ntype t_PhantomData (v_T: Type0) = | PhantomData : t_PhantomData v_T\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Mem.Manually_drop.fsti",
    "content": "module Core_models.Mem.Manually_drop\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_ManuallyDrop (v_T: Type0) = { f_value:v_T }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Mem.Maybe_uninit.fsti",
    "content": "module Core_models.Mem.Maybe_uninit\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen Core_models\nopen FStar.Mul\n\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |}\n    : Core_models.Clone.t_Clone (t_MaybeUninit v_T)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_9 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |}\n    : Core_models.Marker.t_Copy (t_MaybeUninit v_T)\n\nval f_clone__impl__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_1 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} : Core_models.Fmt.t_Debug (t_MaybeUninit v_T)\n\nval f_fmt__impl_1__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__new (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (v_val: v_T)\n    : Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__uninit: #v_T: Type0 -> {| i0: Core_models.Marker.t_Sized v_T |} -> Prims.unit\n  -> Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__zeroed: #v_T: Type0 -> {| i0: Core_models.Marker.t_Sized v_T |} -> Prims.unit\n  -> Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__write\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_MaybeUninit v_T)\n      (v_val: v_T)\n    : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.t_MutRef v_T)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__as_ptr (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T)\n    : Prims.Pure Rust_primitives.Hax.failure Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__as_mut_ptr (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T)\n    : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.failure)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__assume_init (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T)\n    : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__assume_init_read\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_MaybeUninit v_T)\n    : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__assume_init_drop\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_MaybeUninit v_T)\n    : Prims.Pure (t_MaybeUninit v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__assume_init_ref\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_MaybeUninit v_T)\n    : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__assume_init_mut\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_MaybeUninit v_T)\n    : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.t_MutRef v_T)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__array_assume_init\n      (#v_T: Type0)\n      (v_N: usize)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (array: t_Array (t_MaybeUninit v_T) v_N)\n    : Prims.Pure (t_Array v_T v_N) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__as_bytes (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T)\n    : Prims.Pure (t_Slice (t_MaybeUninit u8)) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__as_bytes_mut (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} (self: t_MaybeUninit v_T)\n    : Prims.Pure (t_MaybeUninit v_T & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit u8)))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__slice_assume_init_ref\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (slice: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__slice_assume_init_mut\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (slice: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__slice_as_ptr\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (this: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure Rust_primitives.Hax.failure Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__slice_as_mut_ptr\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (this: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.failure)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__copy_from_slice\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i1: Core_models.Marker.t_Copy v_T |}\n      (this: t_Slice (t_MaybeUninit v_T))\n      (src: t_Slice v_T)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__clone_from_slice\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i2: Core_models.Clone.t_Clone v_T |}\n      (this: t_Slice (t_MaybeUninit v_T))\n      (src: t_Slice v_T)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__fill\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i2: Core_models.Clone.t_Clone v_T |}\n      (this: t_Slice (t_MaybeUninit v_T))\n      (value: v_T)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__fill_with\n      (#v_T #v_F: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i3: Core_models.Marker.t_Sized v_F |}\n      {| i4: Core_models.Ops.Function.t_FnMut v_F Prims.unit |}\n      (this: t_Slice (t_MaybeUninit v_T))\n      (f: v_F)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__fill_from\n      (#v_T #v_I: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i5: Core_models.Marker.t_Sized v_I |}\n      {| i6: Core_models.Iter.Traits.Collect.t_IntoIterator v_I |}\n      (this: t_Slice (t_MaybeUninit v_T))\n      (it: v_I)\n    : Prims.Pure\n      (t_Slice (t_MaybeUninit v_T) &\n        (Rust_primitives.Hax.t_MutRef (t_Slice v_T) &\n          Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit v_T))))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__slice_as_bytes\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (this: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice (t_MaybeUninit u8)) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__slice_as_bytes_mut\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (this: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure\n      (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit u8)))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__assume_init_drop__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__copy_from_slice__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__clone_from_slice__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__fill__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__fill_with__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__fill_from__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__slice_as_bytes__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__slice_as_bytes_mut__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_3__write_copy_of_slice\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i1: Core_models.Marker.t_Copy v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n      (src: t_Slice v_T)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_3__write_clone_of_slice\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i2: Core_models.Clone.t_Clone v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n      (src: t_Slice v_T)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_3__write_filled\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i2: Core_models.Clone.t_Clone v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n      (value: v_T)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_3__write_with\n      (#v_T #v_F: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i3: Core_models.Marker.t_Sized v_F |}\n      {| i4: Core_models.Ops.Function.t_FnMut v_F usize |}\n      (self: t_Slice (t_MaybeUninit v_T))\n      (f: v_F)\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_3__write_iter\n      (#v_T #v_I: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      {| i5: Core_models.Marker.t_Sized v_I |}\n      {| i6: Core_models.Iter.Traits.Collect.t_IntoIterator v_I |}\n      (self: t_Slice (t_MaybeUninit v_T))\n      (it: v_I)\n    : Prims.Pure\n      (t_Slice (t_MaybeUninit v_T) &\n        (Rust_primitives.Hax.t_MutRef (t_Slice v_T) &\n          Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit v_T))))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_3__as_bytes\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice (t_MaybeUninit u8)) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_3__as_bytes_mut\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure\n      (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit u8)))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_3__assume_init_drop\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T)) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_3__assume_init_ref\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_3__assume_init_mut\n      (#v_T: Type0)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_Slice (t_MaybeUninit v_T))\n    : Prims.Pure (t_Slice (t_MaybeUninit v_T) & Rust_primitives.Hax.t_MutRef (t_Slice v_T))\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_4__transpose\n      (#v_T: Type0)\n      (v_N: usize)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_MaybeUninit (t_Array v_T v_N))\n    : Prims.Pure (t_Array (t_MaybeUninit v_T) v_N) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_5__transpose\n      (#v_T: Type0)\n      (v_N: usize)\n      {| i0: Core_models.Marker.t_Sized v_T |}\n      (self: t_Array (t_MaybeUninit v_T) v_N)\n    : Prims.Pure (t_MaybeUninit (t_Array v_T v_N)) Prims.l_True (fun _ -> Prims.l_True)\n\ntype t_Guard (v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} = {\n  f_slice:Rust_primitives.Hax.t_MutRef (t_Slice (t_MaybeUninit v_T));\n  f_initialized:usize\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_6 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} : Core_models.Ops.Drop.t_Drop (t_Guard v_T)\n\nval f_drop__impl_6__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nclass t_SpecFill (v_Self: Type0) (v_T: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_15671470021555116719:Core_models.Marker.t_Sized v_T;\n  f_spec_fill_pre:v_Self -> v_T -> Type0;\n  f_spec_fill_post:v_Self -> v_T -> v_Self -> Type0;\n  f_spec_fill:x0: v_Self -> x1: v_T\n    -> Prims.Pure v_Self (f_spec_fill_pre x0 x1) (fun result -> f_spec_fill_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_7 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Clone.t_Clone v_T |}\n    : t_SpecFill (t_Slice (t_MaybeUninit v_T)) v_T\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_8 (#v_T: Type0) {| i0: Core_models.Marker.t_Sized v_T |} {| i1: Core_models.Marker.t_Copy v_T |}\n    : t_SpecFill (t_Slice (t_MaybeUninit v_T)) v_T\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Mem.Transmutability.fsti",
    "content": "module Core_models.Mem.Transmutability\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen Core_models\nopen FStar.Mul\n\n\nval f_transmute__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\ntype t_Assume = {\n  f_alignment:bool;\n  f_lifetimes:bool;\n  f_safety:bool;\n  f_validity:bool\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_5:Core_models.Marker.t_StructuralPartialEq t_Assume\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_6:Core_models.Cmp.t_PartialEq t_Assume t_Assume\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_7:Core_models.Cmp.t_Eq t_Assume\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_8:Core_models.Clone.t_Clone t_Assume\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_9:Core_models.Marker.t_Copy t_Assume\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_10:Core_models.Fmt.t_Debug t_Assume\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_1:Core_models.Marker.t_UnsizedConstParamTy t_Assume\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl:Core_models.Marker.t_ConstParamTy_ t_Assume\n\nlet impl_Assume__NOTHING: t_Assume = () <: t_Assume\n\nlet impl_Assume__ALIGNMENT: t_Assume = () <: t_Assume\n\nlet impl_Assume__LIFETIMES: t_Assume = () <: t_Assume\n\nlet impl_Assume__SAFETY: t_Assume = () <: t_Assume\n\nlet impl_Assume__VALIDITY: t_Assume = () <: t_Assume\n\nval impl_Assume__and (self other_assumptions: t_Assume)\n    : Prims.Pure t_Assume Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Assume__but_not (self other_assumptions: t_Assume)\n    : Prims.Pure t_Assume Prims.l_True (fun _ -> Prims.l_True)\n\nval f_add__impl_3__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval f_sub__impl_4__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3: Core_models.Ops.Arith.t_Add t_Assume t_Assume =\n  {\n    f_Output = t_Assume;\n    f_Output_11695847888444666345 = FStar.Tactics.Typeclasses.solve;\n    f_add_pre = (fun (self: t_Assume) (other_assumptions: t_Assume) -> true);\n    f_add_post = (fun (self: t_Assume) (other_assumptions: t_Assume) (out: t_Assume) -> true);\n    f_add = fun (self: t_Assume) (other_assumptions: t_Assume) -> () <: t_Assume\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4: Core_models.Ops.Arith.t_Sub t_Assume t_Assume =\n  {\n    f_Output = t_Assume;\n    f_Output_9381071510542709353 = FStar.Tactics.Typeclasses.solve;\n    f_sub_pre = (fun (self: t_Assume) (other_assumptions: t_Assume) -> true);\n    f_sub_post = (fun (self: t_Assume) (other_assumptions: t_Assume) (out: t_Assume) -> true);\n    f_sub = fun (self: t_Assume) (other_assumptions: t_Assume) -> () <: t_Assume\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Mem.fsti",
    "content": "module Core_models.Mem\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval forget (#v_T: Type0) (t: v_T) : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n\nval forget_unsized (#v_T: Type0) (t: v_T)\n    : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n\nval size_of: #v_T: Type0 -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval size_of_val (#v_T: Type0) (v_val: v_T) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval min_align_of: #v_T: Type0 -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval min_align_of_val (#v_T: Type0) (v_val: v_T)\n    : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval align_of: #v_T: Type0 -> Prims.unit -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval align_of_val (#v_T: Type0) (v_val: v_T) : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval align_of_val_raw (#v_T: Type0) (v_val: v_T)\n    : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval needs_drop: #v_T: Type0 -> Prims.unit -> Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)\n\nval uninitialized: #v_T: Type0 -> Prims.unit -> Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True)\n\nval swap (#v_T: Type0) (x y: v_T) : Prims.Pure (v_T & v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval replace (#v_T: Type0) (dest src: v_T)\n    : Prims.Pure (v_T & v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval drop (#v_T: Type0) (e_x: v_T) : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n\nval copy (#v_T: Type0) {| i0: Core_models.Marker.t_Copy v_T |} (x: v_T)\n    : Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True)\n\nval take (#v_T: Type0) (x: v_T) : Prims.Pure (v_T & v_T) Prims.l_True (fun _ -> Prims.l_True)\n\nval transmute_copy (#v_Src #v_Dst: Type0) (src: v_Src)\n    : Prims.Pure v_Dst Prims.l_True (fun _ -> Prims.l_True)\n\nval variant_count: #v_T: Type0 -> Prims.unit\n  -> Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\nval zeroed: #v_T: Type0 -> Prims.unit -> Prims.Pure v_T Prims.l_True (fun _ -> Prims.l_True)\n\nval transmute (#v_Src #v_Dst: Type0) (src: v_Src)\n    : Prims.Pure v_Dst Prims.l_True (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Num.Error.fsti",
    "content": "module Core_models.Num.Error\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_TryFromIntError = | TryFromIntError : Prims.unit -> t_TryFromIntError\n\ntype t_IntErrorKind = | IntErrorKind : t_IntErrorKind\n\ntype t_ParseIntError = { f_kind:t_IntErrorKind }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Num.Niche_types.fsti",
    "content": "module Core_models.Num.Niche_types\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen Core_models\nopen FStar.Mul\n\ntype t_Nanoseconds = | Nanoseconds : u32 -> t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_13:Core_models.Clone.t_Clone t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_14:Core_models.Marker.t_Copy t_Nanoseconds\n\nval e_: Prims.unit \n\nval impl_Nanoseconds__new (v_val: u32)\n    : Prims.Pure (Core_models.Option.t_Option t_Nanoseconds) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Nanoseconds__new_unchecked (v_val: u32)\n    : Prims.Pure t_Nanoseconds Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Nanoseconds__as_inner (self: t_Nanoseconds)\n    : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_7:Core_models.Marker.t_StructuralPartialEq t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_8:Core_models.Cmp.t_PartialEq t_Nanoseconds t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_15:Core_models.Cmp.t_Eq t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_10:Core_models.Cmp.t_PartialOrd t_Nanoseconds t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_9:Core_models.Cmp.t_Ord t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_11:Core_models.Hash.t_Hash t_Nanoseconds\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_12:Core_models.Fmt.t_Debug t_Nanoseconds\n\nval impl_Nanoseconds__ZERO: t_Nanoseconds \n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_1:Core_models.Default.t_Default t_Nanoseconds\n\nval f_default__impl_1__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\ntype t_NonZeroU8Inner = | NonZeroU8Inner : u8 -> t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_86:Core_models.Clone.t_Clone t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_87:Core_models.Marker.t_Copy t_NonZeroU8Inner\n\nval e_ee_1: Prims.unit\n\nval impl_NonZeroU8Inner__new (v_val: u8)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroU8Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU8Inner__new_unchecked (v_val: u8)\n    : Prims.Pure t_NonZeroU8Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU8Inner__as_inner (self: t_NonZeroU8Inner)\n    : Prims.Pure u8 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_17:Core_models.Marker.t_StructuralPartialEq t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_18:Core_models.Cmp.t_PartialEq t_NonZeroU8Inner t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_88:Core_models.Cmp.t_Eq t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_20:Core_models.Cmp.t_PartialOrd t_NonZeroU8Inner t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_19:Core_models.Cmp.t_Ord t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_21:Core_models.Hash.t_Hash t_NonZeroU8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_22:Core_models.Fmt.t_Debug t_NonZeroU8Inner\n\ntype t_NonZeroU16Inner = | NonZeroU16Inner : u16 -> t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_89:Core_models.Clone.t_Clone t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_90:Core_models.Marker.t_Copy t_NonZeroU16Inner\n\nval e_ee_2: Prims.unit\n\nval impl_NonZeroU16Inner__new (v_val: u16)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroU16Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU16Inner__new_unchecked (v_val: u16)\n    : Prims.Pure t_NonZeroU16Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU16Inner__as_inner (self: t_NonZeroU16Inner)\n    : Prims.Pure u16 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_24:Core_models.Marker.t_StructuralPartialEq t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_25:Core_models.Cmp.t_PartialEq t_NonZeroU16Inner t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_91:Core_models.Cmp.t_Eq t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_27:Core_models.Cmp.t_PartialOrd t_NonZeroU16Inner t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_26:Core_models.Cmp.t_Ord t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_28:Core_models.Hash.t_Hash t_NonZeroU16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_29:Core_models.Fmt.t_Debug t_NonZeroU16Inner\n\ntype t_NonZeroU32Inner = | NonZeroU32Inner : u32 -> t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_92:Core_models.Clone.t_Clone t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_93:Core_models.Marker.t_Copy t_NonZeroU32Inner\n\nval e_ee_3: Prims.unit\n\nval impl_NonZeroU32Inner__new (v_val: u32)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroU32Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU32Inner__new_unchecked (v_val: u32)\n    : Prims.Pure t_NonZeroU32Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU32Inner__as_inner (self: t_NonZeroU32Inner)\n    : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_31:Core_models.Marker.t_StructuralPartialEq t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_32:Core_models.Cmp.t_PartialEq t_NonZeroU32Inner t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_94:Core_models.Cmp.t_Eq t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_34:Core_models.Cmp.t_PartialOrd t_NonZeroU32Inner t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_33:Core_models.Cmp.t_Ord t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_35:Core_models.Hash.t_Hash t_NonZeroU32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_36:Core_models.Fmt.t_Debug t_NonZeroU32Inner\n\ntype t_NonZeroU64Inner = | NonZeroU64Inner : u64 -> t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_95:Core_models.Clone.t_Clone t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_96:Core_models.Marker.t_Copy t_NonZeroU64Inner\n\nval e_ee_4: Prims.unit\n\nval impl_NonZeroU64Inner__new (v_val: u64)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroU64Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU64Inner__new_unchecked (v_val: u64)\n    : Prims.Pure t_NonZeroU64Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU64Inner__as_inner (self: t_NonZeroU64Inner)\n    : Prims.Pure u64 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_38:Core_models.Marker.t_StructuralPartialEq t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_39:Core_models.Cmp.t_PartialEq t_NonZeroU64Inner t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_97:Core_models.Cmp.t_Eq t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_41:Core_models.Cmp.t_PartialOrd t_NonZeroU64Inner t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_40:Core_models.Cmp.t_Ord t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_42:Core_models.Hash.t_Hash t_NonZeroU64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_43:Core_models.Fmt.t_Debug t_NonZeroU64Inner\n\ntype t_NonZeroU128Inner = | NonZeroU128Inner : u128 -> t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_98:Core_models.Clone.t_Clone t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_99:Core_models.Marker.t_Copy t_NonZeroU128Inner\n\nval e_ee_5: Prims.unit\n\nval impl_NonZeroU128Inner__new (v_val: u128)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroU128Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU128Inner__new_unchecked (v_val: u128)\n    : Prims.Pure t_NonZeroU128Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroU128Inner__as_inner (self: t_NonZeroU128Inner)\n    : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_45:Core_models.Marker.t_StructuralPartialEq t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_46:Core_models.Cmp.t_PartialEq t_NonZeroU128Inner t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_100:Core_models.Cmp.t_Eq t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_48:Core_models.Cmp.t_PartialOrd t_NonZeroU128Inner t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_47:Core_models.Cmp.t_Ord t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_49:Core_models.Hash.t_Hash t_NonZeroU128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_50:Core_models.Fmt.t_Debug t_NonZeroU128Inner\n\ntype t_NonZeroI8Inner = | NonZeroI8Inner : i8 -> t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_101:Core_models.Clone.t_Clone t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_102:Core_models.Marker.t_Copy t_NonZeroI8Inner\n\nval e_ee_6: Prims.unit\n\nval impl_NonZeroI8Inner__new (v_val: i8)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroI8Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI8Inner__new_unchecked (v_val: i8)\n    : Prims.Pure t_NonZeroI8Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI8Inner__as_inner (self: t_NonZeroI8Inner)\n    : Prims.Pure i8 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_52:Core_models.Marker.t_StructuralPartialEq t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_53:Core_models.Cmp.t_PartialEq t_NonZeroI8Inner t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_103:Core_models.Cmp.t_Eq t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_55:Core_models.Cmp.t_PartialOrd t_NonZeroI8Inner t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_54:Core_models.Cmp.t_Ord t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_56:Core_models.Hash.t_Hash t_NonZeroI8Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_57:Core_models.Fmt.t_Debug t_NonZeroI8Inner\n\ntype t_NonZeroI16Inner = | NonZeroI16Inner : i16 -> t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_104:Core_models.Clone.t_Clone t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_105:Core_models.Marker.t_Copy t_NonZeroI16Inner\n\nval e_ee_7: Prims.unit\n\nval impl_NonZeroI16Inner__new (v_val: i16)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroI16Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI16Inner__new_unchecked (v_val: i16)\n    : Prims.Pure t_NonZeroI16Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI16Inner__as_inner (self: t_NonZeroI16Inner)\n    : Prims.Pure i16 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_59:Core_models.Marker.t_StructuralPartialEq t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_60:Core_models.Cmp.t_PartialEq t_NonZeroI16Inner t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_106:Core_models.Cmp.t_Eq t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_62:Core_models.Cmp.t_PartialOrd t_NonZeroI16Inner t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_61:Core_models.Cmp.t_Ord t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_63:Core_models.Hash.t_Hash t_NonZeroI16Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_64:Core_models.Fmt.t_Debug t_NonZeroI16Inner\n\ntype t_NonZeroI32Inner = | NonZeroI32Inner : i32 -> t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_107:Core_models.Clone.t_Clone t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_108:Core_models.Marker.t_Copy t_NonZeroI32Inner\n\nval e_ee_8: Prims.unit\n\nval impl_NonZeroI32Inner__new (v_val: i32)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroI32Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI32Inner__new_unchecked (v_val: i32)\n    : Prims.Pure t_NonZeroI32Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI32Inner__as_inner (self: t_NonZeroI32Inner)\n    : Prims.Pure i32 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_66:Core_models.Marker.t_StructuralPartialEq t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_67:Core_models.Cmp.t_PartialEq t_NonZeroI32Inner t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_109:Core_models.Cmp.t_Eq t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_69:Core_models.Cmp.t_PartialOrd t_NonZeroI32Inner t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_68:Core_models.Cmp.t_Ord t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_70:Core_models.Hash.t_Hash t_NonZeroI32Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_71:Core_models.Fmt.t_Debug t_NonZeroI32Inner\n\ntype t_NonZeroI64Inner = | NonZeroI64Inner : i64 -> t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_110:Core_models.Clone.t_Clone t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_111:Core_models.Marker.t_Copy t_NonZeroI64Inner\n\nval e_ee_9: Prims.unit\n\nval impl_NonZeroI64Inner__new (v_val: i64)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroI64Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI64Inner__new_unchecked (v_val: i64)\n    : Prims.Pure t_NonZeroI64Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI64Inner__as_inner (self: t_NonZeroI64Inner)\n    : Prims.Pure i64 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_73:Core_models.Marker.t_StructuralPartialEq t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_74:Core_models.Cmp.t_PartialEq t_NonZeroI64Inner t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_112:Core_models.Cmp.t_Eq t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_76:Core_models.Cmp.t_PartialOrd t_NonZeroI64Inner t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_75:Core_models.Cmp.t_Ord t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_77:Core_models.Hash.t_Hash t_NonZeroI64Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_78:Core_models.Fmt.t_Debug t_NonZeroI64Inner\n\ntype t_NonZeroI128Inner = | NonZeroI128Inner : i128 -> t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_113:Core_models.Clone.t_Clone t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_114:Core_models.Marker.t_Copy t_NonZeroI128Inner\n\nval e_ee_10: Prims.unit\n\nval impl_NonZeroI128Inner__new (v_val: i128)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroI128Inner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI128Inner__new_unchecked (v_val: i128)\n    : Prims.Pure t_NonZeroI128Inner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroI128Inner__as_inner (self: t_NonZeroI128Inner)\n    : Prims.Pure i128 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_80:Core_models.Marker.t_StructuralPartialEq t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_81:Core_models.Cmp.t_PartialEq t_NonZeroI128Inner t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_115:Core_models.Cmp.t_Eq t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_83:Core_models.Cmp.t_PartialOrd t_NonZeroI128Inner t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_82:Core_models.Cmp.t_Ord t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_84:Core_models.Hash.t_Hash t_NonZeroI128Inner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_85:Core_models.Fmt.t_Debug t_NonZeroI128Inner\n\ntype t_UsizeNoHighBit = | UsizeNoHighBit : usize -> t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_137:Core_models.Clone.t_Clone t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_138:Core_models.Marker.t_Copy t_UsizeNoHighBit\n\nval e_ee_11: Prims.unit\n\nval impl_UsizeNoHighBit__new (v_val: usize)\n    : Prims.Pure (Core_models.Option.t_Option t_UsizeNoHighBit) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_UsizeNoHighBit__new_unchecked (v_val: usize)\n    : Prims.Pure t_UsizeNoHighBit Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_UsizeNoHighBit__as_inner (self: t_UsizeNoHighBit)\n    : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_117:Core_models.Marker.t_StructuralPartialEq t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_118:Core_models.Cmp.t_PartialEq t_UsizeNoHighBit t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_139:Core_models.Cmp.t_Eq t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_120:Core_models.Cmp.t_PartialOrd t_UsizeNoHighBit t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_119:Core_models.Cmp.t_Ord t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_121:Core_models.Hash.t_Hash t_UsizeNoHighBit\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_122:Core_models.Fmt.t_Debug t_UsizeNoHighBit\n\ntype t_NonZeroUsizeInner = | NonZeroUsizeInner : usize -> t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_140:Core_models.Clone.t_Clone t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_141:Core_models.Marker.t_Copy t_NonZeroUsizeInner\n\nval e_ee_12: Prims.unit\n\nval impl_NonZeroUsizeInner__new (v_val: usize)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroUsizeInner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroUsizeInner__new_unchecked (v_val: usize)\n    : Prims.Pure t_NonZeroUsizeInner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroUsizeInner__as_inner (self: t_NonZeroUsizeInner)\n    : Prims.Pure usize Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_124:Core_models.Marker.t_StructuralPartialEq t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_125:Core_models.Cmp.t_PartialEq t_NonZeroUsizeInner t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_142:Core_models.Cmp.t_Eq t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_127:Core_models.Cmp.t_PartialOrd t_NonZeroUsizeInner t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_126:Core_models.Cmp.t_Ord t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_128:Core_models.Hash.t_Hash t_NonZeroUsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_129:Core_models.Fmt.t_Debug t_NonZeroUsizeInner\n\ntype t_NonZeroIsizeInner = | NonZeroIsizeInner : isize -> t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_143:Core_models.Clone.t_Clone t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_144:Core_models.Marker.t_Copy t_NonZeroIsizeInner\n\nval e_ee_13: Prims.unit\n\nval impl_NonZeroIsizeInner__new (v_val: isize)\n    : Prims.Pure (Core_models.Option.t_Option t_NonZeroIsizeInner) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroIsizeInner__new_unchecked (v_val: isize)\n    : Prims.Pure t_NonZeroIsizeInner Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_NonZeroIsizeInner__as_inner (self: t_NonZeroIsizeInner)\n    : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_131:Core_models.Marker.t_StructuralPartialEq t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_132:Core_models.Cmp.t_PartialEq t_NonZeroIsizeInner t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_145:Core_models.Cmp.t_Eq t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_134:Core_models.Cmp.t_PartialOrd t_NonZeroIsizeInner t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_133:Core_models.Cmp.t_Ord t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_135:Core_models.Hash.t_Hash t_NonZeroIsizeInner\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_136:Core_models.Fmt.t_Debug t_NonZeroIsizeInner\n\ntype t_U32NotAllOnes = | U32NotAllOnes : u32 -> t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_174:Core_models.Clone.t_Clone t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_175:Core_models.Marker.t_Copy t_U32NotAllOnes\n\nval e_ee_14: Prims.unit\n\nval impl_U32NotAllOnes__new (v_val: u32)\n    : Prims.Pure (Core_models.Option.t_Option t_U32NotAllOnes) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_U32NotAllOnes__new_unchecked (v_val: u32)\n    : Prims.Pure t_U32NotAllOnes Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_U32NotAllOnes__as_inner (self: t_U32NotAllOnes)\n    : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_147:Core_models.Marker.t_StructuralPartialEq t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_148:Core_models.Cmp.t_PartialEq t_U32NotAllOnes t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_176:Core_models.Cmp.t_Eq t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_150:Core_models.Cmp.t_PartialOrd t_U32NotAllOnes t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_149:Core_models.Cmp.t_Ord t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_151:Core_models.Hash.t_Hash t_U32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_152:Core_models.Fmt.t_Debug t_U32NotAllOnes\n\ntype t_I32NotAllOnes = | I32NotAllOnes : i32 -> t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_177:Core_models.Clone.t_Clone t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_178:Core_models.Marker.t_Copy t_I32NotAllOnes\n\nval e_ee_15: Prims.unit\n\nval impl_I32NotAllOnes__new (v_val: i32)\n    : Prims.Pure (Core_models.Option.t_Option t_I32NotAllOnes) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_I32NotAllOnes__new_unchecked (v_val: i32)\n    : Prims.Pure t_I32NotAllOnes Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_I32NotAllOnes__as_inner (self: t_I32NotAllOnes)\n    : Prims.Pure i32 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_154:Core_models.Marker.t_StructuralPartialEq t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_155:Core_models.Cmp.t_PartialEq t_I32NotAllOnes t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_179:Core_models.Cmp.t_Eq t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_157:Core_models.Cmp.t_PartialOrd t_I32NotAllOnes t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_156:Core_models.Cmp.t_Ord t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_158:Core_models.Hash.t_Hash t_I32NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_159:Core_models.Fmt.t_Debug t_I32NotAllOnes\n\ntype t_U64NotAllOnes = | U64NotAllOnes : u64 -> t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_180:Core_models.Clone.t_Clone t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_181:Core_models.Marker.t_Copy t_U64NotAllOnes\n\nval e_ee_16: Prims.unit\n\nval impl_U64NotAllOnes__new (v_val: u64)\n    : Prims.Pure (Core_models.Option.t_Option t_U64NotAllOnes) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_U64NotAllOnes__new_unchecked (v_val: u64)\n    : Prims.Pure t_U64NotAllOnes Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_U64NotAllOnes__as_inner (self: t_U64NotAllOnes)\n    : Prims.Pure u64 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_161:Core_models.Marker.t_StructuralPartialEq t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_162:Core_models.Cmp.t_PartialEq t_U64NotAllOnes t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_182:Core_models.Cmp.t_Eq t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_164:Core_models.Cmp.t_PartialOrd t_U64NotAllOnes t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_163:Core_models.Cmp.t_Ord t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_165:Core_models.Hash.t_Hash t_U64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_166:Core_models.Fmt.t_Debug t_U64NotAllOnes\n\ntype t_I64NotAllOnes = | I64NotAllOnes : i64 -> t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_183:Core_models.Clone.t_Clone t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_184:Core_models.Marker.t_Copy t_I64NotAllOnes\n\nval e_ee_17: Prims.unit\n\nval impl_I64NotAllOnes__new (v_val: i64)\n    : Prims.Pure (Core_models.Option.t_Option t_I64NotAllOnes) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_I64NotAllOnes__new_unchecked (v_val: i64)\n    : Prims.Pure t_I64NotAllOnes Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_I64NotAllOnes__as_inner (self: t_I64NotAllOnes)\n    : Prims.Pure i64 Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_168:Core_models.Marker.t_StructuralPartialEq t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_169:Core_models.Cmp.t_PartialEq t_I64NotAllOnes t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_185:Core_models.Cmp.t_Eq t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_171:Core_models.Cmp.t_PartialOrd t_I64NotAllOnes t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_170:Core_models.Cmp.t_Ord t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_172:Core_models.Hash.t_Hash t_I64NotAllOnes\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_173:Core_models.Fmt.t_Debug t_I64NotAllOnes\n\nclass t_NotAllOnesHelper (v_Self: Type0) = {\n  f_Type:Type0;\n  f_Type_659097508213326199:Core_models.Marker.t_Sized f_Type\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_NotAllOnesHelper_for_u32: t_NotAllOnesHelper u32 =\n  { f_Type = t_U32NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_NotAllOnesHelper_for_i32: t_NotAllOnesHelper i32 =\n  { f_Type = t_I32NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_NotAllOnesHelper_for_u64: t_NotAllOnesHelper u64 =\n  { f_Type = t_U64NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_NotAllOnesHelper_for_i64: t_NotAllOnesHelper i64 =\n  { f_Type = t_I64NotAllOnes; f_Type_659097508213326199 = FStar.Tactics.Typeclasses.solve }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Num.fst",
    "content": "module Core_models.Num\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nlet impl_u8__MIN: u8 = mk_u8 0\n\nlet impl_u8__MAX: u8 = mk_u8 255\n\nlet impl_u8__BITS: u32 = mk_u32 8\n\nlet impl_u8__wrapping_add (x y: u8) : u8 = Rust_primitives.Arithmetic.wrapping_add_u8 x y\n\nlet impl_u8__saturating_add (x y: u8) : u8 = Rust_primitives.Arithmetic.saturating_add_u8 x y\n\nlet impl_u8__overflowing_add (x y: u8) : (u8 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_u8 x y\n\nlet impl_u8__checked_add (x y: u8) : Core_models.Option.t_Option u8 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u8__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u8__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u8\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u8\n\nlet impl_u8__wrapping_sub (x y: u8) : u8 = Rust_primitives.Arithmetic.wrapping_sub_u8 x y\n\nlet impl_u8__saturating_sub (x y: u8) : u8 = Rust_primitives.Arithmetic.saturating_sub_u8 x y\n\nlet impl_u8__overflowing_sub (x y: u8) : (u8 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_u8 x y\n\nlet impl_u8__checked_sub (x y: u8) : Core_models.Option.t_Option u8 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u8__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u8__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u8\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u8\n\nlet impl_u8__wrapping_mul (x y: u8) : u8 = Rust_primitives.Arithmetic.wrapping_mul_u8 x y\n\nlet impl_u8__saturating_mul (x y: u8) : u8 = Rust_primitives.Arithmetic.saturating_mul_u8 x y\n\nlet impl_u8__overflowing_mul (x y: u8) : (u8 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_u8 x y\n\nlet impl_u8__checked_mul (x y: u8) : Core_models.Option.t_Option u8 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u8__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u8__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u8\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u8\n\nlet impl_u8__pow (x: u8) (exp: u32) : u8 = Rust_primitives.Arithmetic.pow_u8 x exp\n\nlet impl_u8__count_ones (x: u8) : u32 = Rust_primitives.Arithmetic.count_ones_u8 x\n\nassume\nval impl_u8__rotate_right': x: u8 -> n: u32 -> u8\n\nunfold\nlet impl_u8__rotate_right = impl_u8__rotate_right'\n\nassume\nval impl_u8__rotate_left': x: u8 -> n: u32 -> u8\n\nunfold\nlet impl_u8__rotate_left = impl_u8__rotate_left'\n\nassume\nval impl_u8__leading_zeros': x: u8 -> u32\n\nunfold\nlet impl_u8__leading_zeros = impl_u8__leading_zeros'\n\nassume\nval impl_u8__ilog2': x: u8 -> u32\n\nunfold\nlet impl_u8__ilog2 = impl_u8__ilog2'\n\nassume\nval impl_u8__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result u8 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_u8__from_str_radix = impl_u8__from_str_radix'\n\nassume\nval impl_u8__from_be_bytes': bytes: t_Array u8 (mk_usize 1) -> u8\n\nunfold\nlet impl_u8__from_be_bytes = impl_u8__from_be_bytes'\n\nassume\nval impl_u8__from_le_bytes': bytes: t_Array u8 (mk_usize 1) -> u8\n\nunfold\nlet impl_u8__from_le_bytes = impl_u8__from_le_bytes'\n\nassume\nval impl_u8__to_be_bytes': bytes: u8 -> t_Array u8 (mk_usize 1)\n\nunfold\nlet impl_u8__to_be_bytes = impl_u8__to_be_bytes'\n\nassume\nval impl_u8__to_le_bytes': bytes: u8 -> t_Array u8 (mk_usize 1)\n\nunfold\nlet impl_u8__to_le_bytes = impl_u8__to_le_bytes'\n\nlet impl_u8__rem_euclid (x y: u8) : Prims.Pure u8 (requires y <>. mk_u8 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_u8 x y\n\nlet impl_u16__MIN: u16 = mk_u16 0\n\nlet impl_u16__MAX: u16 = mk_u16 65535\n\nlet impl_u16__BITS: u32 = mk_u32 16\n\nlet impl_u16__wrapping_add (x y: u16) : u16 = Rust_primitives.Arithmetic.wrapping_add_u16 x y\n\nlet impl_u16__saturating_add (x y: u16) : u16 = Rust_primitives.Arithmetic.saturating_add_u16 x y\n\nlet impl_u16__overflowing_add (x y: u16) : (u16 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_u16 x y\n\nlet impl_u16__checked_add (x y: u16) : Core_models.Option.t_Option u16 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u16__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u16__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u16\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u16\n\nlet impl_u16__wrapping_sub (x y: u16) : u16 = Rust_primitives.Arithmetic.wrapping_sub_u16 x y\n\nlet impl_u16__saturating_sub (x y: u16) : u16 = Rust_primitives.Arithmetic.saturating_sub_u16 x y\n\nlet impl_u16__overflowing_sub (x y: u16) : (u16 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_u16 x y\n\nlet impl_u16__checked_sub (x y: u16) : Core_models.Option.t_Option u16 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u16__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u16__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u16\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u16\n\nlet impl_u16__wrapping_mul (x y: u16) : u16 = Rust_primitives.Arithmetic.wrapping_mul_u16 x y\n\nlet impl_u16__saturating_mul (x y: u16) : u16 = Rust_primitives.Arithmetic.saturating_mul_u16 x y\n\nlet impl_u16__overflowing_mul (x y: u16) : (u16 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_u16 x y\n\nlet impl_u16__checked_mul (x y: u16) : Core_models.Option.t_Option u16 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u16__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u16__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u16\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u16\n\nlet impl_u16__pow (x: u16) (exp: u32) : u16 = Rust_primitives.Arithmetic.pow_u16 x exp\n\nlet impl_u16__count_ones (x: u16) : u32 = Rust_primitives.Arithmetic.count_ones_u16 x\n\nassume\nval impl_u16__rotate_right': x: u16 -> n: u32 -> u16\n\nunfold\nlet impl_u16__rotate_right = impl_u16__rotate_right'\n\nassume\nval impl_u16__rotate_left': x: u16 -> n: u32 -> u16\n\nunfold\nlet impl_u16__rotate_left = impl_u16__rotate_left'\n\nassume\nval impl_u16__leading_zeros': x: u16 -> u32\n\nunfold\nlet impl_u16__leading_zeros = impl_u16__leading_zeros'\n\nassume\nval impl_u16__ilog2': x: u16 -> u32\n\nunfold\nlet impl_u16__ilog2 = impl_u16__ilog2'\n\nassume\nval impl_u16__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result u16 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_u16__from_str_radix = impl_u16__from_str_radix'\n\nassume\nval impl_u16__from_be_bytes': bytes: t_Array u8 (mk_usize 2) -> u16\n\nunfold\nlet impl_u16__from_be_bytes = impl_u16__from_be_bytes'\n\nassume\nval impl_u16__from_le_bytes': bytes: t_Array u8 (mk_usize 2) -> u16\n\nunfold\nlet impl_u16__from_le_bytes = impl_u16__from_le_bytes'\n\nassume\nval impl_u16__to_be_bytes': bytes: u16 -> t_Array u8 (mk_usize 2)\n\nunfold\nlet impl_u16__to_be_bytes = impl_u16__to_be_bytes'\n\nassume\nval impl_u16__to_le_bytes': bytes: u16 -> t_Array u8 (mk_usize 2)\n\nunfold\nlet impl_u16__to_le_bytes = impl_u16__to_le_bytes'\n\nlet impl_u16__rem_euclid (x y: u16)\n    : Prims.Pure u16 (requires y <>. mk_u16 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_u16 x y\n\nlet impl_u32__MIN: u32 = mk_u32 0\n\nlet impl_u32__MAX: u32 = mk_u32 4294967295\n\nlet impl_u32__BITS: u32 = mk_u32 32\n\nlet impl_u32__wrapping_add (x y: u32) : u32 = Rust_primitives.Arithmetic.wrapping_add_u32 x y\n\nlet impl_u32__saturating_add (x y: u32) : u32 = Rust_primitives.Arithmetic.saturating_add_u32 x y\n\nlet impl_u32__overflowing_add (x y: u32) : (u32 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_u32 x y\n\nlet impl_u32__checked_add (x y: u32) : Core_models.Option.t_Option u32 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u32__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u32__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u32\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u32\n\nlet impl_u32__wrapping_sub (x y: u32) : u32 = Rust_primitives.Arithmetic.wrapping_sub_u32 x y\n\nlet impl_u32__saturating_sub (x y: u32) : u32 = Rust_primitives.Arithmetic.saturating_sub_u32 x y\n\nlet impl_u32__overflowing_sub (x y: u32) : (u32 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_u32 x y\n\nlet impl_u32__checked_sub (x y: u32) : Core_models.Option.t_Option u32 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u32__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u32__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u32\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u32\n\nlet impl_u32__wrapping_mul (x y: u32) : u32 = Rust_primitives.Arithmetic.wrapping_mul_u32 x y\n\nlet impl_u32__saturating_mul (x y: u32) : u32 = Rust_primitives.Arithmetic.saturating_mul_u32 x y\n\nlet impl_u32__overflowing_mul (x y: u32) : (u32 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_u32 x y\n\nlet impl_u32__checked_mul (x y: u32) : Core_models.Option.t_Option u32 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u32__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u32__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u32\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u32\n\nlet impl_u32__pow (x exp: u32) : u32 = Rust_primitives.Arithmetic.pow_u32 x exp\n\nlet impl_u32__count_ones (x: u32) : u32 = Rust_primitives.Arithmetic.count_ones_u32 x\n\nassume\nval impl_u32__rotate_right': x: u32 -> n: u32 -> u32\n\nunfold\nlet impl_u32__rotate_right = impl_u32__rotate_right'\n\nassume\nval impl_u32__rotate_left': x: u32 -> n: u32 -> u32\n\nunfold\nlet impl_u32__rotate_left = impl_u32__rotate_left'\n\nassume\nval impl_u32__leading_zeros': x: u32 -> u32\n\nunfold\nlet impl_u32__leading_zeros = impl_u32__leading_zeros'\n\nassume\nval impl_u32__ilog2': x: u32 -> u32\n\nunfold\nlet impl_u32__ilog2 = impl_u32__ilog2'\n\nassume\nval impl_u32__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result u32 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_u32__from_str_radix = impl_u32__from_str_radix'\n\nassume\nval impl_u32__from_be_bytes': bytes: t_Array u8 (mk_usize 4) -> u32\n\nunfold\nlet impl_u32__from_be_bytes = impl_u32__from_be_bytes'\n\nassume\nval impl_u32__from_le_bytes': bytes: t_Array u8 (mk_usize 4) -> u32\n\nunfold\nlet impl_u32__from_le_bytes = impl_u32__from_le_bytes'\n\nassume\nval impl_u32__to_be_bytes': bytes: u32 -> t_Array u8 (mk_usize 4)\n\nunfold\nlet impl_u32__to_be_bytes = impl_u32__to_be_bytes'\n\nassume\nval impl_u32__to_le_bytes': bytes: u32 -> t_Array u8 (mk_usize 4)\n\nunfold\nlet impl_u32__to_le_bytes = impl_u32__to_le_bytes'\n\nlet impl_u32__rem_euclid (x y: u32)\n    : Prims.Pure u32 (requires y <>. mk_u32 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_u32 x y\n\nlet impl_u64__MIN: u64 = mk_u64 0\n\nlet impl_u64__MAX: u64 = mk_u64 18446744073709551615\n\nlet impl_u64__BITS: u32 = mk_u32 64\n\nlet impl_u64__wrapping_add (x y: u64) : u64 = Rust_primitives.Arithmetic.wrapping_add_u64 x y\n\nlet impl_u64__saturating_add (x y: u64) : u64 = Rust_primitives.Arithmetic.saturating_add_u64 x y\n\nlet impl_u64__overflowing_add (x y: u64) : (u64 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_u64 x y\n\nlet impl_u64__checked_add (x y: u64) : Core_models.Option.t_Option u64 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u64__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u64__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u64\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u64\n\nlet impl_u64__wrapping_sub (x y: u64) : u64 = Rust_primitives.Arithmetic.wrapping_sub_u64 x y\n\nlet impl_u64__saturating_sub (x y: u64) : u64 = Rust_primitives.Arithmetic.saturating_sub_u64 x y\n\nlet impl_u64__overflowing_sub (x y: u64) : (u64 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_u64 x y\n\nlet impl_u64__checked_sub (x y: u64) : Core_models.Option.t_Option u64 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u64__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u64__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u64\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u64\n\nlet impl_u64__wrapping_mul (x y: u64) : u64 = Rust_primitives.Arithmetic.wrapping_mul_u64 x y\n\nlet impl_u64__saturating_mul (x y: u64) : u64 = Rust_primitives.Arithmetic.saturating_mul_u64 x y\n\nlet impl_u64__overflowing_mul (x y: u64) : (u64 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_u64 x y\n\nlet impl_u64__checked_mul (x y: u64) : Core_models.Option.t_Option u64 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u64__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u64__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u64\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u64\n\nlet impl_u64__pow (x: u64) (exp: u32) : u64 = Rust_primitives.Arithmetic.pow_u64 x exp\n\nlet impl_u64__count_ones (x: u64) : u32 = Rust_primitives.Arithmetic.count_ones_u64 x\n\nassume\nval impl_u64__rotate_right': x: u64 -> n: u32 -> u64\n\nunfold\nlet impl_u64__rotate_right = impl_u64__rotate_right'\n\nassume\nval impl_u64__rotate_left': x: u64 -> n: u32 -> u64\n\nunfold\nlet impl_u64__rotate_left = impl_u64__rotate_left'\n\nassume\nval impl_u64__leading_zeros': x: u64 -> u32\n\nunfold\nlet impl_u64__leading_zeros = impl_u64__leading_zeros'\n\nassume\nval impl_u64__ilog2': x: u64 -> u32\n\nunfold\nlet impl_u64__ilog2 = impl_u64__ilog2'\n\nassume\nval impl_u64__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result u64 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_u64__from_str_radix = impl_u64__from_str_radix'\n\nassume\nval impl_u64__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> u64\n\nunfold\nlet impl_u64__from_be_bytes = impl_u64__from_be_bytes'\n\nassume\nval impl_u64__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> u64\n\nunfold\nlet impl_u64__from_le_bytes = impl_u64__from_le_bytes'\n\nassume\nval impl_u64__to_be_bytes': bytes: u64 -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_u64__to_be_bytes = impl_u64__to_be_bytes'\n\nassume\nval impl_u64__to_le_bytes': bytes: u64 -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_u64__to_le_bytes = impl_u64__to_le_bytes'\n\nlet impl_u64__rem_euclid (x y: u64)\n    : Prims.Pure u64 (requires y <>. mk_u64 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_u64 x y\n\nlet impl_u128__MIN: u128 = mk_u128 0\n\nlet impl_u128__MAX: u128 = mk_u128 340282366920938463463374607431768211455\n\nlet impl_u128__BITS: u32 = mk_u32 128\n\nlet impl_u128__wrapping_add (x y: u128) : u128 = Rust_primitives.Arithmetic.wrapping_add_u128 x y\n\nlet impl_u128__saturating_add (x y: u128) : u128 =\n  Rust_primitives.Arithmetic.saturating_add_u128 x y\n\nlet impl_u128__overflowing_add (x y: u128) : (u128 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_u128 x y\n\nlet impl_u128__checked_add (x y: u128) : Core_models.Option.t_Option u128 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u128__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u128__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option u128\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u128\n\nlet impl_u128__wrapping_sub (x y: u128) : u128 = Rust_primitives.Arithmetic.wrapping_sub_u128 x y\n\nlet impl_u128__saturating_sub (x y: u128) : u128 =\n  Rust_primitives.Arithmetic.saturating_sub_u128 x y\n\nlet impl_u128__overflowing_sub (x y: u128) : (u128 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_u128 x y\n\nlet impl_u128__checked_sub (x y: u128) : Core_models.Option.t_Option u128 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u128__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u128__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option u128\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u128\n\nlet impl_u128__wrapping_mul (x y: u128) : u128 = Rust_primitives.Arithmetic.wrapping_mul_u128 x y\n\nlet impl_u128__saturating_mul (x y: u128) : u128 =\n  Rust_primitives.Arithmetic.saturating_mul_u128 x y\n\nlet impl_u128__overflowing_mul (x y: u128) : (u128 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_u128 x y\n\nlet impl_u128__checked_mul (x y: u128) : Core_models.Option.t_Option u128 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_u128__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_u128__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option u128\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option u128\n\nlet impl_u128__pow (x: u128) (exp: u32) : u128 = Rust_primitives.Arithmetic.pow_u128 x exp\n\nlet impl_u128__count_ones (x: u128) : u32 = Rust_primitives.Arithmetic.count_ones_u128 x\n\nassume\nval impl_u128__rotate_right': x: u128 -> n: u32 -> u128\n\nunfold\nlet impl_u128__rotate_right = impl_u128__rotate_right'\n\nassume\nval impl_u128__rotate_left': x: u128 -> n: u32 -> u128\n\nunfold\nlet impl_u128__rotate_left = impl_u128__rotate_left'\n\nassume\nval impl_u128__leading_zeros': x: u128 -> u32\n\nunfold\nlet impl_u128__leading_zeros = impl_u128__leading_zeros'\n\nassume\nval impl_u128__ilog2': x: u128 -> u32\n\nunfold\nlet impl_u128__ilog2 = impl_u128__ilog2'\n\nassume\nval impl_u128__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result u128 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_u128__from_str_radix = impl_u128__from_str_radix'\n\nassume\nval impl_u128__from_be_bytes': bytes: t_Array u8 (mk_usize 16) -> u128\n\nunfold\nlet impl_u128__from_be_bytes = impl_u128__from_be_bytes'\n\nassume\nval impl_u128__from_le_bytes': bytes: t_Array u8 (mk_usize 16) -> u128\n\nunfold\nlet impl_u128__from_le_bytes = impl_u128__from_le_bytes'\n\nassume\nval impl_u128__to_be_bytes': bytes: u128 -> t_Array u8 (mk_usize 16)\n\nunfold\nlet impl_u128__to_be_bytes = impl_u128__to_be_bytes'\n\nassume\nval impl_u128__to_le_bytes': bytes: u128 -> t_Array u8 (mk_usize 16)\n\nunfold\nlet impl_u128__to_le_bytes = impl_u128__to_le_bytes'\n\nlet impl_u128__rem_euclid (x y: u128)\n    : Prims.Pure u128 (requires y <>. mk_u128 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_u128 x y\n\nlet impl_usize__MIN: usize = mk_usize 0\n\nlet impl_usize__MAX: usize = Rust_primitives.Arithmetic.v_USIZE_MAX\n\nlet impl_usize__BITS: u32 = Rust_primitives.Arithmetic.v_SIZE_BITS\n\nlet impl_usize__wrapping_add (x y: usize) : usize =\n  Rust_primitives.Arithmetic.wrapping_add_usize x y\n\nlet impl_usize__saturating_add (x y: usize) : usize =\n  Rust_primitives.Arithmetic.saturating_add_usize x y\n\nlet impl_usize__overflowing_add (x y: usize) : (usize & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_usize x y\n\nlet impl_usize__checked_add (x y: usize) : Core_models.Option.t_Option usize =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_usize__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_usize__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option usize\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option usize\n\nlet impl_usize__wrapping_sub (x y: usize) : usize =\n  Rust_primitives.Arithmetic.wrapping_sub_usize x y\n\nlet impl_usize__saturating_sub (x y: usize) : usize =\n  Rust_primitives.Arithmetic.saturating_sub_usize x y\n\nlet impl_usize__overflowing_sub (x y: usize) : (usize & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_usize x y\n\nlet impl_usize__checked_sub (x y: usize) : Core_models.Option.t_Option usize =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_usize__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_usize__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option usize\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option usize\n\nlet impl_usize__wrapping_mul (x y: usize) : usize =\n  Rust_primitives.Arithmetic.wrapping_mul_usize x y\n\nlet impl_usize__saturating_mul (x y: usize) : usize =\n  Rust_primitives.Arithmetic.saturating_mul_usize x y\n\nlet impl_usize__overflowing_mul (x y: usize) : (usize & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_usize x y\n\nlet impl_usize__checked_mul (x y: usize) : Core_models.Option.t_Option usize =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_usize__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_usize__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option usize\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option usize\n\nlet impl_usize__pow (x: usize) (exp: u32) : usize = Rust_primitives.Arithmetic.pow_usize x exp\n\nlet impl_usize__count_ones (x: usize) : u32 = Rust_primitives.Arithmetic.count_ones_usize x\n\nassume\nval impl_usize__rotate_right': x: usize -> n: u32 -> usize\n\nunfold\nlet impl_usize__rotate_right = impl_usize__rotate_right'\n\nassume\nval impl_usize__rotate_left': x: usize -> n: u32 -> usize\n\nunfold\nlet impl_usize__rotate_left = impl_usize__rotate_left'\n\nassume\nval impl_usize__leading_zeros': x: usize -> u32\n\nunfold\nlet impl_usize__leading_zeros = impl_usize__leading_zeros'\n\nassume\nval impl_usize__ilog2': x: usize -> u32\n\nunfold\nlet impl_usize__ilog2 = impl_usize__ilog2'\n\nassume\nval impl_usize__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result usize Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_usize__from_str_radix = impl_usize__from_str_radix'\n\nassume\nval impl_usize__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> usize\n\nunfold\nlet impl_usize__from_be_bytes = impl_usize__from_be_bytes'\n\nassume\nval impl_usize__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> usize\n\nunfold\nlet impl_usize__from_le_bytes = impl_usize__from_le_bytes'\n\nassume\nval impl_usize__to_be_bytes': bytes: usize -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_usize__to_be_bytes = impl_usize__to_be_bytes'\n\nassume\nval impl_usize__to_le_bytes': bytes: usize -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_usize__to_le_bytes = impl_usize__to_le_bytes'\n\nlet impl_usize__rem_euclid (x y: usize)\n    : Prims.Pure usize (requires y <>. mk_usize 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_usize x y\n\nlet impl_i8__MIN: i8 = mk_i8 (-128)\n\nlet impl_i8__MAX: i8 = mk_i8 127\n\nlet impl_i8__BITS: u32 = mk_u32 8\n\nlet impl_i8__wrapping_add (x y: i8) : i8 = Rust_primitives.Arithmetic.wrapping_add_i8 x y\n\nlet impl_i8__saturating_add (x y: i8) : i8 = Rust_primitives.Arithmetic.saturating_add_i8 x y\n\nlet impl_i8__overflowing_add (x y: i8) : (i8 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_i8 x y\n\nlet impl_i8__checked_add (x y: i8) : Core_models.Option.t_Option i8 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i8__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i8__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i8\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i8\n\nlet impl_i8__wrapping_sub (x y: i8) : i8 = Rust_primitives.Arithmetic.wrapping_sub_i8 x y\n\nlet impl_i8__saturating_sub (x y: i8) : i8 = Rust_primitives.Arithmetic.saturating_sub_i8 x y\n\nlet impl_i8__overflowing_sub (x y: i8) : (i8 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_i8 x y\n\nlet impl_i8__checked_sub (x y: i8) : Core_models.Option.t_Option i8 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i8__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i8__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i8\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i8\n\nlet impl_i8__wrapping_mul (x y: i8) : i8 = Rust_primitives.Arithmetic.wrapping_mul_i8 x y\n\nlet impl_i8__saturating_mul (x y: i8) : i8 = Rust_primitives.Arithmetic.saturating_mul_i8 x y\n\nlet impl_i8__overflowing_mul (x y: i8) : (i8 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_i8 x y\n\nlet impl_i8__checked_mul (x y: i8) : Core_models.Option.t_Option i8 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i8__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i8__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i8\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i8\n\nlet impl_i8__pow (x: i8) (exp: u32) : i8 = Rust_primitives.Arithmetic.pow_i8 x exp\n\nlet impl_i8__count_ones (x: i8) : u32 = Rust_primitives.Arithmetic.count_ones_i8 x\n\nassume\nval impl_i8__rotate_right': x: i8 -> n: u32 -> i8\n\nunfold\nlet impl_i8__rotate_right = impl_i8__rotate_right'\n\nassume\nval impl_i8__rotate_left': x: i8 -> n: u32 -> i8\n\nunfold\nlet impl_i8__rotate_left = impl_i8__rotate_left'\n\nassume\nval impl_i8__leading_zeros': x: i8 -> u32\n\nunfold\nlet impl_i8__leading_zeros = impl_i8__leading_zeros'\n\nassume\nval impl_i8__ilog2': x: i8 -> u32\n\nunfold\nlet impl_i8__ilog2 = impl_i8__ilog2'\n\nassume\nval impl_i8__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result i8 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_i8__from_str_radix = impl_i8__from_str_radix'\n\nassume\nval impl_i8__from_be_bytes': bytes: t_Array u8 (mk_usize 1) -> i8\n\nunfold\nlet impl_i8__from_be_bytes = impl_i8__from_be_bytes'\n\nassume\nval impl_i8__from_le_bytes': bytes: t_Array u8 (mk_usize 1) -> i8\n\nunfold\nlet impl_i8__from_le_bytes = impl_i8__from_le_bytes'\n\nassume\nval impl_i8__to_be_bytes': bytes: i8 -> t_Array u8 (mk_usize 1)\n\nunfold\nlet impl_i8__to_be_bytes = impl_i8__to_be_bytes'\n\nassume\nval impl_i8__to_le_bytes': bytes: i8 -> t_Array u8 (mk_usize 1)\n\nunfold\nlet impl_i8__to_le_bytes = impl_i8__to_le_bytes'\n\nlet impl_i8__rem_euclid (x y: i8) : Prims.Pure i8 (requires y <>. mk_i8 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_i8 x y\n\nlet impl_i8__abs (x: i8) : Prims.Pure i8 (requires x >. impl_i8__MIN) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.abs_i8 x\n\nlet impl_i16__MIN: i16 = mk_i16 (-32768)\n\nlet impl_i16__MAX: i16 = mk_i16 32767\n\nlet impl_i16__BITS: u32 = mk_u32 16\n\nlet impl_i16__wrapping_add (x y: i16) : i16 = Rust_primitives.Arithmetic.wrapping_add_i16 x y\n\nlet impl_i16__saturating_add (x y: i16) : i16 = Rust_primitives.Arithmetic.saturating_add_i16 x y\n\nlet impl_i16__overflowing_add (x y: i16) : (i16 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_i16 x y\n\nlet impl_i16__checked_add (x y: i16) : Core_models.Option.t_Option i16 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i16__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i16__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i16\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i16\n\nlet impl_i16__wrapping_sub (x y: i16) : i16 = Rust_primitives.Arithmetic.wrapping_sub_i16 x y\n\nlet impl_i16__saturating_sub (x y: i16) : i16 = Rust_primitives.Arithmetic.saturating_sub_i16 x y\n\nlet impl_i16__overflowing_sub (x y: i16) : (i16 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_i16 x y\n\nlet impl_i16__checked_sub (x y: i16) : Core_models.Option.t_Option i16 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i16__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i16__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i16\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i16\n\nlet impl_i16__wrapping_mul (x y: i16) : i16 = Rust_primitives.Arithmetic.wrapping_mul_i16 x y\n\nlet impl_i16__saturating_mul (x y: i16) : i16 = Rust_primitives.Arithmetic.saturating_mul_i16 x y\n\nlet impl_i16__overflowing_mul (x y: i16) : (i16 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_i16 x y\n\nlet impl_i16__checked_mul (x y: i16) : Core_models.Option.t_Option i16 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i16__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i16__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i16\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i16\n\nlet impl_i16__pow (x: i16) (exp: u32) : i16 = Rust_primitives.Arithmetic.pow_i16 x exp\n\nlet impl_i16__count_ones (x: i16) : u32 = Rust_primitives.Arithmetic.count_ones_i16 x\n\nassume\nval impl_i16__rotate_right': x: i16 -> n: u32 -> i16\n\nunfold\nlet impl_i16__rotate_right = impl_i16__rotate_right'\n\nassume\nval impl_i16__rotate_left': x: i16 -> n: u32 -> i16\n\nunfold\nlet impl_i16__rotate_left = impl_i16__rotate_left'\n\nassume\nval impl_i16__leading_zeros': x: i16 -> u32\n\nunfold\nlet impl_i16__leading_zeros = impl_i16__leading_zeros'\n\nassume\nval impl_i16__ilog2': x: i16 -> u32\n\nunfold\nlet impl_i16__ilog2 = impl_i16__ilog2'\n\nassume\nval impl_i16__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result i16 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_i16__from_str_radix = impl_i16__from_str_radix'\n\nassume\nval impl_i16__from_be_bytes': bytes: t_Array u8 (mk_usize 2) -> i16\n\nunfold\nlet impl_i16__from_be_bytes = impl_i16__from_be_bytes'\n\nassume\nval impl_i16__from_le_bytes': bytes: t_Array u8 (mk_usize 2) -> i16\n\nunfold\nlet impl_i16__from_le_bytes = impl_i16__from_le_bytes'\n\nassume\nval impl_i16__to_be_bytes': bytes: i16 -> t_Array u8 (mk_usize 2)\n\nunfold\nlet impl_i16__to_be_bytes = impl_i16__to_be_bytes'\n\nassume\nval impl_i16__to_le_bytes': bytes: i16 -> t_Array u8 (mk_usize 2)\n\nunfold\nlet impl_i16__to_le_bytes = impl_i16__to_le_bytes'\n\nlet impl_i16__rem_euclid (x y: i16)\n    : Prims.Pure i16 (requires y <>. mk_i16 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_i16 x y\n\nlet impl_i16__abs (x: i16) : Prims.Pure i16 (requires x >. impl_i16__MIN) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.abs_i16 x\n\nlet impl_i32__MIN: i32 = mk_i32 (-2147483648)\n\nlet impl_i32__MAX: i32 = mk_i32 2147483647\n\nlet impl_i32__BITS: u32 = mk_u32 32\n\nlet impl_i32__wrapping_add (x y: i32) : i32 = Rust_primitives.Arithmetic.wrapping_add_i32 x y\n\nlet impl_i32__saturating_add (x y: i32) : i32 = Rust_primitives.Arithmetic.saturating_add_i32 x y\n\nlet impl_i32__overflowing_add (x y: i32) : (i32 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_i32 x y\n\nlet impl_i32__checked_add (x y: i32) : Core_models.Option.t_Option i32 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i32__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i32__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i32\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n\nlet impl_i32__wrapping_sub (x y: i32) : i32 = Rust_primitives.Arithmetic.wrapping_sub_i32 x y\n\nlet impl_i32__saturating_sub (x y: i32) : i32 = Rust_primitives.Arithmetic.saturating_sub_i32 x y\n\nlet impl_i32__overflowing_sub (x y: i32) : (i32 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_i32 x y\n\nlet impl_i32__checked_sub (x y: i32) : Core_models.Option.t_Option i32 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i32__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i32__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i32\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n\nlet impl_i32__wrapping_mul (x y: i32) : i32 = Rust_primitives.Arithmetic.wrapping_mul_i32 x y\n\nlet impl_i32__saturating_mul (x y: i32) : i32 = Rust_primitives.Arithmetic.saturating_mul_i32 x y\n\nlet impl_i32__overflowing_mul (x y: i32) : (i32 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_i32 x y\n\nlet impl_i32__checked_mul (x y: i32) : Core_models.Option.t_Option i32 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i32__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i32__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i32\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n\nlet impl_i32__pow (x: i32) (exp: u32) : i32 = Rust_primitives.Arithmetic.pow_i32 x exp\n\nlet impl_i32__count_ones (x: i32) : u32 = Rust_primitives.Arithmetic.count_ones_i32 x\n\nassume\nval impl_i32__rotate_right': x: i32 -> n: u32 -> i32\n\nunfold\nlet impl_i32__rotate_right = impl_i32__rotate_right'\n\nassume\nval impl_i32__rotate_left': x: i32 -> n: u32 -> i32\n\nunfold\nlet impl_i32__rotate_left = impl_i32__rotate_left'\n\nassume\nval impl_i32__leading_zeros': x: i32 -> u32\n\nunfold\nlet impl_i32__leading_zeros = impl_i32__leading_zeros'\n\nassume\nval impl_i32__ilog2': x: i32 -> u32\n\nunfold\nlet impl_i32__ilog2 = impl_i32__ilog2'\n\nassume\nval impl_i32__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result i32 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_i32__from_str_radix = impl_i32__from_str_radix'\n\nassume\nval impl_i32__from_be_bytes': bytes: t_Array u8 (mk_usize 4) -> i32\n\nunfold\nlet impl_i32__from_be_bytes = impl_i32__from_be_bytes'\n\nassume\nval impl_i32__from_le_bytes': bytes: t_Array u8 (mk_usize 4) -> i32\n\nunfold\nlet impl_i32__from_le_bytes = impl_i32__from_le_bytes'\n\nassume\nval impl_i32__to_be_bytes': bytes: i32 -> t_Array u8 (mk_usize 4)\n\nunfold\nlet impl_i32__to_be_bytes = impl_i32__to_be_bytes'\n\nassume\nval impl_i32__to_le_bytes': bytes: i32 -> t_Array u8 (mk_usize 4)\n\nunfold\nlet impl_i32__to_le_bytes = impl_i32__to_le_bytes'\n\nlet impl_i32__rem_euclid (x y: i32)\n    : Prims.Pure i32 (requires y <>. mk_i32 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_i32 x y\n\nlet impl_i32__abs (x: i32) : Prims.Pure i32 (requires x >. impl_i32__MIN) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.abs_i32 x\n\nlet impl_i64__MIN: i64 = mk_i64 (-9223372036854775808)\n\nlet impl_i64__MAX: i64 = mk_i64 9223372036854775807\n\nlet impl_i64__BITS: u32 = mk_u32 64\n\nlet impl_i64__wrapping_add (x y: i64) : i64 = Rust_primitives.Arithmetic.wrapping_add_i64 x y\n\nlet impl_i64__saturating_add (x y: i64) : i64 = Rust_primitives.Arithmetic.saturating_add_i64 x y\n\nlet impl_i64__overflowing_add (x y: i64) : (i64 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_i64 x y\n\nlet impl_i64__checked_add (x y: i64) : Core_models.Option.t_Option i64 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i64__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i64__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i64\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i64\n\nlet impl_i64__wrapping_sub (x y: i64) : i64 = Rust_primitives.Arithmetic.wrapping_sub_i64 x y\n\nlet impl_i64__saturating_sub (x y: i64) : i64 = Rust_primitives.Arithmetic.saturating_sub_i64 x y\n\nlet impl_i64__overflowing_sub (x y: i64) : (i64 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_i64 x y\n\nlet impl_i64__checked_sub (x y: i64) : Core_models.Option.t_Option i64 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i64__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i64__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i64\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i64\n\nlet impl_i64__wrapping_mul (x y: i64) : i64 = Rust_primitives.Arithmetic.wrapping_mul_i64 x y\n\nlet impl_i64__saturating_mul (x y: i64) : i64 = Rust_primitives.Arithmetic.saturating_mul_i64 x y\n\nlet impl_i64__overflowing_mul (x y: i64) : (i64 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_i64 x y\n\nlet impl_i64__checked_mul (x y: i64) : Core_models.Option.t_Option i64 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i64__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i64__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i64\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i64\n\nlet impl_i64__pow (x: i64) (exp: u32) : i64 = Rust_primitives.Arithmetic.pow_i64 x exp\n\nlet impl_i64__count_ones (x: i64) : u32 = Rust_primitives.Arithmetic.count_ones_i64 x\n\nassume\nval impl_i64__rotate_right': x: i64 -> n: u32 -> i64\n\nunfold\nlet impl_i64__rotate_right = impl_i64__rotate_right'\n\nassume\nval impl_i64__rotate_left': x: i64 -> n: u32 -> i64\n\nunfold\nlet impl_i64__rotate_left = impl_i64__rotate_left'\n\nassume\nval impl_i64__leading_zeros': x: i64 -> u32\n\nunfold\nlet impl_i64__leading_zeros = impl_i64__leading_zeros'\n\nassume\nval impl_i64__ilog2': x: i64 -> u32\n\nunfold\nlet impl_i64__ilog2 = impl_i64__ilog2'\n\nassume\nval impl_i64__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result i64 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_i64__from_str_radix = impl_i64__from_str_radix'\n\nassume\nval impl_i64__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> i64\n\nunfold\nlet impl_i64__from_be_bytes = impl_i64__from_be_bytes'\n\nassume\nval impl_i64__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> i64\n\nunfold\nlet impl_i64__from_le_bytes = impl_i64__from_le_bytes'\n\nassume\nval impl_i64__to_be_bytes': bytes: i64 -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_i64__to_be_bytes = impl_i64__to_be_bytes'\n\nassume\nval impl_i64__to_le_bytes': bytes: i64 -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_i64__to_le_bytes = impl_i64__to_le_bytes'\n\nlet impl_i64__rem_euclid (x y: i64)\n    : Prims.Pure i64 (requires y <>. mk_i64 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_i64 x y\n\nlet impl_i64__abs (x: i64) : Prims.Pure i64 (requires x >. impl_i64__MIN) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.abs_i64 x\n\nlet impl_i128__MIN: i128 = mk_i128 (-170141183460469231731687303715884105728)\n\nlet impl_i128__MAX: i128 = mk_i128 170141183460469231731687303715884105727\n\nlet impl_i128__BITS: u32 = mk_u32 128\n\nlet impl_i128__wrapping_add (x y: i128) : i128 = Rust_primitives.Arithmetic.wrapping_add_i128 x y\n\nlet impl_i128__saturating_add (x y: i128) : i128 =\n  Rust_primitives.Arithmetic.saturating_add_i128 x y\n\nlet impl_i128__overflowing_add (x y: i128) : (i128 & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_i128 x y\n\nlet impl_i128__checked_add (x y: i128) : Core_models.Option.t_Option i128 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i128__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i128__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option i128\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i128\n\nlet impl_i128__wrapping_sub (x y: i128) : i128 = Rust_primitives.Arithmetic.wrapping_sub_i128 x y\n\nlet impl_i128__saturating_sub (x y: i128) : i128 =\n  Rust_primitives.Arithmetic.saturating_sub_i128 x y\n\nlet impl_i128__overflowing_sub (x y: i128) : (i128 & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_i128 x y\n\nlet impl_i128__checked_sub (x y: i128) : Core_models.Option.t_Option i128 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i128__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i128__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option i128\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i128\n\nlet impl_i128__wrapping_mul (x y: i128) : i128 = Rust_primitives.Arithmetic.wrapping_mul_i128 x y\n\nlet impl_i128__saturating_mul (x y: i128) : i128 =\n  Rust_primitives.Arithmetic.saturating_mul_i128 x y\n\nlet impl_i128__overflowing_mul (x y: i128) : (i128 & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_i128 x y\n\nlet impl_i128__checked_mul (x y: i128) : Core_models.Option.t_Option i128 =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_i128__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_i128__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option i128\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option i128\n\nlet impl_i128__pow (x: i128) (exp: u32) : i128 = Rust_primitives.Arithmetic.pow_i128 x exp\n\nlet impl_i128__count_ones (x: i128) : u32 = Rust_primitives.Arithmetic.count_ones_i128 x\n\nassume\nval impl_i128__rotate_right': x: i128 -> n: u32 -> i128\n\nunfold\nlet impl_i128__rotate_right = impl_i128__rotate_right'\n\nassume\nval impl_i128__rotate_left': x: i128 -> n: u32 -> i128\n\nunfold\nlet impl_i128__rotate_left = impl_i128__rotate_left'\n\nassume\nval impl_i128__leading_zeros': x: i128 -> u32\n\nunfold\nlet impl_i128__leading_zeros = impl_i128__leading_zeros'\n\nassume\nval impl_i128__ilog2': x: i128 -> u32\n\nunfold\nlet impl_i128__ilog2 = impl_i128__ilog2'\n\nassume\nval impl_i128__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result i128 Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_i128__from_str_radix = impl_i128__from_str_radix'\n\nassume\nval impl_i128__from_be_bytes': bytes: t_Array u8 (mk_usize 16) -> i128\n\nunfold\nlet impl_i128__from_be_bytes = impl_i128__from_be_bytes'\n\nassume\nval impl_i128__from_le_bytes': bytes: t_Array u8 (mk_usize 16) -> i128\n\nunfold\nlet impl_i128__from_le_bytes = impl_i128__from_le_bytes'\n\nassume\nval impl_i128__to_be_bytes': bytes: i128 -> t_Array u8 (mk_usize 16)\n\nunfold\nlet impl_i128__to_be_bytes = impl_i128__to_be_bytes'\n\nassume\nval impl_i128__to_le_bytes': bytes: i128 -> t_Array u8 (mk_usize 16)\n\nunfold\nlet impl_i128__to_le_bytes = impl_i128__to_le_bytes'\n\nlet impl_i128__rem_euclid (x y: i128)\n    : Prims.Pure i128 (requires y <>. mk_i128 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_i128 x y\n\nlet impl_i128__abs (x: i128)\n    : Prims.Pure i128 (requires x >. impl_i128__MIN) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.abs_i128 x\n\nlet impl_isize__MIN: isize = Rust_primitives.Arithmetic.v_ISIZE_MIN\n\nlet impl_isize__MAX: isize = Rust_primitives.Arithmetic.v_ISIZE_MAX\n\nlet impl_isize__BITS: u32 = Rust_primitives.Arithmetic.v_SIZE_BITS\n\nlet impl_isize__wrapping_add (x y: isize) : isize =\n  Rust_primitives.Arithmetic.wrapping_add_isize x y\n\nlet impl_isize__saturating_add (x y: isize) : isize =\n  Rust_primitives.Arithmetic.saturating_add_isize x y\n\nlet impl_isize__overflowing_add (x y: isize) : (isize & bool) =\n  Rust_primitives.Arithmetic.overflowing_add_isize x y\n\nlet impl_isize__checked_add (x y: isize) : Core_models.Option.t_Option isize =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_isize__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_isize__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x +! y) <: Core_models.Option.t_Option isize\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option isize\n\nlet impl_isize__wrapping_sub (x y: isize) : isize =\n  Rust_primitives.Arithmetic.wrapping_sub_isize x y\n\nlet impl_isize__saturating_sub (x y: isize) : isize =\n  Rust_primitives.Arithmetic.saturating_sub_isize x y\n\nlet impl_isize__overflowing_sub (x y: isize) : (isize & bool) =\n  Rust_primitives.Arithmetic.overflowing_sub_isize x y\n\nlet impl_isize__checked_sub (x y: isize) : Core_models.Option.t_Option isize =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_isize__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) -\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_isize__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x -! y) <: Core_models.Option.t_Option isize\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option isize\n\nlet impl_isize__wrapping_mul (x y: isize) : isize =\n  Rust_primitives.Arithmetic.wrapping_mul_isize x y\n\nlet impl_isize__saturating_mul (x y: isize) : isize =\n  Rust_primitives.Arithmetic.saturating_mul_isize x y\n\nlet impl_isize__overflowing_mul (x y: isize) : (isize & bool) =\n  Rust_primitives.Arithmetic.overflowing_mul_isize x y\n\nlet impl_isize__checked_mul (x y: isize) : Core_models.Option.t_Option isize =\n  if\n    (Rust_primitives.Hax.Int.from_machine impl_isize__MIN <: Hax_lib.Int.t_Int) <=\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) &&\n    ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) *\n      (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n      <:\n      Hax_lib.Int.t_Int) <=\n    (Rust_primitives.Hax.Int.from_machine impl_isize__MAX <: Hax_lib.Int.t_Int)\n  then Core_models.Option.Option_Some (x *! y) <: Core_models.Option.t_Option isize\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option isize\n\nlet impl_isize__pow (x: isize) (exp: u32) : isize = Rust_primitives.Arithmetic.pow_isize x exp\n\nlet impl_isize__count_ones (x: isize) : u32 = Rust_primitives.Arithmetic.count_ones_isize x\n\nassume\nval impl_isize__rotate_right': x: isize -> n: u32 -> isize\n\nunfold\nlet impl_isize__rotate_right = impl_isize__rotate_right'\n\nassume\nval impl_isize__rotate_left': x: isize -> n: u32 -> isize\n\nunfold\nlet impl_isize__rotate_left = impl_isize__rotate_left'\n\nassume\nval impl_isize__leading_zeros': x: isize -> u32\n\nunfold\nlet impl_isize__leading_zeros = impl_isize__leading_zeros'\n\nassume\nval impl_isize__ilog2': x: isize -> u32\n\nunfold\nlet impl_isize__ilog2 = impl_isize__ilog2'\n\nassume\nval impl_isize__from_str_radix': src: string -> radix: u32\n  -> Core_models.Result.t_Result isize Core_models.Num.Error.t_ParseIntError\n\nunfold\nlet impl_isize__from_str_radix = impl_isize__from_str_radix'\n\nassume\nval impl_isize__from_be_bytes': bytes: t_Array u8 (mk_usize 8) -> isize\n\nunfold\nlet impl_isize__from_be_bytes = impl_isize__from_be_bytes'\n\nassume\nval impl_isize__from_le_bytes': bytes: t_Array u8 (mk_usize 8) -> isize\n\nunfold\nlet impl_isize__from_le_bytes = impl_isize__from_le_bytes'\n\nassume\nval impl_isize__to_be_bytes': bytes: isize -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_isize__to_be_bytes = impl_isize__to_be_bytes'\n\nassume\nval impl_isize__to_le_bytes': bytes: isize -> t_Array u8 (mk_usize 8)\n\nunfold\nlet impl_isize__to_le_bytes = impl_isize__to_le_bytes'\n\nlet impl_isize__rem_euclid (x y: isize)\n    : Prims.Pure isize (requires y <>. mk_isize 0) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.rem_euclid_isize x y\n\nlet impl_isize__abs (x: isize)\n    : Prims.Pure isize (requires x >. impl_isize__MIN) (fun _ -> Prims.l_True) =\n  Rust_primitives.Arithmetic.abs_isize x\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_18: Core_models.Default.t_Default u8 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: u8) -> true);\n    f_default = fun (_: Prims.unit) -> mk_u8 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_19: Core_models.Default.t_Default u16 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: u16) -> true);\n    f_default = fun (_: Prims.unit) -> mk_u16 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_20: Core_models.Default.t_Default u32 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: u32) -> true);\n    f_default = fun (_: Prims.unit) -> mk_u32 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_21: Core_models.Default.t_Default u64 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: u64) -> true);\n    f_default = fun (_: Prims.unit) -> mk_u64 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_22: Core_models.Default.t_Default u128 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: u128) -> true);\n    f_default = fun (_: Prims.unit) -> mk_u128 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_23: Core_models.Default.t_Default usize =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: usize) -> true);\n    f_default = fun (_: Prims.unit) -> mk_usize 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_24: Core_models.Default.t_Default i8 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: i8) -> true);\n    f_default = fun (_: Prims.unit) -> mk_i8 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_25: Core_models.Default.t_Default i16 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: i16) -> true);\n    f_default = fun (_: Prims.unit) -> mk_i16 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_26: Core_models.Default.t_Default i32 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: i32) -> true);\n    f_default = fun (_: Prims.unit) -> mk_i32 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_27: Core_models.Default.t_Default i64 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: i64) -> true);\n    f_default = fun (_: Prims.unit) -> mk_i64 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_28: Core_models.Default.t_Default i128 =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: i128) -> true);\n    f_default = fun (_: Prims.unit) -> mk_i128 0\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_29: Core_models.Default.t_Default isize =\n  {\n    f_default_pre = (fun (_: Prims.unit) -> true);\n    f_default_post = (fun (_: Prims.unit) (out: isize) -> true);\n    f_default = fun (_: Prims.unit) -> mk_isize 0\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Arith.fsti",
    "content": "module Core_models.Ops.Arith\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_AddAssign (v_Self: Type0) (v_Rhs: Type0) = {\n  f_add_assign_pre:v_Self -> v_Rhs -> Type0;\n  f_add_assign_post:v_Self -> v_Rhs -> v_Self -> Type0;\n  f_add_assign:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure v_Self (f_add_assign_pre x0 x1) (fun result -> f_add_assign_post x0 x1 result)\n}\n\nclass t_SubAssign (v_Self: Type0) (v_Rhs: Type0) = {\n  f_sub_assign_pre:v_Self -> v_Rhs -> Type0;\n  f_sub_assign_post:v_Self -> v_Rhs -> v_Self -> Type0;\n  f_sub_assign:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure v_Self (f_sub_assign_pre x0 x1) (fun result -> f_sub_assign_post x0 x1 result)\n}\n\nclass t_MulAssign (v_Self: Type0) (v_Rhs: Type0) = {\n  f_mul_assign_pre:v_Self -> v_Rhs -> Type0;\n  f_mul_assign_post:v_Self -> v_Rhs -> v_Self -> Type0;\n  f_mul_assign:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure v_Self (f_mul_assign_pre x0 x1) (fun result -> f_mul_assign_post x0 x1 result)\n}\n\nclass t_DivAssign (v_Self: Type0) (v_Rhs: Type0) = {\n  f_div_assign_pre:v_Self -> v_Rhs -> Type0;\n  f_div_assign_post:v_Self -> v_Rhs -> v_Self -> Type0;\n  f_div_assign:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure v_Self (f_div_assign_pre x0 x1) (fun result -> f_div_assign_post x0 x1 result)\n}\n\nclass t_RemAssign (v_Self: Type0) (v_Rhs: Type0) = {\n  f_rem_assign_pre:v_Self -> v_Rhs -> Type0;\n  f_rem_assign_post:v_Self -> v_Rhs -> v_Self -> Type0;\n  f_rem_assign:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure v_Self (f_rem_assign_pre x0 x1) (fun result -> f_rem_assign_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl:t_AddAssign u8 u8\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_1:t_SubAssign u8 u8\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_2:t_AddAssign u16 u16\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_3:t_SubAssign u16 u16\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_4:t_AddAssign u32 u32\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_5:t_SubAssign u32 u32\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_6:t_AddAssign u64 u64\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_7:t_SubAssign u64 u64\n\nclass t_Add (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_add_pre:v_Self -> v_Rhs -> Type0;\n  f_add_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_add:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_add_pre x0 x1) (fun result -> f_add_post x0 x1 result)\n}\n\nclass t_Sub (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_sub_pre:v_Self -> v_Rhs -> Type0;\n  f_sub_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_sub:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_sub_pre x0 x1) (fun result -> f_sub_post x0 x1 result)\n}\n\nclass t_Mul (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_mul_pre:v_Self -> v_Rhs -> Type0;\n  f_mul_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_mul:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_mul_pre x0 x1) (fun result -> f_mul_post x0 x1 result)\n}\n\nclass t_Div (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_div_pre:v_Self -> v_Rhs -> Type0;\n  f_div_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_div:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_div_pre x0 x1) (fun result -> f_div_post x0 x1 result)\n}\n\nclass t_Neg (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_neg_pre:v_Self -> Type0;\n  f_neg_post:v_Self -> f_Output -> Type0;\n  f_neg:x0: v_Self -> Prims.Pure f_Output (f_neg_pre x0) (fun result -> f_neg_post x0 result)\n}\n\nclass t_Rem (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_rem_pre:v_Self -> v_Rhs -> Type0;\n  f_rem_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_rem:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_rem_pre x0 x1) (fun result -> f_rem_post x0 x1 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Bit.fsti",
    "content": "module Core_models.Ops.Bit\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Shr (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_shr_pre:v_Self -> v_Rhs -> Type0;\n  f_shr_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_shr:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_shr_pre x0 x1) (fun result -> f_shr_post x0 x1 result)\n}\n\nclass t_Shl (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_shl_pre:v_Self -> v_Rhs -> Type0;\n  f_shl_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_shl:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_shl_pre x0 x1) (fun result -> f_shl_post x0 x1 result)\n}\n\nclass t_BitXor (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_bitxor_pre:v_Self -> v_Rhs -> Type0;\n  f_bitxor_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_bitxor:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_bitxor_pre x0 x1) (fun result -> f_bitxor_post x0 x1 result)\n}\n\nclass t_BitAnd (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_bitand_pre:v_Self -> v_Rhs -> Type0;\n  f_bitand_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_bitand:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_bitand_pre x0 x1) (fun result -> f_bitand_post x0 x1 result)\n}\n\nclass t_BitOr (v_Self: Type0) (v_Rhs: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_bitor_pre:v_Self -> v_Rhs -> Type0;\n  f_bitor_post:v_Self -> v_Rhs -> f_Output -> Type0;\n  f_bitor:x0: v_Self -> x1: v_Rhs\n    -> Prims.Pure f_Output (f_bitor_pre x0 x1) (fun result -> f_bitor_post x0 x1 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Control_flow.fst",
    "content": "module Core_models.Ops.Control_flow\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_ControlFlow (v_B: Type0) (v_C: Type0) =\n  | ControlFlow_Continue : v_C -> t_ControlFlow v_B v_C\n  | ControlFlow_Break : v_B -> t_ControlFlow v_B v_C\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Deref.fst",
    "content": "module Core_models.Ops.Deref\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Deref (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Target:Type0;\n  f_deref_pre:v_Self -> Type0;\n  f_deref_post:v_Self -> f_Target -> Type0;\n  f_deref:x0: v_Self -> Prims.Pure f_Target (f_deref_pre x0) (fun result -> f_deref_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_T: Type0) : t_Deref v_T =\n  {\n    f_Target = v_T;\n    f_deref_pre = (fun (self: v_T) -> true);\n    f_deref_post = (fun (self: v_T) (out: v_T) -> true);\n    f_deref = fun (self: v_T) -> self\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Drop.fst",
    "content": "module Core_models.Ops.Drop\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Drop (v_Self: Type0) = {\n  f_drop_pre:v_Self -> Type0;\n  f_drop_post:v_Self -> v_Self -> Type0;\n  f_drop:x0: v_Self -> Prims.Pure v_Self (f_drop_pre x0) (fun result -> f_drop_post x0 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Function.fst",
    "content": "module Core_models.Ops.Function\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_FnOnce (v_Self: Type0) (v_Args: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_call_once_pre:self_: v_Self -> args: v_Args -> pred: Type0{true ==> pred};\n  f_call_once_post:v_Self -> v_Args -> f_Output -> Type0;\n  f_call_once:x0: v_Self -> x1: v_Args\n    -> Prims.Pure f_Output (f_call_once_pre x0 x1) (fun result -> f_call_once_post x0 x1 result)\n}\n\nclass t_Fn (v_Self: Type0) (v_Args: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_FnOnce v_Self v_Args;\n  f_call_pre:self_: v_Self -> args: v_Args -> pred: Type0{true ==> pred};\n  f_call_post:v_Self -> v_Args -> (_super_i0).f_Output -> Type0;\n  f_call:x0: v_Self -> x1: v_Args\n    -> Prims.Pure (_super_i0).f_Output (f_call_pre x0 x1) (fun result -> f_call_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) (v_Args:Type0) {|i: t_Fn v_Self v_Args|} -> i._super_i0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2 (#v_Arg #v_Out: Type0) : t_FnOnce (v_Arg -> v_Out) v_Arg =\n  {\n    f_Output = v_Out;\n    f_call_once_pre = (fun (self: (v_Arg -> v_Out)) (arg: v_Arg) -> true);\n    f_call_once_post = (fun (self: (v_Arg -> v_Out)) (arg: v_Arg) (out: v_Out) -> true);\n    f_call_once = fun (self: (v_Arg -> v_Out)) (arg: v_Arg) -> self arg\n  }\n\nunfold instance fnonce_arrow_binder t u\n  : t_FnOnce (_:t -> u) t = {\n    f_Output = u;\n    f_call_once_pre = (fun _ _ -> true);\n    f_call_once_post = (fun (x0: (_:t -> u)) (x1: t) (res: u) -> res == x0 x1);\n    f_call_once = (fun (x0: (_:t -> u)) (x1: t) -> x0 x1);\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_Arg1 #v_Arg2 #v_Out: Type0) : t_FnOnce (v_Arg1 -> v_Arg2 -> v_Out) (v_Arg1 & v_Arg2) =\n  {\n    f_Output = v_Out;\n    f_call_once_pre = (fun (self: (v_Arg1 -> v_Arg2 -> v_Out)) (arg: (v_Arg1 & v_Arg2)) -> true);\n    f_call_once_post\n    =\n    (fun (self: (v_Arg1 -> v_Arg2 -> v_Out)) (arg: (v_Arg1 & v_Arg2)) (out: v_Out) -> true);\n    f_call_once\n    =\n    fun (self: (v_Arg1 -> v_Arg2 -> v_Out)) (arg: (v_Arg1 & v_Arg2)) -> self arg._1 arg._2\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1 (#v_Arg1 #v_Arg2 #v_Arg3 #v_Out: Type0)\n    : t_FnOnce (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out) (v_Arg1 & v_Arg2 & v_Arg3) =\n  {\n    f_Output = v_Out;\n    f_call_once_pre\n    =\n    (fun (self: (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out)) (arg: (v_Arg1 & v_Arg2 & v_Arg3)) -> true);\n    f_call_once_post\n    =\n    (fun\n        (self: (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out))\n        (arg: (v_Arg1 & v_Arg2 & v_Arg3))\n        (out: v_Out)\n        ->\n        true);\n    f_call_once\n    =\n    fun (self: (v_Arg1 -> v_Arg2 -> v_Arg3 -> v_Out)) (arg: (v_Arg1 & v_Arg2 & v_Arg3)) ->\n      self arg._1 arg._2 arg._3\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Index.IndexMut.fst",
    "content": "module Core_models.Ops.Index.IndexMut\n\nclass t_IndexMut t_Self t_Idx = {\n  f_Input: Type0;\n  in_range: t_Self -> t_Idx -> Type0;\n  f_index_mut: s:t_Self -> i:t_Idx{in_range s i} -> v:f_Input -> t_Self;\n}\n\nopen Rust_primitives\ninstance impl__index_mut t l n: t_IndexMut (t_Array t l) (int_t n)\n  = { f_Input = t;\n      in_range = (fun (s: t_Array t l) (i: int_t n) -> v i >= 0 && v i < v l);\n      f_index_mut = (fun s i x -> Seq.upd s (v i) x);\n    }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Index.Index_mut.fst",
    "content": "module Core_models.Ops.Index.Index_mut\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\n(* item error backend: The mutation of this \u001b[1m&mut\u001b[0m is not allowed here.\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/420.\nPlease upvote or comment this issue if you see this error message.\n\u001b[90mNote: the error was labeled with context `DirectAndMut`.\n\u001b[0m\nLast available AST for this item:\n\n#[allow(dead_code)]\n#[feature(register_tool)]\n#[register_tool(_hax)]\ntrait t_IndexMut<Self_, Idx>\nwhere\n    _: core_models::ops::index::t_Index<Self, Idx>,\n{\n    #[allow(dead_code)]\n    #[feature(register_tool)]\n    #[register_tool(_hax)]\n    fn f_index_mut<Anonymous: 'unk>(\n        _: Self,\n        _: Idx,\n    ) -> tuple2<Self, &mut proj_asso_type!()>;\n}\n\n\nLast AST:\n/** print_rust: pitem: not implemented  (item: { Concrete_ident.T.def_id =\n  { Explicit_def_id.T.is_constructor = false;\n    def_id =\n    { Types.index = (0, 0, None); is_local = true; kind = Types.Trait;\n      krate = \"core_models\";\n      parent =\n      (Some { Types.contents =\n              { Types.id = 0;\n                value =\n                { Types.index = (0, 0, None); is_local = true;\n                  kind = Types.Mod; krate = \"core_models\";\n                  parent =\n                  (Some { Types.contents =\n                          { Types.id = 0;\n                            value =\n                            { Types.index = (0, 0, None); is_local = true;\n                              kind = Types.Mod; krate = \"core_models\";\n                              parent =\n                              (Some { Types.contents =\n                                      { Types.id = 0;\n                                        value =\n                                        { Types.index = (0, 0, None);\n                                          is_local = true; kind = Types.Mod;\n                                          krate = \"core_models\";\n                                          parent =\n                                          (Some { Types.contents =\n                                                  { Types.id = 0;\n                                                    value =\n                                                    { Types.index =\n                                                      (0, 0, None);\n                                                      is_local = true;\n                                                      kind = Types.Mod;\n                                                      krate = \"core_models\";\n                                                      parent = None;\n                                                      path = [] }\n                                                    }\n                                                  });\n                                          path =\n                                          [{ Types.data =\n                                             (Types.TypeNs \"ops\");\n                                             disambiguator = 0 }\n                                            ]\n                                          }\n                                        }\n                                      });\n                              path =\n                              [{ Types.data = (Types.TypeNs \"ops\");\n                                 disambiguator = 0 };\n                                { Types.data = (Types.TypeNs \"index\");\n                                  disambiguator = 0 }\n                                ]\n                              }\n                            }\n                          });\n                  path =\n                  [{ Types.data = (Types.TypeNs \"ops\"); disambiguator = 0 };\n                    { Types.data = (Types.TypeNs \"index\"); disambiguator = 0\n                      };\n                    { Types.data = (Types.TypeNs \"index_mut\");\n                      disambiguator = 0 }\n                    ]\n                  }\n                }\n              });\n      path =\n      [{ Types.data = (Types.TypeNs \"ops\"); disambiguator = 0 };\n        { Types.data = (Types.TypeNs \"index\"); disambiguator = 0 };\n        { Types.data = (Types.TypeNs \"index_mut\"); disambiguator = 0 };\n        { Types.data = (Types.TypeNs \"IndexMut\"); disambiguator = 0 }]\n      }\n    };\n  moved = None; suffix = None }) */\nconst _: () = ();\n *)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Index.fst",
    "content": "module Core_models.Ops.Index\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Index (v_Self: Type0) (v_Idx: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_index_pre:v_Self -> v_Idx -> Type0;\n  f_index_post:v_Self -> v_Idx -> f_Output -> Type0;\n  f_index:x0: v_Self -> x1: v_Idx\n    -> Prims.Pure f_Output (f_index_pre x0 x1) (fun result -> f_index_post x0 x1 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Range.fst",
    "content": "module Core_models.Ops.Range\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_RangeTo (v_T: Type0) = { f_end:v_T }\n\ntype t_RangeFrom (v_T: Type0) = { f_start:v_T }\n\ntype t_Range (v_T: Type0) = {\n  f_start:v_T;\n  f_end:v_T\n}\n\ntype t_RangeFull = | RangeFull : t_RangeFull\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u8) =\n  {\n    f_Item = u8;\n    f_next_pre = (fun (self: t_Range u8) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range u8) (out: (t_Range u8 & Core_models.Option.t_Option u8)) -> true);\n    f_next\n    =\n    fun (self: t_Range u8) ->\n      let (self: t_Range u8), (hax_temp_output: Core_models.Option.t_Option u8) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n          <:\n          (t_Range u8 & Core_models.Option.t_Option u8)\n        else\n          let res:u8 = self.f_start in\n          let self:t_Range u8 = { self with f_start = self.f_start +! mk_u8 1 } <: t_Range u8 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u8)\n          <:\n          (t_Range u8 & Core_models.Option.t_Option u8)\n      in\n      self, hax_temp_output <: (t_Range u8 & Core_models.Option.t_Option u8)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u16) =\n  {\n    f_Item = u16;\n    f_next_pre = (fun (self: t_Range u16) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range u16) (out: (t_Range u16 & Core_models.Option.t_Option u16)) -> true);\n    f_next\n    =\n    fun (self: t_Range u16) ->\n      let (self: t_Range u16), (hax_temp_output: Core_models.Option.t_Option u16) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u16)\n          <:\n          (t_Range u16 & Core_models.Option.t_Option u16)\n        else\n          let res:u16 = self.f_start in\n          let self:t_Range u16 = { self with f_start = self.f_start +! mk_u16 1 } <: t_Range u16 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u16)\n          <:\n          (t_Range u16 & Core_models.Option.t_Option u16)\n      in\n      self, hax_temp_output <: (t_Range u16 & Core_models.Option.t_Option u16)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u32) =\n  {\n    f_Item = u32;\n    f_next_pre = (fun (self: t_Range u32) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range u32) (out: (t_Range u32 & Core_models.Option.t_Option u32)) -> true);\n    f_next\n    =\n    fun (self: t_Range u32) ->\n      let (self: t_Range u32), (hax_temp_output: Core_models.Option.t_Option u32) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u32)\n          <:\n          (t_Range u32 & Core_models.Option.t_Option u32)\n        else\n          let res:u32 = self.f_start in\n          let self:t_Range u32 = { self with f_start = self.f_start +! mk_u32 1 } <: t_Range u32 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u32)\n          <:\n          (t_Range u32 & Core_models.Option.t_Option u32)\n      in\n      self, hax_temp_output <: (t_Range u32 & Core_models.Option.t_Option u32)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u64) =\n  {\n    f_Item = u64;\n    f_next_pre = (fun (self: t_Range u64) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range u64) (out: (t_Range u64 & Core_models.Option.t_Option u64)) -> true);\n    f_next\n    =\n    fun (self: t_Range u64) ->\n      let (self: t_Range u64), (hax_temp_output: Core_models.Option.t_Option u64) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u64)\n          <:\n          (t_Range u64 & Core_models.Option.t_Option u64)\n        else\n          let res:u64 = self.f_start in\n          let self:t_Range u64 = { self with f_start = self.f_start +! mk_u64 1 } <: t_Range u64 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u64)\n          <:\n          (t_Range u64 & Core_models.Option.t_Option u64)\n      in\n      self, hax_temp_output <: (t_Range u64 & Core_models.Option.t_Option u64)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range u128) =\n  {\n    f_Item = u128;\n    f_next_pre = (fun (self: t_Range u128) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range u128) (out: (t_Range u128 & Core_models.Option.t_Option u128)) -> true);\n    f_next\n    =\n    fun (self: t_Range u128) ->\n      let (self: t_Range u128), (hax_temp_output: Core_models.Option.t_Option u128) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option u128)\n          <:\n          (t_Range u128 & Core_models.Option.t_Option u128)\n        else\n          let res:u128 = self.f_start in\n          let self:t_Range u128 =\n            { self with f_start = self.f_start +! mk_u128 1 } <: t_Range u128\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option u128)\n          <:\n          (t_Range u128 & Core_models.Option.t_Option u128)\n      in\n      self, hax_temp_output <: (t_Range u128 & Core_models.Option.t_Option u128)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_5: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range usize) =\n  {\n    f_Item = usize;\n    f_next_pre = (fun (self: t_Range usize) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range usize) (out: (t_Range usize & Core_models.Option.t_Option usize)) -> true);\n    f_next\n    =\n    fun (self: t_Range usize) ->\n      let (self: t_Range usize), (hax_temp_output: Core_models.Option.t_Option usize) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option usize)\n          <:\n          (t_Range usize & Core_models.Option.t_Option usize)\n        else\n          let res:usize = self.f_start in\n          let self:t_Range usize =\n            { self with f_start = self.f_start +! mk_usize 1 } <: t_Range usize\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option usize)\n          <:\n          (t_Range usize & Core_models.Option.t_Option usize)\n      in\n      self, hax_temp_output <: (t_Range usize & Core_models.Option.t_Option usize)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i8) =\n  {\n    f_Item = i8;\n    f_next_pre = (fun (self: t_Range i8) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range i8) (out: (t_Range i8 & Core_models.Option.t_Option i8)) -> true);\n    f_next\n    =\n    fun (self: t_Range i8) ->\n      let (self: t_Range i8), (hax_temp_output: Core_models.Option.t_Option i8) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i8)\n          <:\n          (t_Range i8 & Core_models.Option.t_Option i8)\n        else\n          let res:i8 = self.f_start in\n          let self:t_Range i8 = { self with f_start = self.f_start +! mk_i8 1 } <: t_Range i8 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i8)\n          <:\n          (t_Range i8 & Core_models.Option.t_Option i8)\n      in\n      self, hax_temp_output <: (t_Range i8 & Core_models.Option.t_Option i8)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_7: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i16) =\n  {\n    f_Item = i16;\n    f_next_pre = (fun (self: t_Range i16) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range i16) (out: (t_Range i16 & Core_models.Option.t_Option i16)) -> true);\n    f_next\n    =\n    fun (self: t_Range i16) ->\n      let (self: t_Range i16), (hax_temp_output: Core_models.Option.t_Option i16) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i16)\n          <:\n          (t_Range i16 & Core_models.Option.t_Option i16)\n        else\n          let res:i16 = self.f_start in\n          let self:t_Range i16 = { self with f_start = self.f_start +! mk_i16 1 } <: t_Range i16 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i16)\n          <:\n          (t_Range i16 & Core_models.Option.t_Option i16)\n      in\n      self, hax_temp_output <: (t_Range i16 & Core_models.Option.t_Option i16)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_8: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i32) =\n  {\n    f_Item = i32;\n    f_next_pre = (fun (self: t_Range i32) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range i32) (out: (t_Range i32 & Core_models.Option.t_Option i32)) -> true);\n    f_next\n    =\n    fun (self: t_Range i32) ->\n      let (self: t_Range i32), (hax_temp_output: Core_models.Option.t_Option i32) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n          <:\n          (t_Range i32 & Core_models.Option.t_Option i32)\n        else\n          let res:i32 = self.f_start in\n          let self:t_Range i32 = { self with f_start = self.f_start +! mk_i32 1 } <: t_Range i32 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i32)\n          <:\n          (t_Range i32 & Core_models.Option.t_Option i32)\n      in\n      self, hax_temp_output <: (t_Range i32 & Core_models.Option.t_Option i32)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_9: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i64) =\n  {\n    f_Item = i64;\n    f_next_pre = (fun (self: t_Range i64) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range i64) (out: (t_Range i64 & Core_models.Option.t_Option i64)) -> true);\n    f_next\n    =\n    fun (self: t_Range i64) ->\n      let (self: t_Range i64), (hax_temp_output: Core_models.Option.t_Option i64) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i64)\n          <:\n          (t_Range i64 & Core_models.Option.t_Option i64)\n        else\n          let res:i64 = self.f_start in\n          let self:t_Range i64 = { self with f_start = self.f_start +! mk_i64 1 } <: t_Range i64 in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i64)\n          <:\n          (t_Range i64 & Core_models.Option.t_Option i64)\n      in\n      self, hax_temp_output <: (t_Range i64 & Core_models.Option.t_Option i64)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_10: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range i128) =\n  {\n    f_Item = i128;\n    f_next_pre = (fun (self: t_Range i128) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range i128) (out: (t_Range i128 & Core_models.Option.t_Option i128)) -> true);\n    f_next\n    =\n    fun (self: t_Range i128) ->\n      let (self: t_Range i128), (hax_temp_output: Core_models.Option.t_Option i128) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option i128)\n          <:\n          (t_Range i128 & Core_models.Option.t_Option i128)\n        else\n          let res:i128 = self.f_start in\n          let self:t_Range i128 =\n            { self with f_start = self.f_start +! mk_i128 1 } <: t_Range i128\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option i128)\n          <:\n          (t_Range i128 & Core_models.Option.t_Option i128)\n      in\n      self, hax_temp_output <: (t_Range i128 & Core_models.Option.t_Option i128)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_11: Core_models.Iter.Traits.Iterator.t_Iterator (t_Range isize) =\n  {\n    f_Item = isize;\n    f_next_pre = (fun (self: t_Range isize) -> true);\n    f_next_post\n    =\n    (fun (self: t_Range isize) (out: (t_Range isize & Core_models.Option.t_Option isize)) -> true);\n    f_next\n    =\n    fun (self: t_Range isize) ->\n      let (self: t_Range isize), (hax_temp_output: Core_models.Option.t_Option isize) =\n        if self.f_start >=. self.f_end\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option isize)\n          <:\n          (t_Range isize & Core_models.Option.t_Option isize)\n        else\n          let res:isize = self.f_start in\n          let self:t_Range isize =\n            { self with f_start = self.f_start +! mk_isize 1 } <: t_Range isize\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option isize)\n          <:\n          (t_Range isize & Core_models.Option.t_Option isize)\n      in\n      self, hax_temp_output <: (t_Range isize & Core_models.Option.t_Option isize)\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Ops.Try_trait.fst",
    "content": "module Core_models.Ops.Try_trait\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_FromResidual (v_Self: Type0) (v_R: Type0) = {\n  f_from_residual_pre:v_R -> Type0;\n  f_from_residual_post:v_R -> v_Self -> Type0;\n  f_from_residual:x0: v_R\n    -> Prims.Pure v_Self (f_from_residual_pre x0) (fun result -> f_from_residual_post x0 result)\n}\n\nclass t_Try (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Residual:Type0;\n  f_from_output_pre:f_Output -> Type0;\n  f_from_output_post:f_Output -> v_Self -> Type0;\n  f_from_output:x0: f_Output\n    -> Prims.Pure v_Self (f_from_output_pre x0) (fun result -> f_from_output_post x0 result);\n  f_branch_pre:v_Self -> Type0;\n  f_branch_post:v_Self -> Core_models.Ops.Control_flow.t_ControlFlow f_Residual f_Output -> Type0;\n  f_branch:x0: v_Self\n    -> Prims.Pure (Core_models.Ops.Control_flow.t_ControlFlow f_Residual f_Output)\n        (f_branch_pre x0)\n        (fun result -> f_branch_post x0 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Option.fst",
    "content": "module Core_models.Option\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Bundle {t_Option as t_Option}\n\ninclude Core_models.Bundle {Option_Some as Option_Some}\n\ninclude Core_models.Bundle {Option_None as Option_None}\n\ninclude Core_models.Bundle {impl__is_some as impl__is_some}\n\ninclude Core_models.Bundle {impl__is_some_and as impl__is_some_and}\n\ninclude Core_models.Bundle {impl__is_none as impl__is_none}\n\ninclude Core_models.Bundle {impl__is_none_or as impl__is_none_or}\n\ninclude Core_models.Bundle {impl__as_ref as impl__as_ref}\n\ninclude Core_models.Bundle {impl__expect as impl__expect}\n\ninclude Core_models.Bundle {impl__unwrap as impl__unwrap}\n\ninclude Core_models.Bundle {impl__unwrap_or as impl__unwrap_or}\n\ninclude Core_models.Bundle {impl__unwrap_or_else as impl__unwrap_or_else}\n\ninclude Core_models.Bundle {impl__unwrap_or_default as impl__unwrap_or_default}\n\ninclude Core_models.Bundle {impl__map as impl__map}\n\ninclude Core_models.Bundle {impl__map_or as impl__map_or}\n\ninclude Core_models.Bundle {impl__map_or_else as impl__map_or_else}\n\ninclude Core_models.Bundle {impl__map_or_default as impl__map_or_default}\n\ninclude Core_models.Bundle {impl__ok_or as impl__ok_or}\n\ninclude Core_models.Bundle {impl__ok_or_else as impl__ok_or_else}\n\ninclude Core_models.Bundle {impl__and_then as impl__and_then}\n\ninclude Core_models.Bundle {impl__take as impl__take}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Panicking.Internal.fsti",
    "content": "module Core_models.Panicking.Internal\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval panic: #v_T: Type0 -> Prims.unit -> Prims.Pure v_T (requires false) (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Panicking.fst",
    "content": "module Core_models.Panicking\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nassume\nval panic_explicit': Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never (requires false) (fun _ -> Prims.l_True)\n\nunfold\nlet panic_explicit = panic_explicit'\n\nassume\nval panic': e_msg: string\n  -> Prims.Pure Rust_primitives.Hax.t_Never (requires false) (fun _ -> Prims.l_True)\n\nunfold\nlet panic = panic'\n\nassume\nval panic_fmt': e_fmt: Core_models.Fmt.t_Arguments\n  -> Prims.Pure Rust_primitives.Hax.t_Never (requires false) (fun _ -> Prims.l_True)\n\nunfold\nlet panic_fmt = panic_fmt'\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Result.fst",
    "content": "module Core_models.Result\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ninclude Core_models.Bundle {t_Result as t_Result}\n\ninclude Core_models.Bundle {Result_Ok as Result_Ok}\n\ninclude Core_models.Bundle {Result_Err as Result_Err}\n\ninclude Core_models.Bundle {impl__unwrap__from__result as impl__unwrap}\n\ninclude Core_models.Bundle {impl__unwrap_or__from__result as impl__unwrap_or}\n\ninclude Core_models.Bundle {impl__expect__from__result as impl__expect}\n\ninclude Core_models.Bundle {impl__map__from__result as impl__map}\n\ninclude Core_models.Bundle {impl__map_or__from__result as impl__map_or}\n\ninclude Core_models.Bundle {impl__map_or_else__from__result as impl__map_or_else}\n\ninclude Core_models.Bundle {impl__map_err as impl__map_err}\n\ninclude Core_models.Bundle {impl__is_ok as impl__is_ok}\n\ninclude Core_models.Bundle {impl__and_then__from__result as impl__and_then}\n\ninclude Core_models.Bundle {impl__ok as impl__ok}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Slice.Iter.fst",
    "content": "module Core_models.Slice.Iter\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Chunks (v_T: Type0) = {\n  f_cs:usize;\n  f_elements:t_Slice v_T\n}\n\nlet impl__new (#v_T: Type0) (cs: usize) (elements: t_Slice v_T) : t_Chunks v_T =\n  { f_cs = cs; f_elements = elements } <: t_Chunks v_T\n\ntype t_ChunksExact (v_T: Type0) = {\n  f_cs:usize;\n  f_elements:t_Slice v_T\n}\n\nlet impl_1__new (#v_T: Type0) (cs: usize) (elements: t_Slice v_T) : t_ChunksExact v_T =\n  { f_cs = cs; f_elements = elements } <: t_ChunksExact v_T\n\ntype t_Iter (v_T: Type0) = | Iter : Rust_primitives.Sequence.t_Seq v_T -> t_Iter v_T\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2 (#v_T: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_Iter v_T) =\n  {\n    f_Item = v_T;\n    f_next_pre = (fun (self: t_Iter v_T) -> true);\n    f_next_post\n    =\n    (fun (self: t_Iter v_T) (out: (t_Iter v_T & Core_models.Option.t_Option v_T)) -> true);\n    f_next\n    =\n    fun (self: t_Iter v_T) ->\n      let (self: t_Iter v_T), (hax_temp_output: Core_models.Option.t_Option v_T) =\n        if (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize) =. mk_usize 0\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option v_T)\n          <:\n          (t_Iter v_T & Core_models.Option.t_Option v_T)\n        else\n          let res:v_T = Rust_primitives.Sequence.seq_first #v_T self._0 in\n          let self:t_Iter v_T =\n            {\n              self with\n              _0\n              =\n              Rust_primitives.Sequence.seq_slice #v_T\n                self._0\n                (mk_usize 1)\n                (Rust_primitives.Sequence.seq_len #v_T self._0 <: usize)\n            }\n            <:\n            t_Iter v_T\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option v_T)\n          <:\n          (t_Iter v_T & Core_models.Option.t_Option v_T)\n      in\n      self, hax_temp_output <: (t_Iter v_T & Core_models.Option.t_Option v_T)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3 (#v_T: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_Chunks v_T) =\n  {\n    f_Item = t_Slice v_T;\n    f_next_pre = (fun (self: t_Chunks v_T) -> true);\n    f_next_post\n    =\n    (fun (self: t_Chunks v_T) (out: (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T))) ->\n        true);\n    f_next\n    =\n    fun (self: t_Chunks v_T) ->\n      let (self: t_Chunks v_T), (hax_temp_output: Core_models.Option.t_Option (t_Slice v_T)) =\n        if (Rust_primitives.Slice.slice_length #v_T self.f_elements <: usize) =. mk_usize 0\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T))\n          <:\n          (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T))\n        else\n          if (Rust_primitives.Slice.slice_length #v_T self.f_elements <: usize) <. self.f_cs\n          then\n            let res:t_Slice v_T = self.f_elements in\n            let self:t_Chunks v_T =\n              {\n                self with\n                f_elements\n                =\n                Rust_primitives.Slice.slice_slice #v_T self.f_elements (mk_usize 0) (mk_usize 0)\n              }\n              <:\n              t_Chunks v_T\n            in\n            self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option (t_Slice v_T))\n            <:\n            (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T))\n          else\n            let (res: t_Slice v_T), (new_elements: t_Slice v_T) =\n              Rust_primitives.Slice.slice_split_at #v_T self.f_elements self.f_cs\n            in\n            let self:t_Chunks v_T = { self with f_elements = new_elements } <: t_Chunks v_T in\n            self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option (t_Slice v_T))\n            <:\n            (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T))\n      in\n      self, hax_temp_output <: (t_Chunks v_T & Core_models.Option.t_Option (t_Slice v_T))\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4 (#v_T: Type0) : Core_models.Iter.Traits.Iterator.t_Iterator (t_ChunksExact v_T) =\n  {\n    f_Item = t_Slice v_T;\n    f_next_pre = (fun (self: t_ChunksExact v_T) -> true);\n    f_next_post\n    =\n    (fun\n        (self: t_ChunksExact v_T)\n        (out: (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T)))\n        ->\n        true);\n    f_next\n    =\n    fun (self: t_ChunksExact v_T) ->\n      let (self: t_ChunksExact v_T), (hax_temp_output: Core_models.Option.t_Option (t_Slice v_T)) =\n        if (Rust_primitives.Slice.slice_length #v_T self.f_elements <: usize) <. self.f_cs\n        then\n          self, (Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T))\n          <:\n          (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T))\n        else\n          let (res: t_Slice v_T), (new_elements: t_Slice v_T) =\n            Rust_primitives.Slice.slice_split_at #v_T self.f_elements self.f_cs\n          in\n          let self:t_ChunksExact v_T =\n            { self with f_elements = new_elements } <: t_ChunksExact v_T\n          in\n          self, (Core_models.Option.Option_Some res <: Core_models.Option.t_Option (t_Slice v_T))\n          <:\n          (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T))\n      in\n      self, hax_temp_output <: (t_ChunksExact v_T & Core_models.Option.t_Option (t_Slice v_T))\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Slice.fst",
    "content": "module Core_models.Slice\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nlet impl__len (#v_T: Type0) (s: t_Slice v_T) : usize = Rust_primitives.Slice.slice_length #v_T s\n\nlet impl__chunks (#v_T: Type0) (s: t_Slice v_T) (cs: usize) : Core_models.Slice.Iter.t_Chunks v_T =\n  Core_models.Slice.Iter.impl__new #v_T cs s\n\nlet impl__iter (#v_T: Type0) (s: t_Slice v_T) : Core_models.Slice.Iter.t_Iter v_T =\n  Core_models.Slice.Iter.Iter (Rust_primitives.Sequence.seq_from_slice #v_T s)\n  <:\n  Core_models.Slice.Iter.t_Iter v_T\n\nlet impl__chunks_exact (#v_T: Type0) (s: t_Slice v_T) (cs: usize)\n    : Core_models.Slice.Iter.t_ChunksExact v_T = Core_models.Slice.Iter.impl_1__new #v_T cs s\n\nlet impl__is_empty (#v_T: Type0) (s: t_Slice v_T) : bool = (impl__len #v_T s <: usize) =. mk_usize 0\n\nassume\nval impl__contains': #v_T: Type0 -> s: t_Slice v_T -> v: v_T -> bool\n\nunfold\nlet impl__contains (#v_T: Type0) = impl__contains' #v_T\n\nassume\nval impl__copy_within':\n    #v_T: Type0 ->\n    #v_R: Type0 ->\n    {| i0: Core_models.Marker.t_Copy v_T |} ->\n    s: t_Slice v_T ->\n    src: v_R ->\n    dest: usize\n  -> t_Slice v_T\n\nunfold\nlet impl__copy_within\n      (#v_T #v_R: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n     = impl__copy_within' #v_T #v_R #i0\n\nassume\nval impl__binary_search': #v_T: Type0 -> s: t_Slice v_T -> x: v_T\n  -> Core_models.Result.t_Result usize usize\n\nunfold\nlet impl__binary_search (#v_T: Type0) = impl__binary_search' #v_T\n\nlet impl__copy_from_slice\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n      (s src: t_Slice v_T)\n    : Prims.Pure (t_Slice v_T)\n      (requires (impl__len #v_T s <: usize) =. (impl__len #v_T src <: usize))\n      (fun _ -> Prims.l_True) =\n  let (tmp0: t_Slice v_T), (out: t_Slice v_T) = Rust_primitives.Mem.replace #(t_Slice v_T) s src in\n  let s:t_Slice v_T = tmp0 in\n  let _:t_Slice v_T = out in\n  s\n\nlet impl__clone_from_slice\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T)\n      (s src: t_Slice v_T)\n    : Prims.Pure (t_Slice v_T)\n      (requires (impl__len #v_T s <: usize) =. (impl__len #v_T src <: usize))\n      (fun _ -> Prims.l_True) =\n  let (tmp0: t_Slice v_T), (out: t_Slice v_T) = Rust_primitives.Mem.replace #(t_Slice v_T) s src in\n  let s:t_Slice v_T = tmp0 in\n  let _:t_Slice v_T = out in\n  s\n\nlet impl__split_at (#v_T: Type0) (s: t_Slice v_T) (mid: usize)\n    : Prims.Pure (t_Slice v_T & t_Slice v_T)\n      (requires mid <=. (impl__len #v_T s <: usize))\n      (fun _ -> Prims.l_True) = Rust_primitives.Slice.slice_split_at #v_T s mid\n\nlet impl__split_at_checked (#v_T: Type0) (s: t_Slice v_T) (mid: usize)\n    : Core_models.Option.t_Option (t_Slice v_T & t_Slice v_T) =\n  if mid <=. (impl__len #v_T s <: usize)\n  then\n    Core_models.Option.Option_Some (impl__split_at #v_T s mid)\n    <:\n    Core_models.Option.t_Option (t_Slice v_T & t_Slice v_T)\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T & t_Slice v_T)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1 (#v_T: Type0) : Core_models.Iter.Traits.Collect.t_IntoIterator (t_Slice v_T) =\n  {\n    f_IntoIter = Core_models.Slice.Iter.t_Iter v_T;\n    f_into_iter_pre = (fun (self: t_Slice v_T) -> true);\n    f_into_iter_post = (fun (self: t_Slice v_T) (out: Core_models.Slice.Iter.t_Iter v_T) -> true);\n    f_into_iter = fun (self: t_Slice v_T) -> impl__iter #v_T self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_7 (#v_T: Type0)\n    : Core_models.Ops.Index.t_Index (t_Slice v_T) (Core_models.Ops.Range.t_Range usize) =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Slice v_T) (i: Core_models.Ops.Range.t_Range usize) ->\n        i.Core_models.Ops.Range.f_start <=. i.Core_models.Ops.Range.f_end &&\n        i.Core_models.Ops.Range.f_end <=. (impl__len #v_T self_ <: usize));\n    f_index_post\n    =\n    (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_Range usize) (out: t_Slice v_T) -> true);\n    f_index\n    =\n    fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_Range usize) ->\n      Rust_primitives.Slice.slice_slice #v_T\n        self\n        i.Core_models.Ops.Range.f_start\n        i.Core_models.Ops.Range.f_end\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_8 (#v_T: Type0)\n    : Core_models.Ops.Index.t_Index (t_Slice v_T) (Core_models.Ops.Range.t_RangeTo usize) =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeTo usize) ->\n        i.Core_models.Ops.Range.f_end <=. (impl__len #v_T self_ <: usize));\n    f_index_post\n    =\n    (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeTo usize) (out: t_Slice v_T) -> true);\n    f_index\n    =\n    fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeTo usize) ->\n      Rust_primitives.Slice.slice_slice #v_T self (mk_usize 0) i.Core_models.Ops.Range.f_end\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_9 (#v_T: Type0)\n    : Core_models.Ops.Index.t_Index (t_Slice v_T) (Core_models.Ops.Range.t_RangeFrom usize) =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre\n    =\n    (fun (self_: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFrom usize) ->\n        i.Core_models.Ops.Range.f_start <=. (impl__len #v_T self_ <: usize));\n    f_index_post\n    =\n    (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFrom usize) (out: t_Slice v_T) -> true\n    );\n    f_index\n    =\n    fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFrom usize) ->\n      Rust_primitives.Slice.slice_slice #v_T\n        self\n        i.Core_models.Ops.Range.f_start\n        (Rust_primitives.Slice.slice_length #v_T self <: usize)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_10 (#v_T: Type0)\n    : Core_models.Ops.Index.t_Index (t_Slice v_T) Core_models.Ops.Range.t_RangeFull =\n  {\n    f_Output = t_Slice v_T;\n    f_index_pre = (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFull) -> true);\n    f_index_post\n    =\n    (fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFull) (out: t_Slice v_T) -> true);\n    f_index\n    =\n    fun (self: t_Slice v_T) (i: Core_models.Ops.Range.t_RangeFull) ->\n      Rust_primitives.Slice.slice_slice #v_T\n        self\n        (mk_usize 0)\n        (Rust_primitives.Slice.slice_length #v_T self <: usize)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_11 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Slice v_T) usize =\n  {\n    f_Output = v_T;\n    f_index_pre = (fun (self_: t_Slice v_T) (i: usize) -> i <. (impl__len #v_T self_ <: usize));\n    f_index_post = (fun (self: t_Slice v_T) (i: usize) (out: v_T) -> true);\n    f_index = fun (self: t_Slice v_T) (i: usize) -> Rust_primitives.Slice.slice_index #v_T self i\n  }\n\nclass t_SliceIndex (v_Self: Type0) (v_T: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Output:Type0;\n  f_get_pre:self_: v_Self -> slice: v_T -> pred: Type0{true ==> pred};\n  f_get_post:v_Self -> v_T -> Core_models.Option.t_Option f_Output -> Type0;\n  f_get:x0: v_Self -> x1: v_T\n    -> Prims.Pure (Core_models.Option.t_Option f_Output)\n        (f_get_pre x0 x1)\n        (fun result -> f_get_post x0 x1 result)\n}\n\nlet impl__get\n      (#v_T #v_I: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_SliceIndex v_I (t_Slice v_T))\n      (s: t_Slice v_T)\n      (index: v_I)\n    : Core_models.Option.t_Option i0.f_Output =\n  f_get #v_I #(t_Slice v_T) #FStar.Tactics.Typeclasses.solve index s\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2 (#v_T: Type0) : t_SliceIndex usize (t_Slice v_T) =\n  {\n    f_Output = v_T;\n    f_get_pre = (fun (self: usize) (slice: t_Slice v_T) -> true);\n    f_get_post\n    =\n    (fun (self: usize) (slice: t_Slice v_T) (out: Core_models.Option.t_Option v_T) -> true);\n    f_get\n    =\n    fun (self: usize) (slice: t_Slice v_T) ->\n      if self <. (impl__len #v_T slice <: usize)\n      then\n        Core_models.Option.Option_Some (Rust_primitives.Slice.slice_index #v_T slice self)\n        <:\n        Core_models.Option.t_Option v_T\n      else Core_models.Option.Option_None <: Core_models.Option.t_Option v_T\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3 (#v_T: Type0) : t_SliceIndex Core_models.Ops.Range.t_RangeFull (t_Slice v_T) =\n  {\n    f_Output = t_Slice v_T;\n    f_get_pre = (fun (self: Core_models.Ops.Range.t_RangeFull) (slice: t_Slice v_T) -> true);\n    f_get_post\n    =\n    (fun\n        (self: Core_models.Ops.Range.t_RangeFull)\n        (slice: t_Slice v_T)\n        (out: Core_models.Option.t_Option (t_Slice v_T))\n        ->\n        true);\n    f_get\n    =\n    fun (self: Core_models.Ops.Range.t_RangeFull) (slice: t_Slice v_T) ->\n      Core_models.Option.Option_Some slice <: Core_models.Option.t_Option (t_Slice v_T)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4 (#v_T: Type0) : t_SliceIndex (Core_models.Ops.Range.t_RangeFrom usize) (t_Slice v_T) =\n  {\n    f_Output = t_Slice v_T;\n    f_get_pre = (fun (self: Core_models.Ops.Range.t_RangeFrom usize) (slice: t_Slice v_T) -> true);\n    f_get_post\n    =\n    (fun\n        (self: Core_models.Ops.Range.t_RangeFrom usize)\n        (slice: t_Slice v_T)\n        (out: Core_models.Option.t_Option (t_Slice v_T))\n        ->\n        true);\n    f_get\n    =\n    fun (self: Core_models.Ops.Range.t_RangeFrom usize) (slice: t_Slice v_T) ->\n      if self.Core_models.Ops.Range.f_start <. (impl__len #v_T slice <: usize)\n      then\n        Core_models.Option.Option_Some\n        (Rust_primitives.Slice.slice_slice #v_T\n            slice\n            self.Core_models.Ops.Range.f_start\n            (impl__len #v_T slice <: usize))\n        <:\n        Core_models.Option.t_Option (t_Slice v_T)\n      else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_5 (#v_T: Type0) : t_SliceIndex (Core_models.Ops.Range.t_RangeTo usize) (t_Slice v_T) =\n  {\n    f_Output = t_Slice v_T;\n    f_get_pre = (fun (self: Core_models.Ops.Range.t_RangeTo usize) (slice: t_Slice v_T) -> true);\n    f_get_post\n    =\n    (fun\n        (self: Core_models.Ops.Range.t_RangeTo usize)\n        (slice: t_Slice v_T)\n        (out: Core_models.Option.t_Option (t_Slice v_T))\n        ->\n        true);\n    f_get\n    =\n    fun (self: Core_models.Ops.Range.t_RangeTo usize) (slice: t_Slice v_T) ->\n      if self.Core_models.Ops.Range.f_end <=. (impl__len #v_T slice <: usize)\n      then\n        Core_models.Option.Option_Some\n        (Rust_primitives.Slice.slice_slice #v_T slice (mk_usize 0) self.Core_models.Ops.Range.f_end)\n        <:\n        Core_models.Option.t_Option (t_Slice v_T)\n      else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6 (#v_T: Type0) : t_SliceIndex (Core_models.Ops.Range.t_Range usize) (t_Slice v_T) =\n  {\n    f_Output = t_Slice v_T;\n    f_get_pre = (fun (self: Core_models.Ops.Range.t_Range usize) (slice: t_Slice v_T) -> true);\n    f_get_post\n    =\n    (fun\n        (self: Core_models.Ops.Range.t_Range usize)\n        (slice: t_Slice v_T)\n        (out: Core_models.Option.t_Option (t_Slice v_T))\n        ->\n        true);\n    f_get\n    =\n    fun (self: Core_models.Ops.Range.t_Range usize) (slice: t_Slice v_T) ->\n      if\n        self.Core_models.Ops.Range.f_start <. self.Core_models.Ops.Range.f_end &&\n        self.Core_models.Ops.Range.f_end <=. (impl__len #v_T slice <: usize)\n      then\n        Core_models.Option.Option_Some\n        (Rust_primitives.Slice.slice_slice #v_T\n            slice\n            self.Core_models.Ops.Range.f_start\n            self.Core_models.Ops.Range.f_end)\n        <:\n        Core_models.Option.t_Option (t_Slice v_T)\n      else Core_models.Option.Option_None <: Core_models.Option.t_Option (t_Slice v_T)\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Str.Converts.fsti",
    "content": "module Core_models.Str.Converts\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval from_utf8 (s: t_Slice u8)\n    : Prims.Pure (Core_models.Result.t_Result string Core_models.Str.Error.t_Utf8Error)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Str.Error.fsti",
    "content": "module Core_models.Str.Error\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Utf8Error = | Utf8Error : t_Utf8Error\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Str.Iter.fsti",
    "content": "module Core_models.Str.Iter\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Split (v_T: Type0) = | Split : v_T -> t_Split v_T\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Str.Traits.fsti",
    "content": "module Core_models.Str.Traits\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_FromStr (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Err:Type0;\n  f_from_str_pre:string -> Type0;\n  f_from_str_post:string -> Core_models.Result.t_Result v_Self f_Err -> Type0;\n  f_from_str:x0: string\n    -> Prims.Pure (Core_models.Result.t_Result v_Self f_Err)\n        (f_from_str_pre x0)\n        (fun result -> f_from_str_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl:t_FromStr u64\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Str.fsti",
    "content": "module Core_models.Str\nopen Rust_primitives\n\nval impl_str__len: string -> usize\nval impl_str__as_bytes: string -> t_Slice u8\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.Time.fsti",
    "content": "module Core_models.Time\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen Core_models\nopen FStar.Mul\n\nval v_NANOS_PER_SEC: u32\n\nval v_NANOS_PER_MILLI: u32\n\nval v_NANOS_PER_MICRO: u32\n\nval v_MILLIS_PER_SEC: u64\n\nval v_MICROS_PER_SEC: u64\n\nval v_SECS_PER_MINUTE: u64\n\nval v_MINS_PER_HOUR: u64\n\nval v_HOURS_PER_DAY: u64\n\nval v_DAYS_PER_WEEK: u64\n\ntype t_Duration = {\n  f_secs:u64;\n  f_nanos:Core_models.Num.Niche_types.t_Nanoseconds\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_16:Core_models.Clone.t_Clone t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_17:Core_models.Marker.t_Copy t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_18:Core_models.Marker.t_StructuralPartialEq t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_19:Core_models.Cmp.t_PartialEq t_Duration t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_20:Core_models.Cmp.t_Eq t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_21:Core_models.Cmp.t_PartialOrd t_Duration t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_22:Core_models.Cmp.t_Ord t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_23:Core_models.Hash.t_Hash t_Duration\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_24:Core_models.Default.t_Default t_Duration\n\nval impl_Duration__SECOND: t_Duration\n\nval impl_Duration__MILLISECOND: t_Duration\n\nval impl_Duration__MICROSECOND: t_Duration\n\nval impl_Duration__NANOSECOND: t_Duration\n\nval impl_Duration__ZERO: t_Duration\n\nval impl_Duration__MAX: t_Duration\n\nval impl_Duration__new (secs: u64) (nanos: u32)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_secs (secs: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_millis (millis: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_micros (micros: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_nanos (nanos: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_weeks (weeks: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_days (days: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_hours (hours: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_mins (mins: u64)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__is_zero (self: t_Duration) : Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_secs (self: t_Duration) : Prims.Pure u64 Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__subsec_millis (self: t_Duration)\n    : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__subsec_micros (self: t_Duration)\n    : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__subsec_nanos (self: t_Duration)\n    : Prims.Pure u32 Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_millis (self: t_Duration)\n    : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_micros (self: t_Duration)\n    : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_nanos (self: t_Duration)\n    : Prims.Pure u128 Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__abs_diff (self other: t_Duration)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__checked_add (self rhs: t_Duration)\n    : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__saturating_add (self rhs: t_Duration)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__checked_sub (self rhs: t_Duration)\n    : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__saturating_sub (self rhs: t_Duration)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__checked_mul (self: t_Duration) (rhs: u32)\n    : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__saturating_mul (self: t_Duration) (rhs: u32)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__checked_div (self: t_Duration) (rhs: u32)\n    : Prims.Pure (Core_models.Option.t_Option t_Duration) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_secs_f64 (self: t_Duration)\n    : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_secs_f32 (self: t_Duration)\n    : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_millis_f64 (self: t_Duration)\n    : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__as_millis_f32 (self: t_Duration)\n    : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_secs_f64 (secs: float)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_secs_f32 (secs: float)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__mul_f64 (self: t_Duration) (rhs: float)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__mul_f32 (self: t_Duration) (rhs: float)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__div_f64 (self: t_Duration) (rhs: float)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__div_f32 (self: t_Duration) (rhs: float)\n    : Prims.Pure t_Duration Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__div_duration_f64 (self rhs: t_Duration)\n    : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__div_duration_f32 (self rhs: t_Duration)\n    : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_nanos__v_NANOS_PER_SEC: u64\n\nval impl_Duration__from_secs_f64__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__from_secs_f32__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__mul_f64__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__mul_f32__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__div_f64__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__div_f32__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval f_add__impl_1__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_2:Core_models.Ops.Arith.t_AddAssign t_Duration t_Duration\n\nval f_add_assign__impl_2__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval f_sub__impl_3__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_4:Core_models.Ops.Arith.t_SubAssign t_Duration t_Duration\n\nval f_sub_assign__impl_4__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval f_mul__impl_5__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval f_mul__impl_6__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_7:Core_models.Ops.Arith.t_MulAssign t_Duration u32\n\nval f_mul_assign__impl_7__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval f_div__impl_8__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_9:Core_models.Ops.Arith.t_DivAssign t_Duration u32\n\nval f_div_assign__impl_9__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n(* [@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_10:Core_models.Iter.Traits.Accum.t_Sum t_Duration t_Duration *)\n\nval f_sum__impl_10__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n(* [@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_11:Core_models.Iter.Traits.Accum.t_Sum t_Duration t_Duration *)\n\nval f_sum__impl_11__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_12:Core_models.Fmt.t_Debug t_Duration\n\nval f_fmt__impl_12__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval f_fmt__impl_14__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\ntype t_TryFromFloatSecsErrorKind =\n  | TryFromFloatSecsErrorKind_Negative : t_TryFromFloatSecsErrorKind\n  | TryFromFloatSecsErrorKind_OverflowOrNan : t_TryFromFloatSecsErrorKind\n\ntype t_TryFromFloatSecsError = { f_kind:t_TryFromFloatSecsErrorKind }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_25:Core_models.Fmt.t_Debug t_TryFromFloatSecsError\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_26:Core_models.Clone.t_Clone t_TryFromFloatSecsError\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_27:Core_models.Marker.t_StructuralPartialEq t_TryFromFloatSecsError\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_28:Core_models.Cmp.t_PartialEq t_TryFromFloatSecsError t_TryFromFloatSecsError\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_29:Core_models.Cmp.t_Eq t_TryFromFloatSecsError\n\nval impl_TryFromFloatSecsError__description (self: t_TryFromFloatSecsError)\n    : Prims.Pure string Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_14:Core_models.Fmt.t_Display t_TryFromFloatSecsError\n\nval t_TryFromFloatSecsErrorKind_cast_to_repr (x: t_TryFromFloatSecsErrorKind)\n    : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_30:Core_models.Fmt.t_Debug t_TryFromFloatSecsErrorKind\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_31:Core_models.Clone.t_Clone t_TryFromFloatSecsErrorKind\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_32:Core_models.Marker.t_StructuralPartialEq t_TryFromFloatSecsErrorKind\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_33:Core_models.Cmp.t_PartialEq t_TryFromFloatSecsErrorKind t_TryFromFloatSecsErrorKind\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_34:Core_models.Cmp.t_Eq t_TryFromFloatSecsErrorKind\n\nval impl_Duration__try_from_secs_f32 (secs: float)\n    : Prims.Pure (Core_models.Result.t_Result t_Duration t_TryFromFloatSecsError)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_Duration__try_from_secs_f64 (secs: float)\n    : Prims.Pure (Core_models.Result.t_Result t_Duration t_TryFromFloatSecsError)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_Duration__try_from_secs_f32__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Duration__try_from_secs_f64__panic_cold_explicit: Prims.unit\n  -> Prims.Pure Rust_primitives.Hax.t_Never Prims.l_True (fun _ -> Prims.l_True)\n\n(* [@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Core_models.Ops.Arith.t_Add t_Duration t_Duration =\n  {\n    f_Output = t_Duration;\n    f_Output_11695847888444666345 = FStar.Tactics.Typeclasses.solve;\n    f_add_pre = (fun (self: t_Duration) (rhs: t_Duration) -> true);\n    f_add_post = (fun (self: t_Duration) (rhs: t_Duration) (out: t_Duration) -> true);\n    f_add = fun (self: t_Duration) (rhs: t_Duration) -> () <: t_Duration\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3: Core_models.Ops.Arith.t_Sub t_Duration t_Duration =\n  {\n    f_Output = t_Duration;\n    f_Output_9381071510542709353 = FStar.Tactics.Typeclasses.solve;\n    f_sub_pre = (fun (self: t_Duration) (rhs: t_Duration) -> true);\n    f_sub_post = (fun (self: t_Duration) (rhs: t_Duration) (out: t_Duration) -> true);\n    f_sub = fun (self: t_Duration) (rhs: t_Duration) -> () <: t_Duration\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_5: Core_models.Ops.Arith.t_Mul t_Duration u32 =\n  {\n    f_Output = t_Duration;\n    f_Output_11167888388700478202 = FStar.Tactics.Typeclasses.solve;\n    f_mul_pre = (fun (self: t_Duration) (rhs: u32) -> true);\n    f_mul_post = (fun (self: t_Duration) (rhs: u32) (out: t_Duration) -> true);\n    f_mul = fun (self: t_Duration) (rhs: u32) -> () <: t_Duration\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6: Core_models.Ops.Arith.t_Mul u32 t_Duration =\n  {\n    f_Output = t_Duration;\n    f_Output_11167888388700478202 = FStar.Tactics.Typeclasses.solve;\n    f_mul_pre = (fun (self: u32) (rhs: t_Duration) -> true);\n    f_mul_post = (fun (self: u32) (rhs: t_Duration) (out: t_Duration) -> true);\n    f_mul = fun (self: u32) (rhs: t_Duration) -> () <: t_Duration\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_8: Core_models.Ops.Arith.t_Div t_Duration u32 =\n  {\n    f_Output = t_Duration;\n    f_Output_10117503193521621741 = FStar.Tactics.Typeclasses.solve;\n    f_div_pre = (fun (self: t_Duration) (rhs: u32) -> true);\n    f_div_post = (fun (self: t_Duration) (rhs: u32) (out: t_Duration) -> true);\n    f_div = fun (self: t_Duration) (rhs: u32) -> () <: t_Duration\n  } *)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.TypeClassPlaceHolder.fst",
    "content": "module Core_models.TypeClassPlaceHolder\n(* This module defines a dummy type-class that acts as a placeholder for\nresolution, when an argument is useless. See Core_models.Alloc.Borrow for example. *)\n\nclass t_Placeholder = {\n  content : unit\n}\n\ninstance placeholder : t_Placeholder = {\n  content = ()\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Core_models.fst",
    "content": "module Core_models\n\ninclude Rust_primitives\ninclude Core_models.Num\ninclude Rust_primitives.Notations\ninclude Rust_primitives.Hax\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HAX_PROOF_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\n\nHAX_HOME      ?= $(shell git rev-parse --show-toplevel)\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nHAX_PROOF_LIBS_HOME ?= $(HAX_HOME)/proof-libs/fstar\nHAX_LIBS_HOME       ?= $(HAX_HOME)/hax-lib\n\nCACHE_DIR     ?= ../core/.cache\nHINT_DIR      ?= ../core/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HAX_PROOF_LIBS_HOME)/rust_primitives $(HAX_PROOF_LIBS_HOME)/core $(HAX_LIBS_HOME)/proofs/fstar/extraction/\n\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/README.md",
    "content": "# Core (and alloc) library\n\nThis directory contains a model for the [Core Rust\nlibrary](https://doc.rust-lang.org/core/): the minimal Rust foundation\nbehind the [standard libarary of\nRust](https://doc.rust-lang.org/std/index.html). This also includes a\nmodel for some part of the [`alloc` Rust\nlibrary](https://doc.rust-lang.org/stable/alloc/).\n\nCore is self-contained, and is dependency-free: it links to no\nupstream or system libraries. Thus, even if it is minimal, it is not\nsmall: it is around **75k LoC**, comments excluded.\n\nIn this directory, you will find the first stage of our approach to\n`core` in F\\*: a hand-written model. Note that this model tries to\nfollow as much as possible the structure and naming found in the Rust\ncore library.\n\nThe second stage of our approach to `core` is automatic generation\nwith specifications and models.\nOur plan is to annotate the Rust `core` library with specifications\nand models written directly as Rust annotations.\nThis will enable automatic generation of `core` models with consistent\nsemantics in all of hax backends (for now F\\* and Coq).\n\nNote that we already started experimenting with this second approach:\nhax is already able to digest and generate signature-only F\\* for\nmore than 80% of core definitions.\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Rand.Distr.Distribution.fsti",
    "content": "module Rand.Distr.Distribution\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Rand.Distr.Integer.fsti",
    "content": "module Rand.Distr.Integer\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Rand.Distributions.Distribution.fsti",
    "content": "module Rand.Distributions.Distribution\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Rand.Distributions.Integer.fsti",
    "content": "module Rand.Distributions.Integer\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Rand.Rng.fsti",
    "content": "module Rand.Rng\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Rand_core.Os.fsti",
    "content": "module Rand_core.Os\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Rand_core in\n  ()\n\ntype t_OsRng = | OsRng : t_OsRng\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl:Rand_core.t_RngCore t_OsRng\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_1:Rand_core.t_CryptoRng t_OsRng\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Rand_core.fsti",
    "content": "module Rand_core\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_RngCore (v_Self: Type0) = {\n  f_next_u32_pre:v_Self -> Type0;\n  f_next_u32_post:v_Self -> (v_Self & u32) -> Type0;\n  f_next_u32:x0: v_Self\n    -> Prims.Pure (v_Self & u32) (f_next_u32_pre x0) (fun result -> f_next_u32_post x0 result);\n  f_next_u64_pre:v_Self -> Type0;\n  f_next_u64_post:v_Self -> (v_Self & u64) -> Type0;\n  f_next_u64:x0: v_Self\n    -> Prims.Pure (v_Self & u64) (f_next_u64_pre x0) (fun result -> f_next_u64_post x0 result);\n  f_fill_bytes_pre:v_Self -> t_Slice u8 -> Type0;\n  f_fill_bytes_post:v_Self -> t_Slice u8 -> (v_Self & t_Slice u8) -> Type0;\n  f_fill_bytes:x0: v_Self -> x1: t_Slice u8\n    -> Prims.Pure (v_Self & t_Slice u8)\n        (f_fill_bytes_pre x0 x1)\n        (fun result -> f_fill_bytes_post x0 x1 result)\n}\n\nclass t_CryptoRng (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_RngCore v_Self\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_CryptoRng v_Self|} -> i._super_i0\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Std.Collections.Hash.Map.fsti",
    "content": "module Std.Collections.Hash.Map\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval t_HashMap (v_K v_V v_S: Type0) : eqtype\n\nval impl__new: #v_K: Type0 -> #v_V: Type0 -> Prims.unit\n  -> Prims.Pure (t_HashMap v_K v_V Std.Hash.Random.t_RandomState)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n\nval impl_2__get (#v_K #v_V #v_S #v_Y: Type0) (m: t_HashMap v_K v_V v_S) (k: v_K)\n    : Prims.Pure (Core_models.Option.t_Option v_V) Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_2__insert (#v_K #v_V #v_S: Type0) (m: t_HashMap v_K v_V v_S) (k: v_K) (v: v_V)\n    : Prims.Pure (t_HashMap v_K v_V v_S & Core_models.Option.t_Option v_V)\n      Prims.l_True\n      (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Std.F64.fsti",
    "content": "module Std.F64\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval impl_f64__powf (x y: float) : Prims.Pure float Prims.l_True (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Std.Hash.Random.fsti",
    "content": "module Std.Hash.Random\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_RandomState = | RandomState : t_RandomState\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Std.Io.Error.fsti",
    "content": "module Std.Io.Error\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\ntype t_Error = | Error : t_Error\n\ntype t_ErrorKind =\n  | ErrorKind_NotFound : t_ErrorKind\n  | ErrorKind_PermissionDenied : t_ErrorKind\n  | ErrorKind_ConnectionRefused : t_ErrorKind\n  | ErrorKind_ConnectionReset : t_ErrorKind\n  | ErrorKind_HostUnreachable : t_ErrorKind\n  | ErrorKind_NetworkUnreachable : t_ErrorKind\n  | ErrorKind_ConnectionAborted : t_ErrorKind\n  | ErrorKind_NotConnected : t_ErrorKind\n  | ErrorKind_AddrInUse : t_ErrorKind\n  | ErrorKind_AddrNotAvailable : t_ErrorKind\n  | ErrorKind_NetworkDown : t_ErrorKind\n  | ErrorKind_BrokenPipe : t_ErrorKind\n  | ErrorKind_AlreadyExists : t_ErrorKind\n  | ErrorKind_WouldBlock : t_ErrorKind\n  | ErrorKind_NotADirectory : t_ErrorKind\n  | ErrorKind_IsADirectory : t_ErrorKind\n  | ErrorKind_DirectoryNotEmpty : t_ErrorKind\n  | ErrorKind_ReadOnlyFilesystem : t_ErrorKind\n  | ErrorKind_FilesystemLoop : t_ErrorKind\n  | ErrorKind_StaleNetworkFileHandle : t_ErrorKind\n  | ErrorKind_InvalidInput : t_ErrorKind\n  | ErrorKind_InvalidData : t_ErrorKind\n  | ErrorKind_TimedOut : t_ErrorKind\n  | ErrorKind_WriteZero : t_ErrorKind\n  | ErrorKind_StorageFull : t_ErrorKind\n  | ErrorKind_NotSeekable : t_ErrorKind\n  | ErrorKind_QuotaExceeded : t_ErrorKind\n  | ErrorKind_FileTooLarge : t_ErrorKind\n  | ErrorKind_ResourceBusy : t_ErrorKind\n  | ErrorKind_ExecutableFileBusy : t_ErrorKind\n  | ErrorKind_Deadlock : t_ErrorKind\n  | ErrorKind_CrossesDevices : t_ErrorKind\n  | ErrorKind_TooManyLinks : t_ErrorKind\n  | ErrorKind_InvalidFilename : t_ErrorKind\n  | ErrorKind_ArgumentListTooLong : t_ErrorKind\n  | ErrorKind_Interrupted : t_ErrorKind\n  | ErrorKind_Unsupported : t_ErrorKind\n  | ErrorKind_UnexpectedEof : t_ErrorKind\n  | ErrorKind_OutOfMemory : t_ErrorKind\n  | ErrorKind_InProgress : t_ErrorKind\n  | ErrorKind_Other : t_ErrorKind\n\nval t_ErrorKind_cast_to_repr (x: t_ErrorKind)\n    : Prims.Pure isize Prims.l_True (fun _ -> Prims.l_True)\n\nval impl_Error__kind (self: t_Error) : Prims.Pure t_ErrorKind Prims.l_True (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Std.Io.Impls.fsti",
    "content": "module Std.Io.Impls\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl:Std.Io.t_Read (t_Slice u8)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_1:Std.Io.t_Write (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Std.Io.Stdio.fsti",
    "content": "module Std.Io.Stdio\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nval e_print (args: Core_models.Fmt.t_Arguments)\n    : Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/core/Std.Io.fsti",
    "content": "module Std.Io\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Rust_primitives\n\nclass t_Read (v_Self: Type0) = {\n  f_read_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred};\n  f_read_post:\n      self_: v_Self ->\n      buf: t_Slice u8 ->\n      x: (v_Self & t_Slice u8 & Core_models.Result.t_Result usize Std.Io.Error.t_Error)\n    -> pred:\n      Type0\n        { pred ==>\n          (let\n            (self_e_future: v_Self),\n            (buf_future: t_Slice u8),\n            (_: Core_models.Result.t_Result usize Std.Io.Error.t_Error) =\n              x\n            in\n            (Core_models.Slice.impl__len #u8 buf_future <: usize) =.\n            (Core_models.Slice.impl__len #u8 buf <: usize)) };\n  f_read:x0: v_Self -> x1: t_Slice u8\n    -> Prims.Pure (v_Self & t_Slice u8 & Core_models.Result.t_Result usize Std.Io.Error.t_Error)\n        (f_read_pre x0 x1)\n        (fun result -> f_read_post x0 x1 result);\n  f_read_exact_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred};\n  f_read_exact_post:\n      self_: v_Self ->\n      buf: t_Slice u8 ->\n      x: (v_Self & t_Slice u8 & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error)\n    -> pred:\n      Type0\n        { pred ==>\n          (let\n            (self_e_future: v_Self),\n            (buf_future: t_Slice u8),\n            (_: Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error) =\n              x\n            in\n            (Core_models.Slice.impl__len #u8 buf_future <: usize) =.\n            (Core_models.Slice.impl__len #u8 buf <: usize)) };\n  f_read_exact:x0: v_Self -> x1: t_Slice u8\n    -> Prims.Pure\n        (v_Self & t_Slice u8 & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error)\n        (f_read_exact_pre x0 x1)\n        (fun result -> f_read_exact_post x0 x1 result)\n}\n\nclass t_Write (v_Self: Type0) = {\n  f_write_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred};\n  f_write_post:\n      v_Self ->\n      t_Slice u8 ->\n      (v_Self & Core_models.Result.t_Result usize Std.Io.Error.t_Error)\n    -> Type0;\n  f_write:x0: v_Self -> x1: t_Slice u8\n    -> Prims.Pure (v_Self & Core_models.Result.t_Result usize Std.Io.Error.t_Error)\n        (f_write_pre x0 x1)\n        (fun result -> f_write_post x0 x1 result);\n  f_flush_pre:self_: v_Self -> pred: Type0{true ==> pred};\n  f_flush_post:v_Self -> (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error)\n    -> Type0;\n  f_flush:x0: v_Self\n    -> Prims.Pure (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error)\n        (f_flush_pre x0)\n        (fun result -> f_flush_post x0 result);\n  f_write_all_pre:self_: v_Self -> buf: t_Slice u8 -> pred: Type0{true ==> pred};\n  f_write_all_post:\n      v_Self ->\n      t_Slice u8 ->\n      (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error)\n    -> Type0;\n  f_write_all:x0: v_Self -> x1: t_Slice u8\n    -> Prims.Pure (v_Self & Core_models.Result.t_Result Prims.unit Std.Io.Error.t_Error)\n        (f_write_all_pre x0 x1)\n        (fun result -> f_write_all_post x0 x1 result)\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/hax_lib/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HACL_HOME to be set to your HACL* repo location\n# We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nHACL_HOME     ?= $(HAX_LIBS_HOME)/../../../hacl-star\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= $(HAX_LIBS_HOME)/.cache\nHINT_DIR      ?= $(HAX_LIBS_HOME)/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HAX_PROOF_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n# We expect HAX_LIBS_HOME to be set to the hax-lib folder\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHAX_HOME      ?= $(shell git rev-parse --show-toplevel)\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nHAX_PROOF_LIBS_HOME ?= $(HAX_HOME)/proof-libs/fstar\nHAX_LIBS_HOME       ?= $(HAX_HOME)/hax-lib\n\nCACHE_DIR     ?= $(HAX_LIBS_HOME)/.cache\nHINT_DIR      ?= $(HAX_LIBS_HOME)/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HAX_PROOF_LIBS_HOME)/rust_primitives $(HAX_PROOF_LIBS_HOME)/core $(HAX_LIBS_HOME)/proofs/fstar/extraction/\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Arithmetic.fsti",
    "content": "module Rust_primitives.Arithmetic\n\nopen FStar.Mul\nopen Rust_primitives.Integers\n\nlet wrapping_add_u8 : u8 -> u8 -> u8 = add_mod\nlet saturating_add_u8 : u8 -> u8 -> u8 = add_sat\nval overflowing_add_u8 : u8 -> u8 -> u8 & bool\nlet wrapping_sub_u8 : u8 -> u8 -> u8 = sub_mod\nlet saturating_sub_u8 : u8 -> u8 -> u8 = sub_sat\nlet overflowing_sub_u8 (x y: u8): u8 & bool\n  = let sub = v x - v y in\n    let borrow = sub < 0 in\n    let out = if borrow then pow2 8 + sub else sub in\n    (mk_u8 out, borrow)\nlet wrapping_mul_u8 : u8 -> u8 -> u8 = mul_mod\nval saturating_mul_u8 : u8 -> u8 -> u8\nlet overflowing_mul_u8 : u8 -> u8 -> u8 & bool = mul_overflow\nlet rem_euclid_u8 (x: u8) (y: u8 {v y <> 0}): u8 = x %! y\nval pow_u8 : u8 -> u32 -> u8\nval count_ones_u8 : u8 -> r:u32{v r <= 8}\n\nlet wrapping_add_u16 : u16 -> u16 -> u16 = add_mod\nlet saturating_add_u16 : u16 -> u16 -> u16 = add_sat\nval overflowing_add_u16 : u16 -> u16 -> u16 & bool\nlet wrapping_sub_u16 : u16 -> u16 -> u16 = sub_mod\nlet saturating_sub_u16 : u16 -> u16 -> u16 = sub_sat\nlet overflowing_sub_u16 (x y: u16): u16 & bool\n  = let sub = v x - v y in\n    let borrow = sub < 0 in\n    let out = if borrow then pow2 16 + sub else sub in\n    (mk_u16 out, borrow)\nlet wrapping_mul_u16 : u16 -> u16 -> u16 = mul_mod\nval saturating_mul_u16 : u16 -> u16 -> u16\nlet overflowing_mul_u16 : u16 -> u16 -> u16 & bool = mul_overflow\nlet rem_euclid_u16 (x: u16) (y: u16 {v y <> 0}): u16 = x %! y\nval pow_u16 : x:u16 -> y:u32 -> result : u16 {v x == 2 /\\ v y < 16 ==> result == mk_u16 (pow2 (v y))}\nval count_ones_u16 : u16 -> r:u32{v r <= 16}\n\nlet wrapping_add_u32 : u32 -> u32 -> u32 = add_mod\nlet saturating_add_u32 : u32 -> u32 -> u32 = add_sat\nval overflowing_add_u32 : u32 -> u32 -> u32 & bool\nlet wrapping_sub_u32 : u32 -> u32 -> u32 = sub_mod\nlet saturating_sub_u32 : u32 -> u32 -> u32 = sub_sat\nlet overflowing_sub_u32 (x y: u32): u32 & bool\n  = let sub = v x - v y in\n    let borrow = sub < 0 in\n    let out = if borrow then pow2 32 + sub else sub in\n    (mk_u32 out, borrow)\nlet wrapping_mul_u32 : u32 -> u32 -> u32 = mul_mod\nval saturating_mul_u32 : u32 -> u32 -> u32\nlet overflowing_mul_u32 : u32 -> u32 -> u32 & bool = mul_overflow\nlet rem_euclid_u32 (x: u32) (y: u32 {v y <> 0}): u32 = x %! y\nval pow_u32 : x:u32 -> y:u32 -> result : u32 {v x == 2 /\\ v y <= 16 ==> result == mk_u32 (pow2 (v y))}\nval count_ones_u32 : u32 -> r:u32{v r <= 32}\n\nlet wrapping_add_u64 : u64 -> u64 -> u64 = add_mod\nlet saturating_add_u64 : u64 -> u64 -> u64 = add_sat\nval overflowing_add_u64 : u64 -> u64 -> u64 & bool\nlet wrapping_sub_u64 : u64 -> u64 -> u64 = sub_mod\nlet saturating_sub_u64 : u64 -> u64 -> u64 = sub_sat\nlet overflowing_sub_u64 (x y: u64): u64 & bool\n  = let sub = v x - v y in\n    let borrow = sub < 0 in\n    let out = if borrow then pow2 64 + sub else sub in\n    (mk_u64 out, borrow)\nlet wrapping_mul_u64 : u64 -> u64 -> u64 = mul_mod\nval saturating_mul_u64 : u64 -> u64 -> u64\nlet overflowing_mul_u64 : u64 -> u64 -> u64 & bool = mul_overflow\nlet rem_euclid_u64 (x: u64) (y: u64 {v y <> 0}): u64 = x %! y\nval pow_u64 : u64 -> u32 -> u64\nval count_ones_u64 : u64 -> r:u32{v r <= 64}\n\nlet wrapping_add_u128 : u128 -> u128 -> u128 = add_mod\nlet saturating_add_u128 : u128 -> u128 -> u128 = add_sat\nval overflowing_add_u128 : u128 -> u128 -> u128 & bool\nlet wrapping_sub_u128 : u128 -> u128 -> u128 = sub_mod\nlet saturating_sub_u128 : u128 -> u128 -> u128 = sub_sat\nlet overflowing_sub_u128 (x y: u128): u128 & bool\n  = let sub = v x - v y in\n    let borrow = sub < 0 in\n    let out = if borrow then pow2 128 + sub else sub in\n    (mk_u128 out, borrow)\nlet wrapping_mul_u128 : u128 -> u128 -> u128 = mul_mod\nval saturating_mul_u128 : u128 -> u128 -> u128\nlet overflowing_mul_u128 : u128 -> u128 -> u128 & bool = mul_overflow\nlet rem_euclid_u128 (x: u128) (y: u128 {v y <> 0}): u128 = x %! y\nval pow_u128 : u128 -> u32 -> u128\nval count_ones_u128 : u128 -> r:u32{v r <= 128}\n\nlet wrapping_add_usize : usize -> usize -> usize = add_mod\nlet saturating_add_usize : usize -> usize -> usize = add_sat\nval overflowing_add_usize : usize -> usize -> usize & bool\nlet wrapping_sub_usize : usize -> usize -> usize = sub_mod\nlet saturating_sub_usize : usize -> usize -> usize = sub_sat\nlet overflowing_sub_usize (x y: usize): usize & bool\n  = let sub = v x - v y in\n    let borrow = sub < 0 in\n    let out = if borrow then pow2 size_bits + sub else sub in\n    (mk_usize out, borrow)\nlet wrapping_mul_usize : usize -> usize -> usize = mul_mod\nval saturating_mul_usize : usize -> usize -> usize\nlet overflowing_mul_usize : usize -> usize -> usize & bool = mul_overflow\nlet rem_euclid_usize (x: usize) (y: usize {v y <> 0}): usize = x %! y\nval pow_usize : usize -> u32 -> usize\nval count_ones_usize : usize -> r:u32{v r <= size_bits}\n\nlet wrapping_add_i8 : i8 -> i8 -> i8 = add_mod\nlet saturating_add_i8 : i8 -> i8 -> i8 = add_sat\nval overflowing_add_i8 : i8 -> i8 -> i8 & bool\nlet wrapping_sub_i8 : i8 -> i8 -> i8 = sub_mod\nlet saturating_sub_i8 : i8 -> i8 -> i8 = sub_sat\nval overflowing_sub_i8 (x y: i8): i8 & bool\nlet wrapping_mul_i8 : i8 -> i8 -> i8 = mul_mod\nval saturating_mul_i8 : i8 -> i8 -> i8\nlet overflowing_mul_i8 : i8 -> i8 -> i8 & bool = mul_overflow\nlet rem_euclid_i8 (x: i8) (y: i8 {v y <> 0}): i8 = x %! y\nval pow_i8 : i8 -> u32 -> i8\nval count_ones_i8 : i8 -> r:u32{v r <= 8}\nval abs_i8 : i8 -> i8\n\nlet wrapping_add_i16 : i16 -> i16 -> i16 = add_mod\nlet saturating_add_i16 : i16 -> i16 -> i16 = add_sat\nval overflowing_add_i16 : i16 -> i16 -> i16 & bool\nlet wrapping_sub_i16 : i16 -> i16 -> i16 = sub_mod\nlet saturating_sub_i16 : i16 -> i16 -> i16 = sub_sat\nval overflowing_sub_i16 (x y: i16): i16 & bool\nlet wrapping_mul_i16 : i16 -> i16 -> i16 = mul_mod\nval saturating_mul_i16 : i16 -> i16 -> i16\nlet overflowing_mul_i16 : i16 -> i16 -> i16 & bool = mul_overflow\nlet rem_euclid_i16 (x: i16) (y: i16 {v y <> 0}): i16 = x %! y\nval pow_i16 : x: i16 -> y:u32 -> result: i16 {v x == 2 /\\ v y < 15 ==> (Math.Lemmas.pow2_lt_compat 15 (v y); result == mk_i16 (pow2 (v y)))}\nval count_ones_i16 : i16 -> r:u32{v r <= 16}\nval abs_i16 : i16 -> i16\n\nlet wrapping_add_i32 : i32 -> i32 -> i32 = add_mod\nlet saturating_add_i32 : i32 -> i32 -> i32 = add_sat\nval overflowing_add_i32 : i32 -> i32 -> i32 & bool\nlet wrapping_sub_i32 : i32 -> i32 -> i32 = sub_mod\nlet saturating_sub_i32 : i32 -> i32 -> i32 = sub_sat\nval overflowing_sub_i32 (x y: i32): i32 & bool\nlet wrapping_mul_i32 : i32 -> i32 -> i32 = mul_mod\nval saturating_mul_i32 : i32 -> i32 -> i32\nlet overflowing_mul_i32 : i32 -> i32 -> i32 & bool = mul_overflow\nlet rem_euclid_i32 (x: i32) (y: i32 {v y <> 0}): i32 = x %! y\nval pow_i32 : x : i32 -> y:u32 -> result: i32 {v x == 2 /\\ v y <= 16 ==> result == mk_i32 (pow2 (v y))}\nval count_ones_i32 : i32 -> r:u32{v r <= 32}\nval abs_i32 : i32 -> i32\n\nlet wrapping_add_i64 : i64 -> i64 -> i64 = add_mod\nlet saturating_add_i64 : i64 -> i64 -> i64 = add_sat\nval overflowing_add_i64 : i64 -> i64 -> i64 & bool\nlet wrapping_sub_i64 : i64 -> i64 -> i64 = sub_mod\nlet saturating_sub_i64 : i64 -> i64 -> i64 = sub_sat\nval overflowing_sub_i64 (x y: i64): i64 & bool\nlet wrapping_mul_i64 : i64 -> i64 -> i64 = mul_mod\nval saturating_mul_i64 : i64 -> i64 -> i64\nlet overflowing_mul_i64 : i64 -> i64 -> i64 & bool = mul_overflow\nlet rem_euclid_i64 (x: i64) (y: i64 {v y <> 0}): i64 = x %! y\nval pow_i64 : i64 -> u32 -> i64\nval count_ones_i64 : i64 -> r:u32{v r <= 64}\nval abs_i64 : i64 -> i64\n\nlet wrapping_add_i128 : i128 -> i128 -> i128 = add_mod\nlet saturating_add_i128 : i128 -> i128 -> i128 = add_sat\nval overflowing_add_i128 : i128 -> i128 -> i128 & bool\nlet wrapping_sub_i128 : i128 -> i128 -> i128 = sub_mod\nlet saturating_sub_i128 : i128 -> i128 -> i128 = sub_sat\nval overflowing_sub_i128 (x y: i128): i128 & bool\nlet wrapping_mul_i128 : i128 -> i128 -> i128 = mul_mod\nval saturating_mul_i128 : i128 -> i128 -> i128\nlet overflowing_mul_i128 : i128 -> i128 -> i128 & bool = mul_overflow\nlet rem_euclid_i128 (x: i128) (y: i128 {v y <> 0}): i128 = x %! y\nval pow_i128 : i128 -> u32 -> i128\nval count_ones_i128 : i128 -> r:u32{v r <= 128}\nval abs_i128 : i128 -> i128\n\nlet wrapping_add_isize : isize -> isize -> isize = add_mod\nlet saturating_add_isize : isize -> isize -> isize = add_sat\nval overflowing_add_isize : isize -> isize -> isize & bool\nlet wrapping_sub_isize : isize -> isize -> isize = sub_mod\nlet saturating_sub_isize : isize -> isize -> isize = sub_sat\nval overflowing_sub_isize (x y: isize): isize & bool\nlet wrapping_mul_isize : isize -> isize -> isize = mul_mod\nval saturating_mul_isize : isize -> isize -> isize\nlet overflowing_mul_isize : isize -> isize -> isize & bool = mul_overflow\nlet rem_euclid_isize (x: isize) (y: isize {v y <> 0}): isize = x %! y\nval pow_isize : isize -> u32 -> isize\nval count_ones_isize : isize -> r:u32{v r <= size_bits}\nval abs_isize : isize -> isize\n\nlet v_USIZE_MAX = mk_usize max_usize\nlet v_ISIZE_MAX = mk_isize max_isize\nlet v_ISIZE_MIN = mk_isize (minint ISIZE)\nlet v_SIZE_BITS = mk_u32 size_bits\n\nlet neg #t x = zero #t -! x\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Arrays.fsti",
    "content": "module Rust_primitives.Arrays\n\nopen Rust_primitives.Integers\nopen FStar.Mul\n\n/// Rust slices and arrays are represented as sequences\ntype t_Slice t = s:Seq.seq t{Seq.length s <= max_usize}\ntype t_Array t (l:usize) = s: Seq.seq t { Seq.length s == v l }\n\n/// Length of a slice\nlet length (#a: Type) (s: t_Slice a): usize = sz (Seq.length s)\n\n/// Check whether a slice contains an item\nlet contains (#t: eqtype) (s: t_Slice t) (x: t): bool = Seq.mem x s\n\n/// Converts an F* list into an array\nval of_list (#t:Type) (l: list t {FStar.List.Tot.length l < maxint U16}):\n    t_Array t (sz (FStar.List.Tot.length l))\n/// Converts an slice into a F* list\nval to_list (#t:Type) (s: t_Slice t): list t\n\nval map_array (#a #b: Type) #n (arr: t_Array a n) (f: a -> b): t_Array b n\n\n/// Creates an array of size `l` using a function `f`\nval createi #t (l:usize) (f:(u:usize{u <. l} -> t))\n    : Pure (t_Array t l)\n      (requires True)\n      (ensures (fun res -> (forall i. Seq.index res (v i) == f i)))\n\nunfold let map #a #b #p\n  (f:(x:a{p x} -> b))\n  (s: t_Slice a {forall (i:nat). i < Seq.length s ==> p (Seq.index s i)}): t_Slice b\n  = createi (length s) (fun i -> f (Seq.index s (v i)))\n\n/// Concatenates two slices\nlet concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) :\n           r:t_Array t (length x +! length y) = Seq.append x y\n\n/// Translate indexes of `concat x y` into indexes of `x` or of `y`\nval lemma_index_concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) (i:usize{i <. length x +! length y}):\n           Lemma (if i <. length x then\n                    Seq.index (concat x y) (v i) == Seq.index x (v i)\n                  else \n                    Seq.index (concat x y) (v i) == Seq.index y (v (i -! length x)))\n           [SMTPat (Seq.index (concat #t x y) (v i))]\n\n/// Take a subslice given `x` a slice and `i` and `j` two indexes\nlet slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\\ j <=. length x}):\n           r:t_Array t (j -! i) = Seq.slice x (v i) (v j)\n\n/// Translate indexes for subslices\nval lemma_index_slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\\ j <=. length x})\n                                (k:usize{k <. j -! i}):\n           Lemma (Seq.index (slice x i j) (v k) == Seq.index x (v (i +! k)))\n           [SMTPat (Seq.index (slice x i j) (v k))]\n\n/// Introduce bitwise equality principle for sequences\nval eq_intro #t (a : Seq.seq t) (b:Seq.seq t{Seq.length a == Seq.length b}):\n       Lemma\n       (requires forall i. {:pattern Seq.index a i; Seq.index b i}\n                      i < Seq.length a ==>\n                      Seq.index a i == Seq.index b i)\n       (ensures Seq.equal a b)\n       [SMTPat (Seq.equal a b)]\n\n/// Split a slice in two at index `m`\nlet split #t (a:t_Slice t) (m:usize{m <=. length a}):\n       Pure (t_Array t m & t_Array t (length a -! m))\n       True (ensures (fun (x,y) ->\n         x == slice a (sz 0) m /\\\n         y == slice a m (length a) /\\\n         concat #t x y == a)) = \n         let x = Seq.slice a 0 (v m) in\n         let y = Seq.slice a (v m) (Seq.length a) in\n         assert (Seq.equal a (concat x y));\n         (x,y)\n\nlet lemma_slice_append #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t):\n  Lemma (requires (range (v (length y) + v (length z)) usize_inttype /\\\n                   length y +! length z == length x /\\\n                   y == slice x (sz 0) (length y) /\\ \n                   z == slice x (length y) (length x)))\n        (ensures (x == concat y z)) = \n        assert (Seq.equal x (concat y z))\n\nlet lemma_slice_append_3 #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t) (w:t_Slice t):\n  Lemma (requires (range (v (length y) + v (length z) + v (length w)) usize_inttype /\\\n                   length y +! length z +! length w == length x /\\\n                   y == slice x (sz 0) (length y) /\\ \n                   z == slice x (length y) (length y +! length z) /\\\n                   w == slice x (length y +! length z) (length x)))\n        (ensures (x == concat y (concat z w))) =\n         assert (Seq.equal x (Seq.append y (Seq.append z w)))\n\nlet lemma_slice_append_4 #t (x y z w u:t_Slice t) :\n  Lemma (requires (range (v (length y) + v (length z) + v (length w) + v (length u)) usize_inttype /\\\n                   length y +! length z +! length w +! length u == length x /\\\n                   y == slice x (sz 0) (length y) /\\ \n                   z == slice x (length y) (length y +! length z) /\\\n                   w == slice x (length y +! length z) (length y +! length z +! length w) /\\\n                   u == slice x (length y +! length z +! length w) (length x)))\n        (ensures (x == concat y (concat z (concat w u)))) =\n         assert (Seq.equal x (Seq.append y (Seq.append z (Seq.append w u))))\n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.BitVectors.fsti",
    "content": "module Rust_primitives.BitVectors\n\nopen FStar.Mul\nopen Rust_primitives.Arrays\nopen Rust_primitives.Integers\n\n// TODO: relate `num_bits` with a notion of bounded integer\n/// Number of bits carried by an integer of type `t`\ntype num_bits t = d: nat {d > 0 /\\ d <= bits t /\\ (signed t ==> d <= bits t)}\n\n/// States that `x` is a positive integer that fits in `d` bits\ntype bounded #t (x:int_t t) (d:num_bits t) =\n  v x >= 0 /\\ v x < pow2 d\n\n/// Integer of type `t` that carries at most `d` bits\ntype int_t_d t (d: num_bits t) =\n  n: int_t t {bounded n d}\n\n/// If `x` fits in `d` bits, then upper bits are zero\nval lemma_get_bit_bounded #t (x:int_t t) (d:num_bits t) (i:usize):\n  Lemma ((bounded x d /\\ v i >= d /\\ v i < bits t) ==>\n         get_bit x i == 0)\n        [SMTPat (get_bit #t x i); SMTPat (bounded x d)]\n\n/// If upper bits of `x` are zero, then `x` is bounded accordingly\nval lemma_get_bit_bounded' #t (x:int_t t) (d:num_bits t):\n  Lemma (requires forall i. v i > d ==> get_bit x i == 0)\n        (ensures bounded x d)\n\nopen FStar.FunctionalExtensionality\n\n/// A bit vector is a partial map from indexes to bits\ntype bit_vec (len: nat) = i:nat {i < len} ^-> bit\n\n/// Transform an array of integers to a bit vector\n#push-options \"--fuel 0 --ifuel 1 --z3rlimit 50\"\nlet bit_vec_of_int_t_array (#n: inttype) (#len: usize) \n                (arr: t_Array (int_t n) len)\n                (d: num_bits n): bit_vec (v len * d)\n  = on (i: nat {i < v len * d}) \n       (fun i -> get_bit (Seq.index arr (i / d)) (sz (i % d)))\n\nlet bit_vec_of_refined_int_t_array (#n: inttype) (#len: usize) \n                #refinement\n                (arr: t_Array (x: int_t n {refinement x}) len)\n                (d: num_bits n): bit_vec (v len * d)\n  = on (i: nat {i < v len * d})\n       (fun i -> get_bit (Seq.index arr (i / d)) (sz (i % d)))\n#pop-options\n\n/// Transform an array of `nat`s to a bit vector\n#push-options \"--fuel 0 --ifuel 1 --z3rlimit 50\"\nlet bit_vec_of_nat_array (#len: usize)\n                       (arr: t_Array nat len)\n                       (d: nat)\n                       : bit_vec (v len * d)\n  = on (i: nat {i < v len * d})\n       (fun i -> get_bit_nat (Seq.index arr (i / d)) (i % d))\n#pop-options\n\n/// Transforms a bit vector to an integer\nval bit_vec_to_int_t #t (d: num_bits t) (bv: bit_vec d): int_t t\n\n/// `bit_vec_to_int_t` and `get_bit` are (modulo usize) inverse\nval bit_vec_to_int_t_lemma\n    #t (d: num_bits t) (bv: bit_vec d)\n    i\n  : Lemma (get_bit (bit_vec_to_int_t d bv) (sz i) == bv i)\n\n/// Transforms a bit vector into an array of integers\nval bit_vec_to_int_t_array #t (#len: usize) (d: num_bits t) (bv: bit_vec (v len * d))\n  : Pure (t_Array (int_t t) len)\n         (requires True)\n         (ensures fun r -> (forall i. bit_vec_of_int_t_array r d i == bv i))\n\n/// Transforms a bit vector into an array of integers\nval bit_vec_to_nat_array (#len: usize) (d: nat) (bv: bit_vec (v len * d))\n  : Pure (t_Array nat len)\n         (requires True)\n         (ensures fun r -> (forall i. bit_vec_of_nat_array r d i == bv i)\n                      /\\ (forall i. Seq.index r i < pow2 d))\n\n/// Bit-wise semantics of `2^n-1`\nval get_bit_pow2_minus_one #t\n  (n: nat {pow2 n - 1 <= maxint t}) \n  (nth: usize {v nth < bits t})\n  : Lemma (  get_bit (mk_int #t (pow2 n - 1)) nth\n          == (if v nth < n then 1 else 0))\n\n/// Log2 table\nunfold let mask_inv_opt =\n  function | 0   -> Some 0\n           | 1   -> Some 1\n           | 3   -> Some 2\n           | 7   -> Some 3\n           | 15  -> Some 4\n           | 31  -> Some 5\n           | 63  -> Some 6\n           | 127 -> Some 7\n           | 255 -> Some 8\n           | 511 -> Some 9\n           | 1023  -> Some  10\n           | 2047  -> Some  11\n           | 4095  -> Some  12\n           | _   -> None\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `i32`\nval get_bit_pow2_minus_one_i32\n  (x: int {x < pow2 31 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32})\n  : Lemma ( get_bit (mk_i32 x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit (mk_i32 x) nth)]\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `i16`\nval get_bit_pow2_minus_one_i16\n  (x: int {x < pow2 15 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 16})\n  : Lemma ( get_bit (mk_i16 x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit (mk_i16 x) nth)]\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `u32`\nval get_bit_pow2_minus_one_u32\n  (x: int {x < pow2 32 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32})\n  : Lemma ( get_bit (mk_u32 x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit (mk_u32 x) nth)]\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `u16`\nval get_bit_pow2_minus_one_u16\n  (x: int {x < pow2 16 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 16})\n  : Lemma ( get_bit (mk_u16 x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit (mk_u16 x) nth)]\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `u8`  \nval get_bit_pow2_minus_one_u8\n  (t: _ {t == u8_inttype})  \n  (x: int {x < pow2 8 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 8})\n  : Lemma ( get_bit #t (mk_u8 x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit #t (mk_u8 x) nth)]\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Char.fsti",
    "content": "module Rust_primitives.Char\n\n#set-options \"--max_fuel 0 --max_ifuel 1 --z3rlimit 20\"\n\ntype char : eqtype = FStar.Char.char\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Float.fsti",
    "content": "module Rust_primitives.Float\n\n#set-options \"--max_fuel 0 --max_ifuel 1 --z3rlimit 20\"\n\ntype float : eqtype\n\nval mk_float : string -> float\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Control_flow_monad.Mexception.fst",
    "content": "module Rust_primitives.Hax.Control_flow_monad.Mexception\nopen Core_models.Ops.Control_flow\n\nlet run #a: t_ControlFlow a a -> a\n    = function | ControlFlow_Continue v | ControlFlow_Break v -> v\n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Control_flow_monad.Moption.fst",
    "content": "module Rust_primitives.Hax.Control_flow_monad.Moption\n\nlet run #a (f: Core_models.Option.t_Option (Core_models.Option.t_Option a)): Core_models.Option.t_Option a\n    = match f with\n    | Core_models.Option.Option_Some x -> x \n    | Core_models.Option.Option_None -> Core_models.Option.Option_None\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Control_flow_monad.Mresult.fst",
    "content": "module Rust_primitives.Hax.Control_flow_monad.Mresult\n\nlet run #a #e (f: Core_models.Result.t_Result (Core_models.Result.t_Result a e) e): Core_models.Result.t_Result a e\n    = match f with\n    | Core_models.Result.Result_Ok x -> x \n    | Core_models.Result.Result_Err e -> Core_models.Result.Result_Err e\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Folds.fsti",
    "content": "module Rust_primitives.Hax.Folds\n\nopen Rust_primitives\nopen Core_models.Ops.Range\nopen FStar.Mul\n\n(**** `s.chunks_exact(chunk_size).enumerate()` *)\n/// Predicate that asserts a slice `s_chunk` is exactly the nth chunk\n/// of the sequence `s`\nlet nth_chunk_of #t\n  (s: Seq.seq t)\n  (s_chunk: Seq.seq t {Seq.length s_chunk > 0})\n  (chunk_nth: nat {chunk_nth < Seq.length s / Seq.length s_chunk})\n  =  Seq.slice s (Seq.length s_chunk * chunk_nth) (Seq.length s_chunk * (chunk_nth + 1))\n  == s_chunk\n\n/// Fold function that is generated for `for` loops iterating on\n/// `s.chunks_exact(chunk_size).enumerate()`-like iterators\nval fold_enumerated_chunked_slice\n  (#t: Type0) (#acc_t: Type0)\n  (chunk_size: usize {v chunk_size > 0})\n  (s: t_Slice t)\n  (inv: acc_t -> (i:usize{v i <= Seq.length s / v chunk_size}) -> Type0)\n  (init: acc_t {inv init (sz 0)})\n  (f: ( acc:acc_t\n      -> item:(usize & t_Slice t) {\n        let (i, s_chunk) = item in\n          v i < Seq.length s / v chunk_size\n        /\\ length s_chunk == chunk_size\n        /\\ nth_chunk_of s s_chunk (v i)\n        /\\ inv acc i\n      }\n      -> acc':acc_t {\n        inv acc' (fst item +! sz 1)\n      }\n      )\n  )\n  : result: acc_t {inv result (mk_int (Seq.length s / v chunk_size))}\n\n/// Fold function that is generated for `for` loops iterating on\n/// `s.chunks_exact(chunk_size)`-like iterators\nval fold_chunked_slice\n  (#t: Type0) (#acc_t: Type0)\n  (chunk_size: usize {v chunk_size > 0})\n  (s: t_Slice t)\n  (inv: acc_t -> (i:usize) -> Type0)\n  (init: acc_t {inv init (sz 0)})\n  (f: ( acc:acc_t\n      -> item:(t_Slice t) {\n        length item == chunk_size /\\\n        inv acc (sz 0)\n      }\n      -> acc':acc_t {\n        inv acc' (sz 0)\n      }\n      )\n  )\n  : result: acc_t {inv result (mk_int 0)}\n\n(**** `s.enumerate()` *)\n/// Fold function that is generated for `for` loops iterating on\n/// `s.enumerate()`-like iterators\nval fold_enumerated_slice\n  (#t: Type0) (#acc_t: Type0)\n  (s: t_Slice t)\n  (inv: acc_t -> (i:usize{v i <= v (length s)}) -> Type0)\n  (init: acc_t {inv init (sz 0)})\n  (f: (acc:acc_t -> i:(usize & t) {v (fst i) < v (length s) /\\ snd i == Seq.index s (v (fst i)) /\\ inv acc  (fst i)}\n                 -> acc':acc_t    {v (fst i) < v (length s) /\\ inv acc' (fst i)}))\n  : result: acc_t {inv result (length s)}\n\nval fold_enumerated_slice_return\n  (#t: Type0) (#acc_t: Type0) (#ret: Type0)\n  (s: t_Slice t)\n  (inv: acc_t -> (i:usize{v i <= v (length s)}) -> Type0)\n  (init: acc_t {inv init (sz 0)})\n  (f: (acc:acc_t -> i:(usize & t) {v (fst i) < v (length s) /\\ snd i == Seq.index s (v (fst i)) (*/\\ inv acc  (fst i)*)}\n                 -> Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow ret (unit & acc_t)) (acc':acc_t)    (*{v (fst i) < v (length s) /\\ inv acc' (fst i)}*)))\n  : result: Core_models.Ops.Control_flow.t_ControlFlow ret acc_t(* {inv result (length s)} *)\n\n(**** `(start..end_).step_by(step)` *)\nunfold let fold_range_step_by_wf_index (#u: inttype)\n  (start: int_t u) (end_: int_t u)\n  (step: usize {v step > 0}) (strict: bool) (i: int)\n  = v start < v end_ ==> (let end_step = v end_ - 1 - ((v end_ - 1 - v start) % v step) in\n                          i >= v start \n                        /\\ (if strict then i <= end_step else i <= end_step + v step))\n  // /\\ i % v step == v start % v step\n\n#push-options \"--z3rlimit 80\"\nunfold let fold_range_step_by_upper_bound (#u: inttype)\n  (start: int_t u) (end_: int_t u)\n  (step: usize {v step > 0})\n  : end':int {fold_range_step_by_wf_index start end_ step false end'}\n  = if v end_ <= v start \n    then v end_\n    else\n      let range: nat = v end_ - v start in\n      let k: nat = range / v step in\n      let end' = v start + k * v step in\n      FStar.Math.Lemmas.division_propriety range (v step);\n      end'\n#pop-options\n\n/// Fold function that is generated for `for` loops iterating on\n/// `s.enumerate()`-like iterators\nval fold_range_step_by\n  (#acc_t: Type0) (#u: inttype)\n  (start: int_t u)\n  (end_: int_t u)\n  (step: usize {v step > 0 /\\ range (v end_ + v step) u})\n  (inv: acc_t -> (i:int_t u{fold_range_step_by_wf_index start end_ step false (v i)}) -> Type0)\n  (init: acc_t {inv init start})\n  (f: (acc:acc_t -> i:int_t u  {v i < v end_ - ((v end_ - 1 - v start) % v step) /\\ fold_range_step_by_wf_index start end_ step true (v i) /\\ inv acc i}\n                 -> acc':acc_t {(inv acc' (mk_int (v i + v step)))}))\n  : result: acc_t {inv result (mk_int (fold_range_step_by_upper_bound start end_ step))}\n\n(**** `start..end_` *)\nunfold let fold_range_wf_index (#u: inttype)\n  (start: int_t u) (end_: int_t u)\n  (strict: bool) (i: int)\n  = i >= v start \n     /\\ (if strict then i < v end_ else i <= v end_)\n\nunfold let range_empty (#u: inttype)\n  (start: int_t u) (end_: int_t u) = v start > v end_\n\nlet rec fold_range\n  (#acc_t: Type0) (#u: inttype)\n  (start: int_t u)\n  (end_: int_t u)\n  (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0)\n  (init: acc_t {~(range_empty start end_) ==> inv init start})\n  (f: (acc:acc_t -> i:int_t u  {v i <= v end_ /\\ fold_range_wf_index start end_ true (v i) /\\ inv acc i}\n                 -> acc':acc_t {(inv acc' (mk_int (v i + 1)))}))\n  : Tot (result: acc_t {if range_empty start end_ then result == init else inv result end_}) \n        (decreases v end_ - v start)\n  = if v start < v end_\n    then fold_range (start +! mk_int 1) end_ inv (f init start) f\n    else init\n\nlet rec fold_range_cf\n  (#acc_t: Type0) (#u: inttype)\n  (start: int_t u)\n  (end_: int_t u)\n  (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0)\n  (acc: acc_t {~(range_empty start end_) ==> inv acc start})\n  (f: (acc:acc_t -> i:int_t u {v i <= v end_ /\\ fold_range_wf_index start end_ true (v i) /\\ inv acc i}\n                  -> tuple:((Core_models.Ops.Control_flow.t_ControlFlow (unit & acc_t) acc_t))\n                    {\n                      let acc = match tuple with \n                        | Core_models.Ops.Control_flow.ControlFlow_Break ((), acc)\n                        | Core_models.Ops.Control_flow.ControlFlow_Continue acc -> acc in\n                      inv acc (mk_int (v i + 1))}))\n: Tot (res: acc_t{if range_empty start end_ then res == acc else (exists (final: int_t u). v start <= v final /\\ v final <= v end_ /\\ inv res final)}) \n (decreases v end_ - v start)\n  =\n  if v start < v end_\n  then match f acc start with\n       | Core_models.Ops.Control_flow.ControlFlow_Break ((), acc) -> acc\n       | Core_models.Ops.Control_flow.ControlFlow_Continue acc ->\n         fold_range_cf (start +! mk_int 1) end_ inv acc f\n  else acc\n\nlet rec fold_range_return\n  (#acc_t: Type0) (#ret_t: Type0) (#u: inttype)\n  (start: int_t u)\n  (end_: int_t u)\n  (inv: acc_t -> (i:int_t u{fold_range_wf_index start end_ false (v i)}) -> Type0)\n  (acc: acc_t )\n  (f: (acc:acc_t -> i:int_t u {v i <= v end_ /\\ fold_range_wf_index start end_ true (v i) }\n                  -> tuple:((Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Ops.Control_flow.t_ControlFlow ret_t (unit & acc_t))) acc_t)\n                    ))\n: Tot (Core_models.Ops.Control_flow.t_ControlFlow ret_t acc_t) (decreases v end_ - v start)\n  =\n  if v start < v end_\n  then match f acc start with\n       | Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Break res)-> Core_models.Ops.Control_flow.ControlFlow_Break res\n       \n       | Core_models.Ops.Control_flow.ControlFlow_Break (Core_models.Ops.Control_flow.ControlFlow_Continue ((), res)) -> Core_models.Ops.Control_flow.ControlFlow_Continue res\n       | Core_models.Ops.Control_flow.ControlFlow_Continue acc ->\n         fold_range_return (start +! mk_int 1) end_ inv acc f\n  else Core_models.Ops.Control_flow.ControlFlow_Continue acc\n\nval fold_return #it #acc #ret #item (i: it) (init: acc) \n  (f: acc -> item -> \n    Core_models.Ops.Control_flow.t_ControlFlow  \n    (Core_models.Ops.Control_flow.t_ControlFlow ret (unit & acc)) acc): \n  Core_models.Ops.Control_flow.t_ControlFlow ret acc\n  "
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Int.fst",
    "content": "module Rust_primitives.Hax.Int\n\nopen Rust_primitives\n\nunfold let from_machine (#t:inttype) (x:int_t t) : range_t t = v #t x\nunfold let into_machine (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti",
    "content": "module Rust_primitives.Hax.Monomorphized_update_at\n#set-options \"--z3rlimit 30\"\n\n/// Monomorphized versions of the `update_at` operator.\n\nopen Rust_primitives\nopen Rust_primitives.Hax\nopen Core_models.Ops.Range\n\nlet update_at_usize\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: usize {v i < Seq.length s})\n  (x: t)\n  : t_Array t (length s)\n  = Seq.upd #t s (v i) x\n\nval update_at_range #n\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_Range (int_t n))\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires (v i.f_start >= 0 /\\ v i.f_start <= Seq.length s /\\\n               v i.f_end <= Seq.length s /\\\n               Seq.length x == v i.f_end - v i.f_start))\n    (ensures (fun res ->\n                Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\\\n                Seq.slice res (v i.f_start) (v i.f_end) == x /\\\n                Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s)))\n\nval update_at_range_to #n\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_RangeTo (int_t n))\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires (v i.f_end >= 0 /\\ v i.f_end <= Seq.length s /\\\n               Seq.length x == v i.f_end))\n    (ensures (fun res ->\n                Seq.slice res 0 (v i.f_end) == x /\\\n                Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s)))\n\nval update_at_range_from #n\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_RangeFrom (int_t n))\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires ( v i.f_start >= 0 /\\ v i.f_start <= Seq.length s /\\\n                Seq.length x == Seq.length s - v i.f_start))\n    (ensures (fun res ->\n                Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\\\n                Seq.slice res (v i.f_start) (Seq.length res) == x))\n\nval update_at_range_full\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_RangeFull)\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires (Seq.length x == Seq.length s))\n    (ensures (fun res -> res == x))\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Hax.fst",
    "content": "module Rust_primitives.Hax\n\nopen Rust_primitives.Integers\nopen Rust_primitives.Arrays\n\ntype t_Never = False\nlet never_to_any #t: t_Never -> t = (fun _ -> match () with)\n\nlet repeat #a (x: a) (len: usize): t_Array a len = \n  FStar.Seq.create (v len) x\n\nopen Core_models.Ops.Index\nclass update_at_tc self idx = {\n  [@@@FStar.Tactics.Typeclasses.tcinstance]\n  super_index: t_Index self idx;\n  update_at: s: self -> i: idx {f_index_pre s i} -> super_index.f_Output -> self;\n}\n\nopen Core_models.Slice\nopen Core_models.Array\nopen Core_models.Ops.Range\n\n/// We have an instance for `usize`, but we often work with refined\n/// `usize`, and F* typeclass inference doesn't support subtyping\n/// well, hence the instance below.\ninstance impl__index_refined t l r: t_Index (t_Array t l) (x: usize {r x})\n  = { f_Output = t;\n      f_index_pre = (fun (s: t_Array t l) (i: usize {r i}) -> v i >= 0 && v i < v l);\n      f_index_post = (fun _ _ _ -> true);\n      f_index = (fun s i -> Seq.index s (v i));\n    }\n\n/// Similarly to `impl__index_refined`, we need to define a instance\n/// for refined `usize`.\ninstance update_at_tc_array_refined t l r: update_at_tc (t_Array t l) (x: usize {r x}) = {\n  super_index = impl__index_refined t l r;\n  update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x);\n}\n\ninstance impl__index t l: t_Index (t_Array t l) (usize)\n  = { f_Output = t;\n      f_index_pre = (fun (s: t_Array t l) (i: usize) -> v i >= 0 && v i < v l);\n      f_index_post = (fun _ _ _ -> true);\n      f_index = (fun s i -> Seq.index s (v i));\n    }\n\ninstance update_at_tc_array t l: update_at_tc (t_Array t l) (usize) = {\n  super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Array t l) (usize);\n  update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x);\n}\n\n\nlet update_at_tc_array_range_super t l: t_Index (t_Array t l) (t_Range (usize))\n  = FStar.Tactics.Typeclasses.solve\nlet update_at_tc_array_range_to_super t l: t_Index (t_Array t l) (t_RangeTo (usize))\n  = FStar.Tactics.Typeclasses.solve\nlet update_at_tc_array_range_from_super t l: t_Index (t_Array t l) (t_RangeFrom (usize))\n  = FStar.Tactics.Typeclasses.solve\nlet update_at_tc_array_range_full_super t l: t_Index (t_Array t l) t_RangeFull\n  = FStar.Tactics.Typeclasses.solve\n\nassume val update_at_array_range t l\n  (s: t_Array t l) (i: t_Range (usize) {(update_at_tc_array_range_super t l).f_index_pre s i})\n  : (update_at_tc_array_range_super t l).f_Output -> t_Array t l\nassume val update_at_array_range_to t l\n  (s: t_Array t l) (i: t_RangeTo (usize) {(update_at_tc_array_range_to_super t l).f_index_pre s i})\n  : (update_at_tc_array_range_to_super t l).f_Output -> t_Array t l\nassume val update_at_array_range_from t l\n  (s: t_Array t l) (i: t_RangeFrom (usize) {(update_at_tc_array_range_from_super t l).f_index_pre s i})\n  : (update_at_tc_array_range_from_super t l).f_Output -> t_Array t l\nassume val update_at_array_range_full t l\n  (s: t_Array t l) (i: t_RangeFull)\n  : (update_at_tc_array_range_full_super t l).f_Output -> t_Array t l\n\ninstance update_at_tc_array_range t l: update_at_tc (t_Array t l) (t_Range (usize)) = {\n  super_index = update_at_tc_array_range_super t l;\n  update_at = update_at_array_range t l\n}\ninstance update_at_tc_array_range_to t l: update_at_tc (t_Array t l) (t_RangeTo (usize)) = {\n  super_index = update_at_tc_array_range_to_super t l;\n  update_at = update_at_array_range_to t l\n}\ninstance update_at_tc_array_range_from t l: update_at_tc (t_Array t l) (t_RangeFrom (usize)) = {\n  super_index = update_at_tc_array_range_from_super t l;\n  update_at = update_at_array_range_from t l\n}\ninstance update_at_tc_array_range_full t l: update_at_tc (t_Array t l) t_RangeFull = {\n  super_index = update_at_tc_array_range_full_super t l;\n  update_at = update_at_array_range_full t l\n}\n\n\nlet (.[]<-) #self #idx {| update_at_tc self idx |} (s: self) (i: idx {f_index_pre s i})\n  = update_at s i\n\nunfold let array_of_list (#t:Type)\n  (n: nat {n < maxint U16})\n  (l: list t {FStar.List.Tot.length l == n})\n  : t_Array t (sz n)\n  = Seq.seq_of_list l\n\n(* class iterator_return (self: Type u#0): Type u#1 = {\n  [@@@FStar.Tactics.Typeclasses.tcresolve]\n  parent_iterator: Core_models.Iter.Traits.Iterator.t_Iterator self;\n  f_fold_return: #b:Type0 -> s:self -> b -> (b -> i:parent_iterator.f_Item{parent_iterator.f_contains s i} -> Core_models.Ops.Control_flow.t_ControlFlow b b) -> Core_models.Ops.Control_flow.t_ControlFlow b b;\n} *)\nlet while_loop #acc_t \n  (inv: acc_t -> Type0)\n  (condition: (c:acc_t {inv c}) -> bool) \n  (fuel: (a:acc_t{inv a} -> nat))\n  (init: acc_t {inv init}) \n  (f: (i:acc_t{inv i /\\ condition i} -> o:acc_t{inv o /\\ fuel o < fuel i})): \n  (res: acc_t {inv res /\\ not (condition res)})\n  = \n  let rec while_loop_internal\n  (current: acc_t {inv current}): \n  Tot (res: acc_t {inv res /\\ not (condition res)}) (decreases (fuel current))\n  = if condition current\n    then \n      let next = f current in \n      assert (fuel next < fuel current);\n      while_loop_internal next\n    else current in \n  while_loop_internal init\n\nassume val while_loop_return #acc_t #ret_t \n  (inv: acc_t -> Type0)\n  (condition: (c:acc_t {inv c}) -> bool) \n  (fuel: (a:acc_t -> nat))\n  (init: acc_t ) \n  (f: (acc_t -> Core_models.Ops.Control_flow.t_ControlFlow \n  (Core_models.Ops.Control_flow.t_ControlFlow ret_t (Prims.unit & acc_t)) acc_t))\n  : Core_models.Ops.Control_flow.t_ControlFlow ret_t acc_t\n\n/// Represents backend failures\nlet failure #t (_error: string) (_ast: string): Pure t False (fun _ -> True) = ()\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Integers.fsti",
    "content": "module Rust_primitives.Integers\n\nopen FStar.Mul\n\n#set-options \"--max_fuel 0 --max_ifuel 1 --z3rlimit 20\"\n\nval pow2_values: x:nat -> Lemma\n  (let p = pow2 x in\n   match x with\n   | 0  -> p=1\n   | 1  -> p=2\n   | 8  -> p=256\n   | 16 -> p=65536\n   | 31 -> p=2147483648\n   | 32 -> p=4294967296\n   | 63 -> p=9223372036854775808\n   | 64 -> p=18446744073709551616\n   | 127 -> p=170141183460469231731687303715884105728\n   | 128 -> p=340282366920938463463374607431768211456\n   | 2 | 3 | 4 | 5 | 6 | 7\n   | 9 | 10 | 11 | 12 | 13 | 14 | 15 \n   | 17 | 18 | 19 | 20 | 21 | 22 | 23\n   | 24 | 25 | 26 | 27 | 28 | 29 | 30\n   | 33 | 34 | 35 | 36 | 37 | 38 | 39\n   | 40 | 41 | 42 | 43 | 44 | 45 | 46\n   | 47 | 48 | 49 | 50 | 51 | 52 | 53\n   | 54 | 55 | 56 | 57 | 58 | 59 | 60\n   | 61 | 62 | 63 | 65 | 127 | 128 -> p = normalize_term (pow2 x)\n   | _ -> True)\n  [SMTPat (pow2 x)]\n\ntype inttype =\n  | I8 | I16 | I32 | I64 | I128 | ISIZE\n  | U8 | U16 | U32 | U64 | U128 | USIZE\n\nlet unsigned t = match t with\n  | U8 | U16 | U32 | U64 | U128 | USIZE -> true\n  | I8 | I16 | I32 | I64 | I128 | ISIZE -> false\n\nlet signed t = match t with\n  | U8 | U16 | U32 | U64 | U128 | USIZE -> false\n  | I8 | I16 | I32 | I64 | I128 | ISIZE -> true\n\ntype uinttype = t:inttype{unsigned t}\n\nval size_bits:n:nat{n == 32 \\/ n == 64}\n\nlet bits t = match t with\n  | U8 | I8 -> 8 \n  | U16 | I16 -> 16\n  | U32 | I32 -> 32\n  | U64 | I64 -> 64\n  | U128 | I128 -> 128 \n  | USIZE | ISIZE -> size_bits \n\nlet minint (t:inttype) =\n  if unsigned t then 0 else -(pow2 (bits t - 1))\n\nlet maxint (t:inttype) =\n  if unsigned t then pow2 (bits t) - 1\n  else pow2 (bits t - 1) - 1\n\nlet max_usize = maxint USIZE\nlet max_isize = maxint ISIZE\n\nlet range (n:int) (t:inttype) : bool =\n  minint t <= n && n <= maxint t\n\nlet included (t: inttype) (t': inttype) =\n  minint t' <= minint t && maxint t <= maxint t'\n\n\nlet range_t t = x:int{range x t}\n\ntype int_t t = | MkInt: range_t t -> int_t t\n\nlet u8_inttype = U8\nlet i8_inttype = I8\nlet u16_inttype = U16\nlet i16_inttype = I16\nlet u32_inttype = U32\nlet i32_inttype = I32\nlet u64_inttype = U64\nlet i64_inttype = I64\nlet u128_inttype = U128\nlet i128_inttype = I128\nlet usize_inttype = USIZE\nlet isize_inttype = ISIZE\n\ntype u8 = int_t U8 \ntype i8 = int_t I8\ntype u16 = int_t U16\ntype i16 = int_t I16\ntype u32 = int_t U32\ntype i32 = int_t I32\ntype u64 = int_t U64\ntype i64 =  int_t I64\ntype u128 = int_t U128\ntype i128 = int_t I128\ntype usize = int_t USIZE\ntype isize = int_t ISIZE\n\n[@(strict_on_arguments [0])]\nlet v (#t:inttype) (x:int_t t) : range_t t = x._0\n\n[@(strict_on_arguments [0])]\nlet mk_int (#t:inttype) (n:range_t t) : int_t t = MkInt n\n\nlet mk_int_v_lemma (#t:inttype) (a:int_t t) : Lemma\n  (mk_int #t (v #t a) == a)\n  [SMTPat (mk_int #t (v #t a))]\n  = ()\n\nlet v_mk_int_lemma (#t:inttype) (n:range_t t) : Lemma\n  (v #t (mk_int #t n) == n)\n  [SMTPat (v #t (mk_int #t n))]\n  = ()\n\nlet mk_u8 x = mk_int #U8 x\nlet mk_i8  x = mk_int #I8 x\nlet mk_u16  x = mk_int #U16 x\nlet mk_i16  x = mk_int #I16 x\nlet mk_u32 x = mk_int #U32 x\nlet mk_i32  x = mk_int #I32 x\nlet mk_u64 x = mk_int #U64 x\nlet mk_i64  x = mk_int #I64 x\nlet mk_u128 x = mk_int #U128 x\nlet mk_i128  x = mk_int #I128 x\nlet mk_usize x = mk_int #USIZE x\nlet mk_isize  x = mk_int #ISIZE x\n\nlet sz x = mk_usize x\nlet isz  x = mk_isize x\n\nlet from_uint8 (x:FStar.UInt8.t) : u8  = mk_int (FStar.UInt8.v x)\nlet from_int8 (x:FStar.Int8.t) : i8  = mk_int (FStar.Int8.v x)\nlet from_uint16 (x:FStar.UInt16.t) : u16  = mk_int (FStar.UInt16.v x)\nlet from_int16 (x:FStar.Int16.t) : i16  = mk_int (FStar.Int16.v x)\nlet from_uint32 (x:FStar.UInt32.t) : u32  = mk_int (FStar.UInt32.v x)\nlet from_int32 (x:FStar.Int32.t) : i32  = mk_int (FStar.Int32.v x)\nlet from_uint64 (x:FStar.UInt64.t) : u64  = mk_int (FStar.UInt64.v x)\nlet from_int64 (x:FStar.Int64.t) : i64  = mk_int (FStar.Int64.v x)\nlet from_uint128 (x:FStar.UInt128.t) : u128  = mk_int (FStar.UInt128.v x)\nlet from_int128 (x:FStar.Int128.t) : i128  = mk_int (FStar.Int128.v x)\nlet from_usize (x:FStar.UInt32.t) : usize  = mk_int (FStar.UInt32.v x)\nlet from_isize (x:FStar.Int32.t) : isize  = mk_int (FStar.Int32.v x)\n\nlet to_uint8 (x:u8) : FStar.UInt8.t = FStar.UInt8.uint_to_t (v x)\nlet to_int8 (x:i8) : FStar.Int8.t  = FStar.Int8.int_to_t (v x)\nlet to_uint16 (x:u16) : FStar.UInt16.t  = FStar.UInt16.uint_to_t (v x)\nlet to_int16 (x:i16) : FStar.Int16.t  = FStar.Int16.int_to_t (v x)\nlet to_uint32 (x:u32) : FStar.UInt32.t  = FStar.UInt32.uint_to_t (v x)\nlet to_int32 (x:i32) : FStar.Int32.t  = FStar.Int32.int_to_t (v x)\nlet to_uint64 (x:u64) : FStar.UInt64.t  = FStar.UInt64.uint_to_t (v x)\nlet to_int64 (x:i64) : FStar.Int64.t  = FStar.Int64.int_to_t (v x)\nlet to_uint128 (x:u128) : FStar.UInt128.t  = FStar.UInt128.uint_to_t (v x)\nlet to_int128 (x:i128) : FStar.Int128.t  = FStar.Int128.int_to_t (v x)\n\nlet modulus (t:inttype) = pow2 (bits t)\n\n(* Wrap-around modulo: wraps into [-p/2; p/2[ *)\nlet op_At_Percent (v:int) (p:int{p>0 /\\ p%2=0}) : Tot int =\n  let m = v % p in if m >= p/2 then m - p else m\n\nlet op_At_Percent_Dot x t : range_t t =\n  if unsigned t then x % modulus t\n  else x @% modulus t\n\nlet cast (#t:inttype) (#t':inttype)\n    (u1:int_t t{range (v u1) t'}) =\n    mk_int #t' (v u1)\nlet cast_mod (#t:inttype) (#t':inttype)\n    (u1:int_t t) = \n    mk_int #t' (v u1 @%. t')\n\n/// Simplifies double casts when possible.\n/// For example, with `x` a i32, this lemma rewrites `x as i64 as i32` into `x`.\nlet cast_identity_lemma\n  (a: inttype) (b: inttype {bits b >= bits a})\n  (n: int_t a)\n  : Lemma (cast_mod #b #a (cast_mod #a #b n) == n)\n    [SMTPat (cast_mod #b #a (cast_mod #a #b n))]\n  = FStar.Math.Lemmas.small_mod (abs (v n)) (modulus a)\n\n/// Arithmetic operations\n/// \n\nlet add_mod (#t:inttype) (a:int_t t) (b:int_t t) =\n    mk_int #t ((v a + v b) @%. t)\n\nlet add_sat (#t:inttype) (a:int_t t) (b:int_t t) =\n    mk_int #t (if (v a + v b) <= minint t \n              then minint t \n              else \n                if (v a + v b) >= maxint t \n                then maxint t \n                else  (v a + v b))\n\nlet sub_sat (#t:inttype) (a:int_t t) (b:int_t t) =\n    mk_int #t (if (v a - v b) <= minint t \n              then minint t \n              else \n                if (v a - v b) >= maxint t \n                then maxint t \n                else  v a - v b)\n    \nlet add (#t:inttype) (a:int_t t)\n        (b:int_t t{range (v a + v b) t}) =\n    mk_int #t (v a + v b)\n\nlet incr (#t:inttype) (a:int_t t{v a < maxint t}) =\n    mk_int #t (v a + 1)\n\nlet mul_mod (#t:inttype) (a:int_t t)\n            (b:int_t t) =\n            mk_int #t (v a * v b @%. t)\n\nlet mul_overflow (#t:inttype) (a:int_t t)\n                 (b:int_t t) =\n                 (mk_int #t (v a * v b @%. t), (v a * v b > maxint t || v a * v b < maxint t))\nlet mul (#t:inttype) (a:int_t t)\n        (b:int_t t{range (v a * v b) t}) =\n        mk_int #t (v a * v b)\n\nlet sub_mod (#t:inttype) (a:int_t t) (b:int_t t) =\n    mk_int #t ((v a - v b) @%. t)\n\nlet sub (#t:inttype) (a:int_t t)\n        (b:int_t t{range (v a - v b) t}) =\n    mk_int #t (v a - v b)\n\nlet decr (#t:inttype) (a:int_t t{minint t < v a}) =\n    mk_int #t (v a - 1)\n\nlet div (#t:inttype) (a:int_t t) (b:int_t t{v b <> 0 /\\ (unsigned t \\/ range (v a / v b) t)}) =\n  assert (unsigned t \\/ range (v a / v b) t);\n  mk_int #t (v a / v b)\n  \nlet mod (#t:inttype) (a:int_t t) (b:int_t t{v b <> 0}) =\n  mk_int #t (v a % v b)\n\n\n/// Comparison Operators\n/// \nlet eq (#t:inttype) (a:int_t t) (b:int_t t) = v a = v b\nlet ne (#t:inttype) (a:int_t t) (b:int_t t) = v b <> v b\nlet lt (#t:inttype) (a:int_t t) (b:int_t t) = v a < v b\nlet lte (#t:inttype) (a:int_t t) (b:int_t t) = v a <= v b\nlet gt (#t:inttype) (a:int_t t) (b:int_t t) = v a > v b\nlet gte (#t:inttype) (a:int_t t) (b:int_t t) = v a >= v b\n\n\n/// Bitwise Operations\n\n/// Todo: define bitvector-based normalizable definitions\n///       for all these operations\n\nlet ones (#t:inttype) : n:int_t t =\n  if unsigned t then mk_int #t (pow2 (bits t) - 1)\n  else mk_int #t (-1)\n\nlet zero (#t:inttype) : n:int_t t =\n  mk_int #t 0\n\nval lognot: #t:inttype -> int_t t -> int_t t\nval lognot_lemma: #t:inttype -> a:int_t t -> Lemma\n  (lognot #t zero == ones /\\\n   lognot #t ones == zero /\\\n   lognot (lognot a) == a /\\\n   (signed t ==> v (lognot a) = -1 - v a) /\\\n   (unsigned t ==> v (lognot a)  = pow2 (bits t) - 1 - v a)\n   )\n\nval logxor: #t:inttype\n  -> int_t t\n  -> int_t t\n  -> int_t t\n \nval logxor_lemma: #t:inttype -> a:int_t t -> b:int_t t -> Lemma\n  (a `logxor` a == zero /\\\n   (a `logxor` b == zero ==> b == a) /\\\n   a `logxor` (a `logxor` b) == b /\\\n   a `logxor` (b `logxor` a) == b /\\\n   zero `logxor` a == a /\\\n   a `logxor` zero == a /\\\n   ones `logxor` a == lognot a /\\\n   a `logxor` ones == lognot a)\n    \nval logand: #t:inttype\n  -> int_t t\n  -> int_t t\n  -> int_t t\n\nval logand_lemma: #t:inttype -> a:int_t t -> b:int_t t ->\n  Lemma (logand a zero == zero /\\\n         logand zero a == zero /\\\n         logand a ones == a /\\\n         logand ones a == a /\\\n         (a == b ==> logand a b == a) /\\\n         (b == lognot a ==> logand a b == zero) /\\\n         (v a >= 0 ==> (v (logand a b) >= 0) /\\ (v (logand a b) <= v a)) /\\\n         (v b >= 0 ==> (v (logand a b) >= 0) /\\ (v (logand a b) <= v b)))\n\nval logand_mask_lemma: #t:inttype\n  -> a:int_t t\n  -> m:nat{m < bits t} ->\n  Lemma (pow2 m < maxint t /\\\n         logand a (sub #t (mk_int #t (pow2 m)) (mk_int #t 1)) ==\n         mk_int (v a % pow2 m))\n  [SMTPat (logand #t a (sub #t (mk_int #t (pow2 m)) (mk_int #t 1)))]\n\nval logor: #t:inttype\n  -> int_t t\n  -> int_t t\n  -> int_t t\n\nval logor_disjoint: #t:inttype -> a:int_t t -> b:int_t t -> m:nat{m < bits t} ->\n  Lemma\n    (requires 0 <= v a /\\ 0 <= v b /\\ v a % pow2 m == 0 /\\ v b < pow2 m)\n    (ensures  v (logor a b) == v a + v b)\n\nval logor_lemma: #t:inttype -> a:int_t t -> b:int_t t ->\n  Lemma (logor a zero == a /\\\n         logor a ones == ones /\\\n         logor zero a == a /\\\n         logor ones a == ones /\\\n         ((v a >= 0 /\\ v b >= 0) ==> (v (logor a b) >= v a /\\ v (logor a b) >= v b)))\n\nunfold type shiftval (t:inttype) (t':inttype) =\n     b:int_t t'{v b >= 0 /\\ v b < bits t}\nunfold type rotval (t:inttype) (t':inttype) =\n     b:int_t t'{v b > 0 /\\ v b < bits t}\n\n#push-options \"--z3version 4.13.3\"\n[@@\"opaque_to_smt\"]\nlet shift_right (#t:inttype) (#t':inttype)\n    (a:int_t t) (b:shiftval t t') : int_t t\n    = mk_int #t (v a / pow2 (v b))\n#pop-options\n\nval shift_right_lemma (#t:inttype) (#t':inttype)\n    (a:int_t t) (b:shiftval t t'):\n    Lemma (v (shift_right #t #t' a b) == (v a / pow2 (v b)))\n          [SMTPat (shift_right #t #t' a b)]\n    \nval shift_left (#t:inttype) (#t':inttype)\n    (a:int_t t) (b:shiftval t t') : int_t t\n\nval shift_left_positive_lemma (#t:inttype) (#t':inttype)\n    (a:int_t t) (b:shiftval t t'):\n    Lemma (requires (unsigned t \\/ v a >= 0))\n          (ensures ((v (shift_left #t #t' a b) == (v a * pow2 (v b)) @%. t)))\n          [SMTPat (shift_left #t #t' a b)]\n\n\nval rotate_right: #t:inttype{unsigned t} -> #t':inttype\n  -> a:int_t t\n  -> rotval t t'\n  -> int_t t\n\nval rotate_left: #t:inttype{unsigned t} -> #t':inttype\n  -> a:int_t t\n  -> rotval t t'\n  -> int_t t\n\nlet shift_right_i (#t:inttype) (#t':inttype) (s:shiftval t t') (u:int_t t) : int_t t = shift_right u s\n\nlet shift_left_i (#t:inttype) (#t':inttype) (s:shiftval t t') (u:int_t t{v u >= 0}) : int_t t = shift_left u s\n\nlet rotate_right_i (#t:inttype{unsigned t}) (#t':inttype) (s:rotval t t') (u:int_t t) : int_t t = rotate_right u s\n\nlet rotate_left_i (#t:inttype{unsigned t}) (#t':inttype) (s:rotval t t') (u:int_t t) : int_t t = rotate_left u s\n\nlet abs_int (#t:inttype) (a:int_t t{minint t < v a}) =\n    mk_int #t (abs (v a))\n\nlet neg (#t:inttype{signed t}) (a:int_t t{range (0 - v a) t}) =\n    mk_int #t (0 - (v a))\n\nval neg_equiv_lemma: #t:inttype{signed t /\\ not (I128? t)}\n  -> a:int_t t{range (0 - v a) t}\n  -> Lemma (neg a == sub #t (mk_int 0) a /\\\n          (lognot a = sub (neg a) (mk_int 1)))\n\n\n///\n/// Operators available for all machine integers\n///\n\n// Strict: with precondition\nunfold\nlet (+!) #t = add #t\n\n// Wrapping: no precondition\nunfold\nlet (+.) #t = add_mod #t\n\nunfold\nlet ( *! ) #t = mul #t\n\nunfold\nlet ( *. ) #t = mul_mod #t\n\nunfold\nlet ( -! ) #t = sub #t\n\nunfold\nlet ( -. ) #t = sub_mod #t\n\nunfold\nlet ( >>! ) #t #t' = shift_right #t #t'\n\nunfold\nlet ( <<! ) #t #t' = shift_left #t #t'\n\nunfold\nlet ( >>>. ) #t #t' = rotate_right #t #t'\n\nunfold\nlet ( <<<. ) #t #t' = rotate_left #t #t'\n\nunfold\nlet ( ^. ) #t = logxor #t\n\nunfold\nlet ( |. ) #t = logor #t\n\nunfold\nlet ( &. ) #t = logand #t\n\nunfold\nlet ( ~. ) #t = lognot #t\n\nunfold\nlet (/!) #t = div #t\n\nunfold\nlet (%!) #t = mod #t\n\nunfold\nlet (=.) = (=)\n\nunfold\nlet (<>.) = (<>)\n\nunfold\nlet (<.) #t = lt #t\n\nunfold\nlet (<=.) #t = lte #t\n\nunfold\nlet (>.) #t = gt #t\n\nunfold\nlet (>=.) #t = gte #t\n\ntype bit = n: nat {n < 2}\n\n/// Mathematical `get_bit` definition on `nat`s\nlet get_bit_nat (x: nat) (nth: nat): bit\n  = (x / pow2 nth) % 2\n\n/// `get_bit` definition for machine integer of any size and signedness\n[@\"opaque_to_smt\"]\nlet get_bit (#n: inttype) (x: int_t n) (nth: usize {v nth < bits n}): bit\n  = if v x >= 0 then get_bit_nat (v x) (v nth)\n               else // two's complement\n                    get_bit_nat (pow2 (bits n) + v x) (v nth)\n\nunfold let bit_and (x y: bit): bit = match x, y with | (1, 1) -> 1 | _ -> 0\nunfold let bit_or  (x y: bit): bit = (x + y) % 2\n\n/// Bit-wise semantics for `&.`\nval get_bit_and #t (x y: int_t t) (i: usize {v i < bits t})\n  : Lemma (get_bit (x &. y) i == get_bit x i `bit_and` get_bit y i)\n          [SMTPat (get_bit (x &. y) i)]\n\n/// Bit-wise semantics for `|.`\nval get_bit_or #t (x y: int_t t) (i: usize {v i < bits t})\n  : Lemma (get_bit (x |. y) i == get_bit x i `bit_or` get_bit y i)\n          [SMTPat (get_bit (x |. y) i)]\n\n/// Bit-wise semantics for `<<!`\nval get_bit_shl #t #u (x: int_t t) (y: int_t u) (i: usize {v i < bits t})\n  : Lemma (requires v y >= 0 /\\ v y < bits t)\n          (ensures get_bit (x <<! y) i \n                == (if v i < v y then 0 else get_bit x (mk_int (v i - v y))))\n    [SMTPat (get_bit (x <<! y) i)]\n\n/// Bit-wise semantics for `>>!`\nval get_bit_shr #t #u (x: int_t t) (y: int_t u) (i: usize {v i < bits t})\n  : Lemma (requires v y >= 0 /\\ v y < bits t)\n          (ensures get_bit (x >>! y) i \n                == (if v i < bits t - v y\n                    then get_bit x (mk_int (v i + v y))\n                    else if signed t\n                         then get_bit x (mk_int (bits t - 1))\n                         else 0))\n    [SMTPat (get_bit (x >>! y) i)]\n\n/// Bit-wise semantics of integer casts\nval get_bit_cast #t #u\n  (x: int_t t) (nth: usize)\n  : Lemma (requires v nth < bits u /\\ v nth < bits t)\n          (ensures get_bit (cast_mod #t #u x) nth == get_bit x nth)\n          [SMTPat (get_bit (cast_mod #t #u x) nth)]\n\nval get_bit_cast_extend #t #u\n  (x: int_t t) (nth: usize)\n  : Lemma (requires bits t < bits u /\\ v nth >= bits t /\\ v nth < bits u)\n          (ensures get_bit (cast_mod #t #u x) nth == 0)\n          [SMTPat (get_bit (cast_mod #t #u x) nth)]\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Iterators.fsti",
    "content": "module Rust_primitives.Iterators\n\nopen Rust_primitives\nopen Core_models.Ops.Range\nopen FStar.Mul\n\nval foldi_range  (#n:inttype) (#acc_t:Type)\n                 (#inv:(acc_t -> i:int_t n -> Type))\n                 (r: t_Range (int_t n){r.f_start <=. r.f_end}) \n                 (acc:acc_t{inv acc r.f_start})\n                 (f: (acc:acc_t -> i:int_t n{i >=. r.f_start /\\ i <. r.f_end /\\ inv acc i}\n                       -> acc':acc_t{inv acc' (i +! mk_int 1)}))\n                 : res:acc_t{inv res r.f_end}\n\nval foldi_range_step_by  (#n:inttype) (#acc_t:Type)\n                 (#inv:(acc_t -> i:int_t n -> Type))\n                 (r: t_Range (int_t n){r.f_start <=. r.f_end}) \n                 (step: usize{v step > 0 /\\ range (v step) n /\\ range (v r.f_end + v step) n})\n                 (acc:acc_t{inv acc r.f_start})\n                 (f: (acc:acc_t -> i:int_t n{i >=. r.f_start /\\ i <. r.f_end /\\ \n                                            (v i - v r.f_start) % (v step) == 0 /\\ inv acc i}\n                       -> acc':acc_t{inv acc' (i +! mk_int #n (v step))}))\n                 : res:acc_t{inv res r.f_end}\n\n/// Predicate that asserts a slice `s_chunk` is exactly the nth chunk\n/// of the sequence `s`\nlet nth_chunk_of #t\n  (s: Seq.seq t)\n  (s_chunk: Seq.seq t {Seq.length s_chunk > 0})\n  (chunk_nth: nat {chunk_nth < Seq.length s / Seq.length s_chunk})\n  =  Seq.slice s (Seq.length s_chunk * chunk_nth) (Seq.length s_chunk * (chunk_nth + 1))\n  == s_chunk\n\nval foldi_chunks_exact\n  (#t #acc_t:Type)\n  (#inv: acc_t -> usize -> Type)\n  (s: t_Slice t)\n  (chunk_len: usize {v chunk_len > 0})\n  (acc: acc_t {inv acc (sz 0)})\n  (f: ( acc:acc_t\n      -> it: (usize & t_Array t chunk_len) {\n              let (i, s_chunk) = it in\n                v i < Seq.length s / v chunk_len\n              /\\ nth_chunk_of s s_chunk (v i)\n              /\\ inv acc i\n        }\n      -> acc': acc_t {inv acc' (fst it +! sz 1)}\n      )\n  )\n  : res:acc_t{inv res (length s /! chunk_len)}\n\nval fold_chunks_exact\n                 (#t:Type) (#acc_t:Type)\n                 (#inv:(acc_t -> Type))\n                 (s:t_Slice t)\n                 (chunk_len:usize{v chunk_len > 0}) // /\\ Seq.length s % v chunk_len == 0})\n                 (acc:acc_t{inv acc})\n                 (f: (acc:acc_t -> it:t_Array t chunk_len{inv acc}\n                       -> acc':acc_t{inv acc'}))\n                 : res:acc_t{inv res}\n\n\nval foldi_slice  (#t:Type) (#acc_t:Type)\n                 (#inv:(acc_t -> usize -> Type))\n                 (sl: t_Slice t)\n                 (acc:acc_t{inv acc (sz 0)})\n                 (f: (acc:acc_t -> it:(usize & t){\n                                  let (i,item) = it in\n                                  v i >= 0 /\\\n                                  v i < Seq.length sl /\\\n                                  Seq.index sl (v i) == item /\\\n                                  inv acc i}\n                       -> acc':acc_t{inv acc' (fst it +! sz 1)}))\n                 : res:acc_t{inv res (length sl)}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Mem.fsti",
    "content": "module Rust_primitives.Mem\n\nopen FStar.Mul\n\nlet copy (#t: Type0) (x: t) = x\nlet replace (#t: Type0) (dest: t) (src: t) = (src, dest)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Notations.fsti",
    "content": "module Rust_primitives.Notations\nopen Rust_primitives\n\nclass negation_tc self = {\n  ( ~. ): self -> self;\n}\n\ninstance negation_for_integers #t: negation_tc (int_t t) = {\n  ( ~. ) = fun x -> lognot x\n}\n\ninstance negation_for_bool: negation_tc bool = {\n  ( ~. ) = not\n}\n\nopen Core_models.Ops.Index\n\nlet ( .[] ) #self #idx {| inst: t_Index self idx |}\n  (s:self) (i:idx{f_index_pre s i}): inst.f_Output\n  = f_index s i\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Sequence.fst",
    "content": "module Rust_primitives.Sequence\n\nopen Rust_primitives.Integers\n\ntype t_Seq t = Rust_primitives.Arrays.t_Slice t\n\nlet seq_empty #t () : t_Seq t = FStar.Seq.empty\n\nlet seq_from_slice #t (s: Rust_primitives.Arrays.t_Slice t) : t_Seq t = s \n\nlet seq_from_array #t n (s: Rust_primitives.Arrays.t_Array t n) : t_Seq t = s \n\nlet seq_to_slice #t (s: t_Seq t) : Rust_primitives.Arrays.t_Slice t = s \n\nlet seq_len #t (s: t_Seq t): usize = mk_usize (Seq.length s)\n\nlet seq_slice #t (s: t_Seq t) (b: usize) (e: usize{e >=. b && e <=. seq_len s}): t_Seq t = Seq.slice s (v b) (v e) \n\nlet seq_index #t (s: t_Seq t) (i: usize{i <. seq_len s}): t = Rust_primitives.Slice.slice_index s i \n\nlet seq_last #t (s: t_Seq t{seq_len s >. mk_usize 0}): t = Seq.index s ((Seq.length s) - 1)\n\nlet seq_first #t (s: t_Seq t{seq_len s >. mk_usize 0}): t = Seq.index s 0\n\nlet seq_concat #t (s1: t_Seq t) (s2: t_Seq t {(Seq.length s1) + (Seq.length s2) <= max_usize}): t_Seq t = Seq.append s1 s2\n\nlet seq_one #t (x: t): t_Seq t = Seq.create 1 x\n\nlet seq_create #t (x: t) (n: usize): t_Seq t = Seq.create (v n) x\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.Slice.fsti",
    "content": "module Rust_primitives.Slice\n\nopen FStar.Mul\nopen Rust_primitives.Arrays\nopen Rust_primitives.Integers\n\nlet slice_length (#a: Type) (s: t_Slice a): res: usize {res == sz (Seq.length s)} = sz (Seq.length s)\nlet slice_split_at (#v_T: Type0) (s: t_Slice v_T) (mid: usize {mid <=. length s}): t_Slice v_T & t_Slice v_T = \n  Seq.slice s 0 (v mid), Seq.slice s (v mid) (Seq.length s)\nlet slice_contains (#a: eqtype) (s: t_Slice a) (v: a): bool = Seq.mem v s\nlet slice_index (#t: Type) (s: t_Slice t) (i: usize {i <. length s}): t = Seq.index s (v i)\nlet slice_slice (#v_T: Type0) (s: t_Slice v_T) (start: usize {start <=. length s}) (end_: usize {start <=. end_ /\\ end_ <=. length s}): t_Slice v_T =\n  Seq.slice s (v start) (v end_)\nval array_map (#t: Type) (#u: Type) (l: usize) (#ft: Type)\n  (s: t_Array t l) (f: t -> u): res: t_Array u l {forall i. Seq.index res i == f (Seq.index s i)}\nlet array_as_slice (#t: Type) (l: usize) (s: t_Array t l): t_Slice t =\n  s\nlet array_slice (#t: Type) (l: usize) (s: t_Array t l) = slice_slice s\nval array_from_fn (#t: Type) (len: usize) (#ft: Type) (f: (x: usize {x <. len}) -> t): \n  Pure (t_Array t len) (requires True) (ensures (fun a -> forall i. Seq.index a i == f (sz i)))\nlet array_index (#t: Type) (l: usize) (s: t_Array t l) (i: usize {i <. length s}): t = Seq.index s (v i)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.String.fsti",
    "content": "module Rust_primitives.String\n\nopen Rust_primitives.Integers\n\nval str_concat: string -> string -> string \n\nval str_of_char: FStar.Char.char -> string\n\nval str_sub: string -> usize -> usize -> string\n\nval str_index: string -> usize -> FStar.Char.char\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar/rust_primitives/Rust_primitives.fst",
    "content": "module Rust_primitives\n\ninclude Rust_primitives.Integers\ninclude Rust_primitives.Arrays\ninclude Rust_primitives.BitVectors\ninclude Rust_primitives.Float\ninclude Rust_primitives.Char\n\nclass cast_tc a b = {\n  cast: a -> b; \n}\n\n/// Rust's casts operations on integers are non-panicking\ninstance cast_tc_integers (t:inttype) (t':inttype)\n  : cast_tc (int_t t) (int_t t')\n  = { cast = (fun x -> Rust_primitives.Integers.cast_mod #t #t' x) }\n\ninstance cast_tc_bool_integer (t:inttype)\n  : cast_tc bool (int_t t)\n  = { cast = (fun x -> if x then Rust_primitives.Integers.mk_int 1 else Rust_primitives.Integers.mk_int 0) }\n\nclass unsize_tc source = {\n  output: Type;\n  unsize: source -> output;\n}\n\ninstance array_to_slice_unsize t n: unsize_tc (t_Array t n) = {\n  output = (x:t_Slice t{Seq.length x == v n});\n  unsize = (fun (arr: t_Array t n) -> \n            arr <: t_Slice t);\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/.envrc",
    "content": "use flake .#examples\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/Makefile.copy",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HACL_HOME to be set to your HACL* repo location\n# We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nHACL_HOME     ?= $(HAX_LIBS_HOME)/../../../hacl-star\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= $(HAX_LIBS_HOME)/.cache\nHINT_DIR      ?= $(HAX_LIBS_HOME)/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/README.md",
    "content": "## Libraries for Hax\n\nThe goal of this directory is to serve as a snapshot of the current F*\nsupporting libraries for Hax.\n\nThe dependency chain is:\n\n`rust_primitives` <- `core` <- `hax_lib`\n\n# Rust Primitives\n\nThe `/rust_primitives` directory contains hand-written models for Rust\nbuilt-in features like machine integers and arrays. In particular, the\ncode in this directory reconciles any type or semantic differences\nbetween Rust and F*. A number of files in this directory use the \n[HACL Library](https://github.com/hacl-star/hacl-star/tree/main/lib).\n\n# Core & Alloc\n\nThe `/core` directory contains hand-written models for some parts of\nthe Core and Alloc libraries of Rust.\n\nAs a first goal, we would like to typecheck the code in this directory\nagainst interfaces generated from Rust Core and Alloc.\n\nAs a second goal, we would like to generate the code in this directory\nfrom an annotated version of Rust Core and Alloc.\n\n# Hax Library\n\nThe `/hax_lib` directory contains hand-written and generated code\nfor the Hax library which adds new features and functionality to Rust\nto help programmers. For example, this library includes bounded indexes\nfor arrays, unbounded integers etc.\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Alloc.fst",
    "content": "module Alloc.Alloc\n\nlet t_Global = ()\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Collections.Binary_heap.fsti",
    "content": "module Alloc.Collections.Binary_heap\nopen Rust_primitives\n\nval t_BinaryHeap: Type -> eqtype\n\nval impl_9__new: #t:Type -> t_BinaryHeap t\nval impl_9__push: #t:Type -> t_BinaryHeap t -> t -> t_BinaryHeap t\nval impl_10__len: #t:Type -> t_BinaryHeap t -> usize\nval impl_10__iter: #t:Type -> t_BinaryHeap t -> t_Slice t\n\nopen Core.Option\n\nval impl_10__peek: #t:Type -> t_BinaryHeap t -> t_Option t\nval impl_9__pop: #t:Type -> t_BinaryHeap t -> t_BinaryHeap t & t_Option t\n\nunfold\nlet nonempty h = v (impl_10__len h) > 0\n\nval lemma_peek_len: #t:Type -> h: t_BinaryHeap t \n  -> Lemma (Option_Some? (impl_10__peek h) <==> nonempty h)\n  \nval lemma_pop_len: #t:Type -> h: t_BinaryHeap t \n  -> Lemma (Option_Some? (snd (impl_9__pop h)) <==> nonempty h)\n\nval lemma_peek_pop: #t:Type -> h: t_BinaryHeap t \n  -> Lemma (impl_10__peek h == snd (impl_9__pop h))\n          [SMTPat (impl_10__peek h)]\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Slice.fst",
    "content": "module Alloc.Slice\nopen Rust_primitives.Arrays\nopen Alloc.Vec\n\nlet impl__to_vec #a (s: t_Slice a): t_Vec a Alloc.Alloc.t_Global = s\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Alloc.Vec.fst",
    "content": "module Alloc.Vec\nopen Rust_primitives\n\nunfold type t_Vec t (_: unit) = s:t_Slice t\n\nlet impl__new #t: t_Vec t () = FStar.Seq.empty\n\nlet impl_2__extend_from_slice #t (self: t_Vec t ()) (other: t_Slice t{Seq.length self + Seq.length other <= max_usize}): t_Vec t ()\n  = FStar.Seq.append self other\n\nlet impl__with_capacity (_capacity: usize) = impl__new\n\n// TODO: missing precondition For now, `impl_1__push` has a wrong\n// semantics: pushing on a \"full\" vector does nothing. It should panic\n// instead.\nlet impl_1__push #t\n  (v: t_Vec t ())// Removed: {Seq.length v + 1 <= max_usize})\n  (x: t)\n   : t_Vec t () = \n     if Seq.length v <= max_usize then v else\n     FStar.Seq.append v (FStar.Seq.create 1 x)\n\nlet impl_1__len #t (v: t_Vec t ()) =\n  let n = Seq.length v in\n  assert (n <= maxint usize_inttype);\n  mk_int #usize_inttype (Seq.length v)\n\nlet from_elem #a (item: a) (len: usize) = Seq.create (v len) item\n\nopen Rust_primitives.Hax\nopen Core.Ops.Index\ninstance update_at_tc_array t n: update_at_tc (t_Vec t ()) (int_t n) = {\n  super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Vec t ()) (int_t n);\n  update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x);\n}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Array.Iter.fsti",
    "content": "module Core.Array.Iter\nopen Rust_primitives\n\nlet into_iter = Core.Iter.iterator_array\nlet t_IntoIter t l = t_Array t l\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Array.fst",
    "content": "module Core.Array\nopen Rust_primitives\n\ntype t_TryFromSliceError = | TryFromSliceError\n\nlet impl_23__map #a #b n (arr: t_Array a n) (f: a -> b): t_Array b n \n  = map_array arr f\n\nlet impl_23__as_slice #a len (arr: t_Array a len): t_Slice a = arr\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Clone.fst",
    "content": "module Core.Clone\n\nclass t_Clone self = {\n  f_clone: x:self -> r:self {x == r}\n}\n\n(** Everything is clonable *)\ninstance clone_all (t: Type): t_Clone t = {\n  f_clone = (fun x -> x);\n}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Cmp.fsti",
    "content": "module Core.Cmp\nopen Rust_primitives\n\nlet min (#t:inttype) (a:int_t t) (b:int_t t) =\n  if a <. b then a else b\n\ntype t_Ordering =\n  | Ordering_Less : t_Ordering\n  | Ordering_Equal : t_Ordering\n  | Ordering_Greater : t_Ordering\n\nclass t_Ord (v_Self: Type) = {\n  f_cmp:v_Self -> v_Self -> t_Ordering;\n  // f_max:v_Self -> v_Self -> v_Self;\n  // f_min:v_Self -> v_Self -> v_Self;\n  // f_clamp:v_Self -> v_Self -> v_Self -> v_Self\n}\n\nclass t_PartialEq (v_Self: Type) (v_Rhs: Type) = {\n  // __constraint_1069563329_t_PartialEq:t_PartialEq v_Self v_Rhs;\n  f_eq:v_Self -> v_Rhs -> bool;\n  f_ne:v_Self -> v_Rhs -> bool\n}\n\ninstance all_eq (a: eqtype): t_PartialEq a a = {\n  f_eq = (fun x y -> x = y);\n  f_ne = (fun x y -> x <> y);\n}\n\nclass t_PartialOrd (v_Self: Type) (v_Rhs: Type) = {\n  __constraint_Rhs_t_PartialEq:t_PartialEq v_Self v_Rhs;\n  // __constraint_Rhs_t_PartialOrd:t_PartialOrd v_Self v_Rhs;\n  f_partial_cmp:v_Self -> v_Rhs -> Core.Option.t_Option t_Ordering;\n  // f_lt:v_Self -> v_Rhs -> bool;\n  // f_le:v_Self -> v_Rhs -> bool;\n  // f_gt:v_Self -> v_Rhs -> bool;\n  // f_ge:v_Self -> v_Rhs -> bool\n}\n\ntype t_Reverse t = | Reverse of t\n\nlet impl__then x y = x\n\n[@FStar.Tactics.Typeclasses.tcinstance]\nval ord_u64: t_Ord u64\n\n[@FStar.Tactics.Typeclasses.tcinstance]\nval ord_reverse t {| t_Ord t |}: t_Ord (t_Reverse t)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Convert.fst",
    "content": "\nmodule Core.Convert\nopen Rust_primitives\n\nclass try_into_tc self t = {\n  [@@@FStar.Tactics.Typeclasses.no_method]\n  f_Error: Type;\n  f_try_into: self -> Core.Result.t_Result t f_Error\n}\n\ninstance impl_6 (t: Type0) (len: usize): try_into_tc (t_Slice t) (t_Array t len) = {\n  f_Error = Core.Array.t_TryFromSliceError;\n  f_try_into = (fun (s: t_Slice t) -> \n    if Core.Slice.impl__len s = len\n    then Core.Result.Result_Ok (s <: t_Array t len)\n    else Core.Result.Result_Err Core.Array.TryFromSliceError\n  )\n}\n\n\ninstance impl_6_refined (t: Type0) (len: usize): try_into_tc (s: t_Slice t {Core.Slice.impl__len s == len}) (t_Array t len) = {\n  f_Error = Core.Array.t_TryFromSliceError;\n  f_try_into = (fun (s: t_Slice t {Core.Slice.impl__len s == len}) -> \n    Core.Result.Result_Ok (s <: t_Array t len)\n  )\n}\n\nclass t_Into self t = {\n  f_into: self -> t;\n}\n\nclass t_From self t = {\n  f_from: t -> self;\n}\n\nclass t_TryFrom self t = {\n  [@@@FStar.Tactics.Typeclasses.no_method]\n  f_Error: Type;\n  f_try_from: t -> Core.Result.t_Result self f_Error;\n}\n\ninstance integer_into\n  (t:inttype) (t':inttype { minint t >= minint t' /\\ maxint t <= maxint t' })\n  : t_From (int_t t') (int_t t)\n  = { f_from = (fun (x: int_t t) -> Rust_primitives.Integers.cast #t #t' x) }\n\ninstance into_from_from a b {| t_From a b |}: t_Into b a = {\n  f_into = (fun x -> f_from x)\n}\n\ninstance from_id a: t_From a a = {\n  f_from = (fun x -> x)\n}\n\nclass t_AsRef self t = {\n  f_as_ref:  self -> t;\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Adapters.Enumerate.fst",
    "content": "module Core.Iter.Adapters.Enumerate\nopen Rust_primitives\n\ntype t_Enumerate t = { iter: t; count: usize }\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Adapters.Step_by.fst",
    "content": "module Core.Iter.Adapters.Step_by\nopen Rust_primitives\n\ntype t_StepBy t = { \n  f_iter: t;\n  f_step: n: usize {v n > 0};\n  f_first_take: bool;\n}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Traits.Collect.fst",
    "content": "module Core.Iter.Traits.Collect\n\nclass into_iterator self = {\n  f_IntoIter: Type;\n  // f_Item: Type;\n  f_into_iter: self -> f_IntoIter;\n}\n\nlet t_IntoIterator = into_iterator\n\nunfold instance impl t {| Core.Iter.Traits.Iterator.iterator t |}: into_iterator t = {\n  f_IntoIter = t;\n  f_into_iter = id;\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.Traits.Iterator.fst",
    "content": "module Core.Iter.Traits.Iterator\nopen Rust_primitives\n\n(*** Definition of the `iterator` trait *)\n(** We define the types of the different method of the iterator trait\non their own. This is handy for revealing only certain fields of the\ninstances of the `iterator` trait. *)\n\nunfold type t_next self item\n  = self -> self * option item\nunfold type t_contains self item\n  = self -> item -> Type0\nunfold type t_fold self (item: Type0) (contains: t_contains self item)\n  = #b:Type -> s:self -> b -> (b -> i:item{contains s i} -> b) -> b\nunfold type t_enumerate self\n  = self -> Core.Iter.Adapters.Enumerate.t_Enumerate self\nunfold type t_step_by self\n  = self -> usize -> Core.Iter.Adapters.Step_by.t_StepBy self\nunfold type t_all self item\n  = self -> (item -> bool) -> self * bool\n\n(* Inference behaves strangly with type synonyms... :( *)\n// class iterator (self: Type) = {\n//   f_Item: Type;\n//   f_next:      t_next      self f_Item;\n//   f_contains:  t_contains  self f_Item; (* hax-specific method *)\n//   f_fold:      t_fold      self f_Item f_contains;\n//   f_enumerate: t_enumerate self;\n// }\n\nclass iterator (self: Type u#0): Type u#1 = {\n  [@@@FStar.Tactics.Typeclasses.no_method]\n  f_Item: Type0;\n  f_next:      self -> self * option f_Item;\n  f_contains:  self -> f_Item -> Type0;\n  f_fold:      #b:Type0 -> s:self -> b -> (b -> i:f_Item{f_contains s i} -> b) -> b;\n  f_enumerate: self -> Core.Iter.Adapters.Enumerate.t_Enumerate self;\n  f_step_by:   self -> usize -> Core.Iter.Adapters.Step_by.t_StepBy self;\n  f_all:       self -> (f_Item -> bool) -> self * bool;\n}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Iter.fsti",
    "content": "module Core.Iter\n\nopen Rust_primitives\n\nopen Core.Iter.Traits.Iterator\n\n(*** Instances for the `iterator` trait *)\n\n(**** Enumerate *)\n(** This lives in this file for cyclic dependencies reasons. *)\n\nval iterator_enumerate_contains it (i: iterator it)\n  : t_contains (Core.Iter.Adapters.Enumerate.t_Enumerate it) (usize * i.f_Item)\nval iterator_enumerate_fold it (i: iterator it)\n  : t_fold (Core.Iter.Adapters.Enumerate.t_Enumerate it) (usize * i.f_Item) (iterator_enumerate_contains it i)\nval iterator_enumerate_enumerate it\n  : t_enumerate (Core.Iter.Adapters.Enumerate.t_Enumerate it)\nval iterator_enumerate_all it (i: iterator it)\n  : t_all (Core.Iter.Adapters.Enumerate.t_Enumerate it) (usize * i.f_Item)\nval iterator_enumerate_step_by it\n  : t_step_by (Core.Iter.Adapters.Enumerate.t_Enumerate it)\n\ninstance iterator_enumerate it {| i: iterator it |}: iterator (Core.Iter.Adapters.Enumerate.t_Enumerate it) = \n  let open Core.Iter.Adapters.Enumerate in\n  {\n    f_Item = (usize * i.f_Item);\n    f_next = (fun {iter; count} -> \n      let open Core.Ops in\n      let iter, opt = f_next iter in\n      match opt with\n      | Some value -> if v count = max_usize\n                     then {iter; count                }, None\n                     else {iter; count = count +. sz 1}, Some (count, value)\n      | None -> {iter; count}, None\n    );\n    f_contains  = iterator_enumerate_contains  it i;\n    f_fold      = iterator_enumerate_fold      it i;\n    f_enumerate = iterator_enumerate_enumerate it;\n    f_step_by   = iterator_enumerate_step_by it;\n    f_all       = iterator_enumerate_all it i;\n  }\n\n(**** Step_by *)\n(** This lives in this file for cyclic dependencies reasons. *)\n\nval iterator_step_by_contains it (i: iterator it)\n  : t_contains (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item\nval iterator_step_by_fold it (i: iterator it)\n  : t_fold (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item (iterator_step_by_contains it i)\nval iterator_step_by_next it (i: iterator it)\n  : t_next (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item\nval iterator_step_by_enumerate it\n  : t_enumerate (Core.Iter.Adapters.Step_by.t_StepBy it)\nval iterator_step_by_all it (i: iterator it)\n  : t_all (Core.Iter.Adapters.Step_by.t_StepBy it) i.f_Item\nval iterator_step_by_step_by it\n  : t_step_by (Core.Iter.Adapters.Step_by.t_StepBy it)\n\nunfold instance iterator_step_by it {| i: iterator it |}: iterator (Core.Iter.Adapters.Step_by.t_StepBy it) = \n  let open Core.Iter.Adapters.Enumerate in\n  {\n    f_Item = i.f_Item;\n    f_next      = iterator_step_by_next      it i;\n    f_contains  = iterator_step_by_contains  it i;\n    f_fold      = iterator_step_by_fold      it i;\n    f_enumerate = iterator_step_by_enumerate it  ;\n    f_step_by   = iterator_step_by_step_by   it  ;\n    f_all       = iterator_step_by_all       it i;\n  }\n\n(**** Slice *)\n(** Slices are not iterable as such in Rust. We ignore this indirection here. *)\nopen Core.Ops.Range\n\nval iterator_slice_next t: t_next (t_Slice t) t\nunfold\nlet iterator_slice_contains (t: eqtype): t_contains (t_Slice t) t\n  = fun s x -> Rust_primitives.Arrays.contains s x\nval iterator_slice_fold (t: eqtype): t_fold (t_Slice t) t (iterator_slice_contains t)\nval iterator_slice_enumerate (t: eqtype): t_enumerate (t_Slice t)\nval iterator_slice_step_by (t: eqtype): t_step_by (t_Slice t)\nval iterator_slice_all (t: eqtype): t_all (t_Slice t) t\n\ninstance iterator_slice (t: eqtype): iterator (t_Slice t) = {\n  f_Item = t;\n  f_next = iterator_slice_next t;\n  // size_hint = (fun s -> Some (Rust_primitives.Arrays.length s));\n  f_contains  = iterator_slice_contains  t;\n  f_fold      = iterator_slice_fold      t;\n  f_enumerate = iterator_slice_enumerate t;\n  f_step_by   = iterator_slice_step_by   t;\n  f_all       = iterator_slice_all       t;\n}\n\n(**** Array *)\n(** Arrays are not iterable as such in Rust. We ignore this indirection here. *)\nval iterator_array_next t len: t_next (t_Array t len) t\nunfold\nlet iterator_array_contains (t: eqtype) len: t_contains (t_Array t len) t\n  = fun s x -> Rust_primitives.Arrays.contains s x\nval iterator_array_fold (t: eqtype) len: t_fold (t_Array t len) t (iterator_array_contains t len)\nval iterator_array_enumerate (t: eqtype) len: t_enumerate (t_Array t len)\nval iterator_array_step_by (t: eqtype) len: t_step_by (t_Array t len)\nval iterator_array_all (t: eqtype) len: t_all (t_Array t len) t\n\ninstance iterator_array (t: eqtype) len: iterator (t_Array t len) = {\n  f_Item = t;\n  f_next = iterator_array_next t len;\n  // size_hint = (fun (_s: t_Array t len) -> Some len);\n  f_contains  = iterator_array_contains  t len;\n  f_fold      = iterator_array_fold      t len;\n  f_enumerate = iterator_array_enumerate t len;\n  f_step_by   = iterator_array_step_by t len;\n  f_all       = iterator_array_all t len;\n}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Marker.fst",
    "content": "module Core.Marker\n\nclass t_Sized (h: Type) = {\n  dummy_field: unit\n}\n\n(** we consider everything to be sized *)\ninstance t_Sized_all t: t_Sized t = {\n  dummy_field = ()\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Num.Error.fsti",
    "content": "module Core.Num.Error\nopen Rust_primitives\n\ntype t_ParseIntError = unit\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Num.fsti",
    "content": "module Core.Num\nopen Rust_primitives\n\nlet impl__u8__wrapping_sub: u8 -> u8 -> u8 = sub_mod\nlet impl__u16__wrapping_add: u16 -> u16 ->  u16 = add_mod\nlet impl__i32__wrapping_add: i32 -> i32 -> i32 = add_mod\nlet impl__i32__abs (a:i32{minint i32_inttype < v a}) : i32 = abs_int a\n\nlet impl__u32__wrapping_add: u32 -> u32 -> u32 = add_mod\nval impl__u32__rotate_left: u32 -> u32 -> u32\nval impl__u32__from_le_bytes: t_Array u8 (sz 4) -> u32\nval impl__u32__from_be_bytes: t_Array u8 (sz 4) -> u32\nval impl__u32__to_le_bytes: u32 -> t_Array u8 (sz 4)\nval impl__u32__to_be_bytes: u32 -> t_Array u8 (sz 4)\nval impl__u32__rotate_right: u32 -> u32 -> u32\nlet impl__u32__BITS: u32 = classify (32ul <: pub_int_t u32_inttype)\n\nlet impl__u64__wrapping_add: u64 -> u64 -> u64 = add_mod\nval impl__u64__rotate_left: u32 -> u32 -> u32\nval impl__u64__from_le_bytes: t_Array u8 (sz 8) -> u64\nval impl__u64__from_be_bytes: t_Array u8 (sz 8) -> u64\nval impl__u64__to_le_bytes: u64 -> t_Array u8 (sz 8)\nval impl__u64__to_be_bytes: u64 -> t_Array u8 (sz 8)\nval impl__u64__rotate_right: u64 -> u64 -> u64\n\nlet impl__u128__wrapping_add (x: u128) (y: u128): u128 = add_mod x y //FStar.UInt128.add_underspec x y\nval impl__u128__rotate_left: u128 -> u128 -> u128\nval impl__u128__from_le_bytes: t_Array u8 (sz 16) -> u128\nval impl__u128__from_be_bytes: t_Array u8 (sz 16) -> u128\nval impl__u128__to_le_bytes: u128 -> t_Array u8 (sz 16)\nval impl__u128__to_be_bytes: u128 -> t_Array u8 (sz 16)\nval impl__u128__rotate_right: u128 -> u128 -> u128\n\nval impl__u8__pow: u8 -> u32 -> u8\nval impl__u16__pow (base: u16) (exponent: u32): result : u16 {v base == 2 /\\ v exponent < 16 ==> result == mk_int #Lib.IntTypes.U16 (pow2 (v exponent))}\nval impl__u32__pow (base: u32) (exponent: u32): result : u32 {v base == 2 /\\ v exponent <= 16 ==> result == mk_int #Lib.IntTypes.U32 (pow2 (v exponent))}\nval impl__u64__pow: u64 -> u32 -> u64\nval impl__u128__pow: u128 -> u32 -> u128\nval impl__i32__pow (base: i32) (exponent: u32): result: i32 {v base == 2 /\\ v exponent <= 16 ==> result == mk_int #Lib.IntTypes.S32 (pow2 (v exponent))}\n\nval impl__u8__from_str_radix: string -> u32 -> Core.Result.t_Result u8 Core.Num.Error.t_ParseIntError\n\nval impl__usize__ilog2: i32 -> u32 \n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Arith.Neg.fsti",
    "content": "module Core.Ops.Arith.Neg\nopen Rust_primitives\n\nlet neg #t #l (x:int_t_l t l) = zero #t #l -! x\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Arith.fsti",
    "content": "module Core.Ops.Arith\nopen Rust_primitives\n\n\nclass t_Add self rhs = {\n   add_output: Type;\n   add_in_bounds: self -> rhs -> Type0;\n   ( +! ): x:self -> y:rhs {add_in_bounds x y} -> add_output;\n}\n\nclass t_Sub self rhs = {\n   sub_output: Type;\n   sub_in_bounds: self -> rhs -> Type0;\n   ( -! ): x:self -> y:rhs {sub_in_bounds x y} -> sub_output;\n}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Control_flow.fst",
    "content": "module Core.Ops.Control_flow\n\ntype t_ControlFlow (b c: Type) = \n  | ControlFlow_Continue of c\n  | ControlFlow_Break of b\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Deref.fst",
    "content": "module Core.Ops.Deref\n\nlet f_deref = id\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Index.IndexMut.fst",
    "content": "module Core.Ops.Index.IndexMut\n\nclass t_IndexMut t_Self t_Idx = {\n  f_Input: Type;\n  in_range: t_Self -> t_Idx -> Type0;\n  f_index_mut: s:t_Self -> i:t_Idx{in_range s i} -> v:f_Input -> t_Self;\n}\n\nopen Rust_primitives\ninstance impl__index_mut t l n: t_IndexMut (t_Array t l) (int_t n)\n  = { f_Input = t;\n      in_range = (fun (s: t_Array t l) (i: int_t n) -> v i >= 0 && v i < v l);\n      f_index_mut = (fun s i x -> Seq.upd s (v i) x);\n    }\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Index.fst",
    "content": "module Core.Ops.Index\n\nclass t_Index (t_Self:Type0) (t_Idx:Type0) = {\n  f_Output: Type0;\n  in_range: t_Self -> t_Idx -> Type0;\n  f_index: s:t_Self -> i:t_Idx{in_range s i} -> f_Output;\n}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Range.fsti",
    "content": "module Core.Ops.Range\nopen Rust_primitives\n\ntype t_RangeTo   (t: Type) = {f_end: t}\ntype t_RangeFrom (t: Type) = {f_start: t}\ntype t_Range     (t: Type) = {f_start: t; f_end: t}\ntype t_RangeFull           = | RangeFull\n\nopen Core.Iter.Traits.Iterator\nmodule LI = Lib.IntTypes\n\nlet rec fold_range' #t \n  (min: Rust_primitives.pub_int_t t) (max: Rust_primitives.pub_int_t t{v min <= v max})\n  (init: 'a) (f: ('a -> i:Rust_primitives.pub_int_t t{v i < v max /\\ v i >= v min} -> 'a))\n  : Tot 'a (decreases (v max - v min))\n  = if min =. max\n    then init\n    else fold_range' (add min (Rust_primitives.mk_int_l #_ #LI.PUB 1)) max (f init min) f\n\nval iterator_range_enumerate t: t_enumerate (t_Range (Rust_primitives.pub_int_t t))\nval iterator_range_step_by t: t_step_by (t_Range (Rust_primitives.pub_int_t t))\nval iterator_range_all t: t_all (t_Range (Rust_primitives.pub_int_t t)) (Rust_primitives.pub_int_t t)\n\ninstance iterator_range t: iterator (t_Range (Rust_primitives.pub_int_t t)) = \n  { f_Item = Rust_primitives.pub_int_t t;\n    f_next = (fun {f_start; f_end} -> \n       if f_start >=. f_end then ({f_start; f_end}, None)\n       else ({f_start = f_start +. Rust_primitives.mk_pub_int 0; f_end}, Some f_start)\n    );\n    f_contains = (fun x i -> v i < v x.f_end /\\ v i >= v x.f_start);\n    f_fold = (fun #b r init f ->  if r.f_start >=. r.f_end then init\n                               else fold_range' r.f_start r.f_end init (fun x i -> f x i));\n    f_enumerate = iterator_range_enumerate t;\n    f_step_by = iterator_range_step_by t;\n    f_all = iterator_range_all t;\n  }\n\nopen Core.Ops.Index\n\ninstance impl_index_range_slice t n : t_Index (t_Slice t) (t_Range (pub_int_t n)) \n  = { f_Output = t_Slice t\n    ; in_range = (fun (s: t_Slice t) {f_start; f_end} -> \n         let len = Rust_primitives.length s in\n         v f_start >= 0 /\\ v f_start <= v len /\\ v f_end <= v len)\n    ; f_index = (fun s {f_start; f_end} -> \n          if f_start <. f_end then Seq.slice s (v f_start) (v f_end)\n                              else Seq.empty)}\n\ninstance impl_index_range_to_slice t n : t_Index (t_Slice t) (t_RangeTo (pub_int_t n)) \n  = { f_Output = t_Slice t\n    ; in_range = (fun (s: t_Slice t) ({f_end}: t_RangeTo (pub_int_t n)) -> \n         let len = Rust_primitives.length s in v f_end <= v len)\n    ; f_index = (fun s {f_end} -> if 0 < v f_end then Seq.slice s 0 (v f_end) else Seq.empty)}\n\ninstance impl_index_range_from_slice t n : t_Index (t_Slice t) (t_RangeFrom (pub_int_t n)) \n  = { f_Output = t_Slice t\n    ; in_range = (fun (s: t_Slice t) ({f_start}: t_RangeFrom (pub_int_t n)) -> \n         let len = Rust_primitives.length s in v f_start >= 0 /\\ v f_start <= v len)\n    ; f_index = (fun s {f_start} -> \n         let len = Rust_primitives.length s in\n         if v f_start = v len then Seq.empty else Seq.slice s (v f_start) (v len))}\n         \ninstance impl_index_range_full_slice t : t_Index (t_Slice t) t_RangeFull\n  = { f_Output = t_Slice t\n    ; in_range = (fun (s: t_Slice t) _ -> True)\n    ; f_index = (fun s _ -> s)}\n\ninstance impl_range_index_array t len n : t_Index (t_Array t len) (t_Range (pub_int_t n)) = \n  let i = impl_index_range_slice t n in\n  { f_Output = i.f_Output\n  ; in_range = (fun (s: t_Array t len) r -> i.in_range s r)\n  ; f_index = (fun s r -> i.f_index s r) }\n  \ninstance impl_range_to_index_array t len n : t_Index (t_Array t len) (t_RangeTo (pub_int_t n)) = \n  let i = impl_index_range_to_slice t n in\n  { f_Output = i.f_Output\n  ; in_range = (fun (s: t_Array t len) r -> i.in_range s r)\n  ; f_index = (fun s r -> i.f_index s r) }\n\ninstance impl_range_from_index_array t len n : t_Index (t_Array t len) (t_RangeFrom (pub_int_t n)) = \n  let i = impl_index_range_from_slice t n in\n  { f_Output = i.f_Output\n  ; in_range = (fun (s: t_Array t len) r -> i.in_range s r)\n  ; f_index = (fun s r -> i.f_index s r) }\n\ninstance impl_range_full_index_array t len : t_Index (t_Array t len) t_RangeFull = \n  let i = impl_index_range_full_slice t in\n  { f_Output = i.f_Output\n  ; in_range = (fun (s: t_Array t len) r -> i.in_range s r)\n  ; f_index = (fun s r -> i.f_index s r) }\n\nopen Rust_primitives.Hax\n\nlet update_at_tc_array_range_super t l n: t_Index (t_Array t l) (t_Range (pub_int_t n))\n  = FStar.Tactics.Typeclasses.solve\nlet update_at_tc_array_range_to_super t l n: t_Index (t_Array t l) (t_RangeTo (pub_int_t n))\n  = FStar.Tactics.Typeclasses.solve\nlet update_at_tc_array_range_from_super t l n: t_Index (t_Array t l) (t_RangeFrom (pub_int_t n))\n  = FStar.Tactics.Typeclasses.solve\nlet update_at_tc_array_range_full_super t l: t_Index (t_Array t l) t_RangeFull\n  = FStar.Tactics.Typeclasses.solve\n\nval update_at_array_range t l n\n  (s: t_Array t l) (i: t_Range (pub_int_t n) {(update_at_tc_array_range_super t l n).in_range s i})\n  : (update_at_tc_array_range_super t l n).f_Output -> t_Array t l\nval update_at_array_range_to t l n\n  (s: t_Array t l) (i: t_RangeTo (pub_int_t n) {(update_at_tc_array_range_to_super t l n).in_range s i})\n  : (update_at_tc_array_range_to_super t l n).f_Output -> t_Array t l\nval update_at_array_range_from t l n\n  (s: t_Array t l) (i: t_RangeFrom (pub_int_t n) {(update_at_tc_array_range_from_super t l n).in_range s i})\n  : (update_at_tc_array_range_from_super t l n).f_Output -> t_Array t l\nval update_at_array_range_full t l\n  (s: t_Array t l) (i: t_RangeFull)\n  : (update_at_tc_array_range_full_super t l).f_Output -> t_Array t l\n\ninstance update_at_tc_array_range t l n: update_at_tc (t_Array t l) (t_Range (pub_int_t n)) = {\n  super_index = update_at_tc_array_range_super t l n;\n  update_at = update_at_array_range t l n\n}\ninstance update_at_tc_array_range_to t l n: update_at_tc (t_Array t l) (t_RangeTo (pub_int_t n)) = {\n  super_index = update_at_tc_array_range_to_super t l n;\n  update_at = update_at_array_range_to t l n\n}\ninstance update_at_tc_array_range_from t l n: update_at_tc (t_Array t l) (t_RangeFrom (pub_int_t n)) = {\n  super_index = update_at_tc_array_range_from_super t l n;\n  update_at = update_at_array_range_from t l n\n}\ninstance update_at_tc_array_range_full t l: update_at_tc (t_Array t l) t_RangeFull = {\n  super_index = update_at_tc_array_range_full_super t l;\n  update_at = update_at_array_range_full t l\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.Try_trait.fst",
    "content": "module Core.Ops.Try_trait\n\nclass t_FromResidual self r = {\n   f_from_residual: r -> self;\n}\n\nclass t_Try self = {\n   f_Output: Type;\n   f_Residual: Type;\n   [@@@FStar.Tactics.Typeclasses.tcresolve]\n   parent_FromResidual: t_FromResidual f_Residual f_Residual;\n\n   f_from_output: f_Output -> self;\n   f_branch: self -> Core.Ops.Control_flow.t_ControlFlow f_Residual f_Output;\n}\n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Ops.fst",
    "content": "module Core.Ops\nopen Rust_primitives\n\n// class add_tc self rhs = {\n//   output: Type;\n//   in_bounds: self -> rhs -> Type0;\n//   ( +! ): x:self -> y:rhs {in_bounds x y} -> output;\n// }\n\nclass negation_tc self = {\n  ( ~. ): self -> self;\n}\n\ninstance negation_for_integers #t: negation_tc (int_t t) = {\n  ( ~. ) = fun x -> lognot x\n}\n\ninstance negation_for_bool: negation_tc bool = {\n  ( ~. ) = not\n}\n\nopen Core.Ops.Index\n\nlet ( .[] ) (#self:Type0) (#idx:Type0) {| inst: t_Index self idx |}\n  : s:self -> i:idx{in_range s i} -> inst.f_Output\n  = f_index\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Option.fst",
    "content": "module Core.Option\n\ntype t_Option t = | Option_Some of t | Option_None\n\nlet impl__and_then #t #t_Self (self: t_Option t_Self) (f: t_Self -> t_Option t): t_Option t = \n  match self with\n  | Option_Some x -> f x\n  | Option_None -> Option_None\n\nlet impl__unwrap #t (x: t_Option t {Option_Some? x}): t = Option_Some?._0 x\n\nlet impl__is_some #t_Self (self: t_Option t_Self): bool =  Option_Some? self\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Panicking.fst",
    "content": "module Core.Panicking\n\nopen Rust_primitives\nopen Rust_primitives.Hax\n\ntype t_AssertKind = | AssertKind_Eq\n\nlet panic (message: string {False}): t_Never\n  = match () with\n  \nlet assert_failed (k: t_AssertKind) x y (z: Core.Option.t_Option unit {False}): t_Never\n  = match () with\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Result.fst",
    "content": "module Core.Result\n\ntype t_Result t e = | Result_Ok: v:t -> t_Result t e\n                    | Result_Err of e\n\nlet impl__unwrap (x: t_Result 't 'e {Result_Ok? x}): 't = Result_Ok?.v x\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Slice.Iter.fst",
    "content": "module Core.Slice.Iter\n\nopen Rust_primitives\n\nunfold let t_Chunks a = t_Slice (t_Slice a)\nunfold let t_ChunksExact a = t_Slice (t_Slice a)\nunfold let t_Iter a = t_Slice a\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Slice.fsti",
    "content": "module Core.Slice\nopen Rust_primitives.Arrays\nopen Rust_primitives.Integers\n\nlet impl__len (#t: Type) (s: t_Slice t)\n  : len: usize {len == sz (Seq.length s)} = \n  sz (Seq.length s)\n\nopen Core.Slice.Iter\n\nval impl__chunks #a (x: t_Slice a) (cs: usize): t_Chunks a\n\nlet impl__iter #t (s: t_Slice t): t_Slice t = s\n\nval impl__chunks_exact #a (x: t_Slice a) (cs: usize):\n    Pure (t_Slice (t_Slice a))\n    (requires True)\n    (ensures (fun r -> forall i. i < v (length x) ==> length x ==  cs))\n\nopen Core.Ops.Index\n\ninstance impl__index t n: t_Index (t_Slice t) (int_t n)\n  = { f_Output = t;\n      in_range = (fun (s: t_Slice t) (i: int_t n) -> v i >= 0 && v i < v (length s));\n      f_index = (fun s i -> Seq.index s (v i));\n    }\n\nlet impl__copy_from_slice #t (x: t_Slice t) (y:t_Slice t) : t_Slice t = y\n\nval impl__split_at #t (s: t_Slice t) (mid: usize): Pure (t_Slice t * t_Slice t)\n    (requires (v mid <= Seq.length s))\n    (ensures (fun (x,y) -> Seq.length x == v mid /\\ Seq.length y == Seq.length s - v mid /\\\n                        x == Seq.slice s 0 (v mid) /\\ y == Seq.slice s (v mid) (Seq.length s) /\\\n                        s == Seq.append x y))\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Str.Converts.fsti",
    "content": "module Core.Str.Converts\nopen Rust_primitives\n\nval from_utf8 (s: t_Slice u8): Core.Result.t_Result string Core.Str.Error.t_Utf8Error\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Str.Error.fsti",
    "content": "module Core.Str.Error\n\ntype t_Utf8Error = unit\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.Str.fsti",
    "content": "module Core.Str\nopen Rust_primitives\n\nval impl__str__len: string -> usize\nval impl__str__as_bytes: string -> t_Slice u8\n\n/// Parses this string slice into another type\nval impl_str__parse (#t: Type0) (#err: Type0) (s:string) :\n  (Core.Result.t_Result t err)\n\n/// Trims trailing whitespace\nval impl_str__trim : string -> string\n\n/// Split strings on patterns\nval impl_str__split : (#pattern: Type) -> string -> pattern ->\n(Core.Str.Iter.t_Split pattern)\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Core.fst",
    "content": "module Core\n\ninclude Rust_primitives\ninclude Core.Num\ninclude Core.Iter\ninclude Core.Ops\n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HACL_HOME to be set to your HACL* repo location\n# We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar-secret-integers\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nHACL_HOME     ?= $(HAX_LIBS_HOME)/../../../hacl-star\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= $(HAX_LIBS_HOME)/.cache\nHINT_DIR      ?= $(HAX_LIBS_HOME)/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/core/README.md",
    "content": "# Core (and alloc) library\n\nThis directory contains a model for the [Core Rust\nlibrary](https://doc.rust-lang.org/core/): the minimal Rust foundation\nbehind the [standard libarary of\nRust](https://doc.rust-lang.org/std/index.html). This also includes a\nmodel for some part of the [`alloc` Rust\nlibrary](https://doc.rust-lang.org/stable/alloc/).\n\nCore is self-contained, and is dependency-free: it links to no\nupstream or system libraries. Thus, even if it is minimal, it is not\nsmall: it is around **75k LoC**, comments excluded.\n\nIn this directory, you will find the first stage of our approach to\n`core` in F\\*: a hand-written model. Note that this model tries to\nfollow as much as possible the structure and naming found in the Rust\ncore library.\n\nThe second stage of our approach to `core` is automatic generation\nwith specifications and models.\nOur plan is to annotate the Rust `core` library with specifications\nand models written directly as Rust annotations.\nThis will enable automatic generation of `core` models with consistent\nsemantics in all of hax backends (for now F\\* and Coq).\n\nNote that we already started experimenting with this second approach:\nhax is already able to digest and generate signature-only F\\* for\nmore than 80% of core definitions.\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/hax_lib/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HACL_HOME to be set to your HACL* repo location\n# We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nHACL_HOME     ?= $(HAX_LIBS_HOME)/../../../hacl-star\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= $(HAX_LIBS_HOME)/.cache\nHINT_DIR      ?= $(HAX_LIBS_HOME)/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect FSTAR_HOME to be set to your FSTAR repo/install directory\n# We expect HACL_HOME to be set to your HACL* repo location\n# We expect HAX_LIBS_HOME to be set to the folder containing core, rust_primitives etc.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHAX_LIBS_HOME ?= $(shell git rev-parse --show-toplevel)/proof-libs/fstar-secret-integers\nFSTAR_HOME    ?= $(HAX_LIBS_HOME)/../../../FStar\nHACL_HOME     ?= $(HAX_LIBS_HOME)/../../../hacl-star\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= $(HAX_LIBS_HOME)/.cache\nHINT_DIR      ?= $(HAX_LIBS_HOME)/.hints\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# By default, we process all the files in the current directory. Here, we\n# *extend* the set of relevant files with the tests.\nROOTS = $(wildcard *.fst)\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(HAX_LIBS_HOME)/rust_primitives $(HAX_LIBS_HOME)/core $(HAX_LIBS_HOME)/hax_lib\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS) $(OTHERFLAGS)\n\n\n.depend: $(HINT_DIR) $(CACHE_DIR)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nSHELL=/usr/bin/env bash\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Arrays.fst",
    "content": "module Rust_primitives.Arrays\n\nopen Rust_primitives.Integers\n\nlet of_list (#t:Type) (l: list t {FStar.List.Tot.length l < maxint Lib.IntTypes.U16}): t_Slice t = Seq.seq_of_list l\nlet to_list (#t:Type) (s: t_Slice t): list t = Seq.seq_to_list s\n\nlet to_of_list_lemma t l = Seq.lemma_list_seq_bij l\nlet of_to_list_lemma t l = Seq.lemma_seq_list_bij l\n\nlet map_array #a #b #n (arr: t_Array a n) (f: a -> b): t_Array b n \n  = FStar.Seq.map_seq_len f arr;\n    FStar.Seq.map_seq f arr \n\nlet createi #t l f = admit()\n\nlet lemma_index_concat x y i = admit()\n\nlet lemma_index_slice x y i = admit()\n\nlet eq_intro a b = admit()\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Arrays.fsti",
    "content": "module Rust_primitives.Arrays\n\nopen Rust_primitives.Integers\n\ntype t_Slice t = s:Seq.seq t{Seq.length s <= max_usize}\ntype t_Array t (l:usize) = s: Seq.seq t { Seq.length s == v l }\nlet length (s: t_Slice 'a): usize = sz (Seq.length s)\nlet contains (#t: eqtype) (s: t_Slice t) (x: t): bool = Seq.mem x s\n\n\nval of_list (#t:Type) (l: list t {FStar.List.Tot.length l < maxint Lib.IntTypes.U16}):\n    t_Array t (sz (FStar.List.Tot.length l))\nval to_list (#t:Type) (s: t_Slice t): list t\n\nval map_array #n (arr: t_Array 'a n) (f: 'a -> 'b): t_Array 'b n \n\nval createi #t (l:usize) (f:(u:usize{u <. l} -> t))\n    : Pure (t_Array t l)\n      (requires True)\n      (ensures (fun res -> (forall i. Seq.index res (v i) == f i)))\n\nunfold let map #p\n  (f:(x:'a{p x} -> 'b))\n  (s: t_Slice 'a {forall (i:nat). i < Seq.length s ==> p (Seq.index s i)}): t_Slice 'b\n  = createi (length s) (fun i -> f (Seq.index s (v i)))\n\nlet concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) :\n           r:t_Array t (length x +! length y) = Seq.append x y\n\nval lemma_index_concat #t (x:t_Slice t) (y:t_Slice t{range (v (length x) + v (length y)) usize_inttype}) (i:usize{i <. length x +! length y}):\n           Lemma (if i <. length x then\n                    Seq.index (concat x y) (v i) == Seq.index x (v i)\n                  else \n                    Seq.index (concat x y) (v i) == Seq.index y (v (i -! length x)))\n           [SMTPat (Seq.index (concat #t x y) i)]\n\nlet slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\\ j <=. length x}):\n           r:t_Array t (j -! i) = Seq.slice x (v i) (v j)\n\nval lemma_index_slice #t (x:t_Slice t) (i:usize{i <=. length x}) (j:usize{i <=. j /\\ j <=. length x})\n                                (k:usize{k <. j -! i}):\n           Lemma (Seq.index (slice x i j) (v k) == Seq.index x (v (i +! k)))\n           [SMTPat (Seq.index (slice x i j) (v k))]\n\nval eq_intro #t (a : Seq.seq t) (b:Seq.seq t{Seq.length a == Seq.length b}):\n       Lemma\n       (requires forall i. {:pattern Seq.index a i; Seq.index b i}\n                      i < Seq.length a ==>\n                      Seq.index a i == Seq.index b i)\n       (ensures Seq.equal a b)\n       [SMTPat (Seq.equal a b)]\n\nlet split #t (a:t_Slice t) (m:usize{m <=. length a}):\n       Pure (t_Array t m & t_Array t (length a -! m))\n       True (ensures (fun (x,y) ->\n         x == slice a (sz 0) m /\\\n         y == slice a m (length a) /\\\n         concat #t x y == a)) = \n         let x = Seq.slice a 0 (v m) in\n         let y = Seq.slice a (v m) (Seq.length a) in\n         assert (Seq.equal a (concat x y));\n         (x,y)\n\nlet lemma_slice_append #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t):\n  Lemma (requires (range (v (length y) + v (length z)) usize_inttype /\\\n                   length y +! length z == length x /\\\n                   y == slice x (sz 0) (length y) /\\ \n                   z == slice x (length y) (length x)))\n        (ensures (x == concat y z)) = \n        assert (Seq.equal x (concat y z))\n\nlet lemma_slice_append_3 #t (x:t_Slice t) (y:t_Slice t) (z:t_Slice t) (w:t_Slice t):\n  Lemma (requires (range (v (length y) + v (length z) + v (length w)) usize_inttype /\\\n                   length y +! length z +! length w == length x /\\\n                   y == slice x (sz 0) (length y) /\\ \n                   z == slice x (length y) (length y +! length z) /\\\n                   w == slice x (length y +! length z) (length x)))\n        (ensures (x == concat y (concat z w))) =\n         assert (Seq.equal x (Seq.append y (Seq.append z w)))\n\n#push-options \"--z3rlimit 100\"\nlet lemma_slice_append_4 #t (x y z w u:t_Slice t) :\n  Lemma (requires (range (v (length y) + v (length z) + v (length w) + v (length u)) usize_inttype /\\\n                   length y +! length z +! length w +! length u == length x /\\\n                   y == slice x (sz 0) (length y) /\\ \n                   z == slice x (length y) (length y +! length z) /\\\n                   w == slice x (length y +! length z) (length y +! length z +! length w) /\\\n                   u == slice x (length y +! length z +! length w) (length x)))\n        (ensures (x == concat y (concat z (concat w u)))) =\n         assert (Seq.equal x (Seq.append y (Seq.append z (Seq.append w u))))\n#pop-options\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.BitVectors.fst",
    "content": "module Rust_primitives.BitVectors\n\nopen FStar.Mul\nopen Rust_primitives.Arrays\nopen Rust_primitives.Integers\n\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 40\"\n\nlet lemma_get_bit_bounded #t x d i = admit()\n\nlet lemma_get_bit_bounded' #t x d = admit()\n\nlet pow2_minus_one_mod_lemma1 (n: nat) (m: nat {m < n})\n   : Lemma (((pow2 n - 1) / pow2 m) % 2 == 1)\n   = let d: pos = n - m in\n     Math.Lemmas.pow2_plus m d;\n     Math.Lemmas.lemma_div_plus (-1) (pow2 d) (pow2 m);\n     if d > 0 then Math.Lemmas.pow2_double_mult (d-1)\n\nlet pow2_minus_one_mod_lemma2 (n: nat) (m: nat {n <= m})\n  : Lemma (((pow2 n - 1) / pow2 m) % 2 == 0)\n  = Math.Lemmas.pow2_le_compat m n;\n    Math.Lemmas.small_div (pow2 n - 1) (pow2 m)\n\nlet get_bit_pow2_minus_one #t #l  n nth\n  = reveal_opaque (`%get_bit) (get_bit (mk_int_l #t #l (pow2 n - 1)) nth);\n    if v nth < n then pow2_minus_one_mod_lemma1 n (v nth)\n                 else pow2_minus_one_mod_lemma2 n (v nth)\n\nlet get_bit_pow2_minus_one_i32 x nth\n  = let n = Some?.v (mask_inv_opt x) in\n    assume (pow2 n - 1 == x);\n    mk_int_equiv_lemma #i32_inttype x;\n    get_bit_pow2_minus_one #i32_inttype #Lib.IntTypes.PUB n nth\n\nlet get_bit_pow2_minus_one_u32 x nth\n  = let n = Some?.v (mask_inv_opt x) in\n    assume (pow2 n - 1 == x);\n    mk_int_equiv_lemma #u32_inttype x;\n    get_bit_pow2_minus_one #u32_inttype #Lib.IntTypes.PUB n nth\n\nlet get_bit_pow2_minus_one_u16 x nth\n  = let n = Some?.v (mask_inv_opt x) in\n    assume (pow2 n - 1 == x);\n    mk_int_equiv_lemma #u16_inttype x;\n    get_bit_pow2_minus_one #u16_inttype #Lib.IntTypes.PUB n nth\n\nlet get_bit_pow2_minus_one_u8 t x nth\n  = let n = Some?.v (mask_inv_opt x) in\n    assume (pow2 n - 1 == x);\n    mk_int_equiv_lemma #u8_inttype x;\n    get_bit_pow2_minus_one #u8_inttype #Lib.IntTypes.PUB n nth\n\nlet get_last_bit_signed_lemma #t x\n  = admit ()\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.BitVectors.fsti",
    "content": "module Rust_primitives.BitVectors\n\nopen FStar.Mul\nopen Rust_primitives.Arrays\nopen Rust_primitives.Integers\n\n/// Number of bits carried by an integer of type `t`\ntype bit_num t = d: nat {d > 0 /\\ d <= bits t /\\ (signed t ==> d <= bits t)}\n\n/// Number of bits carried by an integer of type `t`\ntype bounded #t (x:int_t t) (d:bit_num t) =\n  v x >= 0 /\\ v x < pow2 d\n\n/// Integer of type `t` that carries `d` bits\ntype int_t_d t (d: bit_num t) =\n  n: int_t t {bounded n d}\n\nval lemma_get_bit_bounded #t (x:int_t t) (d:bit_num t) (i:usize):\n  Lemma ((bounded x d /\\ v i >= d /\\ v i < bits t) ==>\n         get_bit x i == 0)\n        [SMTPat (get_bit #t x i); SMTPat (bounded x d)]\n\nval lemma_get_bit_bounded' #t (x:int_t t) (d:bit_num t):\n  Lemma (requires forall i. v i > d ==> get_bit x i == 0)\n        (ensures bounded x d)\n\ntype bit_vec (len: nat) = i:nat {i < len} -> bit\n\n/// Transform an array of integers to a bit vector\n#push-options \"--fuel 0 --ifuel 1 --z3rlimit 50\"\nlet bit_vec_of_int_arr (#n: inttype) (#len: usize) \n                (arr: t_Array (int_t n) len)\n                (d: bit_num n): bit_vec (v len * d)\n  = fun i -> get_bit (Seq.index arr (i / d)) (sz (i % d))\n#pop-options\n\n\n/// Transform an array of `nat`s to a bit vector\n#push-options \"--fuel 0 --ifuel 1 --z3rlimit 50\"\nlet bit_vec_of_nat_arr (#len: usize)\n                       (arr: t_Array nat len)\n                       (d: nat)\n                       : bit_vec (v len * d)\n  = fun i -> get_bit_nat (Seq.index arr (i / d)) (i % d)\n#pop-options\n\n/// Bit-wise semantics of `2^n-1`\nval get_bit_pow2_minus_one #t #l\n  (n: nat {pow2 n - 1 <= maxint t}) \n  (nth: usize {v nth < bits t})\n  : Lemma (  get_bit (mk_int_l #t #l (pow2 n - 1)) nth\n          == (if v nth < n then 1 else 0))\n\n/// Log2 table\nunfold let mask_inv_opt =\n  function | 0   -> Some 0\n           | 1   -> Some 1\n           | 3   -> Some 2\n           | 7   -> Some 3\n           | 15  -> Some 4\n           | 31  -> Some 5\n           | 63  -> Some 6\n           | 127 -> Some 7\n           | 255 -> Some 8\n           | 511 -> Some 9\n           | 1023  -> Some  10\n           | 2047  -> Some  11\n           | 4095  -> Some  12\n(*           | 8191  -> Some  13\n           | 16383  -> Some  14\n           | 32767  -> Some  15\n           | 65535  -> Some  16\n           | 131071  -> Some  17\n           | 262143  -> Some  18\n           | 524287  -> Some  19\n           | 1048575  -> Some  20\n           | 2097151  -> Some  21\n           | 4194303  -> Some  22\n           | 8388607  -> Some  23\n           | 16777215  -> Some  24\n           | 33554431  -> Some  25\n           | 67108863  -> Some  26\n           | 134217727  -> Some  27\n           | 268435455  -> Some  28\n           | 536870911  -> Some  29\n           | 1073741823  -> Some  30\n           | 2147483647  -> Some  31\n           | 4294967295  -> Some  32\n*)           | _   -> None\n\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `i32`\nval get_bit_pow2_minus_one_i32\n  (x: int {x < pow2 31 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32})\n  : Lemma ( get_bit (FStar.Int32.int_to_t x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit (FStar.Int32.int_to_t x) nth)]\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `u32`\nval get_bit_pow2_minus_one_u32\n  (x: int {x < pow2 32 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 32})\n  : Lemma ( get_bit (FStar.UInt32.uint_to_t x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit (FStar.UInt16.uint_to_t x) nth)]\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `u16`\nval get_bit_pow2_minus_one_u16\n  (x: int {x < pow2 16 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 16})\n  : Lemma ( get_bit (FStar.UInt16.uint_to_t x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit (FStar.UInt16.uint_to_t x) nth)]\n\n/// Specialized `get_bit_pow2_minus_one` lemmas with SMT patterns\n/// targetting machine integer literals of type `u8`  \nval get_bit_pow2_minus_one_u8\n  (t: _ {t == u8_inttype}) (x: int {x < pow2 8 /\\ Some? (mask_inv_opt x)}) (nth: usize {v nth < 8})\n  : Lemma ( get_bit #t #Lib.IntTypes.PUB (FStar.UInt8.uint_to_t x) nth \n        == (if v nth < Some?.v (mask_inv_opt x) then 1 else 0))\n  [SMTPat (get_bit #t #Lib.IntTypes.PUB (FStar.UInt8.uint_to_t x) nth)]\n// XXX: Why the #t here and not in the ones above?\n\nval get_last_bit_signed_lemma (#t: inttype{signed t}) (x: int_t t)\n  : Lemma (   get_bit x (mk_int_l (bits t - 1)) \n          == (if v x < 0 then 1 else 0))\n    // [SMTPat (get_bit x (mk_int (bits t - 1)))]\n\n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fst",
    "content": "module Rust_primitives.Hax.Monomorphized_update_at\n\nopen Rust_primitives\nopen Rust_primitives.Hax\nopen Core.Ops.Range\n\nlet update_at_usize s i x = \n  update_at s i x\n\nlet update_at_range #n s i x = \n  let res = update_at s i x in\n  admit(); // To be proved\n  res\n  \nlet update_at_range_to #n s i x =\n  let res = update_at s i x in\n  admit();\n  res\n  \nlet update_at_range_from #n s i x = \n  let res = update_at s i x in\n  admit();\n  res\n\nlet update_at_range_full s i x =\n  let res = update_at s i x in\n  admit();\n  res\n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Hax.Monomorphized_update_at.fsti",
    "content": "module Rust_primitives.Hax.Monomorphized_update_at\n\nopen Rust_primitives\nopen Rust_primitives.Hax\nopen Core.Ops.Range\n\n#set-options \"--z3rlimit 30\"\n\nval update_at_usize\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: usize)\n  (x: t)\n  : Pure (t_Array t (length s))\n    (requires (v i < Seq.length s))\n    (ensures (fun res -> res == Seq.upd s (v i) x))\n\nval update_at_range #n\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_Range (pub_int_t n))\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires (v i.f_start >= 0 /\\ v i.f_start <= Seq.length s /\\\n               v i.f_end <= Seq.length s /\\\n               Seq.length x == v i.f_end - v i.f_start))\n    (ensures (fun res ->\n                Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\\\n                Seq.slice res (v i.f_start) (v i.f_end) == x /\\\n                Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s)))\n\nval update_at_range_to #n\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_RangeTo (pub_int_t n))\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires (v i.f_end >= 0 /\\ v i.f_end <= Seq.length s /\\\n               Seq.length x == v i.f_end))\n    (ensures (fun res ->\n                Seq.slice res 0 (v i.f_end) == x /\\\n                Seq.slice res (v i.f_end) (Seq.length res) == Seq.slice s (v i.f_end) (Seq.length s)))\n\nval update_at_range_from #n\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_RangeFrom (pub_int_t n))\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires ( v i.f_start >= 0 /\\ v i.f_start <= Seq.length s /\\\n                Seq.length x == Seq.length s - v i.f_start))\n    (ensures (fun res ->\n                Seq.slice res 0 (v i.f_start) == Seq.slice s 0 (v i.f_start) /\\\n                Seq.slice res (v i.f_start) (Seq.length res) == x))\n\nval update_at_range_full\n  (#t: Type0)\n  (s: t_Slice t)\n  (i: t_RangeFull)\n  (x: t_Slice t)\n  : Pure (t_Array t (length s))\n    (requires (Seq.length x == Seq.length s))\n    (ensures (fun res -> res == x))\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Hax.fst",
    "content": "module Rust_primitives.Hax\n\nopen Rust_primitives.Integers\nopen Rust_primitives.Arrays\n\ntype t_Never = False\nlet never_to_any #t: t_Never -> t = (fun _ -> match () with)\n\nlet repeat (#t:Type0) (x: t) (len: usize): t_Array t len = \n  FStar.Seq.create (v len) x\n\nopen Core.Ops.Index\nclass update_at_tc self idx = {\n  [@@@FStar.Tactics.Typeclasses.tcinstance]\n  super_index: t_Index self idx;\n  update_at: s: self -> i: idx {in_range s i} -> super_index.f_Output -> self;\n}\n\nopen Core.Slice\n\ninstance impl__index t n: t_Index (t_Slice t) (pub_int_t n)\n  = { f_Output = t;\n      in_range = (fun (s: t_Slice t) (i: pub_int_t n) -> v i >= 0 && v i < Seq.length s);\n      f_index = (fun s i -> Seq.index s (v i));\n    }\n\ninstance impl__index_array t l n: t_Index (t_Array t l) (pub_int_t n)\n  = { f_Output = t;\n      in_range = (fun (s: t_Array t l) (i: pub_int_t n) -> v i >= 0 && v i < v l);\n      f_index = (fun s i -> Seq.index s (v i));\n    }\n\ninstance update_at_tc_slice t n: update_at_tc (t_Slice t) (pub_int_t n) = {\n  super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Slice t) (pub_int_t n);\n  update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x);\n}\n\ninstance update_at_tc_array t l n: update_at_tc (t_Array t l) (pub_int_t n) = {\n  super_index = FStar.Tactics.Typeclasses.solve <: t_Index (t_Array t l) (pub_int_t n);\n  update_at = (fun arr i x -> FStar.Seq.upd arr (v i) x);\n}\n\n\nlet (.[]<-) #self #idx {| update_at_tc self idx |} (s: self) (i: idx {in_range s i})\n  = update_at s i\n\nlet array_of_list #t = Rust_primitives.Arrays.of_list #t\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Integers.fst",
    "content": "module Rust_primitives.Integers\n\n#set-options \"--z3rlimit 100 --fuel 0 --ifuel 1\"\n\n\nlet pow2_values x = \n   let p = pow2 x in\n   assert_norm (p == normalize_term (pow2 x))\n\nlet usize_inttype = LI.U32\nlet isize_inttype = LI.S32\n\nlet v_injective #t a = LI.v_injective #t #LI.PUB a\nlet v_mk_int #t n = LI.v_mk_int #t #LI.PUB n\n\nlet usize_to_uint32 x = x\nlet usize_to_uint64 x = Int.Cast.uint32_to_uint64 x\nlet size_to_uint64 x = Int.Cast.uint32_to_uint64 x\n\nlet mk_int_l #t #l a = LI.mk_int #t #l a\nlet mk_int_equiv_lemma #_ = admit ()\nlet mk_int_v_lemma #t a = ()\nlet v_mk_int_lemma #t a = ()\n\nlet declassify #t #l #l' a = admit()\n\nlet add_mod_equiv_lemma #t #l #l' a b =\n  LI.add_mod_lemma #_ #(meet l l') (classify a) (classify b)\n  \nlet add_equiv_lemma #t #l #l' a b =\n  LI.add_lemma #_ #(meet l l') (classify a) (classify b)\n  \nlet incr_equiv_lemma #t #l a = LI.incr_lemma #t #l a\n\nlet mul_mod_equiv_lemma #t #l #l' a b =\n  LI.mul_mod_lemma #_ #(meet l l') (classify a) (classify b)\n\nlet mul_equiv_lemma #t #l #l' a b =\n  LI.mul_lemma #_ #(meet l l') (classify a) (classify b)\n\nlet sub_mod_equiv_lemma #t #l #l' a b =\n  LI.sub_mod_lemma #_ #(meet l l') (classify a) (classify b)\n\nlet sub_equiv_lemma #t #l #l' a b =\n  LI.sub_lemma #_ #(meet l l') (classify a) (classify b)\n\nlet decr_equiv_lemma #t a = LI.decr_lemma #t  a\n\nlet div_equiv_lemma #t a b = admit(); LI.div_lemma #t a b\nlet mod_equiv_lemma #t a b = admit(); LI.mod_lemma #t a b\n\nlet lognot #t a = LI.lognot #t a\nlet lognot_lemma #t a = admit()\n\nlet logxor #t #l1 #l2 a b = LI.logxor #t #(meet l1 l2) (classify a) (classify b)\nlet logxor_lemma #t a b = admit()\n\nlet logand #t #l1 #l2 a b = LI.logand #t #(meet l1 l2) (classify a) (classify b)\nlet logand_lemma #t a b = admit()\nlet logand_mask_lemma #t a b = admit()\n\nlet logor #t #l1 #l2 a b = LI.logor #t #(meet l1 l2) (classify a) (classify b)\nlet logor_lemma #t a b = admit()\n\nlet shift_right_equiv_lemma #t a b = admit()\nlet shift_left_equiv_lemma #t a b = admit()\n\nlet rotate_right #t a b = LI.rotate_right #t  a (cast b)\nlet rotate_right_equiv_lemma #t a b = ()\nlet rotate_left #t a b = LI.rotate_left #t  a (cast b)\nlet rotate_left_equiv_lemma #t a b = ()\n\nlet abs_int_equiv_lemma #t a = admit()\n\nlet neg_equiv_lemma #_ _ = admit()\n\nlet get_bit_and _x _y _i = admit ()\nlet get_bit_or _x _y _i = admit ()\nlet get_bit_shl _x _y _i = admit ()\nlet get_bit_shr _x _y _i = admit ()\n\nlet get_bit_cast #t #u #l x nth\n  = reveal_opaque (`%get_bit) (get_bit x nth);\n    reveal_opaque (`%get_bit) (get_bit (cast_mod #t #u x <: int_t_l u l) nth);\n    admit ()\n\nlet get_bit_cast_extend #t #u x nth\n  = admit ()\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Integers.fsti",
    "content": "module Rust_primitives.Integers\n\nopen FStar.Mul\n\nmodule LI = Lib.IntTypes\n\n#set-options \"--max_fuel 0 --max_ifuel 1 --z3rlimit 20\"\n\nval pow2_values: x:nat -> Lemma\n  (let p = pow2 x in\n   match x with\n   | 0  -> p=1\n   | 1  -> p=2\n   | 8  -> p=256\n   | 16 -> p=65536\n   | 31 -> p=2147483648\n   | 32 -> p=4294967296\n   | 63 -> p=9223372036854775808\n   | 64 -> p=18446744073709551616\n   | 2 | 3 | 4 | 5 | 6 | 7\n   | 9 | 10 | 11 | 12 | 13 | 14 | 15 \n   | 17 | 18 | 19 | 20 | 21 | 22 | 23\n   | 24 | 25 | 26 | 27 | 28 | 29 | 30\n   | 33 | 34 | 35 | 36 | 37 | 38 | 39\n   | 40 | 41 | 42 | 43 | 44 | 45 | 46\n   | 47 | 48 | 49 | 50 | 51 | 52 | 53\n   | 54 | 55 | 56 | 57 | 58 | 59 | 60\n   | 61 | 62 | 63 | 65 | 127 | 128 -> p = normalize_term (pow2 x)\n   | _ -> True)\n  [SMTPat (pow2 x)]\n\ntype inttype = LI.inttype\nlet unsigned = LI.unsigned\nlet signed = LI.signed\ntype uinttype = t:inttype{unsigned t}\nlet int_t_l t l = LI.int_t t l\nlet int_t t = int_t_l t LI.SEC\nlet pub_int_t t = int_t_l t LI.PUB\n\nlet meet (l1 l2:LI.secrecy_level) : LI.secrecy_level =\n  match l1, l2 with\n  | LI.SEC, LI.PUB -> LI.SEC\n  | LI.SEC, LI.SEC -> LI.SEC\n  | LI.PUB, LI.SEC -> LI.SEC\n  | LI.PUB, LI.PUB -> LI.PUB\n\nlet can_flow (l1 l2:LI.secrecy_level) : bool =\n  match l1, l2 with\n  | LI.PUB, LI.PUB -> true\n  | LI.SEC, LI.SEC -> true\n  | LI.PUB, LI.SEC -> true\n  | LI.SEC, LI.PUB -> false\n\nlet bits t = LI.bits t\nlet u8_inttype = LI.U8\nlet i8_inttype = LI.S8\nlet u16_inttype = LI.U16\nlet i16_inttype = LI.S16\nlet u32_inttype = LI.U32\nlet i32_inttype = LI.S32\nlet u64_inttype = LI.U64\nlet i64_inttype = LI.S64\nlet u128_inttype = LI.U128\nlet i128_inttype = LI.S128\nval usize_inttype: t:inttype{unsigned t /\\ (t = LI.U32 \\/ t = LI.U64)}\nval isize_inttype: t:inttype{signed t /\\ (t = LI.S32 \\/ t = LI.S64)}\n\ntype u8 = int_t LI.U8 \ntype i8 = int_t LI.S8\ntype u16 = int_t LI.U16\ntype i16 = int_t LI.S16\ntype u32 = int_t LI.U32\ntype i32 = int_t LI.S32\ntype u64 = int_t LI.U64\ntype i64=  int_t LI.S64\ntype u128 = int_t LI.U128\ntype i128 = int_t LI.S128\n\ntype pub_u8 = pub_int_t LI.U8 \ntype pub_i8 = pub_int_t LI.S8\ntype pub_u16 = pub_int_t LI.U16\ntype pub_i16 = pub_int_t LI.S16\ntype pub_u32 = pub_int_t LI.U32\ntype pub_i32 = pub_int_t LI.S32\ntype pub_u64 = pub_int_t LI.U64\ntype pub_i64=  pub_int_t LI.S64\ntype pub_u128 = pub_int_t LI.U128\ntype pub_i128 = pub_int_t LI.S128\n\ntype usize = pub_int_t usize_inttype\ntype isize = pub_int_t isize_inttype\n\nlet minint (t:LI.inttype) =\n  if unsigned t then 0 else -(pow2 (bits t - 1))\nlet maxint (t:LI.inttype) =\n  if unsigned t then pow2 (bits t) - 1\n  else pow2 (bits t - 1) - 1\nlet modulus (t:LI.inttype) = pow2 (bits t)\n\nlet max_usize = maxint usize_inttype\nlet max_isize = maxint isize_inttype\n\n//let range_bits (n:int) (n:bits) : bool =\n//  minint t <= n && n <= maxint t\n\nlet range (n:int) (t:inttype) : bool =\n  minint t <= n && n <= maxint t\ntype range_t (t:inttype) = x:int{range x t}\n\n[@(strict_on_arguments [0])]\nlet v (#t:inttype) (#l) (x:int_t_l t l) : range_t t = LI.v #t #l x\n\n[@(strict_on_arguments [0])]\nval mk_int_l (#t:inttype) (#l:LI.secrecy_level) (n:range_t t) : int_t_l t l\n\n[@(strict_on_arguments [0])]\nlet mk_int (#t:inttype) (n:range_t t) : int_t t = mk_int_l n\n\n[@(strict_on_arguments [0])]\nlet mk_pub_int (#t:inttype) (n:range_t t) : pub_int_t t = mk_int_l n\n\n[@(strict_on_arguments [0])]\nval mk_int_equiv_lemma #t (n:range_t t) :\n    Lemma (\n    match t with\n    | LI.U8 -> mk_int_l #u8_inttype n == UInt8.uint_to_t n   \n    | LI.S8 -> mk_int_l #i8_inttype n == Int8.int_to_t n   \n    | LI.U16 -> mk_int_l #u16_inttype n == UInt16.uint_to_t n   \n    | LI.S16 -> mk_int_l #i16_inttype n == Int16.int_to_t n   \n    | LI.U32 -> mk_int_l #u32_inttype n == UInt32.uint_to_t n   \n    | LI.S32 -> mk_int_l #i32_inttype n == Int32.int_to_t n   \n    | LI.U64 -> mk_int_l #u64_inttype n == UInt64.uint_to_t n   \n    | LI.S64 -> mk_int_l #i64_inttype n == Int64.int_to_t n   \n    | LI.U128 -> mk_int_l #u128_inttype n == UInt128.uint_to_t n   \n    | LI.S128 -> mk_int_l #i128_inttype n == Int128.int_to_t n  \n    | _ -> True)\n\nlet sz (n:range_t usize_inttype) : usize = mk_int_l n\nlet isz (n:range_t isize_inttype) : isize = mk_int_l n\n\nval mk_int_v_lemma: #t:inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> Lemma\n  (mk_int_l #t (v #t #l a) == a)\n  [SMTPat (mk_int_l #t #l (v #t #l a))]\n\nval v_mk_int_lemma: #t:inttype -> #l:LI.secrecy_level -> n:range_t t -> Lemma\n  (v #t #l (mk_int_l #t #l n) == n)\n  [SMTPat (v #t #l (mk_int_l #t #l n))]\n\n(* Wrap-around modulo: wraps into [-p/2; p/2[ *)\nlet op_At_Percent (v:int) (p:int{p>0/\\ p%2=0}) : Tot int =\n  let m = v % p in if m >= p/2 then m - p else m \n\n[@(strict_on_arguments [0])]\nlet op_At_Percent_Dot x t : range_t t =\n  if unsigned t then x % modulus t\n  else x @% modulus t\n\nlet cast (#t:inttype) (#t':inttype) (#l:LI.secrecy_level)\n    (u1:int_t_l t l{range (v u1) t'}) =\n    mk_int_l #t' #l (v u1)\nlet cast_mod (#t:inttype) (#t':inttype) (#l:LI.secrecy_level)\n    (u1:int_t_l t l) = \n    mk_int_l #t' #l (v u1 @%. t')\n\nlet classify #t #l (#l':LI.secrecy_level{can_flow l l'})\n    (a:int_t_l t l)    \n    : int_t_l t l' =\n    match l,l' with\n    | LI.PUB, LI.SEC -> LI.secret #t a\n    | LI.PUB, LI.PUB -> a\n    | LI.SEC, LI.SEC -> a\n\n(* NOTE: Use with extreme care, and clearly document each use case *)\nval declassify #t #l #l'\n    (a:int_t_l t l)    \n    : int_t_l t l'\n\n\n/// Arithmetic operations\n/// \nlet add_mod (#t:inttype) \n            (#l #l':LI.secrecy_level) \n            (a:int_t_l t l) (b:int_t_l t l') =\n    mk_int_l #t #(meet l l') ((v a + v b) @%. t)\n\nval add_mod_equiv_lemma: #t:uinttype \n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> a:int_t_l t l\n  -> b:int_t_l t l'\n  -> Lemma\n    (add_mod a b == LI.add_mod #_ #(meet l l') (classify a) (classify b))\n\nlet add (#t:inttype) (#l #l':LI.secrecy_level) (a:int_t_l t l)\n        (b:int_t_l t l'{range (v a + v b) t}) =\n    mk_int_l #t #(meet l l') (v a + v b)\n\nval add_equiv_lemma: #t:uinttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level\n  -> a:int_t_l t l\n  -> b:int_t_l t l'{range (v a + v b) t}\n  -> Lemma\n    (add a b == LI.add #t #(meet l l') (classify a) (classify b))\n\nlet incr (#t:inttype) (#l:LI.secrecy_level) (a:int_t_l t l{v a < maxint t}) =\n    mk_int_l #t #l (v a + 1)\n\nval incr_equiv_lemma: #t:inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l{v a < maxint t}\n  -> Lemma (incr a == LI.incr a)\n\nlet mul_mod (#t:inttype) (#l #l':LI.secrecy_level)\n            (a:int_t_l t l)\n            (b:int_t_l t l') =\n            mk_int_l #t #(meet l l') (v a * v b @%. t)\n\nval mul_mod_equiv_lemma: #t:uinttype{not (LI.U128? t)}\n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> a:int_t_l t l\n  -> b:int_t_l t l'\n  -> Lemma (mul_mod a b == LI.mul_mod #t #(meet l l') (classify a) (classify b))\n\nlet mul (#t:inttype) (#l #l':LI.secrecy_level)\n        (a:int_t_l t l)\n        (b:int_t_l t l'{range (v a * v b) t}) =\n        mk_int_l #t #(meet l l') (v a * v b)\n\nval mul_equiv_lemma: #t:uinttype{not (LI.U128? t)} \n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> a:int_t_l t l\n  -> b:int_t_l t l'{range (v a * v b) t}\n  -> Lemma (mul a b == LI.mul #t #(meet l l') (classify a) (classify b))\n\nlet sub_mod (#t:inttype) (#l #l':LI.secrecy_level)\n  (a:int_t_l t l) (b:int_t_l t l') =\n    mk_int_l #t #(meet l l') ((v a - v b) @%. t)\n\nval sub_mod_equiv_lemma: #t:uinttype\n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> a:int_t_l t l\n  -> b:int_t_l t l'\n  -> Lemma\n    (sub_mod a b == LI.sub_mod #_ #(meet l l') (classify a) (classify b))\n\nlet sub (#t:inttype) (#l #l':LI.secrecy_level)\n        (a:int_t_l t l)\n        (b:int_t_l t l'{range (v a - v b) t}) =\n    mk_int_l #t #(meet l l') (v a - v b) \n\nval sub_equiv_lemma: #t:uinttype\n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> a:int_t_l t l\n  -> b:int_t_l t l'{range (v a - v b) t}\n  -> Lemma\n    (sub a b == LI.sub #t #(meet l l') (classify a) (classify b))\n\nlet decr (#t:inttype) (#l:LI.secrecy_level) (a:int_t_l t l{minint t < v a}) =\n    mk_int_l #t #l (v a - 1)\n\nval decr_equiv_lemma: #t:inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l{minint t < v a}\n  -> Lemma (decr a == LI.decr  a)\n\nlet div (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB{v b <> 0}) =\n  assume(unsigned t \\/ range (v a / v b) t);\n  mk_int_l #t #LI.PUB (v a / v b)\n  \nval div_equiv_lemma: #t:inttype{~(LI.U128? t) /\\ ~(LI.S128? t)}\n  -> a:int_t_l t LI.PUB\n  -> b:int_t_l t LI.PUB{v b <> 0 /\\ (unsigned t \\/ range FStar.Int.(v a / v b) t)}\n  -> Lemma (div a b == LI.div a b)\n\nlet mod (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB{v b <> 0}) =\n  mk_int_l #t #LI.PUB (v a % v b)\n\n\nval mod_equiv_lemma: #t:inttype{~(LI.U128? t) /\\ ~(LI.S128? t)}\n  -> a:int_t_l t LI.PUB\n  -> b:int_t_l t LI.PUB{v b <> 0 /\\ (unsigned t \\/ range FStar.Int.(v a / v b) t)}\n  -> Lemma (mod a b == LI.mod a b)\n  \n\n/// Comparison Operators\n/// \nlet eq (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a = v b\nlet ne (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v b <> v b\nlet lt (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a < v b\nlet lte (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a <= v b\nlet gt (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a > v b\nlet gte (#t:inttype) (a:int_t_l t LI.PUB) (b:int_t_l t LI.PUB) = v a >= v b\n\n\n/// Bitwise Operations\n\n\nlet ones (#t:inttype) (#l:LI.secrecy_level) : n:int_t_l t l =\n  if unsigned t then mk_int_l #t #l (pow2 (bits t) - 1)\n  else mk_int_l #t #l (-1)\n\nlet zero (#t:inttype) (#l:LI.secrecy_level) : n:int_t_l t l =\n  mk_int_l #t #l 0\n\nval lognot: #t:inttype -> #l:LI.secrecy_level -> int_t_l t l -> int_t_l t l\nval lognot_lemma: #t:inttype -> #l:LI.secrecy_level -> a:int_t_l t l -> Lemma\n  (lognot a == LI.lognot  a /\\\n   lognot #t #l zero == ones /\\\n   lognot #t #l ones == zero /\\\n   lognot (lognot a) == a /\\\n   (signed t ==> v (lognot a) = -1 - v a) /\\\n   (unsigned t ==> v (lognot a)  = pow2 (bits t) - 1 - v a)\n   )\n\nval logxor: #t:inttype \n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> int_t_l t l\n  -> int_t_l t l'\n  -> int_t_l t (meet l l')\n  \nval logxor_lemma: #t:inttype \n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> a:int_t_l t l \n  -> b:int_t_l t l' -> Lemma\n  (logxor a b == LI.logxor #t #(meet l l') (classify a) (classify b) /\\\n   a `logxor` a == zero #t #l /\\\n   (a `logxor` b == zero #t #(meet l l') ==> v b == v a) /\\\n   v (a `logxor` (a `logxor` b)) == v b /\\\n   v (a `logxor` (b `logxor` a)) == v b /\\\n   zero #t #l' `logxor` a == classify a /\\\n   a `logxor` zero #t #l' == classify a /\\\n   v (ones #t #l' `logxor` a) == v (lognot a) /\\\n   v (a `logxor` ones #t #l') == v (lognot a))\n    \nval logand: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level\n  -> int_t_l t l\n  -> int_t_l t l'\n  -> int_t_l t (meet l l')\n\nval logand_lemma: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level -> a:int_t_l t l -> b:int_t_l t l' ->\n  Lemma (logand a b == LI.logand #t #(meet l l') (classify a) (classify b) /\\\n         v (logand a (zero #t #l')) == v (zero #t #l') /\\\n         v (logand (zero #t #l') a) == v (zero #t #l') /\\\n         v (logand a (ones #t #l')) == v a /\\\n         v (logand (ones #t #l') a) == v a /\\\n         (v a >= 0 ==> (v (logand a b) >= 0) /\\ (v (logand a b) <= v a)) /\\\n         (v b >= 0 ==> (v (logand a b) >= 0) /\\ (v (logand a b) <= v b)))\n\nval logand_mask_lemma: #t:inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l\n  -> m:nat{m < bits t} ->\n  Lemma (pow2 m < maxint t /\\\n         logand a (sub #t (mk_int_l #t #l (pow2 m)) (mk_int_l #t #l 1)) ==\n         mk_int_l #t #l (v a % pow2 m))\n  [SMTPat (logand #t a (sub #t (mk_int_l #t #l (pow2 m)) (mk_int_l #t #l 1)))]\n\nval logor: #t:inttype -> #l:LI.secrecy_level -> #l':LI.secrecy_level\n  -> int_t_l t l\n  -> int_t_l t l'\n  -> int_t_l t (meet l l')\n\nval logor_lemma: #t:inttype\n  -> #l:LI.secrecy_level\n  -> #l':LI.secrecy_level\n  -> a:int_t_l t l\n  -> b:int_t_l t l' ->\n  Lemma (logor a b == LI.logor #t #(meet l l') (classify a) (classify b) /\\\n         v (logor a (zero #t #l')) == v a /\\\n         v (logor a (ones #t #l')) == v (ones #t #l') /\\\n         v (logor (zero #t #l') a) == v a /\\\n         v (logor (ones #t #l') a) == v (ones #t #l') /\\\n         ((v a >= 0 /\\ v b >= 0) ==> (v (logor a b) >= v a /\\ v (logor a b) >= v b)))\n\nunfold type shiftval (t:inttype) (t':inttype) =\n     b:int_t_l t' LI.PUB{v b >= 0 /\\ v b < bits t}\nunfold type rotval (t:inttype) (t':inttype) =\n     b:int_t_l t' LI.PUB{v b > 0 /\\ v b < bits t}\n\nlet shift_right (#t:inttype) (#t':inttype) (#l:LI.secrecy_level)\n    (a:int_t_l t l) (b:shiftval t t') =\n    LI.shift_right_lemma  a (LI.size (v b));\n    mk_int_l #t #l (v a / pow2 (v b))\n\nval shift_right_equiv_lemma: #t:inttype -> #t':inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l -> b:shiftval t t'\n  -> Lemma\n    (v ((cast #t' #u32_inttype b <: LI.size_t)) < bits t /\\\n     shift_right #t #t' a b ==\n     LI.shift_right a (cast #t' #u32_inttype b <: LI.size_t))\n     \nlet shift_left (#t:inttype) (#t':inttype) (#l:LI.secrecy_level)\n    (a:int_t_l t l) (b:shiftval t t') =\n    let x:range_t t = (v a * pow2 (v b)) @%. t in\n    mk_int_l #t #l x\n\nval shift_left_equiv_lemma: #t:inttype -> #t':inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l -> b:shiftval t t'\n  -> Lemma\n    ((v a >= 0 /\\ range (v a * pow2 (v b)) t) ==>\n     (v (cast #_ #u32_inttype b) < bits t /\\\n      shift_left #t #t' a b ==\n      LI.shift_left  a (cast b)))\n\nval rotate_right: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l\n  -> rotval t t'\n  -> int_t_l t l\n\nval rotate_right_equiv_lemma: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l -> b:rotval t t'\n  -> Lemma (v (cast #_ #u32_inttype b) > 0 /\\ \n           rotate_right a b ==\n           LI.rotate_right  a (cast b))\n  \nval rotate_left: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l\n  -> rotval t t'\n  -> int_t_l t l\n\nval rotate_left_equiv_lemma: #t:uinttype -> #t':inttype -> #l:LI.secrecy_level\n  -> a:int_t_l t l -> b:rotval t t'\n  -> Lemma (v (cast #_ #u32_inttype b) > 0 /\\ \n           rotate_left a b ==\n           LI.rotate_left  a (cast b))\n\nlet shift_right_i (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (s:shiftval t t') (u:int_t_l t l) : int_t_l t l = shift_right u s\n\nlet shift_left_i (#t:inttype) (#t':inttype) (#l:LI.secrecy_level) (s:shiftval t t') (u:int_t_l t l{v u >= 0}) : int_t_l t l = shift_left u s\n\nlet rotate_right_i (#t:uinttype) (#t':inttype) (#l:LI.secrecy_level) (s:rotval t t') (u:int_t_l t l) : int_t_l t l = rotate_right u s\n\nlet rotate_left_i (#t:uinttype) (#t':inttype) (#l:LI.secrecy_level) (s:rotval t t') (u:int_t_l t l) : int_t_l t l = rotate_left u s\n\nlet abs_int (#t:inttype) (#l:LI.secrecy_level) (a:int_t_l t l{minint t < v a}) =\n    mk_int_l #t #l (abs (v a))\n\nval abs_int_equiv_lemma: #t:inttype{signed t /\\ not (LI.S128? t)} \n  -> #l:LI.secrecy_level\n  -> a:int_t_l t l{minint t < v a}\n  -> Lemma (abs_int a == LI.ct_abs  a)\n\nlet neg (#t:inttype{signed t}) (#l:LI.secrecy_level) (a:int_t_l t l{range (0 - v a) t}) =\n    mk_int_l #t #l (0 - (v a))\n\nval neg_equiv_lemma: #t:inttype{signed t /\\ not (LI.S128? t)} -> #l:LI.secrecy_level\n  -> a:int_t_l t l{range (0 - v a) t}\n  -> Lemma (neg a == sub (mk_int_l #t #l 0) a /\\\n          (lognot a == sub (neg a) (mk_int_l #t #l 1)))\n\n\n///\n/// Operators available for all machine integers\n///\n\n// Strict: with precondition\nunfold\nlet (+!) #t #l #l' = add #t #l #l'\n\n// Wrapping: no precondition\nunfold\nlet (+.) #t #l #l' = add_mod #t #l #l'\n\nunfold\nlet ( *! ) #t #l #l' = mul #t #l #l'\n\nunfold\nlet ( *. ) #t #l #l' = mul_mod #t #l #l'\n\nunfold\nlet ( -! ) #t #l #l' = sub #t #l #l'\n\nunfold\nlet ( -. ) #t #l #l' = sub_mod #t #l #l'\n\nunfold\nlet ( >>! ) #t #t' #l = shift_right #t #t' #l\n\nunfold\nlet ( <<! ) #t #t' #l = shift_left #t #t' #l\n\nunfold\nlet ( >>>. ) #t #t' #l = rotate_right #t #t' #l\n\nunfold\nlet ( <<<. ) #t #t' #l = rotate_left #t #t' #l\n\nunfold\nlet ( ^. ) #t #l #l' = logxor #t #l #l'\n\nunfold\nlet ( |. ) #t #l #l' = logor #t #l #l'\n\nunfold\nlet ( &. ) #t #l #l' = logand #t #l #l'\n\nunfold\nlet ( ~. ) #t #l = lognot #t #l\n\nunfold\nlet (/!) #t = div #t\n\nunfold\nlet (%!) #t = mod #t\n\nunfold\nlet (=.) #t = eq #t\n\nunfold\nlet (<>.) #t = ne #t\n\nunfold\nlet (<.) #t = lt #t\n\nunfold\nlet (<=.) #t = lte #t\n\nunfold\nlet (>.) #t = gt #t\n\nunfold\nlet (>=.) #t = gte #t\n\n\ntype bit = n: nat {n < 2}\n\n/// Mathematical `get_bit` definition on `nat`s\nlet get_bit_nat (x: nat) (nth: nat): bit\n  = (x / pow2 nth) % 2\n\n/// `get_bit` definition for machine integer of any size and signedness\n[@\"opaque_to_smt\"]\nlet get_bit (#t: inttype) (#l:LI.secrecy_level)\n            (x: int_t_l t l) (nth: usize {v nth < bits t}): bit\n  = if v x >= 0 then get_bit_nat (v x) (v nth)\n               else // two's complement\n                    get_bit_nat (pow2 (bits t) + v x) (v nth)\n\nunfold let bit_and (x y: bit): bit = match x, y with | (1, 1) -> 1 | _ -> 0\nunfold let bit_or  (x y: bit): bit = (x + y) % 2\n\n/// Bit-wise semantics for `&.`\nval get_bit_and #t #l (x y: int_t_l t l) (i: usize {v i < bits t})\n  : Lemma (get_bit (x &. y) i == get_bit x i `bit_and` get_bit y i)\n          [SMTPat (get_bit (x &. y) i)]\n\n/// Bit-wise semantics for `|.`\nval get_bit_or #t #l (x y: int_t_l t l) (i: usize {v i < bits t})\n  : Lemma (get_bit (x |. y) i == get_bit x i `bit_or` get_bit y i)\n          [SMTPat (get_bit (x |. y) i)]\n\n/// Bit-wise semantics for `<<!`\nval get_bit_shl #t #u #l (x: int_t_l t l) (y: int_t_l u LI.PUB) (i: usize {v i < bits t})\n  : Lemma (requires v y >= 0 /\\ v y < bits t)\n          (ensures get_bit (x <<! y) i \n                == (if v i < v y then 0 else get_bit x (mk_int_l (v i - v y))))\n    [SMTPat (get_bit (x <<! y) i)]\n\n/// Bit-wise semantics for `>>!`\nval get_bit_shr #t #u #l (x: int_t_l t l) (y: int_t_l u LI.PUB) (i: usize {v i < bits t})\n  : Lemma (requires v y >= 0 /\\ v y < bits t)\n          (ensures get_bit (x >>! y) i \n                == (if v i < bits t - v y\n                    then get_bit x (mk_int_l (v i + v y))\n                    else if signed t\n                         then get_bit x (mk_int_l (bits t - 1))\n                         else 0))\n    [SMTPat (get_bit (x >>! y) i)]\n\n// TODO: check for neg numbers\n/// Bit-wise semantics of integer casts\nval get_bit_cast #t #u #l\n  (x: int_t_l t l) (nth: usize)\n  : Lemma (requires v nth < bits u /\\ v nth < bits t)\n          (ensures get_bit (cast_mod #t #u x) nth == get_bit x nth)\n          [SMTPat (get_bit (cast_mod #t #u x) nth)]\n\nval get_bit_cast_extend #t #u #l\n  (x: int_t_l t l) (nth: usize)\n  : Lemma (requires bits t < bits u /\\ v nth >= bits t /\\ v nth < bits u)\n          (ensures get_bit (cast_mod #t #u x) nth == 0)\n          [SMTPat (get_bit (cast_mod #t #u x) nth)]\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.Iterators.fsti",
    "content": "module Rust_primitives.Iterators\n\nopen Rust_primitives\nopen Core.Ops.Range\nopen FStar.Mul\n\nval foldi_range  (#n:inttype) (#acc_t:Type)\n                 (#inv:(acc_t -> i:pub_int_t n -> Type))\n                 (r: t_Range (pub_int_t n){r.f_start <=. r.f_end}) \n                 (acc:acc_t{inv acc r.f_start})\n                 (f: (acc:acc_t -> i:pub_int_t n{i >=. r.f_start /\\ i <. r.f_end /\\ inv acc i}\n                       -> acc':acc_t{inv acc' (i +! mk_pub_int 1)}))\n                 : res:acc_t{inv res r.f_end}\n\nval foldi_range_step_by  (#n:inttype) (#acc_t:Type)\n                 (#inv:(acc_t -> i:pub_int_t n -> Type))\n                 (r: t_Range (pub_int_t n){r.f_start <=. r.f_end}) \n                 (step: usize{v step > 0 /\\ range (v step) n /\\ range (v r.f_end + v step) n})\n                 (acc:acc_t{inv acc r.f_start})\n                 (f: (acc:acc_t -> i:pub_int_t n{i >=. r.f_start /\\ i <. r.f_end /\\ \n                                            (v i - v r.f_start) % (v step) == 0 /\\ inv acc i}\n                       -> acc':acc_t{inv acc' (i +! mk_int #n (v step))}))\n                 : res:acc_t{inv res r.f_end}\n\n\nval foldi_chunks_exact\n                 (#t:Type) (#acc_t:Type)\n                 (#inv:(acc_t -> usize -> Type))\n                 (s:t_Slice t)\n                 (chunk_len:usize{v chunk_len > 0})\n                 (acc:acc_t{inv acc (sz 0)})\n                 (f: (acc:acc_t -> it:(usize & t_Array t chunk_len){\n                                  let (i,item) = it in\n                                  v i >= 0 /\\\n                                  v i < Seq.length s / v chunk_len /\\\n                                  inv acc i}\n                       -> acc':acc_t{inv acc' (fst it +! sz 1)}))\n                 : res:acc_t{inv res (length s /! chunk_len)}\n\nval fold_chunks_exact\n                 (#t:Type) (#acc_t:Type)\n                 (#inv:(acc_t -> Type))\n                 (s:t_Slice t)\n                 (chunk_len:usize{v chunk_len > 0}) // /\\ Seq.length s % v chunk_len == 0})\n                 (acc:acc_t{inv acc})\n                 (f: (acc:acc_t -> it:t_Array t chunk_len{inv acc}\n                       -> acc':acc_t{inv acc'}))\n                 : res:acc_t{inv res}\n\n\nval foldi_slice  (#t:Type) (#acc_t:Type)\n                 (#inv:(acc_t -> usize -> Type))\n                 (sl: t_Slice t)\n                 (acc:acc_t{inv acc (sz 0)})\n                 (f: (acc:acc_t -> it:(usize & t){\n                                  let (i,item) = it in\n                                  v i >= 0 /\\\n                                  v i < Seq.length sl /\\\n                                  Seq.index sl (v i) == item /\\\n                                  inv acc i}\n                       -> acc':acc_t{inv acc' (fst it +! sz 1)}))\n                 : res:acc_t{inv res (length sl)}\n\n"
  },
  {
    "path": "hax-lib/proof-libs/fstar-secret-integers/rust_primitives/Rust_primitives.fst",
    "content": "module Rust_primitives\n\ninclude Rust_primitives.Integers\ninclude Rust_primitives.Arrays\ninclude Rust_primitives.BitVectors\n\nclass cast_tc a b = {\n  cast: a -> b; \n}\n\n/// Rust's casts operations on integers are non-panicking\ninstance cast_tc_integers (t:inttype) (t':inttype) (l:Lib.IntTypes.secrecy_level)\n  : cast_tc (int_t_l t l) (int_t_l t' l)\n  = { cast = (fun x -> Rust_primitives.Integers.cast_mod #t #t' x) }\n\nclass unsize_tc source = {\n  output: Type;\n  unsize: source -> output;\n}\n\ninstance array_to_slice_unsize t n: unsize_tc (t_Array t n) = {\n  output = t_Slice t;\n  unsize = (fun (arr: t_Array t n) -> \n            arr <: t_Slice t);\n}\n\n\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Array/Lemmas.lean",
    "content": "\nattribute [grind =] Array.size_extract\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/BitVec/Basic.lean",
    "content": "\nattribute [grind =] BitVec.toNat_ofNat\nattribute [grind] BitVec.umulOverflow\nattribute [grind] BitVec.uaddOverflow\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Int/DivMod/Lemmas.lean",
    "content": "attribute [grind <-] Int.tmod_lt_of_pos\nattribute [grind <-] Int.lt_tmod_of_pos\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Nat/Div/Basic.lean",
    "content": "attribute [grind =] Nat.mod_eq_of_lt\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Nat/MinMax.lean",
    "content": "attribute [grind =] Nat.min_eq_left\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Basic.lean",
    "content": "import Hax.MissingLean.Init.Data.SInt.Basic_Int128\nimport Hax.MissingLean.Init.Data.UInt.Basic\n\nopen Lean in\nset_option hygiene false in\nmacro \"additional_int_decls\" typeName:ident width:term : command => do `(\n  namespace $typeName\n\n  def addOverflow (a b : $typeName) : Bool :=\n    BitVec.saddOverflow a.toBitVec b.toBitVec\n\n  def subOverflow (a b : $typeName) : Bool :=\n    BitVec.ssubOverflow a.toBitVec b.toBitVec\n\n  def mulOverflow (a b : $typeName) : Bool :=\n    BitVec.smulOverflow a.toBitVec b.toBitVec\n\n  @[grind .]\n  theorem addOverflow_iff {a b : $typeName} : addOverflow a b ↔\n      a.toInt + b.toInt ≥ 2 ^ ($width - 1) ∨ a.toInt + b.toInt < - 2 ^ ($width - 1) := by\n    simp [addOverflow, BitVec.saddOverflow] <;> rfl\n\n  @[grind .]\n  theorem subOverflow_iff {a b : $typeName} : subOverflow a b ↔\n      a.toInt - b.toInt ≥ 2 ^ ($width - 1) ∨ a.toInt - b.toInt < - 2 ^ ($width - 1) := by\n    simp [subOverflow, BitVec.ssubOverflow] <;> rfl\n\n  @[grind .]\n  theorem mulOverflow_iff {a b : $typeName} : mulOverflow a b ↔\n      a.toInt * b.toInt ≥ 2 ^ ($width - 1) ∨ a.toInt * b.toInt < - 2 ^ ($width - 1) := by\n    simp [mulOverflow, BitVec.smulOverflow] <;> rfl\n\n  @[grind =]\n  theorem toInt_add_of_not_addOverflow {x y : $typeName} (h : ¬ addOverflow x y) :\n      (x + y).toInt = x.toInt + y.toInt := BitVec.toInt_add_of_not_saddOverflow h\n\n  @[grind =]\n  theorem toInt_sub_of_not_subOverflow {x y : $typeName} (h : ¬ subOverflow x y) :\n      (x - y).toInt = x.toInt - y.toInt := BitVec.toInt_sub_of_not_ssubOverflow h\n\n  @[grind =]\n  theorem toInt_mul_of_not_mulOverflow {x y : $typeName} (h : ¬ mulOverflow x y) :\n      (x * y).toInt = x.toInt * y.toInt := BitVec.toInt_mul_of_not_smulOverflow h\n\n  end $typeName\n)\n\nadditional_int_decls Int8 8\nadditional_int_decls Int16 16\nadditional_int_decls Int32 32\nadditional_int_decls Int64 64\nadditional_int_decls Int128 128\nadditional_int_decls ISize System.Platform.numBits\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_missing_int_conversions\" : command => do\n  let mut cmds := #[]\n  let src : List (Name × Nat) := [\n    (`Int8, 8),\n    (`Int16, 16),\n    (`Int32, 32),\n    (`Int64, 64),\n    (`Int128, 128),\n    (`ISize, 0)\n  ]\n  let dst : List (Name × Nat) := [\n    (`UInt8, 8),\n    (`UInt16, 16),\n    (`UInt32, 32),\n    (`UInt64, 64),\n    (`UInt128, 128),\n    (`USize, 0),\n  ]\n  for (srcName, srcIdx) in src do\n    for (dstName, dstIdx) in dst do\n      let srcIdent := mkIdent srcName\n      let dstIdent := mkIdent dstName\n      if srcIdx != dstIdx then\n        cmds := cmds.push $ ← `(\n          def $(mkIdent (srcName ++ dstName.appendBefore \"to\")) (x : $srcIdent) : $dstIdent :=\n            $(mkIdent (dstName ++ `ofInt)) x.toInt\n        )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_missing_int_conversions\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Basic_Int128.lean",
    "content": "import Hax.MissingLean.Init.Prelude\nimport Lean.Meta.Tactic.Simp.BuiltinSimprocs.SInt\n\nset_option autoImplicit true\n\n-- Adapted from Init/Data/SInt/Basic.lean from the Lean v4.29.0-rc1 source code\n\nstructure Int128 where\n  ofUInt128 :: toUInt128 : UInt128\n\nabbrev Int128.size : Nat := 340282366920938463463374607431768211456\n\n@[inline] def Int128.toBitVec (x : Int128) : BitVec 128 := x.toUInt128.toBitVec\n\ntheorem Int128.toBitVec.inj : {x y : Int128} → x.toBitVec = y.toBitVec → x = y\n  | ⟨⟨_⟩⟩, ⟨⟨_⟩⟩, rfl => rfl\n\n@[inline] def UInt128.toInt128 (i : UInt128) : Int128 := Int128.ofUInt128 i\n\ndef Int128.ofInt (i : @& Int) : Int128 := ⟨⟨BitVec.ofInt 128 i⟩⟩\n\ndef Int128.ofNat (n : @& Nat) : Int128 := ⟨⟨BitVec.ofNat 128 n⟩⟩\n\nabbrev Int.toInt128 := Int128.ofInt\n\nabbrev Nat.toInt128 := Int128.ofNat\n\ndef Int128.toInt (i : Int128) : Int := i.toBitVec.toInt\n\n@[suggest_for Int128.toNat, inline] def Int128.toNatClampNeg (i : Int128) : Nat := i.toInt.toNat\n\n@[inline] def Int128.ofBitVec (b : BitVec 128) : Int128 := ⟨⟨b⟩⟩\n\ndef Int128.toInt8 (a : Int128) : Int8 := ⟨⟨a.toBitVec.signExtend 8⟩⟩\n\ndef Int128.toInt16 (a : Int128) : Int16 := ⟨⟨a.toBitVec.signExtend 16⟩⟩\n\ndef Int128.toInt32 (a : Int128) : Int32 := ⟨⟨a.toBitVec.signExtend 32⟩⟩\n\ndef Int128.toInt64 (a : Int128) : Int64 := ⟨⟨a.toBitVec.signExtend 64⟩⟩\n\ndef Int8.toInt128 (a : Int8) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩\n\ndef Int16.toInt128 (a : Int16) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩\n\ndef Int32.toInt128 (a : Int32) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩\n\ndef Int64.toInt128 (a : Int64) : Int128 := ⟨⟨a.toBitVec.signExtend 128⟩⟩\n\ndef Int128.neg (i : Int128) : Int128 := ⟨⟨-i.toBitVec⟩⟩\n\ninstance : ToString Int128 where\n  toString i := toString i.toInt\ninstance : Repr Int128 where\n  reprPrec i prec := reprPrec i.toInt prec\ninstance : ReprAtom Int128 := ⟨⟩\n\ninstance : Hashable Int128 where\n  hash i := UInt64.ofInt i.toInt\n\ninstance Int128.instOfNat : OfNat Int128 n := ⟨Int128.ofNat n⟩\ninstance Int128.instNeg : Neg Int128 where\n  neg := Int128.neg\n\nabbrev Int128.maxValue : Int128 := 170141183460469231731687303715884105727\nabbrev Int128.minValue : Int128 := -170141183460469231731687303715884105728\n\n@[inline]\ndef Int128.ofIntLE (i : Int) (_hl : Int128.minValue.toInt ≤ i) (_hr : i ≤ Int128.maxValue.toInt) : Int128 :=\n  Int128.ofInt i\n\ndef Int128.ofIntTruncate (i : Int) : Int128 :=\n  if hl : Int128.minValue.toInt ≤ i then\n    if hr : i ≤ Int128.maxValue.toInt then\n      Int128.ofIntLE i hl hr\n    else\n      Int128.minValue\n  else\n    Int128.minValue\n\nprotected def Int128.add (a b : Int128) : Int128 := ⟨⟨a.toBitVec + b.toBitVec⟩⟩\n\nprotected def Int128.sub (a b : Int128) : Int128 := ⟨⟨a.toBitVec - b.toBitVec⟩⟩\n\nprotected def Int128.mul (a b : Int128) : Int128 := ⟨⟨a.toBitVec * b.toBitVec⟩⟩\n\nprotected def Int128.div (a b : Int128) : Int128 := ⟨⟨BitVec.sdiv a.toBitVec b.toBitVec⟩⟩\n\nprotected def Int128.pow (x : Int128) (n : Nat) : Int128 :=\n  match n with\n  | 0 => 1\n  | n + 1 => Int128.mul (Int128.pow x n) x\n\nprotected def Int128.mod (a b : Int128) : Int128 := ⟨⟨BitVec.srem a.toBitVec b.toBitVec⟩⟩\n\nprotected def Int128.land (a b : Int128) : Int128 := ⟨⟨a.toBitVec &&& b.toBitVec⟩⟩\n\nprotected def Int128.lor (a b : Int128) : Int128 := ⟨⟨a.toBitVec ||| b.toBitVec⟩⟩\n\nprotected def Int128.xor (a b : Int128) : Int128 := ⟨⟨a.toBitVec ^^^ b.toBitVec⟩⟩\n\nprotected def Int128.shiftLeft (a b : Int128) : Int128 := ⟨⟨a.toBitVec <<< (b.toBitVec.smod 128)⟩⟩\n\nprotected def Int128.shiftRight (a b : Int128) : Int128 := ⟨⟨BitVec.sshiftRight' a.toBitVec (b.toBitVec.smod 128)⟩⟩\n\nprotected def Int128.complement (a : Int128) : Int128 := ⟨⟨~~~a.toBitVec⟩⟩\n\nprotected def Int128.abs (a : Int128) : Int128 := ⟨⟨a.toBitVec.abs⟩⟩\n\ndef Int128.decEq (a b : Int128) : Decidable (a = b) :=\n  match a, b with\n  | ⟨n⟩, ⟨m⟩ =>\n    if h : n = m then\n      isTrue <| h ▸ rfl\n    else\n      isFalse (fun h' => Int128.noConfusion h' (fun h' => absurd h' h))\n\nprotected def Int128.lt (a b : Int128) : Prop := a.toBitVec.slt b.toBitVec\n\nprotected def Int128.le (a b : Int128) : Prop := a.toBitVec.sle b.toBitVec\n\ninstance : Inhabited Int128 where\n  default := 0\n\ninstance : Add Int128         := ⟨Int128.add⟩\ninstance : Sub Int128         := ⟨Int128.sub⟩\ninstance : Mul Int128         := ⟨Int128.mul⟩\ninstance : Pow Int128 Nat     := ⟨Int128.pow⟩\ninstance : Mod Int128         := ⟨Int128.mod⟩\ninstance : Div Int128         := ⟨Int128.div⟩\ninstance : LT Int128          := ⟨Int128.lt⟩\ninstance : LE Int128          := ⟨Int128.le⟩\ninstance : Complement Int128  := ⟨Int128.complement⟩\ninstance : AndOp Int128       := ⟨Int128.land⟩\ninstance : OrOp Int128        := ⟨Int128.lor⟩\ninstance : XorOp Int128         := ⟨Int128.xor⟩\ninstance : ShiftLeft Int128   := ⟨Int128.shiftLeft⟩\ninstance : ShiftRight Int128  := ⟨Int128.shiftRight⟩\ninstance : DecidableEq Int128 := Int128.decEq\n\ndef Bool.toInt128 (b : Bool) : Int128 := if b then 1 else 0\n\ndef Int128.decLt (a b : Int128) : Decidable (a < b) :=\n  inferInstanceAs (Decidable (a.toBitVec.slt b.toBitVec))\n\ndef Int128.decLe (a b : Int128) : Decidable (a ≤ b) :=\n  inferInstanceAs (Decidable (a.toBitVec.sle b.toBitVec))\n\nattribute [instance_reducible, instance] Int128.decLt Int128.decLe\n\ninstance : Max Int128 := maxOfLe\ninstance : Min Int128 := minOfLe\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Lemmas.lean",
    "content": "import Hax.MissingLean.Init.Data.SInt.Lemmas_Int128\n\nattribute [grind =_] Int8.ofNat_le_iff_le\nattribute [grind =_] Int16.ofNat_le_iff_le\nattribute [grind =_] Int32.ofNat_le_iff_le\nattribute [grind =_] Int64.ofNat_le_iff_le\n\nattribute [grind =] Int8.ofNat_toNatClampNeg\nattribute [grind =] Int16.ofNat_toNatClampNeg\nattribute [grind =] Int32.ofNat_toNatClampNeg\nattribute [grind =] Int64.ofNat_toNatClampNeg\n\nopen Lean in\nset_option hygiene false in\nmacro \"additional_int_lemmas\" typeName:ident width:term : command => do `(\n  namespace $typeName\n\n    theorem toInt_neg_of_ne_intMin {x : $typeName} (hx : x ≠ minValue) :\n        (-x).toInt = -(x.toInt) := by\n      have : x.toBitVec ≠ BitVec.intMin $width := by\n        refine fun h => hx ?_\n        rw [← toBitVec_inj, h, BitVec.intMin_eq_neg_two_pow]\n        rfl\n      simp only [toInt, minValue, toBitVec_neg, BitVec.toInt_neg_of_ne_intMin this] at *\n\n      theorem ofInt_eq_of_toInt_eq {a : Int} {b : $typeName} (h : b.toInt = a) : ofInt a = b := by\n        subst_vars; exact (ofInt_toInt b)\n\n  end $typeName\n)\n\nadditional_int_lemmas Int8 8\nadditional_int_lemmas Int16 16\nadditional_int_lemmas Int32 32\nadditional_int_lemmas Int64 64\nadditional_int_lemmas Int128 128\nadditional_int_lemmas ISize System.Platform.numBits\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/SInt/Lemmas_Int128.lean",
    "content": "import Hax.MissingLean.Init.Data.SInt.Basic_Int128\nimport Hax.MissingLean.Init.Data.UInt.Lemmas_UInt128\nimport Hax.MissingLean.Lean.Tactic.Simp.BuiltinSimpProcs.SInt\nimport Hax.MissingLean.Lean.Tactic.Simp.BuiltinSimpProcs.UInt\n\n-- Adapted from Init/Data/SInt/Lemmas.lean from the Lean v4.29.0-rc1 source code\n\ndeclare_int_theorems Int128 128\n\ntheorem Int128.toInt.inj {x y : Int128} (h : x.toInt = y.toInt) : x = y := Int128.toBitVec.inj (BitVec.eq_of_toInt_eq h)\ntheorem Int128.toInt_inj {x y : Int128} : x.toInt = y.toInt ↔ x = y := ⟨Int128.toInt.inj, fun h => h ▸ rfl⟩\n\n@[simp, int_toBitVec] theorem Int128.toBitVec_neg (x : Int128) : (-x).toBitVec = -x.toBitVec := (rfl)\n@[simp] theorem Int128.toBitVec_zero : toBitVec 0 = 0#128 := (rfl)\n\ntheorem Int128.toBitVec_one : (1 : Int128).toBitVec = 1#128 := (rfl)\n\n@[simp, int_toBitVec] theorem Int128.toBitVec_ofInt (i : Int) : (ofInt i).toBitVec = BitVec.ofInt _ i := (rfl)\n\n@[simp] protected theorem Int128.neg_zero : -(0 : Int128) = 0 := (rfl)\n\n@[simp] theorem Int128.toInt_ofInt {n : Int} : toInt (ofInt n) = n.bmod Int128.size := by\n  rw [toInt, toBitVec_ofInt, BitVec.toInt_ofInt]\n\n@[simp] theorem Int128.toInt_ofNat' {n : Nat} : toInt (ofNat n) = (n : Int).bmod Int128.size := by\n  rw [toInt, toBitVec_ofNat', BitVec.toInt_ofNat']\n\ntheorem Int128.toInt_ofNat {n : Nat} : toInt (no_index (OfNat.ofNat n)) = (n : Int).bmod Int128.size := by\n  rw [toInt, toBitVec_ofNat, BitVec.toInt_ofNat]\n\ntheorem Int128.toInt_ofInt_of_le {n : Int} (hn : -2^127 ≤ n) (hn' : n < 2^127) : toInt (ofInt n) = n := by\n  rw [toInt, toBitVec_ofInt, BitVec.toInt_ofInt_eq_self (by decide) hn hn']\n\ntheorem Int128.neg_ofInt {n : Int} : -ofInt n = ofInt (-n) :=\n  toBitVec.inj (by simp [BitVec.ofInt_neg])\n\ntheorem Int128.ofInt_eq_ofNat {n : Nat} : ofInt n = ofNat n := toBitVec.inj (by simp)\n\ntheorem Int128.neg_ofNat {n : Nat} : -ofNat n = ofInt (-n) := by\n  rw [← neg_ofInt, ofInt_eq_ofNat]\n\ntheorem Int128.toNatClampNeg_ofNat_of_lt {n : Nat} (h : n < 2 ^ 127) : toNatClampNeg (ofNat n) = n := by\n  rw [toNatClampNeg, ← ofInt_eq_ofNat, toInt_ofInt_of_le (by omega) (by omega), Int.toNat_natCast]\n\ntheorem Int128.toInt_ofNat_of_lt {n : Nat} (h : n < 2 ^ 127) : toInt (ofNat n) = n := by\n  rw [← ofInt_eq_ofNat, toInt_ofInt_of_le (by omega) (by omega)]\n\n\ntheorem Int128.toInt_neg_ofNat_of_le {n : Nat} (h : n ≤ 2^127) : toInt (-ofNat n) = -n := by\n  rw [← ofInt_eq_ofNat, neg_ofInt, toInt_ofInt_of_le (by omega) (by omega)]\n\ntheorem Int128.toInt_zero : toInt 0 = 0 := by simp\n\ntheorem Int128.toInt_minValue : Int128.minValue.toInt = -2^127 := (rfl)\n\ntheorem Int128.toInt_maxValue : Int128.maxValue.toInt = 2 ^ 127 - 1 := (rfl)\n\n@[simp] theorem Int128.toNatClampNeg_minValue : Int128.minValue.toNatClampNeg = 0 := (rfl)\n\n@[simp, int_toBitVec] theorem UInt128.toBitVec_toInt128 (x : UInt128) : x.toInt128.toBitVec = x.toBitVec := (rfl)\n\n@[simp] theorem Int128.ofBitVec_uInt128ToBitVec (x : UInt128) : Int128.ofBitVec x.toBitVec = x.toInt128 := (rfl)\n\n@[simp] theorem UInt128.toUInt128_toInt128 (x : UInt128) : x.toInt128.toUInt128 = x := (rfl)\n\n@[simp] theorem Int128.toNat_toInt (x : Int128) : x.toInt.toNat = x.toNatClampNeg := (rfl)\n\n@[simp] theorem Int128.toInt_toBitVec (x : Int128) : x.toBitVec.toInt = x.toInt := (rfl)\n\n@[simp, int_toBitVec] theorem Int8.toBitVec_toInt128 (x : Int8) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl)\n@[simp, int_toBitVec] theorem Int16.toBitVec_toInt128 (x : Int16) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl)\n@[simp, int_toBitVec] theorem Int32.toBitVec_toInt128 (x : Int32) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl)\n@[simp, int_toBitVec] theorem Int128.toBitVec_toInt8 (x : Int128) : x.toInt8.toBitVec = x.toBitVec.signExtend 8 := (rfl)\n@[simp, int_toBitVec] theorem Int128.toBitVec_toInt16 (x : Int128) : x.toInt16.toBitVec = x.toBitVec.signExtend 16 := (rfl)\n@[simp, int_toBitVec] theorem Int128.toBitVec_toInt32 (x : Int128) : x.toInt32.toBitVec = x.toBitVec.signExtend 32 := (rfl)\n-- @[simp, int_toBitVec] theorem Int128.toBitVec_toISize (x : Int128) : x.toISize.toBitVec = x.toBitVec.signExtend System.Platform.numBits := (rfl)\n-- @[simp, int_toBitVec] theorem ISize.toBitVec_toInt128 (x : ISize) : x.toInt128.toBitVec = x.toBitVec.signExtend 128 := (rfl)\ntheorem Int128.toInt_lt (x : Int128) : x.toInt < 2 ^ 127 := Int.lt_of_mul_lt_mul_left BitVec.two_mul_toInt_lt (by decide)\ntheorem Int128.le_toInt (x : Int128) : -2 ^ 127 ≤ x.toInt := Int.le_of_mul_le_mul_left BitVec.le_two_mul_toInt (by decide)\ntheorem Int128.toInt_le (x : Int128) : x.toInt ≤ Int128.maxValue.toInt := Int.le_of_lt_add_one x.toInt_lt\ntheorem Int128.minValue_le_toInt (x : Int128) : Int128.minValue.toInt ≤ x.toInt := x.le_toInt\n\ntheorem ISize.int128MinValue_le_toInt (x : ISize) : Int128.minValue.toInt ≤ x.toInt :=\n  Int.le_trans (by decide) x.le_toInt\ntheorem Int128.toNatClampNeg_lt (x : Int128) : x.toNatClampNeg < 2 ^ 127 := (Int.toNat_lt' (by decide)).2 x.toInt_lt\n@[simp] theorem Int8.toInt_toInt128 (x : Int8) : x.toInt128.toInt = x.toInt :=\n  x.toBitVec.toInt_signExtend_of_le (by decide)\n@[simp] theorem Int16.toInt_toInt128 (x : Int16) : x.toInt128.toInt = x.toInt :=\n  x.toBitVec.toInt_signExtend_of_le (by decide)\n@[simp] theorem Int32.toInt_toInt128 (x : Int32) : x.toInt128.toInt = x.toInt :=\n  x.toBitVec.toInt_signExtend_of_le (by decide)\n@[simp] theorem Int64.toInt_toInt128 (x : Int64) : x.toInt128.toInt = x.toInt :=\n  x.toBitVec.toInt_signExtend_of_le (by decide)\n\n@[simp] theorem Int128.toInt_toInt8 (x : Int128) : x.toInt8.toInt = x.toInt.bmod (2 ^ 8) :=\n  x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by decide)\n@[simp] theorem Int128.toInt_toInt16 (x : Int128) : x.toInt16.toInt = x.toInt.bmod (2 ^ 16) :=\n  x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by decide)\n@[simp] theorem Int128.toInt_toInt32 (x : Int128) : x.toInt32.toInt = x.toInt.bmod (2 ^ 32) :=\n  x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by decide)\n-- @[simp] theorem Int128.toInt_toISize (x : Int128) : x.toISize.toInt = x.toInt.bmod (2 ^ System.Platform.numBits) :=\n--   x.toBitVec.toInt_signExtend_eq_toInt_bmod_of_le (by cases System.Platform.numBits_eq <;> simp_all)\n\n-- @[simp] theorem ISize.toInt_toInt128 (x : ISize) : x.toInt128.toInt = x.toInt :=\n--   x.toBitVec.toInt_signExtend_of_le (by cases System.Platform.numBits_eq <;> simp_all)\n\n@[simp] theorem Int8.toNatClampNeg_toInt128 (x : Int8) : x.toInt128.toNatClampNeg = x.toNatClampNeg :=\n  congrArg Int.toNat x.toInt_toInt128\n\n@[simp] theorem Int16.toNatClampNeg_toInt128 (x : Int16) : x.toInt128.toNatClampNeg = x.toNatClampNeg :=\n  congrArg Int.toNat x.toInt_toInt128\n\n@[simp] theorem Int32.toNatClampNeg_toInt128 (x : Int32) : x.toInt128.toNatClampNeg = x.toNatClampNeg :=\n  congrArg Int.toNat x.toInt_toInt128\n\n@[simp] theorem Int64.toNatClampNeg_toInt128 (x : Int64) : x.toInt128.toNatClampNeg = x.toNatClampNeg :=\n  congrArg Int.toNat x.toInt_toInt128\n\n-- @[simp] theorem ISize.toNatClampNeg_toInt128 (x : ISize) : x.toInt128.toNatClampNeg = x.toNatClampNeg :=\n--   congrArg Int.toNat x.toInt_toInt128\n\n@[simp] theorem Int128.toInt128_toUInt128 (x : Int128) : x.toUInt128.toInt128 = x := (rfl)\n\ntheorem Int128.toNat_toBitVec (x : Int128) : x.toBitVec.toNat = x.toUInt128.toNat := (rfl)\n\ntheorem Int128.toNat_toBitVec_of_le {x : Int128} (hx : 0 ≤ x) : x.toBitVec.toNat = x.toNatClampNeg :=\n  (x.toBitVec.toNat_toInt_of_sle hx).symm\n\ntheorem Int128.toNat_toUInt128_of_le {x : Int128} (hx : 0 ≤ x) : x.toUInt128.toNat = x.toNatClampNeg := by\n  rw [← toNat_toBitVec, toNat_toBitVec_of_le hx]\n\ntheorem Int128.toFin_toBitVec (x : Int128) : x.toBitVec.toFin = x.toUInt128.toFin := (rfl)\n@[simp, int_toBitVec] theorem Int128.toBitVec_toUInt128 (x : Int128) : x.toUInt128.toBitVec = x.toBitVec := (rfl)\n@[simp] theorem UInt128.ofBitVec_int128ToBitVec (x : Int128) : UInt128.ofBitVec x.toBitVec = x.toUInt128 := (rfl)\n@[simp] theorem Int128.ofBitVec_toBitVec (x : Int128) : Int128.ofBitVec x.toBitVec = x := (rfl)\n@[simp] theorem Int8.ofBitVec_int128ToBitVec (x : Int128) : Int8.ofBitVec (x.toBitVec.signExtend 8) = x.toInt8 := (rfl)\n@[simp] theorem Int16.ofBitVec_int128ToBitVec (x : Int128) : Int16.ofBitVec (x.toBitVec.signExtend 16) = x.toInt16 := (rfl)\n@[simp] theorem Int32.ofBitVec_int128ToBitVec (x : Int128) : Int32.ofBitVec (x.toBitVec.signExtend 32) = x.toInt32 := (rfl)\n@[simp] theorem Int64.ofBitVec_int128ToBitVec (x : Int128) : Int64.ofBitVec (x.toBitVec.signExtend 64) = x.toInt64 := (rfl)\n@[simp] theorem Int128.ofBitVec_int8ToBitVec (x : Int8) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofBitVec_int16ToBitVec (x : Int16) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofBitVec_int32ToBitVec (x : Int32) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofBitVec_int64ToBitVec (x : Int64) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl)\n-- @[simp] theorem Int128.ofBitVec_iSizeToBitVec (x : ISize) : Int128.ofBitVec (x.toBitVec.signExtend 128) = x.toInt128 := (rfl)\n-- @[simp] theorem ISize.ofBitVec_int128ToBitVec (x : Int128) : ISize.ofBitVec (x.toBitVec.signExtend System.Platform.numBits) = x.toISize := (rfl)\n@[simp] theorem Int128.toBitVec_ofIntLE (x : Int) (h₁ h₂) : (Int128.ofIntLE x h₁ h₂).toBitVec = BitVec.ofInt 128 x := (rfl)\n@[simp] theorem Int128.toInt_bmod (x : Int128) : x.toInt.bmod 340282366920938463463374607431768211456 = x.toInt := Int.bmod_eq_of_le x.le_toInt x.toInt_lt\n\n-- @[simp] theorem Int128.toInt_bmod_18446744073709551616 (x : Int128) : x.toInt.bmod 18446744073709551616 = x.toInt :=\n--   Int.bmod_eq_of_le (Int.le_trans (by decide) x.le_toInt) (Int.lt_of_lt_of_le x.toInt_lt (by decide))\n@[simp] theorem BitVec.ofInt_int128ToInt (x : Int128) : BitVec.ofInt 128 x.toInt = x.toBitVec := BitVec.eq_of_toInt_eq (by simp)\n@[simp] theorem Int128.ofIntLE_toInt (x : Int128) : Int128.ofIntLE x.toInt x.minValue_le_toInt x.toInt_le = x := Int128.toBitVec.inj (by simp)\ntheorem Int8.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int8.ofIntLE x.toInt h₁ h₂ = x.toInt8 := (rfl)\ntheorem Int16.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int16.ofIntLE x.toInt h₁ h₂ = x.toInt16 := (rfl)\ntheorem Int32.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int32.ofIntLE x.toInt h₁ h₂ = x.toInt32 := (rfl)\ntheorem Int64.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : Int64.ofIntLE x.toInt h₁ h₂ = x.toInt64 := (rfl)\n\n@[simp] theorem Int128.ofIntLE_int8ToInt (x : Int8) :\n    Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofIntLE_int16ToInt (x : Int16) :\n    Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofIntLE_int32ToInt (x : Int32) :\n    Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofIntLE_int64ToInt (x : Int64) :\n    Int128.ofIntLE x.toInt (Int.le_trans (by decide) x.minValue_le_toInt) (Int.le_trans x.toInt_le (by decide)) = x.toInt128 := (rfl)\n-- @[simp] theorem Int128.ofIntLE_iSizeToInt (x : ISize) :\n--     Int128.ofIntLE x.toInt x.int128MinValue_le_toInt x.toInt_le_int128MaxValue = x.toInt128 := (rfl)\n-- theorem ISize.ofIntLE_int128ToInt (x : Int128) {h₁ h₂} : ISize.ofIntLE x.toInt h₁ h₂ = x.toISize := (rfl)\n@[simp] theorem Int128.ofInt_toInt (x : Int128) : Int128.ofInt x.toInt = x := Int128.toBitVec.inj (by simp)\n@[simp] theorem Int8.ofInt_int128ToInt (x : Int128) : Int8.ofInt x.toInt = x.toInt8 := (rfl)\n@[simp] theorem Int16.ofInt_int128ToInt (x : Int128) : Int16.ofInt x.toInt = x.toInt16 := (rfl)\n@[simp] theorem Int32.ofInt_int128ToInt (x : Int128) : Int32.ofInt x.toInt = x.toInt32 := (rfl)\n@[simp] theorem Int64.ofInt_int128ToInt (x : Int128) : Int64.ofInt x.toInt = x.toInt64 := (rfl)\n@[simp] theorem Int128.ofInt_int8ToInt (x : Int8) : Int128.ofInt x.toInt = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofInt_int16ToInt (x : Int16) : Int128.ofInt x.toInt = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofInt_int32ToInt (x : Int32) : Int128.ofInt x.toInt = x.toInt128 := (rfl)\n@[simp] theorem Int128.ofInt_int64ToInt (x : Int64) : Int128.ofInt x.toInt = x.toInt128 := (rfl)\n-- @[simp] theorem Int128.ofInt_iSizeToInt (x : ISize) : Int128.ofInt x.toInt = x.toInt128 := (rfl)\n-- @[simp] theorem ISize.ofInt_int128ToInt (x : Int128) : ISize.ofInt x.toInt = x.toISize := (rfl)\n@[simp] theorem Int128.toInt_ofIntLE {x : Int} {h₁ h₂} : (ofIntLE x h₁ h₂).toInt = x := by\n  rw [ofIntLE, toInt_ofInt_of_le h₁ (Int.lt_of_le_sub_one h₂)]\ntheorem Int128.ofIntLE_eq_ofIntTruncate {x : Int} {h₁ h₂} : (ofIntLE x h₁ h₂) = ofIntTruncate x := by\n  rw [ofIntTruncate, dif_pos h₁, dif_pos h₂]\ntheorem Int128.ofIntLE_eq_ofInt {n : Int} (h₁ h₂) : Int128.ofIntLE n h₁ h₂ = Int128.ofInt n := (rfl)\ntheorem Int128.toInt_ofIntTruncate {x : Int} (h₁ : Int128.minValue.toInt ≤ x)\n    (h₂ : x ≤ Int128.maxValue.toInt) : (Int128.ofIntTruncate x).toInt = x := by\n  rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := h₂), toInt_ofIntLE]\n@[simp] theorem Int128.ofIntTruncate_toInt (x : Int128) : Int128.ofIntTruncate x.toInt = x :=\n  Int128.toInt.inj (toInt_ofIntTruncate x.minValue_le_toInt x.toInt_le)\n@[simp] theorem Int128.ofIntTruncate_int8ToInt (x : Int8) : Int128.ofIntTruncate x.toInt = x.toInt128 :=\n  Int128.toInt.inj (by\n    rw [toInt_ofIntTruncate, Int8.toInt_toInt128]\n    · exact Int.le_trans (by decide) x.minValue_le_toInt\n    · exact Int.le_trans x.toInt_le (by decide))\n@[simp] theorem Int128.ofIntTruncate_int16ToInt (x : Int16) : Int128.ofIntTruncate x.toInt = x.toInt128 :=\n  Int128.toInt.inj (by\n    rw [toInt_ofIntTruncate, Int16.toInt_toInt128]\n    · exact Int.le_trans (by decide) x.minValue_le_toInt\n    · exact Int.le_trans x.toInt_le (by decide))\n@[simp] theorem Int128.ofIntTruncate_int32ToInt (x : Int32) : Int128.ofIntTruncate x.toInt = x.toInt128 :=\n  Int128.toInt.inj (by\n    rw [toInt_ofIntTruncate, Int32.toInt_toInt128]\n    · exact Int.le_trans (by decide) x.minValue_le_toInt\n    · exact Int.le_trans x.toInt_le (by decide))\n@[simp] theorem Int128.ofIntTruncate_int64ToInt (x : Int64) : Int128.ofIntTruncate x.toInt = x.toInt128 :=\n  Int128.toInt.inj (by\n    rw [toInt_ofIntTruncate, Int64.toInt_toInt128]\n    · exact Int.le_trans (by decide) x.minValue_le_toInt\n    · exact Int.le_trans x.toInt_le (by decide))\n-- @[simp] theorem Int128.ofIntTruncate_iSizeToInt (x : ISize) : Int128.ofIntTruncate x.toInt = x.toInt128 :=\n--   Int128.toInt.inj (by\n--     rw [toInt_ofIntTruncate, ISize.toInt_toInt128]\n--     · exact x.int128MinValue_le_toInt\n--     · exact x.toInt_le_int128MaxValue)\ntheorem Int128.le_iff_toInt_le {x y : Int128} : x ≤ y ↔ x.toInt ≤ y.toInt := BitVec.sle_iff_toInt_le\ntheorem Int128.lt_iff_toInt_lt {x y : Int128} : x < y ↔ x.toInt < y.toInt := BitVec.slt_iff_toInt_lt\ntheorem Int128.cast_toNatClampNeg (x : Int128) (hx : 0 ≤ x) : x.toNatClampNeg = x.toInt := by\n  rw [toNatClampNeg, toInt, Int.toNat_of_nonneg (by simpa using le_iff_toInt_le.1 hx)]\ntheorem Int128.ofNat_toNatClampNeg (x : Int128) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x :=\n  Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt x.toNatClampNeg_lt, cast_toNatClampNeg _ hx])\ntheorem Int128.ofNat_int8ToNatClampNeg (x : Int8) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x.toInt128 :=\n  Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt (Nat.lt_of_lt_of_le x.toNatClampNeg_lt (by decide)),\n    Int8.cast_toNatClampNeg _ hx, Int8.toInt_toInt128])\ntheorem Int128.ofNat_int16ToNatClampNeg (x : Int16) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x.toInt128 :=\n  Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt (Nat.lt_of_lt_of_le x.toNatClampNeg_lt (by decide)),\n    Int16.cast_toNatClampNeg _ hx, Int16.toInt_toInt128])\ntheorem Int128.ofNat_int32ToNatClampNeg (x : Int32) (hx : 0 ≤ x) : Int128.ofNat x.toNatClampNeg = x.toInt128 :=\n  Int128.toInt.inj (by rw [Int128.toInt_ofNat_of_lt (Nat.lt_of_lt_of_le x.toNatClampNeg_lt (by decide)),\n    Int32.cast_toNatClampNeg _ hx, Int32.toInt_toInt128])\n@[simp] theorem Int8.toInt8_toInt128 (n : Int8) : n.toInt128.toInt8 = n :=\n  Int8.toInt.inj (by simp)\n@[simp] theorem Int8.toInt16_toInt128 (n : Int8) : n.toInt128.toInt16 = n.toInt16 :=\n  Int16.toInt.inj (by simp)\n@[simp] theorem Int8.toInt32_toInt128 (n : Int8) : n.toInt128.toInt32 = n.toInt32 :=\n  Int32.toInt.inj (by simp)\n@[simp] theorem Int8.toInt128_toInt16 (n : Int8) : n.toInt16.toInt128 = n.toInt128 :=\n  Int128.toInt.inj (by simp)\n@[simp] theorem Int8.toInt128_toInt32 (n : Int8) : n.toInt32.toInt128 = n.toInt128 :=\n  Int128.toInt.inj (by simp)\n-- @[simp] theorem Int8.toInt128_toISize (n : Int8) : n.toISize.toInt128 = n.toInt128 :=\n--   Int128.toInt.inj (by simp)\n-- @[simp] theorem Int8.toISize_toInt128 (n : Int8) : n.toInt128.toISize = n.toISize :=\n--   ISize.toInt.inj (by simp)\n@[simp] theorem Int16.toInt8_toInt128 (n : Int16) : n.toInt128.toInt8 = n.toInt8 :=\n  Int8.toInt.inj (by simp)\n@[simp] theorem Int16.toInt16_toInt128 (n : Int16) : n.toInt128.toInt16 = n :=\n  Int16.toInt.inj (by simp)\n\n@[simp] theorem Int16.toInt32_toInt128 (n : Int16) : n.toInt128.toInt32 = n.toInt32 :=\n  Int32.toInt.inj (by simp)\n\n@[simp] theorem Int16.toInt128_toInt32 (n : Int16) : n.toInt32.toInt128 = n.toInt128 :=\n  Int128.toInt.inj (by simp)\n-- @[simp] theorem Int16.toInt128_toISize (n : Int16) : n.toISize.toInt128 = n.toInt128 :=\n--   Int128.toInt.inj (by simp)\n\n-- @[simp] theorem Int16.toISize_toInt128 (n : Int16) : n.toInt128.toISize = n.toISize :=\n--   ISize.toInt.inj (by simp)\n@[simp] theorem Int32.toInt8_toInt128 (n : Int32) : n.toInt128.toInt8 = n.toInt8 :=\n  Int8.toInt.inj (by simp)\n\n@[simp] theorem Int32.toInt16_toInt128 (n : Int32) : n.toInt128.toInt16 = n.toInt16 :=\n  Int16.toInt.inj (by simp)\n\n@[simp] theorem Int32.toInt32_toInt128 (n : Int32) : n.toInt128.toInt32 = n :=\n  Int32.toInt.inj (by simp)\n\n-- @[simp] theorem Int32.toInt128_toISize (n : Int32) : n.toISize.toInt128 = n.toInt128 :=\n--   Int128.toInt.inj (by simp)\n\n-- @[simp] theorem Int32.toISize_toInt128 (n : Int32) : n.toInt128.toISize = n.toISize :=\n--   ISize.toInt.inj (by simp)\n\n@[simp] theorem Int64.toInt8_toInt128 (n : Int64) : n.toInt128.toInt8 = n.toInt8 :=\n  Int8.toInt.inj (by simp)\n\n@[simp] theorem Int64.toInt16_toInt128 (n : Int64) : n.toInt128.toInt16 = n.toInt16 :=\n  Int16.toInt.inj (by simp)\n\n-- @[simp] theorem Int64.toInt128_toISize (n : Int64) : n.toISize.toInt128 = n.toInt128 :=\n--   Int128.toInt.inj (by simp)\n\n-- @[simp] theorem Int64.toISize_toInt128 (n : Int64) : n.toInt128.toISize = n.toISize :=\n--   ISize.toInt.inj (by simp)\n\n@[simp] theorem Int128.toInt8_toInt16 (n : Int128) : n.toInt16.toInt8 = n.toInt8 :=\n  Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide))\n@[simp] theorem Int128.toInt8_toInt32 (n : Int128) : n.toInt32.toInt8 = n.toInt8 :=\n  Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide))\n-- @[simp] theorem Int128.toInt8_toInt64 (n : Int128) : n.toInt64.toInt8 = n.toInt8 :=\n--   Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide))\n-- @[simp] theorem Int128.toInt8_toISize (n : Int128) : n.toISize.toInt8 = n.toInt8 :=\n--   Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by cases System.Platform.numBits_eq <;> simp_all))\n\n@[simp] theorem Int128.toInt16_toInt32 (n : Int128) : n.toInt32.toInt16 = n.toInt16 :=\n  Int16.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide))\n-- @[simp] theorem Int128.toInt16_toISize (n : Int128) : n.toISize.toInt16 = n.toInt16 :=\n--   Int16.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by cases System.Platform.numBits_eq <;> simp_all))\n\n-- @[simp] theorem Int128.toInt32_toISize (n : Int128) : n.toISize.toInt32 = n.toInt32 :=\n--   Int32.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by cases System.Platform.numBits_eq <;> simp_all))\n-- @[simp] theorem ISize.toInt8_toInt128 (n : ISize) : n.toInt128.toInt8 = n.toInt8 :=\n--   Int8.toInt.inj (by simp)\n-- @[simp] theorem ISize.toInt16_toInt128 (n : ISize) : n.toInt128.toInt16 = n.toInt16 :=\n--   Int16.toInt.inj (by simp)\n\n-- @[simp] theorem ISize.toInt32_toInt128 (n : ISize) : n.toInt128.toInt32 = n.toInt32 :=\n--   Int32.toInt.inj (by simp)\n\n-- @[simp] theorem ISize.toISize_toInt128 (n : ISize) : n.toInt128.toISize = n :=\n--   ISize.toInt.inj (by simp)\n-- theorem UInt128.toInt128_ofNatLT {n : Nat} (hn) : (UInt128.ofNatLT n hn).toInt128 = Int128.ofNat n :=\n--   Int128.toBitVec.inj (by simp [BitVec.ofNatLT_eq_ofNat])\n@[simp] theorem UInt128.toInt128_ofNat' {n : Nat} : (UInt128.ofNat n).toInt128 = Int128.ofNat n := (rfl)\n@[simp] theorem UInt128.toInt128_ofNat {n : Nat} : toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := (rfl)\n@[simp] theorem UInt128.toInt128_ofBitVec (b) : (UInt128.ofBitVec b).toInt128 = Int128.ofBitVec b := (rfl)\n@[simp, int_toBitVec] theorem Int128.toBitVec_ofBitVec (b) : (Int128.ofBitVec b).toBitVec = b := (rfl)\ntheorem Int128.toBitVec_ofIntTruncate {n : Int} (h₁ : Int128.minValue.toInt ≤ n) (h₂ : n ≤ Int128.maxValue.toInt) :\n    (Int128.ofIntTruncate n).toBitVec = BitVec.ofInt _ n := by\n  rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := h₂), toBitVec_ofIntLE]\n@[simp] theorem Int128.toInt_ofBitVec (b) : (Int128.ofBitVec b).toInt = b.toInt := (rfl)\n@[simp] theorem Int128.toNatClampNeg_ofIntLE {n : Int} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toNatClampNeg = n.toNat := by\n  rw [ofIntLE, toNatClampNeg, toInt_ofInt_of_le h₁ (Int.lt_of_le_sub_one h₂)]\n@[simp] theorem Int128.toNatClampNeg_ofBitVec (b) : (Int128.ofBitVec b).toNatClampNeg = b.toInt.toNat := (rfl)\ntheorem Int128.toNatClampNeg_ofInt_of_le {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) :\n    (Int128.ofInt n).toNatClampNeg = n.toNat := by rw [toNatClampNeg, toInt_ofInt_of_le h₁ h₂]\ntheorem Int128.toNatClampNeg_ofIntTruncate_of_lt {n : Int} (h₁ : n < 2 ^ 63) :\n    (Int128.ofIntTruncate n).toNatClampNeg = n.toNat := by\n  rw [ofIntTruncate]\n  split\n  · rw [dif_pos (by rw [toInt_maxValue]; omega), toNatClampNeg_ofIntLE]\n  next h =>\n    rw [toNatClampNeg_minValue, eq_comm, Int.toNat_eq_zero]\n    rw [toInt_minValue] at h\n    omega\n@[simp] theorem Int128.toUInt128_ofBitVec (b) : (Int128.ofBitVec b).toUInt128 = UInt128.ofBitVec b := (rfl)\n@[simp] theorem Int128.toUInt128_ofNat' {n} : (Int128.ofNat n).toUInt128 = UInt128.ofNat n := (rfl)\n@[simp] theorem Int128.toUInt128_ofNat {n} : toUInt128 (OfNat.ofNat n) = OfNat.ofNat n := (rfl)\ntheorem Int128.toInt8_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toInt8 = Int8.ofInt n := Int8.toInt.inj (by simp)\n@[simp] theorem Int128.toInt8_ofBitVec (b) : (Int128.ofBitVec b).toInt8 = Int8.ofBitVec (b.signExtend _) := (rfl)\n@[simp] theorem Int128.toInt8_ofNat' {n} : (Int128.ofNat n).toInt8 = Int8.ofNat n :=\n  Int8.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le])\n@[simp] theorem Int128.toInt8_ofInt {n} : (Int128.ofInt n).toInt8 = Int8.ofInt n :=\n  Int8.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide))\n@[simp] theorem Int128.toInt8_ofNat {n} : toInt8 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toInt8_ofNat'\ntheorem Int128.toInt8_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) :\n    (Int128.ofIntTruncate n).toInt8 = Int8.ofInt n := by\n  rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toInt8_ofIntLE]\ntheorem Int128.toInt16_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toInt16 = Int16.ofInt n := Int16.toInt.inj (by simp)\n@[simp] theorem Int128.toInt16_ofBitVec (b) : (Int128.ofBitVec b).toInt16 = Int16.ofBitVec (b.signExtend _) := (rfl)\n@[simp] theorem Int128.toInt16_ofNat' {n} : (Int128.ofNat n).toInt16 = Int16.ofNat n :=\n  Int16.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le])\n@[simp] theorem Int128.toInt16_ofInt {n} : (Int128.ofInt n).toInt16 = Int16.ofInt n :=\n  Int16.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide))\n@[simp] theorem Int128.toInt16_ofNat {n} : toInt16 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toInt16_ofNat'\ntheorem Int128.toInt16_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) :\n    (Int128.ofIntTruncate n).toInt16 = Int16.ofInt n := by\n  rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toInt16_ofIntLE]\ntheorem Int128.toInt32_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toInt32 = Int32.ofInt n := Int32.toInt.inj (by simp)\n\n@[simp] theorem Int128.toInt32_ofBitVec (b) : (Int128.ofBitVec b).toInt32 = Int32.ofBitVec (b.signExtend _) := (rfl)\n\n@[simp] theorem Int128.toInt32_ofNat' {n} : (Int128.ofNat n).toInt32 = Int32.ofNat n :=\n  Int32.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le])\n\n@[simp] theorem Int128.toInt32_ofInt {n} : (Int128.ofInt n).toInt32 = Int32.ofInt n :=\n  Int32.toInt.inj (by simpa using Int.bmod_bmod_of_dvd (by decide))\n\n@[simp] theorem Int128.toInt32_ofNat {n} : toInt32 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toInt32_ofNat'\n\ntheorem Int128.toInt32_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) :\n    (Int128.ofIntTruncate n).toInt32 = Int32.ofInt n := by\n  rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toInt32_ofIntLE]\n\n-- theorem Int128.toISize_ofIntLE {n} (h₁ h₂) : (Int128.ofIntLE n h₁ h₂).toISize = ISize.ofInt n :=\n--   ISize.toInt.inj (by simp [ISize.toInt_ofInt])\n\n-- @[simp] theorem Int128.toISize_ofBitVec (b) : (Int128.ofBitVec b).toISize = ISize.ofBitVec (b.signExtend _) := (rfl)\n\n-- @[simp] theorem Int128.toISize_ofNat' {n} : (Int128.ofNat n).toISize = ISize.ofNat n :=\n--   ISize.toBitVec.inj (by simp [BitVec.signExtend_eq_setWidth_of_le])\n\n-- @[simp] theorem Int128.toISize_ofInt {n} : (Int128.ofInt n).toISize = ISize.ofInt n :=\n--  ISize.toInt.inj (by simpa [ISize.toInt_ofInt] using Int.bmod_bmod_of_dvd USize.size_dvd_uInt128Size)\n\n-- @[simp] theorem Int128.toISize_ofNat {n} : toISize (no_index (OfNat.ofNat n)) = OfNat.ofNat n := toISize_ofNat'\n\n-- theorem Int128.toISize_ofIntTruncate {n : Int} (h₁ : -2 ^ 127 ≤ n) (h₂ : n < 2 ^ 127) :\n--     (Int128.ofIntTruncate n).toISize = ISize.ofInt n := by\n--   rw [← ofIntLE_eq_ofIntTruncate (h₁ := h₁) (h₂ := Int.le_of_lt_add_one h₂), toISize_ofIntLE]\n\n@[simp, int_toBitVec] theorem Int128.toBitVec_minValue : minValue.toBitVec = BitVec.intMin _ := (rfl)\n\n@[simp, int_toBitVec] theorem Int128.toBitVec_maxValue : maxValue.toBitVec = BitVec.intMax _ := (rfl)\n\n@[simp] theorem Int128.toInt8_neg (x : Int128) : (-x).toInt8 = -x.toInt8 := Int8.toBitVec.inj (by simp)\n@[simp] theorem Int128.toInt16_neg (x : Int128) : (-x).toInt16 = -x.toInt16 := Int16.toBitVec.inj (by simp)\n\n@[simp] theorem Int128.toInt32_neg (x : Int128) : (-x).toInt32 = -x.toInt32 := Int32.toBitVec.inj (by simp)\n\n-- @[simp] theorem Int128.toISize_neg (x : Int128) : (-x).toISize = -x.toISize := ISize.toBitVec.inj (by simp)\n\n@[simp] theorem Int8.toInt128_neg_of_ne {x : Int8} (hx : x ≠ -128) : (-x).toInt128 = -x.toInt128 :=\n  Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _ (fun h => hx (Int8.toBitVec.inj h)))\n@[simp] theorem Int16.toInt128_neg_of_ne {x : Int16} (hx : x ≠ -32768) : (-x).toInt128 = -x.toInt128 :=\n  Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _ (fun h => hx (Int16.toBitVec.inj h)))\n@[simp] theorem Int32.toInt128_neg_of_ne {x : Int32} (hx : x ≠ -2147483648) : (-x).toInt128 = -x.toInt128 :=\n  Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _  (fun h => hx (Int32.toBitVec.inj h)))\n@[simp] theorem Int64.toInt128_neg_of_ne {x : Int64} (hx : x ≠ -9223372036854775808) : (-x).toInt128 = -x.toInt128 :=\n  Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _  (fun h => hx (Int64.toBitVec.inj h)))\n-- @[simp] theorem ISize.toInt128_neg_of_ne {x : ISize} (hx : x ≠ minValue) : (-x).toInt128 = -x.toInt128 :=\n--   Int128.toBitVec.inj (BitVec.signExtend_neg_of_ne_intMin _\n--     (fun h => hx (ISize.toBitVec.inj (h.trans toBitVec_minValue.symm))))\n\ntheorem Int8.toInt128_ofIntLE {n : Int} (h₁ h₂) :\n    (Int8.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans (by decide) h₁) (Int.le_trans h₂ (by decide)) :=\n  Int128.toInt.inj (by simp)\n@[simp] theorem Int8.toInt128_ofBitVec (b) : (Int8.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl)\n@[simp] theorem Int8.toInt128_ofInt {n : Int} (h₁ : Int8.minValue.toInt ≤ n) (h₂ : n ≤ Int8.maxValue.toInt) :\n    (Int8.ofInt n).toInt128 = Int128.ofInt n := by rw [← Int8.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt]\n@[simp] theorem Int8.toInt128_ofNat' {n : Nat} (h : n ≤ Int8.maxValue.toInt) :\n    (Int8.ofNat n).toInt128 = Int128.ofNat n := by\n  rw [← ofInt_eq_ofNat, toInt128_ofInt (by simp) h, Int128.ofInt_eq_ofNat]\n@[simp] theorem Int8.toInt128_ofNat {n : Nat} (h : n ≤ 127) :\n    toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := Int8.toInt128_ofNat' (by rw [toInt_maxValue]; omega)\ntheorem Int16.toInt128_ofIntLE {n : Int} (h₁ h₂) :\n    (Int16.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans (by decide) h₁) (Int.le_trans h₂ (by decide)) :=\n  Int128.toInt.inj (by simp)\n@[simp] theorem Int16.toInt128_ofBitVec (b) : (Int16.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl)\n@[simp] theorem Int16.toInt128_ofInt {n : Int} (h₁ : Int16.minValue.toInt ≤ n) (h₂ : n ≤ Int16.maxValue.toInt) :\n    (Int16.ofInt n).toInt128 = Int128.ofInt n := by rw [← Int16.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt]\n@[simp] theorem Int16.toInt128_ofNat' {n : Nat} (h : n ≤ Int16.maxValue.toInt) :\n    (Int16.ofNat n).toInt128 = Int128.ofNat n := by\n  rw [← ofInt_eq_ofNat, toInt128_ofInt (by simp) h, Int128.ofInt_eq_ofNat]\n@[simp] theorem Int16.toInt128_ofNat {n : Nat} (h : n ≤ 32767) :\n    toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := Int16.toInt128_ofNat' (by rw [toInt_maxValue]; omega)\ntheorem Int32.toInt128_ofIntLE {n : Int} (h₁ h₂) :\n    (Int32.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans (by decide) h₁) (Int.le_trans h₂ (by decide)) :=\n  Int128.toInt.inj (by simp)\n\n@[simp] theorem Int32.toInt128_ofBitVec (b) : (Int32.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl)\n\n@[simp] theorem Int32.toInt128_ofInt {n : Int} (h₁ : Int32.minValue.toInt ≤ n) (h₂ : n ≤ Int32.maxValue.toInt) :\n    (Int32.ofInt n).toInt128 = Int128.ofInt n := by rw [← Int32.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt]\n\n@[simp] theorem Int32.toInt128_ofNat' {n : Nat} (h : n ≤ Int32.maxValue.toInt) :\n    (Int32.ofNat n).toInt128 = Int128.ofNat n := by\n  rw [← ofInt_eq_ofNat, toInt128_ofInt (by simp) h, Int128.ofInt_eq_ofNat]\n\n@[simp] theorem Int32.toInt128_ofNat {n : Nat} (h : n ≤ 2147483647) :\n    toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n := Int32.toInt128_ofNat' (by rw [toInt_maxValue]; omega)\n\n-- theorem ISize.toInt128_ofIntLE {n : Int} (h₁ h₂) :\n--     (ISize.ofIntLE n h₁ h₂).toInt128 = Int128.ofIntLE n (Int.le_trans minValue.int128MinValue_le_toInt h₁)\n--       (Int.le_trans h₂ maxValue.toInt_le_int128MaxValue) :=\n--   Int128.toInt.inj (by simp)\n\n-- @[simp] theorem ISize.toInt128_ofBitVec (b) : (ISize.ofBitVec b).toInt128 = Int128.ofBitVec (b.signExtend _) := (rfl)\n\n-- @[simp] theorem ISize.toInt128_ofInt {n : Int} (h₁ : ISize.minValue.toInt ≤ n) (h₂ : n ≤ ISize.maxValue.toInt) :\n--     (ISize.ofInt n).toInt128 = Int128.ofInt n := by rw [← ISize.ofIntLE_eq_ofInt h₁ h₂, toInt128_ofIntLE, Int128.ofIntLE_eq_ofInt]\n\n-- @[simp] theorem ISize.toInt128_ofNat' {n : Nat} (h : n ≤ ISize.maxValue.toInt) :\n--     (ISize.ofNat n).toInt128 = Int128.ofNat n := by\n--   rw [← ofInt_eq_ofNat, toInt128_ofInt _ h, Int128.ofInt_eq_ofNat]\n--   refine Int.le_trans ?_ (Int.zero_le_ofNat _)\n--   cases System.Platform.numBits_eq <;> simp_all [ISize.toInt_minValue]\n\n-- @[simp] theorem ISize.toInt128_ofNat {n : Nat} (h : n ≤ 2147483647) :\n--     toInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n :=\n--   ISize.toInt128_ofNat' (by rw [toInt_maxValue]; cases System.Platform.numBits_eq <;> simp_all <;> omega)\n@[simp] theorem Int128.ofIntLE_bitVecToInt (n : BitVec 128) :\n    Int128.ofIntLE n.toInt (by exact n.le_toInt) (by exact n.toInt_le) = Int128.ofBitVec n :=\n  Int128.toBitVec.inj (by simp)\n\ntheorem Int128.ofBitVec_ofNatLT (n : Nat) (hn) : Int128.ofBitVec (BitVec.ofNatLT n hn) = Int128.ofNat n :=\n  Int128.toBitVec.inj (by simp [BitVec.ofNatLT_eq_ofNat hn])\n@[simp] theorem Int128.ofBitVec_ofNat (n : Nat) : Int128.ofBitVec (BitVec.ofNat 128 n) = Int128.ofNat n := (rfl)\n@[simp] theorem Int128.ofBitVec_ofInt (n : Int) : Int128.ofBitVec (BitVec.ofInt 128 n) = Int128.ofInt n := (rfl)\n@[simp] theorem Int128.ofNat_bitVecToNat (n : BitVec 128) : Int128.ofNat n.toNat = Int128.ofBitVec n :=\n  Int128.toBitVec.inj (by simp)\n@[simp] theorem Int128.ofInt_bitVecToInt (n : BitVec 128) : Int128.ofInt n.toInt = Int128.ofBitVec n :=\n  Int128.toBitVec.inj (by simp)\n@[simp] theorem Int128.ofIntTruncate_bitVecToInt (n : BitVec 128) : Int128.ofIntTruncate n.toInt = Int128.ofBitVec n :=\n  Int128.toBitVec.inj (by simp [toBitVec_ofIntTruncate (n.le_toInt) (n.toInt_le)])\n@[simp] theorem Int128.toInt_neg (n : Int128) : (-n).toInt = (-n.toInt).bmod (2 ^ 128) := BitVec.toInt_neg\n@[simp] theorem Int128.toNatClampNeg_eq_zero_iff {n : Int128} : n.toNatClampNeg = 0 ↔ n ≤ 0 := by\n  rw [toNatClampNeg, Int.toNat_eq_zero, le_iff_toInt_le, toInt_zero]\n@[simp] protected theorem Int128.not_le {n m : Int128} : ¬n ≤ m ↔ m < n := by simp [le_iff_toInt_le, lt_iff_toInt_lt]\n@[simp] theorem Int128.neg_nonpos_iff (n : Int128) : -n ≤ 0 ↔ n = minValue ∨ 0 ≤ n := by\n  rw [le_iff_toBitVec_sle, toBitVec_zero, toBitVec_neg, BitVec.neg_sle_zero (by decide)]\n  simp [← toBitVec_inj, le_iff_toBitVec_sle, BitVec.intMin_eq_neg_two_pow]\n@[simp] theorem Int128.toNatClampNeg_pos_iff (n : Int128) : 0 < n.toNatClampNeg ↔ 0 < n := by simp [Nat.pos_iff_ne_zero]\n@[simp] theorem Int128.toInt_div (a b : Int128) : (a / b).toInt = (a.toInt.tdiv b.toInt).bmod (2 ^ 128) := by\n  rw [← toInt_toBitVec, Int128.toBitVec_div, BitVec.toInt_sdiv, toInt_toBitVec, toInt_toBitVec]\ntheorem Int128.toInt_div_of_ne_left (a b : Int128) (h : a ≠ minValue) : (a / b).toInt = a.toInt.tdiv b.toInt := by\n  rw [← toInt_toBitVec, Int128.toBitVec_div, BitVec.toInt_sdiv_of_ne_or_ne, toInt_toBitVec, toInt_toBitVec]\n  exact Or.inl (by simpa [← toBitVec_inj] using h)\ntheorem Int128.toInt_div_of_ne_right (a b : Int128) (h : b ≠ -1) : (a / b).toInt = a.toInt.tdiv b.toInt := by\n  rw [← toInt_toBitVec, Int128.toBitVec_div, BitVec.toInt_sdiv_of_ne_or_ne, toInt_toBitVec, toInt_toBitVec]\n  exact Or.inr (by simpa [← toBitVec_inj] using h)\ntheorem Int8.toInt128_ne_minValue (a : Int8) : a.toInt128 ≠ Int128.minValue :=\n  have := a.le_toInt; by simp [← Int128.toInt_inj]; omega\ntheorem Int16.toInt128_ne_minValue (a : Int16) : a.toInt128 ≠ Int128.minValue :=\n  have := a.le_toInt; by simp [← Int128.toInt_inj]; omega\ntheorem Int32.toInt128_ne_minValue (a : Int32) : a.toInt128 ≠ Int128.minValue :=\n  have := a.le_toInt; by simp [← Int128.toInt_inj]; omega\n-- theorem ISize.toInt128_ne_minValue (a : ISize) (ha : a ≠ minValue) : a.toInt128 ≠ Int128.minValue := by\n--   have := a.minValue_le_toInt\n--   have : -2 ^ 127 ≤ minValue.toInt := minValue.le_toInt\n--   simp [← Int128.toInt_inj, ← ISize.toInt_inj] at *; omega\n\ntheorem Int8.toInt128_ne_neg_one (a : Int8) (ha : a ≠ -1) : a.toInt128 ≠ -1 :=\n  ne_of_apply_ne Int128.toInt8 (by simpa using ha)\ntheorem Int16.toInt128_ne_neg_one (a : Int16) (ha : a ≠ -1) : a.toInt128 ≠ -1 :=\n  ne_of_apply_ne Int128.toInt16 (by simpa using ha)\ntheorem Int32.toInt128_ne_neg_one (a : Int32) (ha : a ≠ -1) : a.toInt128 ≠ -1 :=\n  ne_of_apply_ne Int128.toInt32 (by simpa using ha)\n-- theorem ISize.toInt128_ne_neg_one (a : ISize) (ha : a ≠ -1) : a.toInt128 ≠ -1 :=\n--   ne_of_apply_ne Int128.toISize (by simpa using ha)\n\ntheorem Int8.toInt128_div_of_ne_left (a b : Int8) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n  Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha,\n    Int128.toInt_div_of_ne_left _ _ a.toInt128_ne_minValue, toInt_toInt128, toInt_toInt128])\ntheorem Int16.toInt128_div_of_ne_left (a b : Int16) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n  Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha,\n    Int128.toInt_div_of_ne_left _ _ a.toInt128_ne_minValue, toInt_toInt128, toInt_toInt128])\ntheorem Int32.toInt128_div_of_ne_left (a b : Int32) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n  Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha,\n    Int128.toInt_div_of_ne_left _ _ a.toInt128_ne_minValue, toInt_toInt128, toInt_toInt128])\n-- theorem ISize.toInt128_div_of_ne_left (a b : ISize) (ha : a ≠ minValue) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n--   Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_left _ _ ha,\n    -- Int128.toInt_div_of_ne_left _ _ (a.toInt128_ne_minValue ha), toInt_toInt128, toInt_toInt128])\ntheorem Int8.toInt128_div_of_ne_right (a b : Int8) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n  Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb,\n    Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128])\ntheorem Int16.toInt128_div_of_ne_right (a b : Int16) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n  Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb,\n    Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128])\ntheorem Int32.toInt128_div_of_ne_right (a b : Int32) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n  Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb,\n    Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128])\n-- theorem ISize.toInt128_div_of_ne_right (a b : ISize) (hb : b ≠ -1) : (a / b).toInt128 = a.toInt128 / b.toInt128 :=\n--   Int128.toInt_inj.1 (by rw [toInt_toInt128, toInt_div_of_ne_right _ _ hb,\n--     Int128.toInt_div_of_ne_right _ _ (b.toInt128_ne_neg_one hb), toInt_toInt128, toInt_toInt128])\n@[simp] theorem Int128.minValue_div_neg_one : minValue / -1 = minValue := by decide\n@[simp] theorem Int128.toInt_add (a b : Int128) : (a + b).toInt = (a.toInt + b.toInt).bmod (2 ^ 128) := by\n  rw [← toInt_toBitVec, Int128.toBitVec_add, BitVec.toInt_add, toInt_toBitVec, toInt_toBitVec]\n@[simp] theorem Int128.toInt8_add (a b : Int128) : (a + b).toInt8 = a.toInt8 + b.toInt8 :=\n  Int8.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add])\n@[simp] theorem Int128.toInt16_add (a b : Int128) : (a + b).toInt16 = a.toInt16 + b.toInt16 :=\n  Int16.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add])\n@[simp] theorem Int128.toInt32_add (a b : Int128) : (a + b).toInt32 = a.toInt32 + b.toInt32 :=\n  Int32.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add])\n-- @[simp] theorem Int128.toISize_add (a b : Int128) : (a + b).toISize = a.toISize + b.toISize :=\n--   ISize.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_add])\n@[simp] theorem Int128.toInt_mul (a b : Int128) : (a * b).toInt = (a.toInt * b.toInt).bmod (2 ^ 128) := by\n  rw [← toInt_toBitVec, Int128.toBitVec_mul, BitVec.toInt_mul, toInt_toBitVec, toInt_toBitVec]\n\n@[simp] theorem Int128.toInt8_mul (a b : Int128) : (a * b).toInt8 = a.toInt8 * b.toInt8 :=\n  Int8.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul])\n@[simp] theorem Int128.toInt16_mul (a b : Int128) : (a * b).toInt16 = a.toInt16 * b.toInt16 :=\n  Int16.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul])\n@[simp] theorem Int128.toInt32_mul (a b : Int128) : (a * b).toInt32 = a.toInt32 * b.toInt32 :=\n  Int32.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul])\n-- @[simp] theorem Int128.toISize_mul (a b : Int128) : (a * b).toISize = a.toISize * b.toISize :=\n--   ISize.toBitVec_inj.1 (by simp [BitVec.signExtend_eq_setWidth_of_le, BitVec.setWidth_mul])\nprotected theorem Int128.sub_eq_add_neg (a b : Int128) : a - b = a + -b := Int128.toBitVec.inj (by simp [BitVec.sub_eq_add_neg])\n@[simp] theorem Int128.toInt_sub (a b : Int128) : (a - b).toInt = (a.toInt - b.toInt).bmod (2 ^ 128) := by\n  simp [Int128.sub_eq_add_neg, Int.sub_eq_add_neg]\n\n@[simp] theorem Int128.toInt8_sub (a b : Int128) : (a - b).toInt8 = a.toInt8 - b.toInt8 := by\n  simp [Int128.sub_eq_add_neg, Int8.sub_eq_add_neg]\n@[simp] theorem Int128.toInt16_sub (a b : Int128) : (a - b).toInt16 = a.toInt16 - b.toInt16 := by\n  simp [Int128.sub_eq_add_neg, Int16.sub_eq_add_neg]\n@[simp] theorem Int128.toInt32_sub (a b : Int128) : (a - b).toInt32 = a.toInt32 - b.toInt32 := by\n  simp [Int128.sub_eq_add_neg, Int32.sub_eq_add_neg]\n-- @[simp] theorem Int128.toISize_sub (a b : Int128) : (a - b).toISize = a.toISize - b.toISize := by\n--   simp [Int128.sub_eq_add_neg, ISize.sub_eq_add_neg]\n@[simp] theorem Int8.toInt128_lt {a b : Int8} : a.toInt128 < b.toInt128 ↔ a < b := by\n  simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt]\n@[simp] theorem Int16.toInt128_lt {a b : Int16} : a.toInt128 < b.toInt128 ↔ a < b := by\n  simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt]\n@[simp] theorem Int32.toInt128_lt {a b : Int32} : a.toInt128 < b.toInt128 ↔ a < b := by\n  simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt]\n-- @[simp] theorem ISize.toInt128_lt {a b : ISize} : a.toInt128 < b.toInt128 ↔ a < b := by\n--   simp [lt_iff_toInt_lt, Int128.lt_iff_toInt_lt]\n\n@[simp] theorem Int8.toInt128_le {a b : Int8} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by\n  simp [le_iff_toInt_le, Int128.le_iff_toInt_le]\n@[simp] theorem Int16.toInt128_le {a b : Int16} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by\n  simp [le_iff_toInt_le, Int128.le_iff_toInt_le]\n@[simp] theorem Int32.toInt128_le {a b : Int32} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by\n  simp [le_iff_toInt_le, Int128.le_iff_toInt_le]\n-- @[simp] theorem ISize.toInt128_le {a b : ISize} : a.toInt128 ≤ b.toInt128 ↔ a ≤ b := by\n--   simp [le_iff_toInt_le, Int128.le_iff_toInt_le]\n@[simp] theorem Int128.ofBitVec_neg (a : BitVec 128) : Int128.ofBitVec (-a) = -Int128.ofBitVec a := (rfl)\n@[simp] theorem Int128.ofInt_neg (a : Int) : Int128.ofInt (-a) = -Int128.ofInt a := Int128.toInt_inj.1 (by simp)\ntheorem Int128.ofInt_eq_iff_bmod_eq_toInt (a : Int) (b : Int128) : Int128.ofInt a = b ↔ a.bmod (2 ^ 128) = b.toInt := by\n  simp [← Int128.toInt_inj]\n@[simp] theorem Int128.ofBitVec_add (a b : BitVec 128) : Int128.ofBitVec (a + b) = Int128.ofBitVec a + Int128.ofBitVec b := (rfl)\n@[simp] theorem Int128.ofInt_add (a b : Int) : Int128.ofInt (a + b) = Int128.ofInt a + Int128.ofInt b := by\n  simp [Int128.ofInt_eq_iff_bmod_eq_toInt]\n@[simp] theorem Int128.ofNat_add (a b : Nat) : Int128.ofNat (a + b) = Int128.ofNat a + Int128.ofNat b := by\n  simp [← Int128.ofInt_eq_ofNat]\ntheorem Int128.ofIntLE_add {a b : Int} {hab₁ hab₂} : Int128.ofIntLE (a + b) hab₁ hab₂ = Int128.ofInt a + Int128.ofInt b := by\n  simp [Int128.ofIntLE_eq_ofInt]\n@[simp] theorem Int128.ofBitVec_sub (a b : BitVec 128) : Int128.ofBitVec (a - b) = Int128.ofBitVec a - Int128.ofBitVec b := (rfl)\n@[simp] theorem Int128.ofInt_sub (a b : Int) : Int128.ofInt (a - b) = Int128.ofInt a - Int128.ofInt b := by\n  simp [Int128.ofInt_eq_iff_bmod_eq_toInt]\n@[simp] theorem Int128.ofNat_sub (a b : Nat) (hab : b ≤ a) : Int128.ofNat (a - b) = Int128.ofNat a - Int128.ofNat b := by\n  simp [← Int128.ofInt_eq_ofNat, Int.ofNat_sub hab]\ntheorem Int128.ofIntLE_sub {a b : Int} {hab₁ hab₂} : Int128.ofIntLE (a - b) hab₁ hab₂ = Int128.ofInt a - Int128.ofInt b := by\n  simp [Int128.ofIntLE_eq_ofInt]\n@[simp] theorem Int128.ofBitVec_mul (a b : BitVec 128) : Int128.ofBitVec (a * b) = Int128.ofBitVec a * Int128.ofBitVec b := (rfl)\n@[simp] theorem Int128.ofInt_mul (a b : Int) : Int128.ofInt (a * b) = Int128.ofInt a * Int128.ofInt b := by\n  simp [Int128.ofInt_eq_iff_bmod_eq_toInt]\n@[simp] theorem Int128.ofNat_mul (a b : Nat) : Int128.ofNat (a * b) = Int128.ofNat a * Int128.ofNat b := by\n  simp [← Int128.ofInt_eq_ofNat]\ntheorem Int128.ofIntLE_mul {a b : Int} {hab₁ hab₂} : Int128.ofIntLE (a * b) hab₁ hab₂ = Int128.ofInt a * Int128.ofInt b := by\n  simp [Int128.ofIntLE_eq_ofInt]\ntheorem Int128.toInt_minValue_lt_zero : minValue.toInt < 0 := by decide\ntheorem Int128.toInt_maxValue_add_one : maxValue.toInt + 1 = 2 ^ 127 := (rfl)\n@[simp] theorem Int128.ofBitVec_sdiv (a b : BitVec 128) : Int128.ofBitVec (a.sdiv b) = Int128.ofBitVec a / Int128.ofBitVec b := (rfl)\ntheorem Int128.ofInt_tdiv {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt)\n    (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt (a.tdiv b) = Int128.ofInt a / Int128.ofInt b := by\n  rw [Int128.ofInt_eq_iff_bmod_eq_toInt, toInt_div, toInt_ofInt, toInt_ofInt,\n    Int.bmod_eq_of_le (n := a), Int.bmod_eq_of_le (n := b)]\n  · exact hb₁\n  · exact Int.lt_of_le_sub_one hb₂\n  · exact ha₁\n  · exact Int.lt_of_le_sub_one ha₂\ntheorem Int128.ofInt_eq_ofIntLE_div {a b : Int} (ha₁ ha₂ hb₁ hb₂) :\n    Int128.ofInt (a.tdiv b) = Int128.ofIntLE a ha₁ ha₂ / Int128.ofIntLE b hb₁ hb₂ := by\n  rw [ofIntLE_eq_ofInt, ofIntLE_eq_ofInt, ofInt_tdiv ha₁ ha₂ hb₁ hb₂]\ntheorem Int128.ofNat_div {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) :\n    Int128.ofNat (a / b) = Int128.ofNat a / Int128.ofNat b := by\n  rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ← ofInt_eq_ofNat, Int.ofNat_tdiv,\n    ofInt_tdiv (by simp) _ (by simp)]\n  · exact Int.le_of_lt_add_one (Int.ofNat_le.2 hb)\n  · exact Int.le_of_lt_add_one (Int.ofNat_le.2 ha)\n@[simp] theorem Int128.ofBitVec_srem (a b : BitVec 128) : Int128.ofBitVec (a.srem b) = Int128.ofBitVec a % Int128.ofBitVec b := (rfl)\n@[simp] theorem Int128.toInt_bmod_size (a : Int128) : a.toInt.bmod size = a.toInt := BitVec.toInt_bmod_cancel _\ntheorem Int128.ofIntLE_le_iff_le {a b : Int} (ha₁ ha₂ hb₁ hb₂) :\n    Int128.ofIntLE a ha₁ ha₂ ≤ Int128.ofIntLE b hb₁ hb₂ ↔ a ≤ b := by simp [le_iff_toInt_le]\ntheorem Int128.ofInt_le_iff_le {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt)\n    (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt a ≤ Int128.ofInt b ↔ a ≤ b := by\n  rw [← ofIntLE_eq_ofInt ha₁ ha₂, ← ofIntLE_eq_ofInt hb₁ hb₂, ofIntLE_le_iff_le]\ntheorem Int128.ofNat_le_iff_le {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) :\n    Int128.ofNat a ≤ Int128.ofNat b ↔ a ≤ b := by\n  rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ofInt_le_iff_le (by simp) _ (by simp), Int.ofNat_le]\n  · exact Int.le_of_lt_add_one (Int.ofNat_le.2 hb)\n  · exact Int.le_of_lt_add_one (Int.ofNat_le.2 ha)\ntheorem Int128.ofBitVec_le_iff_sle (a b : BitVec 128) : Int128.ofBitVec a ≤ Int128.ofBitVec b ↔ a.sle b := Iff.rfl\ntheorem Int128.ofIntLE_lt_iff_lt {a b : Int} (ha₁ ha₂ hb₁ hb₂) :\n    Int128.ofIntLE a ha₁ ha₂ < Int128.ofIntLE b hb₁ hb₂ ↔ a < b := by simp [lt_iff_toInt_lt]\ntheorem Int128.ofInt_lt_iff_lt {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt)\n    (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt a < Int128.ofInt b ↔ a < b := by\n  rw [← ofIntLE_eq_ofInt ha₁ ha₂, ← ofIntLE_eq_ofInt hb₁ hb₂, ofIntLE_lt_iff_lt]\ntheorem Int128.ofNat_lt_iff_lt {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) :\n    Int128.ofNat a < Int128.ofNat b ↔ a < b := by\n  rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ofInt_lt_iff_lt (by simp) _ (by simp), Int.ofNat_lt]\n  · exact Int.le_of_lt_add_one (Int.ofNat_lt.2 hb)\n  · exact Int.le_of_lt_add_one (Int.ofNat_lt.2 ha)\ntheorem Int128.ofBitVec_lt_iff_slt (a b : BitVec 128) : Int128.ofBitVec a < Int128.ofBitVec b ↔ a.slt b := Iff.rfl\ntheorem Int128.toNatClampNeg_one : (1 : Int128).toNatClampNeg = 1 := (rfl)\ntheorem Int128.toInt_one : (1 : Int128).toInt = 1 := (rfl)\ntheorem Int128.zero_lt_one : (0 : Int128) < 1 := by decide\ntheorem Int128.zero_ne_one : (0 : Int128) ≠ 1 := by decide\nprotected theorem Int128.add_assoc (a b c : Int128) : a + b + c = a + (b + c) :=\n  Int128.toBitVec_inj.1 (BitVec.add_assoc _ _ _)\ninstance : Std.Associative (α := Int128) (· + ·) := ⟨Int128.add_assoc⟩\nprotected theorem Int128.add_comm (a b : Int128) : a + b = b + a := Int128.toBitVec_inj.1 (BitVec.add_comm _ _)\ninstance : Std.Commutative (α := Int128) (· + ·) := ⟨Int128.add_comm⟩\n@[simp] protected theorem Int128.add_zero (a : Int128) : a + 0 = a := Int128.toBitVec_inj.1 (BitVec.add_zero _)\n@[simp] protected theorem Int128.zero_add (a : Int128) : 0 + a = a := Int128.toBitVec_inj.1 (BitVec.zero_add _)\ninstance : Std.LawfulIdentity (α := Int128) (· + ·) 0 where\n  left_id := Int128.zero_add\n  right_id := Int128.add_zero\n@[simp] protected theorem Int128.sub_zero (a : Int128) : a - 0 = a := Int128.toBitVec_inj.1 (BitVec.sub_zero _)\n@[simp] protected theorem Int128.zero_sub (a : Int128) : 0 - a = -a := Int128.toBitVec_inj.1 (BitVec.zero_sub _)\n@[simp] protected theorem Int128.sub_self (a : Int128) : a - a = 0 := Int128.toBitVec_inj.1 (BitVec.sub_self _)\nprotected theorem Int128.add_left_neg (a : Int128) : -a + a = 0 := Int128.toBitVec_inj.1 (BitVec.add_left_neg _)\nprotected theorem Int128.add_right_neg (a : Int128) : a + -a = 0 := Int128.toBitVec_inj.1 (BitVec.add_right_neg _)\n@[simp] protected theorem Int128.sub_add_cancel (a b : Int128) : a - b + b = a :=\n  Int128.toBitVec_inj.1 (BitVec.sub_add_cancel _ _)\nprotected theorem Int128.eq_sub_iff_add_eq {a b c : Int128} : a = c - b ↔ a + b = c := by\n  simpa [← Int128.toBitVec_inj] using BitVec.eq_sub_iff_add_eq\nprotected theorem Int128.sub_eq_iff_eq_add {a b c : Int128} : a - b = c ↔ a = c + b := by\n  simpa [← Int128.toBitVec_inj] using BitVec.sub_eq_iff_eq_add\n@[simp] protected theorem Int128.neg_neg {a : Int128} : - -a = a := Int128.toBitVec_inj.1 BitVec.neg_neg\n@[simp] protected theorem Int128.neg_inj {a b : Int128} : -a = -b ↔ a = b := by simp [← Int128.toBitVec_inj]\n@[simp] protected theorem Int128.neg_ne_zero {a : Int128} : -a ≠ 0 ↔ a ≠ 0 := by simp [← Int128.toBitVec_inj]\nprotected theorem Int128.neg_add {a b : Int128} : - (a + b) = -a - b := Int128.toBitVec_inj.1 BitVec.neg_add\n@[simp] protected theorem Int128.sub_neg {a b : Int128} : a - -b = a + b := Int128.toBitVec_inj.1 BitVec.sub_neg\n@[simp] protected theorem Int128.neg_sub {a b : Int128} : -(a - b) = b - a := by\n  rw [Int128.sub_eq_add_neg, Int128.neg_add, Int128.sub_neg, Int128.add_comm, ← Int128.sub_eq_add_neg]\nprotected theorem Int128.sub_sub (a b c : Int128) : a - b - c = a - (b + c) := by\n  simp [Int128.sub_eq_add_neg, Int128.add_assoc, Int128.neg_add]\n@[simp] protected theorem Int128.add_left_inj {a b : Int128} (c : Int128) : (a + c = b + c) ↔ a = b := by\n  simp [← Int128.toBitVec_inj]\n@[simp] protected theorem Int128.add_right_inj {a b : Int128} (c : Int128) : (c + a = c + b) ↔ a = b := by\n  simp [← Int128.toBitVec_inj]\n@[simp] protected theorem Int128.sub_left_inj {a b : Int128} (c : Int128) : (a - c = b - c) ↔ a = b := by\n  simp [← Int128.toBitVec_inj]\n@[simp] protected theorem Int128.sub_right_inj {a b : Int128} (c : Int128) : (c - a = c - b) ↔ a = b := by\n  simp [← Int128.toBitVec_inj]\n@[simp] theorem Int128.add_eq_right {a b : Int128} : a + b = b ↔ a = 0 := by\n  simp [← Int128.toBitVec_inj]\n@[simp] theorem Int128.add_eq_left {a b : Int128} : a + b = a ↔ b = 0 := by\n  simp [← Int128.toBitVec_inj]\n@[simp] theorem Int128.right_eq_add {a b : Int128} : b = a + b ↔ a = 0 := by\n  simp [← Int128.toBitVec_inj]\n@[simp] theorem Int128.left_eq_add {a b : Int128} : a = a + b ↔ b = 0 := by\n  simp [← Int128.toBitVec_inj]\nprotected theorem Int128.mul_comm (a b : Int128) : a * b = b * a := Int128.toBitVec_inj.1 (BitVec.mul_comm _ _)\ninstance : Std.Commutative (α := Int128) (· * ·) := ⟨Int128.mul_comm⟩\nprotected theorem Int128.mul_assoc (a b c : Int128) : a * b * c = a * (b * c) := Int128.toBitVec_inj.1 (BitVec.mul_assoc _ _ _)\ninstance : Std.Associative (α := Int128) (· * ·) := ⟨Int128.mul_assoc⟩\n@[simp] theorem Int128.mul_one (a : Int128) : a * 1 = a := Int128.toBitVec_inj.1 (BitVec.mul_one _)\n@[simp] theorem Int128.one_mul (a : Int128) : 1 * a = a := Int128.toBitVec_inj.1 (BitVec.one_mul _)\ninstance : Std.LawfulCommIdentity (α := Int128) (· * ·) 1 where\n  right_id := Int128.mul_one\n@[simp] theorem Int128.mul_zero {a : Int128} : a * 0 = 0 := Int128.toBitVec_inj.1 BitVec.mul_zero\n@[simp] theorem Int128.zero_mul {a : Int128} : 0 * a = 0 := Int128.toBitVec_inj.1 BitVec.zero_mul\n@[simp] protected theorem Int128.pow_zero (x : Int128) : x ^ 0 = 1 := (rfl)\nprotected theorem Int128.pow_succ (x : Int128) (n : Nat) : x ^ (n + 1) = x ^ n * x := (rfl)\nprotected theorem Int128.mul_add {a b c : Int128} : a * (b + c) = a * b + a * c :=\n    Int128.toBitVec_inj.1 BitVec.mul_add\nprotected theorem Int128.add_mul {a b c : Int128} : (a + b) * c = a * c + b * c := by\n  rw [Int128.mul_comm, Int128.mul_add, Int128.mul_comm a c, Int128.mul_comm c b]\nprotected theorem Int128.mul_succ {a b : Int128} : a * (b + 1) = a * b + a := by simp [Int128.mul_add]\nprotected theorem Int128.succ_mul {a b : Int128} : (a + 1) * b = a * b + b := by simp [Int128.add_mul]\nprotected theorem Int128.two_mul {a : Int128} : 2 * a = a + a := Int128.toBitVec_inj.1 BitVec.two_mul\nprotected theorem Int128.mul_two {a : Int128} : a * 2 = a + a := Int128.toBitVec_inj.1 BitVec.mul_two\nprotected theorem Int128.neg_mul (a b : Int128) : -a * b = -(a * b) := Int128.toBitVec_inj.1 (BitVec.neg_mul _ _)\nprotected theorem Int128.mul_neg (a b : Int128) : a * -b = -(a * b) := Int128.toBitVec_inj.1 (BitVec.mul_neg _ _)\nprotected theorem Int128.neg_mul_neg (a b : Int128) : -a * -b = a * b := Int128.toBitVec_inj.1 (BitVec.neg_mul_neg _ _)\nprotected theorem Int128.neg_mul_comm (a b : Int128) : -a * b = a * -b := Int128.toBitVec_inj.1 (BitVec.neg_mul_comm _ _)\nprotected theorem Int128.mul_sub {a b c : Int128} : a * (b - c) = a * b - a * c := Int128.toBitVec_inj.1 BitVec.mul_sub\nprotected theorem Int128.sub_mul {a b c : Int128} : (a - b) * c = a * c - b * c := by\n  rw [Int128.mul_comm, Int128.mul_sub, Int128.mul_comm, Int128.mul_comm c]\ntheorem Int128.neg_add_mul_eq_mul_not {a b : Int128} : -(a + a * b) = a * ~~~b :=\n  Int128.toBitVec_inj.1 BitVec.neg_add_mul_eq_mul_not\ntheorem Int128.neg_mul_not_eq_add_mul {a b : Int128} : -(a * ~~~b) = a + a * b :=\n  Int128.toBitVec_inj.1 BitVec.neg_mul_not_eq_add_mul\nprotected theorem Int128.le_of_lt {a b : Int128} : a < b → a ≤ b := by\n  simpa [lt_iff_toInt_lt, le_iff_toInt_le] using Int.le_of_lt\nprotected theorem Int128.lt_of_le_of_ne {a b : Int128} : a ≤ b → a ≠ b → a < b := by\n  simpa [lt_iff_toInt_lt, le_iff_toInt_le, ← Int128.toInt_inj] using (Int.lt_iff_le_and_ne.2 ⟨·, ·⟩)\nprotected theorem Int128.lt_iff_le_and_ne {a b : Int128} : a < b ↔ a ≤ b ∧ a ≠ b := by\n  simpa [lt_iff_toInt_lt, le_iff_toInt_le, ← Int128.toInt_inj] using Int.lt_iff_le_and_ne\n@[simp] protected theorem Int128.lt_irrefl {a : Int128} : ¬a < a := by simp [lt_iff_toInt_lt]\nprotected theorem Int128.lt_of_le_of_lt {a b c : Int128} : a ≤ b → b < c → a < c := by\n  simpa [le_iff_toInt_le, lt_iff_toInt_lt] using Int.lt_of_le_of_lt\nprotected theorem Int128.lt_of_lt_of_le {a b c : Int128} : a < b → b ≤ c → a < c := by\n  simpa [le_iff_toInt_le, lt_iff_toInt_lt] using Int.lt_of_lt_of_le\n@[simp] theorem Int128.minValue_le (a : Int128) : minValue ≤ a := by simpa [le_iff_toInt_le] using a.minValue_le_toInt\n@[simp] theorem Int128.le_maxValue (a : Int128) : a ≤ maxValue := by simpa [le_iff_toInt_le] using a.toInt_le\n@[simp] theorem Int128.not_lt_minValue {a : Int128} : ¬a < minValue :=\n  fun h => Int128.lt_irrefl (Int128.lt_of_le_of_lt a.minValue_le h)\n@[simp] theorem Int128.not_maxValue_lt {a : Int128} : ¬maxValue < a :=\n  fun h => Int128.lt_irrefl (Int128.lt_of_lt_of_le h a.le_maxValue)\n@[simp] protected theorem Int128.le_refl (a : Int128) : a ≤ a := by simp [Int128.le_iff_toInt_le]\nprotected theorem Int128.le_rfl {a : Int128} : a ≤ a := Int128.le_refl _\nprotected theorem Int128.le_antisymm_iff {a b : Int128} : a = b ↔ a ≤ b ∧ b ≤ a :=\n  ⟨by rintro rfl; simp, by simpa [← Int128.toInt_inj, le_iff_toInt_le] using Int.le_antisymm⟩\nprotected theorem Int128.le_antisymm {a b : Int128} : a ≤ b → b ≤ a → a = b := by simpa using Int128.le_antisymm_iff.2\n@[simp] theorem Int128.le_minValue_iff {a : Int128} : a ≤ minValue ↔ a = minValue :=\n  ⟨fun h => Int128.le_antisymm h a.minValue_le, by rintro rfl; simp⟩\n@[simp] theorem Int128.maxValue_le_iff {a : Int128} : maxValue ≤ a ↔ a = maxValue :=\n  ⟨fun h => Int128.le_antisymm a.le_maxValue h, by rintro rfl; simp⟩\nset_option maxRecDepth 1000 in\n@[simp] protected theorem Int128.zero_div {a : Int128} : 0 / a = 0 := Int128.toBitVec_inj.1 BitVec.zero_sdiv\n@[simp] protected theorem Int128.div_zero {a : Int128} : a / 0 = 0 := Int128.toBitVec_inj.1 BitVec.sdiv_zero\n@[simp] protected theorem Int128.div_one {a : Int128} : a / 1 = a := Int128.toBitVec_inj.1 BitVec.sdiv_one\nprotected theorem Int128.div_self {a : Int128} : a / a = if a = 0 then 0 else 1 := by\n  simp [← Int128.toBitVec_inj, apply_ite]\n@[simp] protected theorem Int128.mod_zero {a : Int128} : a % 0 = a := Int128.toBitVec_inj.1 BitVec.srem_zero\nset_option maxRecDepth 1000 in\n@[simp] protected theorem Int128.zero_mod {a : Int128} : 0 % a = 0 := Int128.toBitVec_inj.1 BitVec.zero_srem\n@[simp] protected theorem Int128.mod_one {a : Int128} : a % 1 = 0 := Int128.toBitVec_inj.1 BitVec.srem_one\n@[simp] protected theorem Int128.mod_self {a : Int128} : a % a = 0 := Int128.toBitVec_inj.1 BitVec.srem_self\n@[simp] protected theorem Int128.not_lt {a b : Int128} : ¬ a < b ↔ b ≤ a := by\n  simp [lt_iff_toBitVec_slt, le_iff_toBitVec_sle, BitVec.sle_eq_not_slt]\nprotected theorem Int128.le_trans {a b c : Int128} : a ≤ b → b ≤ c → a ≤ c := by\n  simpa [le_iff_toInt_le] using Int.le_trans\nprotected theorem Int128.lt_trans {a b c : Int128} : a < b → b < c → a < c := by\n  simpa [lt_iff_toInt_lt] using Int.lt_trans\nprotected theorem Int128.le_total (a b : Int128) : a ≤ b ∨ b ≤ a := by\n  simpa [le_iff_toInt_le] using Int.le_total _ _\nprotected theorem Int128.lt_asymm {a b : Int128} : a < b → ¬b < a :=\n  fun hab hba => Int128.lt_irrefl (Int128.lt_trans hab hba)\n\nopen Std in\ninstance Int128.instIsLinearOrder : IsLinearOrder Int128 := by\n  apply IsLinearOrder.of_le\n  case le_antisymm => constructor; apply Int128.le_antisymm\n  case le_total => constructor; apply Int128.le_total\n  case le_trans => constructor; apply Int128.le_trans\n\nopen Std in\ninstance : LawfulOrderLT Int128 where\n  lt_iff := by\n    simp [← Int128.not_le, Decidable.imp_iff_not_or, Std.Total.total]\n\nprotected theorem Int128.add_neg_eq_sub {a b : Int128} : a + -b = a - b := Int128.toBitVec_inj.1 BitVec.add_neg_eq_sub\ntheorem Int128.neg_eq_neg_one_mul (a : Int128) : -a = -1 * a := Int128.toInt_inj.1 (by simp)\n@[simp] protected theorem Int128.add_sub_cancel (a b : Int128) : a + b - b = a := Int128.toBitVec_inj.1 (BitVec.add_sub_cancel _ _)\nprotected theorem Int128.lt_or_lt_of_ne {a b : Int128} : a ≠ b → a < b ∨ b < a := by\n  simp [lt_iff_toInt_lt, ← Int128.toInt_inj]; omega\nprotected theorem Int128.lt_or_le (a b : Int128) : a < b ∨ b ≤ a := by\n  simp [lt_iff_toInt_lt, le_iff_toInt_le]; omega\nprotected theorem Int128.le_or_lt (a b : Int128) : a ≤ b ∨ b < a := (b.lt_or_le a).symm\nprotected theorem Int128.le_of_eq {a b : Int128} : a = b → a ≤ b := (· ▸ Int128.le_rfl)\nprotected theorem Int128.le_iff_lt_or_eq {a b : Int128} : a ≤ b ↔ a < b ∨ a = b := by\n  simp [← Int128.toInt_inj, le_iff_toInt_le, lt_iff_toInt_lt]; omega\nprotected theorem Int128.lt_or_eq_of_le {a b : Int128} : a ≤ b → a < b ∨ a = b := Int128.le_iff_lt_or_eq.mp\ntheorem Int128.toInt_eq_toNatClampNeg {a : Int128} (ha : 0 ≤ a) : a.toInt = a.toNatClampNeg := by\n  simpa only [← toNat_toInt, Int.eq_natCast_toNat, le_iff_toInt_le] using ha\n@[simp] theorem UInt128.toInt128_add (a b : UInt128) : (a + b).toInt128 = a.toInt128 + b.toInt128 := (rfl)\n@[simp] theorem UInt128.toInt128_neg (a : UInt128) : (-a).toInt128 = -a.toInt128 := (rfl)\n@[simp] theorem UInt128.toInt128_sub (a b : UInt128) : (a - b).toInt128 = a.toInt128 - b.toInt128 := (rfl)\n@[simp] theorem UInt128.toInt128_mul (a b : UInt128) : (a * b).toInt128 = a.toInt128 * b.toInt128 := (rfl)\n@[simp] theorem Int128.toUInt128_add (a b : Int128) : (a + b).toUInt128 = a.toUInt128 + b.toUInt128 := (rfl)\n@[simp] theorem Int128.toUInt128_neg (a : Int128) : (-a).toUInt128 = -a.toUInt128 := (rfl)\n@[simp] theorem Int128.toUInt128_sub (a b : Int128) : (a - b).toUInt128 = a.toUInt128 - b.toUInt128 := (rfl)\n@[simp] theorem Int128.toUInt128_mul (a b : Int128) : (a * b).toUInt128 = a.toUInt128 * b.toUInt128 := (rfl)\ntheorem Int128.toNatClampNeg_le {a b : Int128} (hab : a ≤ b) : a.toNatClampNeg ≤ b.toNatClampNeg := by\n  rw [← Int128.toNat_toInt, ← Int128.toNat_toInt]\n  exact Int.toNat_le_toNat (Int128.le_iff_toInt_le.1 hab)\ntheorem Int128.toUInt128_le {a b : Int128} (ha : 0 ≤ a) (hab : a ≤ b) : a.toUInt128 ≤ b.toUInt128 := by\n  rw [UInt128.le_iff_toNat_le, toNat_toUInt128_of_le ha, toNat_toUInt128_of_le (Int128.le_trans ha hab)]\n  exact Int128.toNatClampNeg_le hab\ntheorem Int128.zero_le_ofNat_of_lt {a : Nat} (ha : a < 2 ^ 127) : 0 ≤ Int128.ofNat a := by\n  rw [le_iff_toInt_le, toInt_ofNat_of_lt ha, Int128.toInt_zero]\n  exact Int.natCast_nonneg _\nprotected theorem Int128.sub_nonneg_of_le {a b : Int128} (hb : 0 ≤ b) (hab : b ≤ a) : 0 ≤ a - b := by\n  rw [← ofNat_toNatClampNeg _ hb, ← ofNat_toNatClampNeg _ (Int128.le_trans hb hab),\n    ← ofNat_sub _ _ (Int128.toNatClampNeg_le hab)]\n  exact Int128.zero_le_ofNat_of_lt (Nat.sub_lt_of_lt a.toNatClampNeg_lt)\ntheorem Int128.toNatClampNeg_sub_of_le {a b : Int128} (hb : 0 ≤ b) (hab : b ≤ a) :\n    (a - b).toNatClampNeg = a.toNatClampNeg - b.toNatClampNeg := by\n  rw [← toNat_toUInt128_of_le (Int128.sub_nonneg_of_le hb hab), toUInt128_sub,\n    UInt128.toNat_sub_of_le _ _ (Int128.toUInt128_le hb hab),\n    ← toNat_toUInt128_of_le (Int128.le_trans hb hab), ← toNat_toUInt128_of_le hb]\ntheorem Int128.toInt_sub_of_le (a b : Int128) (hb : 0 ≤ b) (h : b ≤ a) :\n    (a - b).toInt = a.toInt - b.toInt := by\n  rw [Int128.toInt_eq_toNatClampNeg (Int128.sub_nonneg_of_le hb h),\n    Int128.toInt_eq_toNatClampNeg (Int128.le_trans hb h), Int128.toInt_eq_toNatClampNeg hb,\n    Int128.toNatClampNeg_sub_of_le hb h, Int.ofNat_sub]\n  exact Int128.toNatClampNeg_le h\nprotected theorem Int128.sub_le {a b : Int128} (hb : 0 ≤ b) (hab : b ≤ a) : a - b ≤ a := by\n  simp_all [le_iff_toInt_le, Int128.toInt_sub_of_le _ _ hb hab]; omega\nprotected theorem Int128.sub_lt {a b : Int128} (hb : 0 < b) (hab : b ≤ a) : a - b < a := by\n  simp_all [lt_iff_toInt_lt, Int128.toInt_sub_of_le _ _ (Int128.le_of_lt hb) hab]; omega\nprotected theorem Int128.ne_of_lt {a b : Int128} : a < b → a ≠ b := by\n  simpa [Int128.lt_iff_toInt_lt, ← Int128.toInt_inj] using Int.ne_of_lt\n@[simp] theorem Int128.toInt_mod (a b : Int128) : (a % b).toInt = a.toInt.tmod b.toInt := by\n  rw [← toInt_toBitVec, Int128.toBitVec_mod, BitVec.toInt_srem, toInt_toBitVec, toInt_toBitVec]\n@[simp] theorem Int8.toInt128_mod (a b : Int8) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp)\n@[simp] theorem Int16.toInt128_mod (a b : Int16) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp)\n@[simp] theorem Int32.toInt128_mod (a b : Int32) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp)\n@[simp] theorem Int64.toInt128_mod (a b : Int64) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp)\n-- @[simp] theorem ISize.toInt128_mod (a b : ISize) : (a % b).toInt128 = a.toInt128 % b.toInt128 := Int128.toInt.inj (by simp)\ntheorem Int128.ofInt_tmod {a b : Int} (ha₁ : minValue.toInt ≤ a) (ha₂ : a ≤ maxValue.toInt)\n    (hb₁ : minValue.toInt ≤ b) (hb₂ : b ≤ maxValue.toInt) : Int128.ofInt (a.tmod b) = Int128.ofInt a % Int128.ofInt b := by\n  rw [Int128.ofInt_eq_iff_bmod_eq_toInt, ← toInt_bmod_size, toInt_mod, toInt_ofInt, toInt_ofInt,\n    Int.bmod_eq_of_le (n := a), Int.bmod_eq_of_le (n := b)]\n  · exact hb₁\n  · exact Int.lt_of_le_sub_one hb₂\n  · exact ha₁\n  · exact Int.lt_of_le_sub_one ha₂\ntheorem Int128.ofInt_eq_ofIntLE_mod {a b : Int} (ha₁ ha₂ hb₁ hb₂) :\n    Int128.ofInt (a.tmod b) = Int128.ofIntLE a ha₁ ha₂ % Int128.ofIntLE b hb₁ hb₂ := by\n  rw [ofIntLE_eq_ofInt, ofIntLE_eq_ofInt, ofInt_tmod ha₁ ha₂ hb₁ hb₂]\ntheorem Int128.ofNat_mod {a b : Nat} (ha : a < 2 ^ 127) (hb : b < 2 ^ 127) :\n    Int128.ofNat (a % b) = Int128.ofNat a % Int128.ofNat b := by\n  rw [← ofInt_eq_ofNat, ← ofInt_eq_ofNat, ← ofInt_eq_ofNat, Int.ofNat_tmod,\n    ofInt_tmod (by simp) _ (by simp)]\n  · exact Int.le_of_lt_add_one (Int.ofNat_le.2 hb)\n  · exact Int.le_of_lt_add_one (Int.ofNat_le.2 ha)\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/Basic.lean",
    "content": "import Hax.MissingLean.Init.Data.UInt.BasicAux\n\n@[inline] def UInt128.ofFin (a : Fin UInt128.size) : UInt128 := ⟨⟨a⟩⟩\n\ndef UInt128.ofInt (x : Int) : UInt128 := UInt128.ofNat (x % 2 ^ 128).toNat\n\nprotected def UInt128.add (a b : UInt128) : UInt128 := ⟨a.toBitVec + b.toBitVec⟩\nprotected def UInt128.sub (a b : UInt128) : UInt128 := ⟨a.toBitVec - b.toBitVec⟩\nprotected def UInt128.mul (a b : UInt128) : UInt128 := ⟨a.toBitVec * b.toBitVec⟩\nprotected def UInt128.div (a b : UInt128) : UInt128 := ⟨BitVec.udiv a.toBitVec b.toBitVec⟩\nprotected def UInt128.pow (x : UInt128) (n : Nat) : UInt128 :=\n  match n with\n  | 0 => 1\n  | n + 1 => UInt128.mul (UInt128.pow x n) x\nprotected def UInt128.mod (a b : UInt128) : UInt128 := ⟨BitVec.umod a.toBitVec b.toBitVec⟩\n\nset_option linter.missingDocs false in\n@[deprecated UInt128.mod (since := \"2024-09-23\")]\nprotected def UInt128.modn (a : UInt128) (n : Nat) : UInt128 := ⟨Fin.modn a.toFin n⟩\n\nprotected def UInt128.land (a b : UInt128) : UInt128 := ⟨a.toBitVec &&& b.toBitVec⟩\nprotected def UInt128.lor (a b : UInt128) : UInt128 := ⟨a.toBitVec ||| b.toBitVec⟩\nprotected def UInt128.xor (a b : UInt128) : UInt128 := ⟨a.toBitVec ^^^ b.toBitVec⟩\nprotected def UInt128.shiftLeft (a b : UInt128) : UInt128 := ⟨a.toBitVec <<< (UInt128.mod b 128).toBitVec⟩\nprotected def UInt128.shiftRight (a b : UInt128) : UInt128 := ⟨a.toBitVec >>> (UInt128.mod b 128).toBitVec⟩\nprotected def UInt128.lt (a b : UInt128) : Prop := a.toBitVec < b.toBitVec\nprotected def UInt128.le (a b : UInt128) : Prop := a.toBitVec ≤ b.toBitVec\n\ninstance : Add UInt128       := ⟨UInt128.add⟩\ninstance : Sub UInt128       := ⟨UInt128.sub⟩\ninstance : Mul UInt128       := ⟨UInt128.mul⟩\ninstance : Pow UInt128 Nat   := ⟨UInt128.pow⟩\ninstance : Mod UInt128       := ⟨UInt128.mod⟩\n\nset_option linter.deprecated false in\ninstance : HMod UInt128 Nat UInt128 := ⟨UInt128.modn⟩\n\ninstance : Div UInt128       := ⟨UInt128.div⟩\ninstance : LT UInt128        := ⟨UInt128.lt⟩\ninstance : LE UInt128        := ⟨UInt128.le⟩\n\nprotected def UInt128.complement (a : UInt128) : UInt128 := ⟨~~~a.toBitVec⟩\nprotected def UInt128.neg (a : UInt128) : UInt128 := ⟨-a.toBitVec⟩\n\ninstance : Complement UInt128 := ⟨UInt128.complement⟩\ninstance : Neg UInt128 := ⟨UInt128.neg⟩\ninstance : AndOp UInt128     := ⟨UInt128.land⟩\ninstance : OrOp UInt128      := ⟨UInt128.lor⟩\ninstance : XorOp UInt128       := ⟨UInt128.xor⟩\ninstance : ShiftLeft UInt128  := ⟨UInt128.shiftLeft⟩\ninstance : ShiftRight UInt128 := ⟨UInt128.shiftRight⟩\n\ndef Bool.toUInt128 (b : Bool) : UInt128 := if b then 1 else 0\n\ndef UInt128.decLt (a b : UInt128) : Decidable (a < b) :=\n  inferInstanceAs (Decidable (a.toBitVec < b.toBitVec))\n\ndef UInt128.decLe (a b : UInt128) : Decidable (a ≤ b) :=\n  inferInstanceAs (Decidable (a.toBitVec ≤ b.toBitVec))\n\nattribute [instance_reducible, instance] UInt128.decLt UInt128.decLe\n\ninstance : Max UInt128 := maxOfLe\ninstance : Min UInt128 := minOfLe\n\nopen Lean in\nset_option hygiene false in\nmacro \"additional_uint_decls\" typeName:ident width:term : command => do\n  let mut cmds := ← Syntax.getArgs <$> `(\n    namespace $typeName\n\n    theorem toNat_add_of_lt {x y : $typeName} (h : x.toNat + y.toNat < 2 ^ $width) :\n        (x + y).toNat = x.toNat + y.toNat := BitVec.toNat_add_of_lt h\n\n    theorem toNat_sub_of_le' {x y : $typeName} (h : y.toNat ≤ x.toNat) :\n        (x - y).toNat = x.toNat - y.toNat := BitVec.toNat_sub_of_le h\n\n    theorem toNat_mul_of_lt {x y : $typeName} (h : x.toNat * y.toNat < 2 ^ $width) :\n        (x * y).toNat = x.toNat * y.toNat := BitVec.toNat_mul_of_lt h\n\n    def addOverflow (a b : $typeName) : Bool :=\n      BitVec.uaddOverflow a.toBitVec b.toBitVec\n\n    def subOverflow (a b : $typeName) : Bool :=\n      BitVec.usubOverflow a.toBitVec b.toBitVec\n\n    def mulOverflow (a b : $typeName) : Bool :=\n      BitVec.umulOverflow a.toBitVec b.toBitVec\n\n    @[grind .]\n    theorem addOverflow_iff {a b : $typeName} : addOverflow a b ↔ a.toNat + b.toNat ≥ 2 ^ $width :=\n      decide_eq_true_iff\n\n    @[grind .]\n    theorem subOverflow_iff {a b : $typeName} : subOverflow a b ↔ a.toNat < b.toNat :=\n      decide_eq_true_iff\n\n    @[grind .]\n    theorem mulOverflow_iff {a b : $typeName} : mulOverflow a b ↔ a.toNat * b.toNat ≥ 2 ^ $width :=\n      decide_eq_true_iff\n\n    end $typeName\n  )\n  return ⟨mkNullNode cmds⟩\n\nadditional_uint_decls UInt8 8\nadditional_uint_decls UInt16 16\nadditional_uint_decls UInt32 32\nadditional_uint_decls UInt64 64\nadditional_uint_decls UInt128 128\nadditional_uint_decls USize System.Platform.numBits\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_missing_uint_conversions\" : command => do\n  let mut cmds := #[]\n  let src : List (Name × Nat) := [\n    (`UInt8, 8),\n    (`UInt16, 16),\n    (`UInt32, 32),\n    (`UInt64, 64),\n    (`USize, 0),\n  ]\n  let dst : List (Name × Nat) := [\n    (`Int8, 8),\n    (`Int16, 16),\n    (`Int32, 32),\n    (`Int64, 64),\n    (`ISize, 0)\n  ]\n  for (srcName, srcIdx) in src do\n    for (dstName, dstIdx) in dst do\n      let srcIdent := mkIdent srcName\n      let dstIdent := mkIdent dstName\n      if srcIdx != dstIdx then\n        cmds := cmds.push $ ← `(\n          def $(mkIdent (srcName ++ dstName.appendBefore \"to\")) (x : $srcIdent) : $dstIdent :=\n            $(mkIdent (`Nat ++ dstName.appendBefore \"to\")) x.toNat\n        )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_missing_uint_conversions\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/BasicAux.lean",
    "content": "import Hax.MissingLean.Init.Prelude\n\ndef UInt128.toFin (x : UInt128) : Fin UInt128.size := x.toBitVec.toFin\n\ndef UInt128.ofNat (n : @& Nat) : UInt128 := ⟨BitVec.ofNat 128 n⟩\n\ndef UInt128.ofNatTruncate (n : Nat) : UInt128 :=\n  if h : n < UInt128.size then\n    UInt128.ofNatLT n h\n  else\n    UInt128.ofNatLT (UInt128.size - 1) (by decide)\n\nabbrev Nat.toUInt128 := UInt128.ofNat\n\ndef UInt128.toNat (n : UInt128) : Nat := n.toBitVec.toNat\n\ndef UInt128.toUInt8 (a : UInt128) : UInt8 := a.toNat.toUInt8\ndef UInt128.toUInt16 (a : UInt128) : UInt16 := a.toNat.toUInt16\ndef UInt128.toUInt32 (a : UInt128) : UInt32 := a.toNat.toUInt32\ndef UInt128.toUInt64 (a : UInt128) : UInt64 := a.toNat.toUInt64\ndef UInt128.toUSize (a : UInt128) : USize := a.toNat.toUSize\n\ndef UInt8.toUInt128 (a : UInt8) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩\ndef UInt16.toUInt128 (a : UInt16) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩\ndef UInt32.toUInt128 (a : UInt32) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩\ndef UInt64.toUInt128 (a : UInt64) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩\ndef USize.toUInt128 (a : USize) : UInt128 := ⟨BitVec.ofNat 128 a.toNat⟩\n\ninstance UInt128.instOfNat (n : Nat) : OfNat UInt128 n := ⟨UInt128.ofNat n⟩\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/Lemmas.lean",
    "content": "import Hax.MissingLean.Init.Data.UInt.Lemmas_UInt128\nimport Hax.MissingLean.Init.GrindInstances.Ring.UInt\n\nattribute [grind =_] UInt8.le_ofNat_iff\nattribute [grind =_] UInt16.le_ofNat_iff\nattribute [grind =_] UInt32.le_ofNat_iff\nattribute [grind =_] UInt64.le_ofNat_iff\nattribute [grind =_] UInt128.le_ofNat_iff\n\ntheorem UInt64.le_self_add {a b : UInt64} (h : a.toNat + b.toNat < 2 ^ 64) :\n    a ≤ a + b := by\n  rw [le_iff_toNat_le, UInt64.toNat_add_of_lt h]\n  exact Nat.le_add_right a.toNat b.toNat\n\ntheorem UInt64.succ_le_of_lt {a b : UInt64} (h : a < b) :\n    a + 1 ≤ b := by grind\n\ntheorem UInt64.add_le_of_le {a b c : UInt64} (habc : a + b ≤ c) (hab : a.toNat + b.toNat < 2 ^ 64):\n    a ≤ c := by\n  rw [UInt64.le_iff_toNat_le, UInt64.toNat_add_of_lt hab] at *\n  omega\n\nopen Lean in\nset_option hygiene false in\nmacro \"additional_uint_lemmas\" typeName:ident _width:term : command => do\n  let tyDot (n : Name) := mkIdent (typeName.getId ++ n)\n  let tyRw (n : Name) : TSyntax `Lean.Parser.Tactic.rwRule := .mk\n    (Syntax.node .none ``Lean.Parser.Tactic.rwRule #[mkNullNode, tyDot n])\n  `(\n    namespace $typeName\n\n      theorem ofNat_eq_of_toNat_eq {a : Nat} {b : $typeName} (h : b.toNat = a) : ofNat a = b := by\n        subst_vars; exact $(mkIdent (typeName.getId ++ `ofNat_toNat))\n\n      theorem sub_add_eq {a b c : $typeName} : a - (b + c) = a - b - c := by grind\n\n      theorem sub_succ_lt_self (a b : $typeName) (h : a < b) :\n          (b - (a + 1)).toNat < (b - a).toNat := by\n        rw [sub_add_eq]\n        rw [$(tyRw `toNat_sub_of_le)]\n        try simp only [USize.toNat_one]\n        apply Nat.sub_one_lt_of_lt\n        · change (0 : $typeName).toNat < (b - a).toNat\n          rw [← lt_iff_toNat_lt]\n          grind\n        · grind\n\n    end $typeName\n  )\n\nadditional_uint_lemmas UInt8 8\nadditional_uint_lemmas UInt16 16\nadditional_uint_lemmas UInt32 32\nadditional_uint_lemmas UInt64 64\nadditional_uint_lemmas UInt128 128\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/UInt/Lemmas_UInt128.lean",
    "content": "import Hax.MissingLean.Lean.Tactic.Simp.BuiltinSimpProcs.UInt\n\n-- Adapted from Init/Data/UInt/Lemmas.lean from the Lean v4.29.0-rc1 source code\n\nset_option autoImplicit true\nopen Std\n\ndeclare_uint_theorems UInt128 128\n@[simp] theorem UInt128.toNat_toUInt64 (x : UInt128) : x.toUInt64.toNat = x.toNat % 2 ^ 64 := (rfl)\n\ntheorem UInt128.ofNat_mod_size : ofNat (x % 2 ^ 128) = ofNat x := by\n  simp [ofNat, BitVec.ofNat, Fin.ofNat]\n\ntheorem UInt128.ofNat_size : ofNat size = 0 := by decide\n\ntheorem UInt128.lt_ofNat_iff {n : UInt128} {m : Nat} (h : m < size) : n < ofNat m ↔ n.toNat < m := by\n  rw [lt_iff_toNat_lt, toNat_ofNat_of_lt' h]\ntheorem UInt128.ofNat_lt_iff {n : UInt128} {m : Nat} (h : m < size) : ofNat m < n ↔ m < n.toNat := by\n  rw [lt_iff_toNat_lt, toNat_ofNat_of_lt' h]\ntheorem UInt128.le_ofNat_iff {n : UInt128} {m : Nat} (h : m < size) : n ≤ ofNat m ↔ n.toNat ≤ m := by\n  rw [le_iff_toNat_le, toNat_ofNat_of_lt' h]\ntheorem UInt128.ofNat_le_iff {n : UInt128} {m : Nat} (h : m < size) : ofNat m ≤ n ↔ m ≤ n.toNat := by\n  rw [le_iff_toNat_le, toNat_ofNat_of_lt' h]\n\nprotected theorem UInt128.mod_eq_of_lt {a b : UInt128} (h : a < b) : a % b = a := UInt128.toNat_inj.1 (Nat.mod_eq_of_lt h)\n\n@[simp] theorem UInt128.toNat_lt (n : UInt128) : n.toNat < 2 ^ 128 := n.toFin.isLt\n\ntheorem USize.size_le_uint128Size : USize.size ≤ UInt128.size := by\n  cases USize.size_eq <;> simp_all +decide\n\ntheorem USize.size_dvd_uInt128Size : USize.size ∣ UInt128.size := by cases USize.size_eq <;> simp_all +decide\n\n@[simp] theorem mod_uInt128Size_uSizeSize (n : Nat) : n % UInt128.size % USize.size = n % USize.size :=\n  Nat.mod_mod_of_dvd _ USize.size_dvd_uInt128Size\n\n@[simp] theorem UInt128.size_sub_one_mod_uSizeSize : 340282366920938463463374607431768211455 % USize.size = USize.size - 1 := by\n  cases USize.size_eq <;> simp_all +decide\n\n@[simp] theorem UInt8.toNat_mod_uInt128Size (n : UInt8) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide))\n@[simp] theorem UInt16.toNat_mod_uInt128Size (n : UInt16) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide))\n@[simp] theorem UInt32.toNat_mod_uInt128Size (n : UInt32) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide))\n@[simp] theorem UInt64.toNat_mod_uInt128Size (n : UInt64) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide))\n@[simp] theorem UInt128.toNat_mod_size (n : UInt128) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt n.toNat_lt\n@[simp] theorem USize.toNat_mod_uInt128Size (n : USize) : n.toNat % UInt128.size = n.toNat := Nat.mod_eq_of_lt (Nat.lt_trans n.toNat_lt (by decide))\n\n-- @[simp] theorem UInt8.toUInt128_mod_256 (n : UInt8) : n.toUInt128 % 256 = n.toUInt128 := UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt16.toUInt128_mod_65536 (n : UInt16) : n.toUInt128 % 65536 = n.toUInt128 := UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt32.toUInt128_mod_4294967296 (n : UInt32) : n.toUInt128 % 4294967296 = n.toUInt128 := UInt128.toNat.inj (by simp)\n\n@[simp] theorem Fin.mk_uInt128ToNat (n : UInt128) : Fin.mk n.toNat (by exact n.toFin.isLt) = n.toFin := (rfl)\n\n@[simp] theorem BitVec.ofNatLT_uInt128ToNat (n : UInt128) : BitVec.ofNatLT n.toNat (by exact n.toFin.isLt) = n.toBitVec := (rfl)\n\n@[simp] theorem BitVec.ofFin_uInt128ToFin (n : UInt128) : BitVec.ofFin n.toFin = n.toBitVec := (rfl)\n\n-- @[simp] theorem UInt8.toFin_toUInt128 (n : UInt8) : n.toUInt128.toFin = n.toFin.castLE (by decide) := (rfl)\n-- @[simp] theorem UInt16.toFin_toUInt128 (n : UInt16) : n.toUInt128.toFin = n.toFin.castLE (by decide) := (rfl)\n-- @[simp] theorem UInt32.toFin_toUInt128 (n : UInt32) : n.toUInt128.toFin = n.toFin.castLE (by decide) := (rfl)\n-- @[simp] theorem USize.toFin_toUInt128 (n : USize) : n.toUInt128.toFin = n.toFin.castLE size_le_uint128Size := (rfl)\n\n@[simp, int_toBitVec] theorem UInt128.toBitVec_toUInt8 (n : UInt128) : n.toUInt8.toBitVec = n.toBitVec.setWidth 8 := (rfl)\n@[simp, int_toBitVec] theorem UInt128.toBitVec_toUInt16 (n : UInt128) : n.toUInt16.toBitVec = n.toBitVec.setWidth 16 := (rfl)\n@[simp, int_toBitVec] theorem UInt128.toBitVec_toUInt32 (n : UInt128) : n.toUInt32.toBitVec = n.toBitVec.setWidth 32 := (rfl)\n\n-- @[simp, int_toBitVec] theorem UInt8.toBitVec_toUInt128 (n : UInt8) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 := (rfl)\n-- @[simp, int_toBitVec] theorem UInt16.toBitVec_toUInt128 (n : UInt16) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 := (rfl)\n-- @[simp, int_toBitVec] theorem UInt32.toBitVec_toUInt128 (n : UInt32) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 := (rfl)\n-- @[simp, int_toBitVec] theorem USize.toBitVec_toUInt128 (n : USize) : n.toUInt128.toBitVec = n.toBitVec.setWidth 128 :=\n--   BitVec.eq_of_toNat_eq (by simp)\n\n@[simp, int_toBitVec] theorem UInt128.toBitVec_toUSize (n : UInt128) : n.toUSize.toBitVec = n.toBitVec.setWidth System.Platform.numBits :=\n  BitVec.eq_of_toNat_eq (by simp)\n\n-- @[simp] theorem UInt128.ofNatLT_uInt8ToNat (n : UInt8) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofNatLT_uInt16ToNat (n : UInt16) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofNatLT_uInt32ToNat (n : UInt32) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofNatLT_toNat (n : UInt128) : UInt128.ofNatLT n.toNat n.toNat_lt = n := (rfl)\n-- @[simp] theorem UInt128.ofNatLT_uSizeToNat (n : USize) : UInt128.ofNatLT n.toNat (Nat.lt_trans n.toNat_lt (by decide)) = n.toUInt128 := (rfl)\n\ntheorem UInt8.ofNatLT_uInt128ToNat (n : UInt128) (h) : UInt8.ofNatLT n.toNat h = n.toUInt8 :=\n  UInt8.toNat.inj (by simp [Nat.mod_eq_of_lt h])\ntheorem UInt16.ofNatLT_uInt128ToNat (n : UInt128) (h) : UInt16.ofNatLT n.toNat h = n.toUInt16 :=\n  UInt16.toNat.inj (by simp [Nat.mod_eq_of_lt h])\ntheorem UInt32.ofNatLT_uInt128ToNat (n : UInt128) (h) : UInt32.ofNatLT n.toNat h = n.toUInt32 :=\n  UInt32.toNat.inj (by simp [Nat.mod_eq_of_lt h])\ntheorem USize.ofNatLT_uInt128ToNat (n : UInt128) (h) : USize.ofNatLT n.toNat h = n.toUSize :=\n  USize.toNat.inj (by simp [Nat.mod_eq_of_lt h])\n\n@[simp] theorem UInt128.ofFin_toFin (n : UInt128) : UInt128.ofFin n.toFin = n := (rfl)\n\n@[simp] theorem UInt128.toFin_ofFin (n : Fin UInt128.size) : (UInt128.ofFin n).toFin = n := (rfl)\n\n-- @[simp] theorem UInt128.ofFin_uint8ToFin (n : UInt8) : UInt128.ofFin (n.toFin.castLE (by decide)) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofFin_uint16ToFin (n : UInt16) : UInt128.ofFin (n.toFin.castLE (by decide)) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofFin_uint32ToFin (n : UInt32) : UInt128.ofFin (n.toFin.castLE (by decide)) = n.toUInt128 := (rfl)\n\n@[simp] theorem Nat.toUInt128_eq {n : Nat} : n.toUInt128 = UInt128.ofNat n := (rfl)\n\n@[simp] theorem UInt8.ofBitVec_uInt128ToBitVec (n : UInt128) :\n    UInt8.ofBitVec (n.toBitVec.setWidth 8) = n.toUInt8 := (rfl)\n@[simp] theorem UInt16.ofBitVec_uInt128ToBitVec (n : UInt128) :\n    UInt16.ofBitVec (n.toBitVec.setWidth 16) = n.toUInt16 := (rfl)\n@[simp] theorem UInt32.ofBitVec_uInt128ToBitVec (n : UInt128) :\n    UInt32.ofBitVec (n.toBitVec.setWidth 32) = n.toUInt32 := (rfl)\n\n-- @[simp] theorem UInt128.ofBitVec_uInt8ToBitVec (n : UInt8) :\n--     UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofBitVec_uInt16ToBitVec (n : UInt16) :\n--     UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofBitVec_uInt32ToBitVec (n : UInt32) :\n--     UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 := (rfl)\n-- @[simp] theorem UInt128.ofBitVec_uSizeToBitVec (n : USize) :\n--     UInt128.ofBitVec (n.toBitVec.setWidth 128) = n.toUInt128 :=\n--   UInt128.toNat.inj (by simp)\n\n@[simp] theorem USize.ofBitVec_uInt128ToBitVec (n : UInt128) :\n    USize.ofBitVec (n.toBitVec.setWidth System.Platform.numBits) = n.toUSize :=\n  USize.toNat.inj (by simp)\n\n@[simp] theorem UInt8.ofNat_uInt128ToNat (n : UInt128) : UInt8.ofNat n.toNat = n.toUInt8 := (rfl)\n@[simp] theorem UInt16.ofNat_uInt128ToNat (n : UInt128) : UInt16.ofNat n.toNat = n.toUInt16 := (rfl)\n@[simp] theorem UInt32.ofNat_uInt128ToNat (n : UInt128) : UInt32.ofNat n.toNat = n.toUInt32 := (rfl)\n\n-- @[simp] theorem UInt128.ofNat_uInt8ToNat (n : UInt8) : UInt128.ofNat n.toNat = n.toUInt128 :=\n--   UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt128.ofNat_uInt16ToNat (n : UInt16) : UInt128.ofNat n.toNat = n.toUInt128 :=\n--   UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt128.ofNat_uInt32ToNat (n : UInt32) : UInt128.ofNat n.toNat = n.toUInt128 :=\n--   UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt128.ofNat_uSizeToNat (n : USize) : UInt128.ofNat n.toNat = n.toUInt128 :=\n--   UInt128.toNat.inj (by simp)\n\n@[simp] theorem USize.ofNat_uInt128ToNat (n : UInt128) : USize.ofNat n.toNat = n.toUSize :=\n  USize.toNat.inj (by simp)\n\ntheorem UInt128.ofNatLT_eq_ofNat (n : Nat) {h} : UInt128.ofNatLT n h = UInt128.ofNat n :=\n  UInt128.toNat.inj (by simp [Nat.mod_eq_of_lt h])\n\ntheorem UInt128.ofNatTruncate_eq_ofNat (n : Nat) (hn : n < UInt128.size) :\n    UInt128.ofNatTruncate n = UInt128.ofNat n := by\n  simp [ofNatTruncate, hn, UInt128.ofNatLT_eq_ofNat]\n\n-- @[simp] theorem UInt128.ofNatTruncate_uInt8ToNat (n : UInt8) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by\n--   rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt8ToNat]\n--   exact Nat.lt_trans (n.toNat_lt) (by decide)\n-- @[simp] theorem UInt128.ofNatTruncate_uInt16ToNat (n : UInt16) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by\n--   rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt16ToNat]\n--   exact Nat.lt_trans (n.toNat_lt) (by decide)\n-- @[simp] theorem UInt128.ofNatTruncate_uInt32ToNat (n : UInt32) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by\n--   rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt32ToNat]\n--   exact Nat.lt_trans (n.toNat_lt) (by decide)\n-- @[simp] theorem UInt128.ofNatTruncate_uInt64ToNat (n : UInt64) : UInt128.ofNatTruncate n.toNat = n.toUInt128 := by\n--   rw [UInt128.ofNatTruncate_eq_ofNat, ofNat_uInt64ToNat]\n--   exact Nat.lt_trans n.toNat_lt (by norm_num [UInt64.size, UInt128.size])\n@[simp] theorem UInt128.ofNatTruncate_toNat (n : UInt128) : UInt128.ofNatTruncate n.toNat = n := by\n  rw [UInt128.ofNatTruncate_eq_ofNat] <;> simp [n.toNat_lt]\n\n-- @[simp] theorem UInt8.toUInt8_toUInt128 (n : UInt8) : n.toUInt128.toUInt8 = n :=\n--   UInt8.toNat.inj (by simp)\n-- @[simp] theorem UInt8.toUInt16_toUInt128 (n : UInt8) : n.toUInt128.toUInt16 = n.toUInt16 :=\n--   UInt16.toNat.inj (by simp)\n-- @[simp] theorem UInt8.toUInt32_toUInt128 (n : UInt8) : n.toUInt128.toUInt32 = n.toUInt32 :=\n--   UInt32.toNat.inj (by simp)\n-- @[simp] theorem UInt8.toUInt64_toUInt128 (n : UInt8) : n.toUInt128.toUInt64 = n.toUInt64 :=\n--   UInt64.toNat.inj (by simp)\n@[simp] theorem UInt8.toUInt128_toUInt16 (n : UInt8) : n.toUInt16.toUInt128 = n.toUInt128 := (rfl)\n@[simp] theorem UInt8.toUInt128_toUInt32 (n : UInt8) : n.toUInt32.toUInt128 = n.toUInt128 := (rfl)\n@[simp] theorem UInt8.toUInt128_toUInt64 (n : UInt8) : n.toUInt64.toUInt128 = n.toUInt128 := (rfl)\n\n-- @[simp] theorem UInt16.toUInt8_toUInt128 (n : UInt16) : n.toUInt128.toUInt8 = n.toUInt8 := (rfl)\n-- @[simp] theorem UInt16.toUInt16_toUInt128 (n : UInt16) : n.toUInt128.toUInt16 = n :=\n--   UInt16.toNat.inj (by simp)\n-- @[simp] theorem UInt16.toUInt32_toUInt128 (n : UInt16) : n.toUInt128.toUInt32 = n.toUInt32 :=\n--   UInt32.toNat.inj (by simp)\n-- @[simp] theorem UInt16.toUInt64_toUInt128 (n : UInt16) : n.toUInt128.toUInt64 = n.toUInt64 :=\n--   UInt64.toNat.inj (by simp)\n-- @[simp] theorem UInt16.toUInt128_toUInt8 (n : UInt16) : n.toUInt8.toUInt128 = n.toUInt128 % 256 := (rfl)\n@[simp] theorem UInt16.toUInt128_toUInt32 (n : UInt16) : n.toUInt32.toUInt128 = n.toUInt128 := (rfl)\n@[simp] theorem UInt16.toUInt128_toUInt64 (n : UInt16) : n.toUInt64.toUInt128 = n.toUInt128 := (rfl)\n\n-- @[simp] theorem UInt32.toUInt8_toUInt128 (n : UInt32) : n.toUInt128.toUInt8 = n.toUInt8 := (rfl)\n-- @[simp] theorem UInt32.toUInt16_toUInt128 (n : UInt32) : n.toUInt128.toUInt16 = n.toUInt16 := (rfl)\n-- @[simp] theorem UInt32.toUInt32_toUInt128 (n : UInt32) : n.toUInt128.toUInt32 = n :=\n--   UInt32.toNat.inj (by simp)\n-- @[simp] theorem UInt32.toUInt64_toUInt128 (n : UInt32) : n.toUInt128.toUInt64 = n.toUInt64 :=\n--   UInt64.toNat.inj (by simp)\n-- @[simp] theorem UInt32.toUInt128_toUInt8 (n : UInt32) : n.toUInt8.toUInt128 = n.toUInt128 % 256 := (rfl)\n-- @[simp] theorem UInt32.toUInt128_toUInt16 (n : UInt32) : n.toUInt16.toUInt128 = n.toUInt128 % 65536 := (rfl)\n@[simp] theorem UInt32.toUInt128_toUInt64 (n : UInt32) : n.toUInt64.toUInt128 = n.toUInt128 := (rfl)\n\n-- @[simp] theorem UInt64.toUInt8_toUInt128 (n : UInt64) : n.toUInt128.toUInt8 = n.toUInt8 := (rfl)\n-- @[simp] theorem UInt64.toUInt16_toUInt128 (n : UInt64) : n.toUInt128.toUInt16 = n.toUInt16 := (rfl)\n-- @[simp] theorem UInt64.toUInt32_toUInt128 (n : UInt64) : n.toUInt128.toUInt32 = n.toUInt32 := (rfl)\n-- @[simp] theorem UInt64.toUInt64_toUInt128 (n : UInt64) : n.toUInt128.toUInt64 = n :=\n--   UInt64.toNat.inj (by simp)\n-- @[simp] theorem UInt64.toUInt128_toUInt8 (n : UInt64) : n.toUInt8.toUInt128 = n.toUInt128 % 256 := (rfl)\n-- @[simp] theorem UInt64.toUInt128_toUInt16 (n : UInt64) : n.toUInt16.toUInt128 = n.toUInt128 % 65536 := (rfl)\n-- @[simp] theorem UInt64.toUInt128_toUInt32 (n : UInt64) : n.toUInt32.toUInt128 = n.toUInt128 % 4294967296 := (rfl)\n\n@[simp] theorem UInt128.toUInt8_toUInt16 (n : UInt128) : n.toUInt16.toUInt8 = n.toUInt8 :=\n  UInt8.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt8_toUInt32 (n : UInt128) : n.toUInt32.toUInt8 = n.toUInt8 :=\n  UInt8.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt8_toUInt64 (n : UInt128) : n.toUInt64.toUInt8 = n.toUInt8 :=\n  UInt8.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt16_toUInt8 (n : UInt128) : n.toUInt8.toUInt16 = n.toUInt16 % 256 :=\n  UInt16.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt16_toUInt32 (n : UInt128) : n.toUInt32.toUInt16 = n.toUInt16 :=\n  UInt16.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt16_toUInt64 (n : UInt128) : n.toUInt64.toUInt16 = n.toUInt16 :=\n  UInt16.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt32_toUInt8 (n : UInt128) : n.toUInt8.toUInt32 = n.toUInt32 % 256 :=\n  UInt32.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt32_toUInt16 (n : UInt128) : n.toUInt16.toUInt32 = n.toUInt32 % 65536 :=\n  UInt32.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt32_toUInt64 (n : UInt128) : n.toUInt64.toUInt32 = n.toUInt32 :=\n  UInt32.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt64_toUInt8 (n : UInt128) : n.toUInt8.toUInt64 = n.toUInt64 % 256 :=\n  UInt64.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt64_toUInt16 (n : UInt128) : n.toUInt16.toUInt64 = n.toUInt64 % 65536 :=\n  UInt64.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt64_toUInt32 (n : UInt128) : n.toUInt32.toUInt64 = n.toUInt64 % 4294967296 :=\n  UInt64.toNat.inj (by simp)\n-- @[simp] theorem UInt128.toUInt128_toUInt8 (n : UInt128) : n.toUInt8.toUInt128 = n % 256 := (rfl)\n-- @[simp] theorem UInt128.toUInt128_toUInt16 (n : UInt128) : n.toUInt16.toUInt128 = n % 65536 := (rfl)\n-- @[simp] theorem UInt128.toUInt128_toUInt32 (n : UInt128) : n.toUInt32.toUInt128 = n % 4294967296 := (rfl)\n-- @[simp] theorem UInt128.toUInt128_toUInt64 (n : UInt128) : n.toUInt64.toUInt128 = n % 18446744073709551616 :=\n--   UInt128.toNat.inj (by simp)\n\n@[simp] theorem UInt128.toNat_ofFin (x : Fin UInt128.size) : (UInt128.ofFin x).toNat = x.val := (rfl)\n\ntheorem UInt128.toNat_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) :\n    (UInt128.ofNatTruncate n).toNat = n := by rw [UInt128.ofNatTruncate, dif_pos hn, toNat_ofNatLT]\n\ntheorem UInt128.toNat_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) :\n    (UInt128.ofNatTruncate n).toNat = UInt128.size - 1 := by rw [ofNatTruncate, dif_neg (by omega), toNat_ofNatLT]\n\ntheorem UInt128.toFin_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) :\n    (UInt128.ofNatTruncate n).toFin = ⟨n, hn⟩ :=\n  Fin.val_inj.1 (by simp [toNat_ofNatTruncate_of_lt hn])\n\ntheorem UInt128.toFin_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) :\n    (UInt128.ofNatTruncate n).toFin = ⟨UInt128.size - 1, by decide⟩ :=\n  Fin.val_inj.1 (by simp [toNat_ofNatTruncate_of_le hn])\n\ntheorem UInt128.toBitVec_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) :\n    (UInt128.ofNatTruncate n).toBitVec = BitVec.ofNatLT n hn :=\n  BitVec.eq_of_toNat_eq (by simp [toNat_ofNatTruncate_of_lt hn])\n\ntheorem UInt128.toBitVec_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) :\n    (UInt128.ofNatTruncate n).toBitVec = BitVec.ofNatLT (UInt128.size - 1) (by decide) :=\n  BitVec.eq_of_toNat_eq (by simp [toNat_ofNatTruncate_of_le hn])\n\n-- theorem UInt128.toUInt8_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) :\n--     (UInt128.ofNatTruncate n).toUInt8 = UInt8.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUInt8_ofNatLT]\n\ntheorem UInt128.toUInt8_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) :\n    (UInt128.ofNatTruncate n).toUInt8 = UInt8.ofNatLT (UInt8.size - 1) (by decide) :=\n  UInt8.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn])\n\n-- theorem UInt128.toUInt16_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) :\n--     (UInt128.ofNatTruncate n).toUInt16 = UInt16.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUInt16_ofNatLT]\n\ntheorem UInt128.toUInt16_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) :\n    (UInt128.ofNatTruncate n).toUInt16 = UInt16.ofNatLT (UInt16.size - 1) (by decide) :=\n  UInt16.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn])\n\n-- theorem UInt128.toUInt32_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) :\n--     (UInt128.ofNatTruncate n).toUInt32 = UInt32.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUInt32_ofNatLT]\n\ntheorem UInt128.toUInt32_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) :\n    (UInt128.ofNatTruncate n).toUInt32 = UInt32.ofNatLT (UInt32.size - 1) (by decide) :=\n  UInt32.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn])\n\n-- theorem UInt128.toUSize_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt128.size) :\n--     (UInt128.ofNatTruncate n).toUSize = USize.ofNat n := by rw [ofNatTruncate, dif_pos hn, toUSize_ofNatLT]\n\ntheorem UInt128.toUSize_ofNatTruncate_of_le {n : Nat} (hn : UInt128.size ≤ n) :\n    (UInt128.ofNatTruncate n).toUSize = USize.ofNatLT (USize.size - 1) (by cases USize.size_eq <;> simp_all) :=\n  USize.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn])\n\n-- theorem UInt16.toUInt128_ofNatLT {n : Nat} (h) :\n--     (UInt16.ofNatLT n h).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le h (by decide)) := (rfl)\n-- theorem UInt16.toUInt128_ofFin {n} :\n--   (UInt16.ofFin n).toUInt128 = UInt128.ofNatLT n.val (Nat.lt_of_lt_of_le n.isLt (by decide)) := (rfl)\n-- @[simp] theorem UInt16.toUInt128_ofBitVec {b} : (UInt16.ofBitVec b).toUInt128 = UInt128.ofBitVec (b.setWidth _) := (rfl)\n-- theorem UInt16.toUInt128_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt16.size) :\n--     (UInt16.ofNatTruncate n).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le hn (by decide)) :=\n--   UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_lt hn])\n-- theorem UInt16.toUInt128_ofNatTruncate_of_le {n : Nat} (hn : UInt16.size ≤ n) :\n--     (UInt16.ofNatTruncate n).toUInt128 = UInt128.ofNatLT (UInt16.size - 1) (by decide) :=\n--   UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn])\n-- theorem UInt32.toUInt128_ofNatLT {n : Nat} (h) :\n--     (UInt32.ofNatLT n h).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le h (by decide)) := (rfl)\n-- theorem UInt32.toUInt128_ofFin {n} :\n--   (UInt32.ofFin n).toUInt128 = UInt128.ofNatLT n.val (Nat.lt_of_lt_of_le n.isLt (by decide)) := (rfl)\n-- @[simp] theorem UInt32.toUInt128_ofBitVec {b} : (UInt32.ofBitVec b).toUInt128 = UInt128.ofBitVec (b.setWidth _) := (rfl)\n-- theorem UInt32.toUInt128_ofNatTruncate_of_lt {n : Nat} (hn : n < UInt32.size) :\n--     (UInt32.ofNatTruncate n).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le hn (by decide)) :=\n--   UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_lt hn])\n-- theorem UInt32.toUInt128_ofNatTruncate_of_le {n : Nat} (hn : UInt32.size ≤ n) :\n--     (UInt32.ofNatTruncate n).toUInt128 = UInt128.ofNatLT (UInt32.size - 1) (by decide) :=\n--   UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn])\n-- theorem USize.toUInt128_ofNatLT {n : Nat} (h) :\n--     (USize.ofNatLT n h).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le h size_le_uint128Size) := (rfl)\n\n-- theorem USize.toUInt128_ofFin {n} :\n--   (USize.ofFin n).toUInt128 = UInt128.ofNatLT n.val (Nat.lt_of_lt_of_le n.isLt size_le_uint128Size) := (rfl)\n\n-- @[simp] theorem USize.toUInt128_ofBitVec {b} : (USize.ofBitVec b).toUInt128 = UInt128.ofBitVec (b.setWidth _) :=\n--   UInt128.toBitVec_inj.1 (by simp)\n\n-- theorem USize.toUInt128_ofNatTruncate_of_lt {n : Nat} (hn : n < USize.size) :\n--     (USize.ofNatTruncate n).toUInt128 = UInt128.ofNatLT n (Nat.lt_of_lt_of_le hn size_le_uint128Size) :=\n--   UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_lt hn])\n\n-- theorem USize.toUInt128_ofNatTruncate_of_le {n : Nat} (hn : USize.size ≤ n) :\n--     (USize.ofNatTruncate n).toUInt128 = UInt128.ofNatLT (USize.size - 1) (by cases USize.size_eq <;> simp_all +decide) :=\n--   UInt128.toNat.inj (by simp [toNat_ofNatTruncate_of_le hn])\n-- @[simp] theorem UInt8.toUInt128_ofNat' {n : Nat} (hn : n < UInt8.size) : (UInt8.ofNat n).toUInt128 = UInt128.ofNat n := by\n--   rw [← UInt8.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat]\n-- @[simp] theorem UInt16.toUInt128_ofNat' {n : Nat} (hn : n < UInt16.size) : (UInt16.ofNat n).toUInt128 = UInt128.ofNat n := by\n  -- rw [← UInt16.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat]\n\n-- @[simp] theorem UInt32.toUInt128_ofNat' {n : Nat} (hn : n < UInt32.size) : (UInt32.ofNat n).toUInt128 = UInt128.ofNat n := by\n--   rw [← UInt32.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat]\n\n-- @[simp] theorem USize.toUInt128_ofNat' {n : Nat} (hn : n < USize.size) : (USize.ofNat n).toUInt128 = UInt128.ofNat n := by\n--   rw [← USize.ofNatLT_eq_ofNat (h := hn), toUInt128_ofNatLT, UInt128.ofNatLT_eq_ofNat]\n-- @[simp] theorem UInt8.toUInt128_ofNat {n : Nat} (hn : n < 256) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n :=\n--   UInt8.toUInt128_ofNat' hn\n-- @[simp] theorem UInt16.toUInt128_ofNat {n : Nat} (hn : n < 65536) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n :=\n--   UInt16.toUInt128_ofNat' hn\n\n-- @[simp] theorem UInt32.toUInt128_ofNat {n : Nat} (hn : n < 4294967296) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n :=\n--   UInt32.toUInt128_ofNat' hn\n\n-- @[simp] theorem USize.toUInt128_ofNat {n : Nat} (hn : n < 4294967296) : toUInt128 (no_index (OfNat.ofNat n)) = OfNat.ofNat n :=\n--   USize.toUInt128_ofNat' (Nat.lt_of_lt_of_le hn UInt32.size_le_usizeSize)\n\n@[simp] theorem UInt128.ofNatLT_finVal (n : Fin UInt128.size) : UInt128.ofNatLT n.val n.isLt = UInt128.ofFin n := (rfl)\n@[simp] theorem UInt128.ofNatLT_bitVecToNat (n : BitVec 128) : UInt128.ofNatLT n.toNat n.isLt = UInt128.ofBitVec n := (rfl)\n@[simp] theorem UInt128.ofNat_finVal (n : Fin UInt128.size) : UInt128.ofNat n.val = UInt128.ofFin n := by\n  rw [← ofNatLT_eq_ofNat (h := n.isLt), ofNatLT_finVal]\n@[simp] theorem UInt128.ofNat_bitVecToNat (n : BitVec 128) : UInt128.ofNat n.toNat = UInt128.ofBitVec n := by\n  rw [← ofNatLT_eq_ofNat (h := n.isLt), ofNatLT_bitVecToNat]\n@[simp] theorem UInt128.ofNatTruncate_finVal (n : Fin UInt128.size) : UInt128.ofNatTruncate n.val = UInt128.ofFin n := by\n  rw [ofNatTruncate_eq_ofNat _ n.isLt, UInt128.ofNat_finVal]\n@[simp] theorem UInt128.ofNatTruncate_bitVecToNat (n : BitVec 128) : UInt128.ofNatTruncate n.toNat = UInt128.ofBitVec n := by\n  rw [ofNatTruncate_eq_ofNat _ n.isLt, ofNat_bitVecToNat]\n@[simp] theorem UInt128.ofFin_mk {n : Nat} (hn) : UInt128.ofFin (Fin.mk n hn) = UInt128.ofNatLT n hn := (rfl)\n@[simp] theorem UInt128.ofFin_bitVecToFin (n : BitVec 128) : UInt128.ofFin n.toFin = UInt128.ofBitVec n := (rfl)\n@[simp] theorem UInt128.ofBitVec_ofNatLT {n : Nat} (hn) : UInt128.ofBitVec (BitVec.ofNatLT n hn) = UInt128.ofNatLT n hn := (rfl)\n@[simp] theorem UInt128.ofBitVec_ofFin (n) : UInt128.ofBitVec (BitVec.ofFin n) = UInt128.ofFin n := (rfl)\n@[simp] theorem BitVec.ofNat_uInt128ToNat (n : UInt128) : BitVec.ofNat 128 n.toNat = n.toBitVec :=\n  BitVec.eq_of_toNat_eq (by simp)\ntheorem UInt128.toUInt8_div (a b : UInt128) (ha : a < 256) (hb : b < 256) : (a / b).toUInt8 = a.toUInt8 / b.toUInt8 :=\n  UInt8.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb)\n\ntheorem UInt128.toUInt16_div (a b : UInt128) (ha : a < 65536) (hb : b < 65536) : (a / b).toUInt16 = a.toUInt16 / b.toUInt16 :=\n  UInt16.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb)\n\ntheorem UInt128.toUInt32_div (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a / b).toUInt32 = a.toUInt32 / b.toUInt32 :=\n  UInt32.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb)\n\ntheorem UInt128.toUSize_div (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a / b).toUSize = a.toUSize / b.toUSize :=\n  USize.toNat.inj (Nat.div_mod_eq_mod_div_mod (Nat.lt_of_lt_of_le ha UInt32.size_le_usizeSize) (Nat.lt_of_lt_of_le hb UInt32.size_le_usizeSize))\n\ntheorem UInt128.toUSize_div_of_toNat_lt (a b : UInt128) (ha : a.toNat < USize.size) (hb : b.toNat < USize.size) :\n    (a / b).toUSize = a.toUSize / b.toUSize :=\n  USize.toNat.inj (by simpa using Nat.div_mod_eq_mod_div_mod ha hb)\n\ntheorem UInt128.toUInt8_mod (a b : UInt128) (ha : a < 256) (hb : b < 256) : (a % b).toUInt8 = a.toUInt8 % b.toUInt8 :=\n  UInt8.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb)\n\ntheorem UInt128.toUInt16_mod (a b : UInt128) (ha : a < 65536) (hb : b < 65536) : (a % b).toUInt16 = a.toUInt16 % b.toUInt16 :=\n  UInt16.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb)\n\ntheorem UInt128.toUInt32_mod (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a % b).toUInt32 = a.toUInt32 % b.toUInt32 :=\n  UInt32.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb)\n\ntheorem UInt128.toUSize_mod (a b : UInt128) (ha : a < 4294967296) (hb : b < 4294967296) : (a % b).toUSize = a.toUSize % b.toUSize :=\n  USize.toNat.inj (Nat.mod_mod_eq_mod_mod_mod (Nat.lt_of_lt_of_le ha UInt32.size_le_usizeSize) (Nat.lt_of_lt_of_le hb UInt32.size_le_usizeSize))\n\ntheorem UInt128.toUSize_mod_of_toNat_lt (a b : UInt128) (ha : a.toNat < USize.size) (hb : b.toNat < USize.size) : (a % b).toUSize = a.toUSize % b.toUSize :=\n  USize.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod ha hb)\n\ntheorem UInt128.toUInt8_mod_of_dvd (a b : UInt128) (hb : b.toNat ∣ 256) : (a % b).toUInt8 = a.toUInt8 % b.toUInt8 :=\n  UInt8.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb)\n\ntheorem UInt128.toUInt16_mod_of_dvd (a b : UInt128)(hb : b.toNat ∣ 65536) : (a % b).toUInt16 = a.toUInt16 % b.toUInt16 :=\n  UInt16.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb)\n\ntheorem UInt128.toUInt32_mod_of_dvd (a b : UInt128) (hb : b.toNat ∣ 4294967296) : (a % b).toUInt32 = a.toUInt32 % b.toUInt32 :=\n  UInt32.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb)\n\ntheorem UInt128.toUSize_mod_of_dvd (a b : UInt128) (hb : b.toNat ∣ 4294967296) : (a % b).toUSize = a.toUSize % b.toUSize :=\n  USize.toNat.inj (Nat.mod_mod_eq_mod_mod_mod_of_dvd (Nat.dvd_trans hb UInt32.size_dvd_usizeSize))\n\ntheorem UInt128.toUSize_mod_of_dvd_usizeSize (a b : UInt128) (hb : b.toNat ∣ USize.size) : (a % b).toUSize = a.toUSize % b.toUSize :=\n  USize.toNat.inj (by simpa using Nat.mod_mod_eq_mod_mod_mod_of_dvd hb)\n\n@[simp] protected theorem UInt128.toFin_add (a b : UInt128) : (a + b).toFin = a.toFin + b.toFin := (rfl)\n@[simp] theorem UInt128.toUInt8_add (a b : UInt128) : (a + b).toUInt8 = a.toUInt8 + b.toUInt8 := UInt8.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt16_add (a b : UInt128) : (a + b).toUInt16 = a.toUInt16 + b.toUInt16 := UInt16.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt32_add (a b : UInt128) : (a + b).toUInt32 = a.toUInt32 + b.toUInt32 := UInt32.toNat.inj (by simp)\n\n@[simp] theorem UInt128.toUSize_add (a b : UInt128) : (a + b).toUSize = a.toUSize + b.toUSize := USize.toNat.inj (by simp)\n\n-- @[simp] theorem UInt8.toUInt128_add (a b : UInt8) : (a + b).toUInt128 = (a.toUInt128 + b.toUInt128) % 256 := UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt16.toUInt128_add (a b : UInt16) : (a + b).toUInt128 = (a.toUInt128 + b.toUInt128) % 65536 := UInt128.toNat.inj (by simp)\n\n-- @[simp] theorem UInt32.toUInt128_add (a b : UInt32) : (a + b).toUInt128 = (a.toUInt128 + b.toUInt128) % 4294967296 := UInt128.toNat.inj (by simp)\n\n@[simp] protected theorem UInt128.toFin_sub (a b : UInt128) : (a - b).toFin = a.toFin - b.toFin := (rfl)\n@[simp] protected theorem UInt128.toFin_mul (a b : UInt128) : (a * b).toFin = a.toFin * b.toFin := (rfl)\n@[simp] theorem UInt128.toUInt8_mul (a b : UInt128) : (a * b).toUInt8 = a.toUInt8 * b.toUInt8 := UInt8.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt16_mul (a b : UInt128) : (a * b).toUInt16 = a.toUInt16 * b.toUInt16 := UInt16.toNat.inj (by simp)\n@[simp] theorem UInt128.toUInt32_mul (a b : UInt128) : (a * b).toUInt32 = a.toUInt32 * b.toUInt32 := UInt32.toNat.inj (by simp)\n@[simp] theorem UInt128.toUSize_mul (a b : UInt128) : (a * b).toUSize = a.toUSize * b.toUSize := USize.toNat.inj (by simp)\n-- @[simp] theorem UInt8.toUInt128_mul (a b : UInt8) : (a * b).toUInt128 = (a.toUInt128 * b.toUInt128) % 256 := UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt16.toUInt128_mul (a b : UInt16) : (a * b).toUInt128 = (a.toUInt128 * b.toUInt128) % 65536 := UInt128.toNat.inj (by simp)\n-- @[simp] theorem UInt32.toUInt128_mul (a b : UInt32) : (a * b).toUInt128 = (a.toUInt128 * b.toUInt128) % 4294967296 := UInt128.toNat.inj (by simp)\n\n\n-- theorem UInt128.toUInt8_eq (a b : UInt128) : a.toUInt8 = b.toUInt8 ↔ a % 256 = b % 256 := by\n--   simp [← UInt8.toNat_inj, ← UInt128.toNat_inj]\n\n-- theorem UInt128.toUInt16_eq (a b : UInt128) : a.toUInt16 = b.toUInt16 ↔ a % 65536 = b % 65536 := by\n--   simp [← UInt16.toNat_inj, ← UInt128.toNat_inj]\n\n-- theorem UInt128.toUInt32_eq (a b : UInt128) : a.toUInt32 = b.toUInt32 ↔ a % 4294967296 = b % 4294967296 := by\n--   simp [← UInt32.toNat_inj, ← UInt128.toNat_inj]\n\n-- theorem UInt8.toUInt128_eq_mod_256_iff (a : UInt8) (b : UInt128) : a.toUInt128 = b % 256 ↔ a = b.toUInt8 := by\n--   simp [← UInt8.toNat_inj, ← UInt128.toNat_inj]\n\n-- theorem UInt16.toUInt128_eq_mod_65536_iff (a : UInt16) (b : UInt128) : a.toUInt128 = b % 65536 ↔ a = b.toUInt16 := by\n--   simp [← UInt16.toNat_inj, ← UInt128.toNat_inj]\n\n-- theorem UInt32.toUInt128_eq_mod_4294967296_iff (a : UInt32) (b : UInt128) : a.toUInt128 = b % 4294967296 ↔ a = b.toUInt32 := by\n--   simp [← UInt32.toNat_inj, ← UInt128.toNat_inj]\n\n-- theorem UInt64.toUInt128_eq_mod_4294967296_iff (a : UInt64) (b : UInt128) : a.toUInt128 = b % 4294967296 ↔ a = b.toUInt32 := by\n--   simp [← UInt64.toNat_inj, ← UInt128.toNat_inj]\n\n-- theorem UInt8.toUInt128_inj {a b : UInt8} : a.toUInt128 = b.toUInt128 ↔ a = b :=\n--   ⟨fun h => by rw [← toUInt8_toUInt128 a, h, toUInt8_toUInt128], by rintro rfl; rfl⟩\n\n-- theorem UInt16.toUInt128_inj {a b : UInt16} : a.toUInt128 = b.toUInt128 ↔ a = b :=\n--   ⟨fun h => by rw [← toUInt16_toUInt128 a, h, toUInt16_toUInt128], by rintro rfl; rfl⟩\n\n-- theorem UInt32.toUInt128_inj {a b : UInt32} : a.toUInt128 = b.toUInt128 ↔ a = b :=\n--   ⟨fun h => by rw [← toUInt32_toUInt128 a, h, toUInt32_toUInt128], by rintro rfl; rfl⟩\n\n-- theorem UInt64.toUInt128_inj {a b : UInt64} : a.toUInt128 = b.toUInt128 ↔ a = b :=\n--   ⟨fun h => by rw [← toUInt64_toUInt128 a, h, toUInt64_toUInt128], by rintro rfl; rfl⟩\n\ntheorem UInt128.lt_iff_toFin_lt {a b : UInt128} : a < b ↔ a.toFin < b.toFin := Iff.rfl\n\ntheorem UInt128.le_iff_toFin_le {a b : UInt128} : a ≤ b ↔ a.toFin ≤ b.toFin := Iff.rfl\n\n-- @[simp] theorem UInt8.toUInt128_lt {a b : UInt8} : a.toUInt128 < b.toUInt128 ↔ a < b := by\n--   simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt]\n\n-- @[simp] theorem UInt16.toUInt128_lt {a b : UInt16} : a.toUInt128 < b.toUInt128 ↔ a < b := by\n--   simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt]\n\n-- @[simp] theorem UInt32.toUInt128_lt {a b : UInt32} : a.toUInt128 < b.toUInt128 ↔ a < b := by\n--   simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt]\n\n-- @[simp] theorem UInt64.toUInt128_lt {a b : UInt64} : a.toUInt128 < b.toUInt128 ↔ a < b := by\n--   simp [lt_iff_toNat_lt, UInt128.lt_iff_toNat_lt]\n\n-- @[simp] theorem UInt8.toUInt128_le {a b : UInt8} : a.toUInt128 ≤ b.toUInt128 ↔ a ≤ b := by\n--   simp [le_iff_toNat_le, UInt128.le_iff_toNat_le]\n\n-- @[simp] theorem UInt16.toUInt128_le {a b : UInt16} : a.toUInt128 ≤ b.toUInt128 ↔ a ≤ b := by\n--   simp [le_iff_toNat_le, UInt128.le_iff_toNat_le]\n\n-- @[simp] theorem UInt32.toUInt128_le {a b : UInt32} : a.toUInt128 ≤ b.toUInt128 ↔ a ≤ b := by\n--   simp [le_iff_toNat_le, UInt128.le_iff_toNat_le]\n\n@[simp] theorem UInt128.toUInt8_le {a b : UInt128} : a.toUInt8 ≤ b.toUInt8 ↔ a % 256 ≤ b % 256 := by\n  simp [le_iff_toNat_le, UInt8.le_iff_toNat_le]\n\n@[simp] theorem UInt128.toUInt16_le {a b : UInt128} : a.toUInt16 ≤ b.toUInt16 ↔ a % 65536 ≤ b % 65536 := by\n  simp [le_iff_toNat_le, UInt16.le_iff_toNat_le]\n\n@[simp] theorem UInt128.toUInt32_le {a b : UInt128} : a.toUInt32 ≤ b.toUInt32 ↔ a % 4294967296 ≤ b % 4294967296 := by\n  simp [le_iff_toNat_le, UInt32.le_iff_toNat_le]\n\n@[simp] theorem UInt128.toUInt8_neg (a : UInt128) : (-a).toUInt8 = -a.toUInt8 := UInt8.toBitVec_inj.1 (by simp)\n@[simp] theorem UInt128.toUInt16_neg (a : UInt128) : (-a).toUInt16 = -a.toUInt16 := UInt16.toBitVec_inj.1 (by simp)\n@[simp] theorem UInt128.toUInt32_neg (a : UInt128) : (-a).toUInt32 = -a.toUInt32 := UInt32.toBitVec_inj.1 (by simp)\n\n-- @[simp] theorem UInt8.toUInt128_neg (a : UInt8) : (-a).toUInt128 = -a.toUInt128 % 256 := by\n--   simp [UInt8.toUInt128_eq_mod_256_iff]\n\n-- @[simp] theorem UInt16.toUInt128_neg (a : UInt16) : (-a).toUInt128 = -a.toUInt128 % 65536 := by\n--   simp [UInt16.toUInt128_eq_mod_65536_iff]\n\n-- @[simp] theorem UInt32.toUInt128_neg (a : UInt32) : (-a).toUInt128 = -a.toUInt128 % 4294967296 := by\n--   simp [UInt32.toUInt128_eq_mod_4294967296_iff]\n\n@[simp] theorem UInt128.toNat_neg (a : UInt128) : (-a).toNat = (UInt128.size - a.toNat) % UInt128.size := (rfl)\n\nprotected theorem UInt128.sub_eq_add_neg (a b : UInt128) : a - b = a + (-b) := UInt128.toBitVec_inj.1 (BitVec.sub_eq_add_neg _ _)\n\nprotected theorem UInt128.add_neg_eq_sub {a b : UInt128} : a + -b = a - b := UInt128.toBitVec_inj.1 BitVec.add_neg_eq_sub\n\ntheorem UInt128.neg_one_eq : (-1 : UInt128) = 340282366920938463463374607431768211455 := (rfl)\n\ntheorem UInt128.toBitVec_zero : toBitVec 0 = 0#128 := (rfl)\n\ntheorem UInt128.toBitVec_one : toBitVec 1 = 1#128 := (rfl)\n\ntheorem UInt128.neg_eq_neg_one_mul (a : UInt128) : -a = -1 * a := by\n  apply UInt128.toBitVec_inj.1\n  rw [UInt128.toBitVec_neg, UInt128.toBitVec_mul, UInt128.toBitVec_neg, UInt128.toBitVec_one, BitVec.neg_eq_neg_one_mul]\n\ntheorem UInt128.sub_eq_add_mul (a b : UInt128) : a - b = a + 340282366920938463463374607431768211455 * b := by\n  rw [UInt128.sub_eq_add_neg, neg_eq_neg_one_mul, neg_one_eq]\n\ntheorem UInt128.ofNat_eq_iff_mod_eq_toNat (a : Nat) (b : UInt128) : UInt128.ofNat a = b ↔ a % 2 ^ 128 = b.toNat := by\n  simp [← UInt128.toNat_inj]\n\n-- theorem UInt128.ofNat_sub {a b : Nat} (hab : b ≤ a) : UInt128.ofNat (a - b) = UInt128.ofNat a - UInt128.ofNat b := by\n--   rw [(Nat.sub_add_cancel hab ▸ UInt128.ofNat_add (a - b) b :), UInt128.add_sub_cancel]\n\n-- theorem UInt128.ofNatLT_sub {a b : Nat} (ha : a < 2 ^ 128) (hab : b ≤ a) :\n--     UInt128.ofNatLT (a - b) (Nat.sub_lt_of_lt ha) = UInt128.ofNatLT a ha - UInt128.ofNatLT b (Nat.lt_of_le_of_lt hab ha) := by\n--   simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_sub hab]\n\n-- @[simp] theorem UInt8.toUInt128_sub (a b : UInt8) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 256 := by\n--   simp [UInt8.toUInt128_eq_mod_256_iff]\n-- @[simp] theorem UInt16.toUInt128_sub (a b : UInt16) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 65536 := by\n--   simp [UInt16.toUInt128_eq_mod_65536_iff]\n-- @[simp] theorem UInt32.toUInt128_sub (a b : UInt32) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 4294967296 := by\n--   simp [UInt32.toUInt64_eq_mod_4294967296_iff]\n-- @[simp] theorem UInt64.toUInt128_sub (a b : UInt64) : (a - b).toUInt128 = (a.toUInt128 - b.toUInt128) % 4294967296 := by\n--   simp [UInt64.toUInt64_eq_mod_4294967296_iff]\n\n@[simp] theorem UInt128.ofBitVec_neg (b : BitVec 128) : UInt128.ofBitVec (-b) = -UInt128.ofBitVec b := (rfl)\n@[simp] theorem UInt128.ofFin_div (a b : Fin UInt128.size) : UInt128.ofFin (a / b) = UInt128.ofFin a / UInt128.ofFin b := (rfl)\n@[simp] theorem UInt128.ofBitVec_div (a b : BitVec 128) : UInt128.ofBitVec (a / b) = UInt128.ofBitVec a / UInt128.ofBitVec b := (rfl)\n@[simp] theorem UInt128.ofFin_mod (a b : Fin UInt128.size) : UInt128.ofFin (a % b) = UInt128.ofFin a % UInt128.ofFin b := (rfl)\n@[simp] theorem UInt128.ofBitVec_mod (a b : BitVec 128) : UInt128.ofBitVec (a % b) = UInt128.ofBitVec a % UInt128.ofBitVec b := (rfl)\n-- theorem UInt128.ofNat_eq_iff_mod_eq_toNat (a : Nat) (b : UInt128) : UInt128.ofNat a = b ↔ a % 2 ^ 128 = b.toNat := by\n--   simp [← UInt128.toNat_inj]\n@[simp] theorem UInt128.ofNat_div {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) :\n    UInt128.ofNat (a / b) = UInt128.ofNat a / UInt128.ofNat b := by\n  simp [UInt128.ofNat_eq_iff_mod_eq_toNat, Nat.div_mod_eq_mod_div_mod ha hb]\n@[simp] theorem UInt128.ofNatLT_div {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) :\n    UInt128.ofNatLT (a / b) (Nat.div_lt_of_lt ha) = UInt128.ofNatLT a ha / UInt128.ofNatLT b hb := by\n  simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_div ha hb]\n@[simp] theorem UInt128.ofNat_mod {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) :\n    UInt128.ofNat (a % b) = UInt128.ofNat a % UInt128.ofNat b := by\n  simp [UInt128.ofNat_eq_iff_mod_eq_toNat, Nat.mod_mod_eq_mod_mod_mod ha hb]\n@[simp] theorem UInt128.ofNatLT_mod {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) :\n    UInt128.ofNatLT (a % b) (Nat.mod_lt_of_lt ha) = UInt128.ofNatLT a ha % UInt128.ofNatLT b hb := by\n  simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_mod ha hb]\n@[simp] theorem UInt128.ofInt_one : ofInt 1 = 1 := (rfl)\n@[simp] theorem UInt128.ofInt_neg_one : ofInt (-1) = -1 := (rfl)\n@[simp] theorem UInt128.ofNat_add (a b : Nat) : UInt128.ofNat (a + b) = UInt128.ofNat a + UInt128.ofNat b := by\n  simp [UInt128.ofNat_eq_iff_mod_eq_toNat]\n@[simp] theorem UInt128.ofInt_add (x y : Int) : UInt128.ofInt (x + y) = UInt128.ofInt x + UInt128.ofInt y := by\n  dsimp only [UInt128.ofInt]\n  rw [Int.add_emod]\n  have h₁ : 0 ≤ x % 2 ^ 128 := Int.emod_nonneg _ (by decide)\n  have h₂ : 0 ≤ y % 2 ^ 128 := Int.emod_nonneg _ (by decide)\n  have h₃ : 0 ≤ x % 2 ^ 128 + y % 2 ^ 128 := Int.add_nonneg h₁ h₂\n  rw [Int.toNat_emod h₃ (by decide), Int.toNat_add h₁ h₂]\n  have : (2 ^ 128 : Int).toNat = 2 ^ 128 := (rfl)\n  rw [this, UInt128.ofNat_mod_size, UInt128.ofNat_add]\n\n@[simp] theorem UInt128.ofNatLT_add {a b : Nat} (hab : a + b < 2 ^ 128) :\n    UInt128.ofNatLT (a + b) hab = UInt128.ofNatLT a (Nat.lt_of_add_right_lt hab) + UInt128.ofNatLT b (Nat.lt_of_add_left_lt hab) := by\n  simp [UInt128.ofNatLT_eq_ofNat]\n@[simp] theorem UInt128.ofFin_add (a b : Fin UInt128.size) : UInt128.ofFin (a + b) = UInt128.ofFin a + UInt128.ofFin b := (rfl)\n@[simp] theorem UInt128.ofBitVec_add (a b : BitVec 128) : UInt128.ofBitVec (a + b) = UInt128.ofBitVec a + UInt128.ofBitVec b := (rfl)\n@[simp] theorem UInt128.ofFin_sub (a b : Fin UInt128.size) : UInt128.ofFin (a - b) = UInt128.ofFin a - UInt128.ofFin b := (rfl)\n@[simp] theorem UInt128.ofBitVec_sub (a b : BitVec 128) : UInt128.ofBitVec (a - b) = UInt128.ofBitVec a - UInt128.ofBitVec b := (rfl)\n@[simp] protected theorem UInt128.add_sub_cancel (a b : UInt128) : a + b - b = a := UInt128.toBitVec_inj.1 (BitVec.add_sub_cancel _ _)\ntheorem UInt128.ofNat_sub {a b : Nat} (hab : b ≤ a) : UInt128.ofNat (a - b) = UInt128.ofNat a - UInt128.ofNat b := by\n  rw [(Nat.sub_add_cancel hab ▸ UInt128.ofNat_add (a - b) b :), UInt128.add_sub_cancel]\ntheorem UInt128.ofNatLT_sub {a b : Nat} (ha : a < 2 ^ 128) (hab : b ≤ a) :\n    UInt128.ofNatLT (a - b) (Nat.sub_lt_of_lt ha) = UInt128.ofNatLT a ha - UInt128.ofNatLT b (Nat.lt_of_le_of_lt hab ha) := by\n  simp [UInt128.ofNatLT_eq_ofNat, UInt128.ofNat_sub hab]\n@[simp] theorem UInt128.ofNat_mul (a b : Nat) : UInt128.ofNat (a * b) = UInt128.ofNat a * UInt128.ofNat b := by\n  simp [UInt128.ofNat_eq_iff_mod_eq_toNat]\n@[simp] theorem UInt128.ofInt_mul (x y : Int) : ofInt (x * y) = ofInt x * ofInt y := by\n  dsimp only [UInt128.ofInt]\n  rw [Int.mul_emod]\n  have h₁ : 0 ≤ x % 2 ^ 128 := Int.emod_nonneg _ (by decide)\n  have h₂ : 0 ≤ y % 2 ^ 128 := Int.emod_nonneg _ (by decide)\n  have h₃ : 0 ≤ (x % 2 ^ 128) * (y % 2 ^ 128) := Int.mul_nonneg h₁ h₂\n  rw [Int.toNat_emod h₃ (by decide), Int.toNat_mul h₁ h₂]\n  have : (2 ^ 128 : Int).toNat = 2 ^ 128 := (rfl)\n  rw [this, UInt128.ofNat_mod_size, UInt128.ofNat_mul]\n@[simp] theorem UInt128.ofNatLT_mul {a b : Nat} (ha : a < 2 ^ 128) (hb : b < 2 ^ 128) (hab : a * b < 2 ^ 128) :\n    UInt128.ofNatLT (a * b) hab = UInt128.ofNatLT a ha * UInt128.ofNatLT b hb := by\n  simp [UInt128.ofNatLT_eq_ofNat]\n@[simp] theorem UInt128.ofFin_mul (a b : Fin UInt128.size) : UInt128.ofFin (a * b) = UInt128.ofFin a * UInt128.ofFin b := (rfl)\n@[simp] theorem UInt128.ofBitVec_mul (a b : BitVec 128) : UInt128.ofBitVec (a * b) = UInt128.ofBitVec a * UInt128.ofBitVec b := (rfl)\n\ntheorem UInt128.ofFin_lt_iff_lt {a b : Fin UInt128.size} : UInt128.ofFin a < UInt128.ofFin b ↔ a < b := Iff.rfl\n\ntheorem UInt128.ofFin_le_iff_le {a b : Fin UInt128.size} : UInt128.ofFin a ≤ UInt128.ofFin b ↔ a ≤ b := Iff.rfl\n\ntheorem UInt128.ofBitVec_lt_iff_lt {a b : BitVec 128} : UInt128.ofBitVec a < UInt128.ofBitVec b ↔ a < b := Iff.rfl\n\ntheorem UInt128.ofBitVec_le_iff_le {a b : BitVec 128} : UInt128.ofBitVec a ≤ UInt128.ofBitVec b ↔ a ≤ b := Iff.rfl\n\ntheorem UInt128.ofNatLT_lt_iff_lt {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) :\n    UInt128.ofNatLT a ha < UInt128.ofNatLT b hb ↔ a < b := Iff.rfl\n\ntheorem UInt128.ofNatLT_le_iff_le {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) :\n    UInt128.ofNatLT a ha ≤ UInt128.ofNatLT b hb ↔ a ≤ b := Iff.rfl\n\ntheorem UInt128.ofNat_lt_iff_lt {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) :\n    UInt128.ofNat a < UInt128.ofNat b ↔ a < b := by\n  rw [← ofNatLT_eq_ofNat (h := ha), ← ofNatLT_eq_ofNat (h := hb), ofNatLT_lt_iff_lt]\n\ntheorem UInt128.ofNat_le_iff_le {a b : Nat} (ha : a < UInt128.size) (hb : b < UInt128.size) :\n    UInt128.ofNat a ≤ UInt128.ofNat b ↔ a ≤ b := by\n  rw [← ofNatLT_eq_ofNat (h := ha), ← ofNatLT_eq_ofNat (h := hb), ofNatLT_le_iff_le]\n\ntheorem UInt128.toNat_one : (1 : UInt128).toNat = 1 := (rfl)\n\n-- theorem UInt128.zero_lt_one : (0 : UInt128) < 1 := by simp\n\n-- theorem UInt128.zero_ne_one : (0 : UInt128) ≠ 1 := by simp\n\nprotected theorem UInt128.add_assoc (a b c : UInt128) : a + b + c = a + (b + c) :=\n  UInt128.toBitVec_inj.1 (BitVec.add_assoc _ _ _)\n\ninstance : Std.Associative (α := UInt128) (· + ·) := ⟨UInt128.add_assoc⟩\n\nprotected theorem UInt128.add_comm (a b : UInt128) : a + b = b + a := UInt128.toBitVec_inj.1 (BitVec.add_comm _ _)\n\ninstance : Std.Commutative (α := UInt128) (· + ·) := ⟨UInt128.add_comm⟩\n@[simp] protected theorem UInt128.add_zero (a : UInt128) : a + 0 = a := UInt128.toBitVec_inj.1 (BitVec.add_zero _)\n@[simp] protected theorem UInt128.zero_add (a : UInt128) : 0 + a = a := UInt128.toBitVec_inj.1 (BitVec.zero_add _)\ninstance : Std.LawfulIdentity (α := UInt128) (· + ·) 0 where\n  left_id := UInt128.zero_add\n  right_id := UInt128.add_zero\n@[simp] protected theorem UInt128.sub_zero (a : UInt128) : a - 0 = a := UInt128.toBitVec_inj.1 (BitVec.sub_zero _)\n@[simp] protected theorem UInt128.zero_sub (a : UInt128) : 0 - a = -a := UInt128.toBitVec_inj.1 (BitVec.zero_sub _)\n@[simp] protected theorem UInt128.sub_self (a : UInt128) : a - a = 0 := UInt128.toBitVec_inj.1 (BitVec.sub_self _)\n\nprotected theorem UInt128.add_left_neg (a : UInt128) : -a + a = 0 := UInt128.toBitVec_inj.1 (BitVec.add_left_neg _)\n\nprotected theorem UInt128.add_right_neg (a : UInt128) : a + -a = 0 := UInt128.toBitVec_inj.1 (BitVec.add_right_neg _)\n\nprotected theorem UInt128.eq_sub_iff_add_eq {a b c : UInt128} : a = c - b ↔ a + b = c := by\n  simpa [← UInt128.toBitVec_inj] using BitVec.eq_sub_iff_add_eq\n\nprotected theorem UInt128.sub_eq_iff_eq_add {a b c : UInt128} : a - b = c ↔ a = c + b := by\n  simpa [← UInt128.toBitVec_inj] using BitVec.sub_eq_iff_eq_add\n@[simp] protected theorem UInt128.neg_neg {a : UInt128} : - -a = a := UInt128.toBitVec_inj.1 BitVec.neg_neg\n@[simp] protected theorem UInt128.neg_inj {a b : UInt128} : -a = -b ↔ a = b := by simp [← UInt128.toBitVec_inj]\n@[simp] protected theorem UInt128.neg_ne_zero {a : UInt128} : -a ≠ 0 ↔ a ≠ 0 := by simp [← UInt128.toBitVec_inj]\nprotected theorem UInt128.neg_add {a b : UInt128} : - (a + b) = -a - b := UInt128.toBitVec_inj.1 BitVec.neg_add\n\n@[simp] protected theorem UInt128.sub_neg {a b : UInt128} : a - -b = a + b := UInt128.toBitVec_inj.1 BitVec.sub_neg\n@[simp] protected theorem UInt128.neg_sub {a b : UInt128} : -(a - b) = b - a := by\n  rw [UInt128.sub_eq_add_neg, UInt128.neg_add, UInt128.sub_neg, UInt128.add_comm, ← UInt128.sub_eq_add_neg]\n@[simp] protected theorem UInt128.ofInt_neg (x : Int) : ofInt (-x) = -ofInt x := by\n  rw [Int.neg_eq_neg_one_mul, ofInt_mul, ofInt_neg_one, ← UInt128.neg_eq_neg_one_mul]\n@[simp] protected theorem UInt128.add_left_inj {a b : UInt128} (c : UInt128) : (a + c = b + c) ↔ a = b := by\n  simp [← UInt128.toBitVec_inj]\n@[simp] protected theorem UInt128.add_right_inj {a b : UInt128} (c : UInt128) : (c + a = c + b) ↔ a = b := by\n  simp [← UInt128.toBitVec_inj]\n@[simp] protected theorem UInt128.sub_left_inj {a b : UInt128} (c : UInt128) : (a - c = b - c) ↔ a = b := by\n  simp [← UInt128.toBitVec_inj]\n@[simp] protected theorem UInt128.sub_right_inj {a b : UInt128} (c : UInt128) : (c - a = c - b) ↔ a = b := by\n  simp [← UInt128.toBitVec_inj]\n@[simp] theorem UInt128.add_eq_right {a b : UInt128} : a + b = b ↔ a = 0 := by\n  simp [← UInt128.toBitVec_inj]\n@[simp] theorem UInt128.add_eq_left {a b : UInt128} : a + b = a ↔ b = 0 := by\n  simp [← UInt128.toBitVec_inj]\n@[simp] theorem UInt128.right_eq_add {a b : UInt128} : b = a + b ↔ a = 0 := by\n  simp [← UInt128.toBitVec_inj]\n@[simp] theorem UInt128.left_eq_add {a b : UInt128} : a = a + b ↔ b = 0 := by\n  simp [← UInt128.toBitVec_inj]\n\nprotected theorem UInt128.mul_comm (a b : UInt128) : a * b = b * a := UInt128.toBitVec_inj.1 (BitVec.mul_comm _ _)\n\ninstance : Std.Commutative (α := UInt128) (· * ·) := ⟨UInt128.mul_comm⟩\n\nprotected theorem UInt128.mul_assoc (a b c : UInt128) : a * b * c = a * (b * c) := UInt128.toBitVec_inj.1 (BitVec.mul_assoc _ _ _)\n\ninstance : Std.Associative (α := UInt128) (· * ·) := ⟨UInt128.mul_assoc⟩\n@[simp] theorem UInt128.mul_one (a : UInt128) : a * 1 = a := UInt128.toBitVec_inj.1 (BitVec.mul_one _)\n@[simp] theorem UInt128.one_mul (a : UInt128) : 1 * a = a := UInt128.toBitVec_inj.1 (BitVec.one_mul _)\ninstance : Std.LawfulCommIdentity (α := UInt128) (· * ·) 1 where\n  right_id := UInt128.mul_one\n@[simp] theorem UInt128.mul_zero {a : UInt128} : a * 0 = 0 := UInt128.toBitVec_inj.1 BitVec.mul_zero\n@[simp] theorem UInt128.zero_mul {a : UInt128} : 0 * a = 0 := UInt128.toBitVec_inj.1 BitVec.zero_mul\n@[simp] protected theorem UInt128.pow_zero (x : UInt128) : x ^ 0 = 1 := (rfl)\nprotected theorem UInt128.pow_succ (x : UInt128) (n : Nat) : x ^ (n + 1) = x ^ n * x := (rfl)\n\n\nprotected theorem UInt128.mul_add {a b c : UInt128} : a * (b + c) = a * b + a * c :=\n    UInt128.toBitVec_inj.1 BitVec.mul_add\n\nprotected theorem UInt128.add_mul {a b c : UInt128} : (a + b) * c = a * c + b * c := by\n  rw [UInt128.mul_comm, UInt128.mul_add, UInt128.mul_comm a c, UInt128.mul_comm c b]\n\n-- protected theorem UInt128.mul_succ {a b : UInt128} : a * (b + 1) = a * b + a := by simp [UInt128.mul_add]\n\n-- protected theorem UInt128.succ_mul {a b : UInt128} : (a + 1) * b = a * b + b := by simp [UInt128.add_mul]\n\nprotected theorem UInt128.two_mul {a : UInt128} : 2 * a = a + a := UInt128.toBitVec_inj.1 BitVec.two_mul\n\nprotected theorem UInt128.mul_two {a : UInt128} : a * 2 = a + a := UInt128.toBitVec_inj.1 BitVec.mul_two\n\nprotected theorem UInt128.neg_mul (a b : UInt128) : -a * b = -(a * b) := UInt128.toBitVec_inj.1 (BitVec.neg_mul _ _)\n\nprotected theorem UInt128.mul_neg (a b : UInt128) : a * -b = -(a * b) := UInt128.toBitVec_inj.1 (BitVec.mul_neg _ _)\n\nprotected theorem UInt128.neg_mul_neg (a b : UInt128) : -a * -b = a * b := UInt128.toBitVec_inj.1 (BitVec.neg_mul_neg _ _)\n\nprotected theorem UInt128.neg_mul_comm (a b : UInt128) : -a * b = a * -b := UInt128.toBitVec_inj.1 (BitVec.neg_mul_comm _ _)\n\nprotected theorem UInt128.mul_sub {a b c : UInt128} : a * (b - c) = a * b - a * c := UInt128.toBitVec_inj.1 BitVec.mul_sub\n\nprotected theorem UInt128.sub_mul {a b c : UInt128} : (a - b) * c = a * c - b * c := by\n  rw [UInt128.mul_comm, UInt128.mul_sub, UInt128.mul_comm, UInt128.mul_comm c]\n\ntheorem UInt128.neg_add_mul_eq_mul_not {a b : UInt128} : -(a + a * b) = a * ~~~b :=\n  UInt128.toBitVec_inj.1 BitVec.neg_add_mul_eq_mul_not\n\ntheorem UInt128.neg_mul_not_eq_add_mul {a b : UInt128} : -(a * ~~~b) = a + a * b :=\n  UInt128.toBitVec_inj.1 BitVec.neg_mul_not_eq_add_mul\n\nprotected theorem UInt128.le_of_lt {a b : UInt128} : a < b → a ≤ b := by\n  simpa [lt_iff_toNat_lt, le_iff_toNat_le] using Nat.le_of_lt\n\nprotected theorem UInt128.lt_of_le_of_ne {a b : UInt128} : a ≤ b → a ≠ b → a < b := by\n  simpa [lt_iff_toNat_lt, le_iff_toNat_le, ← UInt128.toNat_inj] using Nat.lt_of_le_of_ne\n\nprotected theorem UInt128.lt_iff_le_and_ne {a b : UInt128} : a < b ↔ a ≤ b ∧ a ≠ b := by\n  simpa [lt_iff_toNat_lt, le_iff_toNat_le, ← UInt128.toNat_inj] using Nat.lt_iff_le_and_ne\n\nprotected theorem UInt128.div_self {a : UInt128} : a / a = if a = 0 then 0 else 1 := by\n  simp [← UInt128.toBitVec_inj, apply_ite]\n\n-- protected theorem UInt128.pos_iff_ne_zero {a : UInt128} : 0 < a ↔ a ≠ 0 := by simp [UInt128.lt_iff_le_and_ne, Eq.comm]\n\nprotected theorem UInt128.lt_of_le_of_lt {a b c : UInt128} : a ≤ b → b < c → a < c := by\n  simpa [le_iff_toNat_le, lt_iff_toNat_lt] using Nat.lt_of_le_of_lt\n\nprotected theorem UInt128.lt_of_lt_of_le {a b c : UInt128} : a < b → b ≤ c → a < c := by\n  simpa [le_iff_toNat_le, lt_iff_toNat_lt] using Nat.lt_of_lt_of_le\n\nprotected theorem UInt128.lt_or_lt_of_ne {a b : UInt128} : a ≠ b → a < b ∨ b < a := by\n  simpa [lt_iff_toNat_lt, ← UInt128.toNat_inj] using Nat.lt_or_lt_of_ne\n\nprotected theorem UInt128.lt_or_le (a b : UInt128) : a < b ∨ b ≤ a := by\n  simp [lt_iff_toNat_lt, le_iff_toNat_le]; omega\n\nprotected theorem UInt128.le_or_lt (a b : UInt128) : a ≤ b ∨ b < a := (b.lt_or_le a).symm\n\nprotected theorem UInt128.le_of_eq {a b : UInt128} : a = b → a ≤ b := (· ▸ UInt128.le_rfl)\n\nprotected theorem UInt128.le_iff_lt_or_eq {a b : UInt128} : a ≤ b ↔ a < b ∨ a = b := by\n  simpa [← UInt128.toNat_inj, le_iff_toNat_le, lt_iff_toNat_lt] using Nat.le_iff_lt_or_eq\n\nprotected theorem UInt128.lt_or_eq_of_le {a b : UInt128} : a ≤ b → a < b ∨ a = b := UInt128.le_iff_lt_or_eq.mp\n\nprotected theorem UInt128.sub_le {a b : UInt128} (hab : b ≤ a) : a - b ≤ a := by\n  simp [le_iff_toNat_le, UInt128.toNat_sub_of_le _ _ hab]\n\nprotected theorem UInt128.sub_lt {a b : UInt128} (hb : 0 < b) (hab : b ≤ a) : a - b < a := by\n  rw [lt_iff_toNat_lt, UInt128.toNat_sub_of_le _ _ hab]\n  refine Nat.sub_lt ?_ (UInt128.lt_iff_toNat_lt.1 hb)\n  exact UInt128.lt_iff_toNat_lt.1 (UInt128.lt_of_lt_of_le hb hab)\n\ntheorem UInt128.lt_add_one {c : UInt128} (h : c ≠ -1) : c < c + 1 :=\n  UInt128.lt_iff_toBitVec_lt.2 (BitVec.lt_add_one (by simpa [← UInt128.toBitVec_inj] using h))\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Data/Vector/Basic.lean",
    "content": "\nattribute [grind =] Vector.size_toArray\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/GrindInstances/Ring/SInt.lean",
    "content": "import Hax.MissingLean.Init.GrindInstances.ToInt\n\nopen Lean Grind\n\n@[expose, instance_reducible]\ndef Int128.natCast : NatCast Int128 where\n  natCast x := Int128.ofNat x\n\n@[expose, instance_reducible]\ndef Int128.intCast : IntCast Int128 where\n  intCast x := Int128.ofInt x\n\nattribute [local instance] Int128.intCast in\ntheorem Int128.intCast_neg (i : Int) : ((-i : Int) : Int128) = -(i : Int128) :=\n  Int128.ofInt_neg _\n\nattribute [local instance] Int128.intCast in\ntheorem Int128.intCast_ofNat (x : Nat) : (OfNat.ofNat (α := Int) x : Int128) = OfNat.ofNat x := Int128.ofInt_eq_ofNat\n\nattribute [local instance] Int128.natCast Int128.intCast in\ninstance : CommRing Int128 where\n  nsmul := ⟨(· * ·)⟩\n  zsmul := ⟨(· * ·)⟩\n  add_assoc := Int128.add_assoc\n  add_comm := Int128.add_comm\n  add_zero := Int128.add_zero\n  neg_add_cancel := Int128.add_left_neg\n  mul_assoc := Int128.mul_assoc\n  mul_comm := Int128.mul_comm\n  mul_one := Int128.mul_one\n  one_mul := Int128.one_mul\n  left_distrib _ _ _ := Int128.mul_add\n  right_distrib _ _ _ := Int128.add_mul\n  zero_mul _ := Int128.zero_mul\n  mul_zero _ := Int128.mul_zero\n  sub_eq_add_neg := Int128.sub_eq_add_neg\n  pow_zero := Int128.pow_zero\n  pow_succ := Int128.pow_succ\n  ofNat_succ x := Int128.ofNat_add x 1\n  intCast_neg := Int128.ofInt_neg\n  neg_zsmul i x := by\n    change (-i : Int) * x = - (i * x)\n    simp [Int128.intCast_neg, Int128.neg_mul]\n  zsmul_natCast_eq_nsmul n a := congrArg (· * a) (Int128.intCast_ofNat _)\n\ninstance : IsCharP Int128 (2 ^ 128) := IsCharP.mk' _ _\n  (ofNat_eq_zero_iff := fun x => by\n    have : OfNat.ofNat x = Int128.ofInt x := rfl\n    rw [this]\n    simp [Int128.ofInt_eq_iff_bmod_eq_toInt,\n      ← Int.dvd_iff_bmod_eq_zero, ← Nat.dvd_iff_mod_eq_zero, Int.ofNat_dvd_right])\n\n-- Verify we can derive the instances showing how `toInt` interacts with operations:\nexample : ToInt.Add Int128 (.sint 128) := inferInstance\nexample : ToInt.Neg Int128 (.sint 128) := inferInstance\nexample : ToInt.Sub Int128 (.sint 128) := inferInstance\n\ninstance : ToInt.Pow Int128 (.sint 128) := ToInt.pow_of_semiring (by simp)\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/GrindInstances/Ring/UInt.lean",
    "content": "import Hax.MissingLean.Init.GrindInstances.ToInt\n\nopen Lean Grind\n\nset_option autoImplicit true\n\nnamespace UInt128\n\n/-- Variant of `UInt128.ofNat_mod_size` replacing `2 ^ 128` with `340282366920938463463374607431768211456`.-/\ntheorem ofNat_mod_size' : ofNat (x % 340282366920938463463374607431768211456) = ofNat x := ofNat_mod_size\n\n@[expose, instance_reducible]\ndef natCast : NatCast UInt128 where\n  natCast x := UInt128.ofNat x\n\n@[expose, instance_reducible]\ndef intCast : IntCast UInt128 where\n  intCast x := UInt128.ofInt x\n\nattribute [local instance] natCast intCast\n\ntheorem intCast_neg (x : Int) : ((-x : Int) : UInt128) = - (x : UInt128) := by\n  simp only [Int.cast, IntCast.intCast, UInt128.ofInt_neg]\n\ntheorem intCast_ofNat (x : Nat) : (OfNat.ofNat (α := Int) x : UInt128) = OfNat.ofNat x := by\n    -- A better proof would be welcome!\n    simp only [Int.cast, IntCast.intCast]\n    rw [UInt128.ofInt]\n    rw [Int.toNat_emod (Int.zero_le_ofNat x) (by decide)]\n    erw [Int.toNat_natCast]\n    rw [Int.toNat_pow_of_nonneg (by decide)]\n    simp +instances only [ofNat, BitVec.ofNat, Fin.Internal.ofNat_eq_ofNat, Fin.ofNat, Int.reduceToNat, Nat.dvd_refl,\n      Nat.mod_mod_of_dvd, instOfNat]\n    try rfl\n\nend UInt128\n\n\nattribute [local instance] UInt128.natCast UInt128.intCast in\ninstance : CommRing UInt128 where\n  nsmul := ⟨(· * ·)⟩\n  zsmul := ⟨(· * ·)⟩\n  add_assoc := UInt128.add_assoc\n  add_comm := UInt128.add_comm\n  add_zero := UInt128.add_zero\n  neg_add_cancel := UInt128.add_left_neg\n  mul_assoc := UInt128.mul_assoc\n  mul_comm := UInt128.mul_comm\n  mul_one := UInt128.mul_one\n  one_mul := UInt128.one_mul\n  left_distrib _ _ _ := UInt128.mul_add\n  right_distrib _ _ _ := UInt128.add_mul\n  zero_mul _ := UInt128.zero_mul\n  mul_zero _ := UInt128.mul_zero\n  sub_eq_add_neg := UInt128.sub_eq_add_neg\n  pow_zero := UInt128.pow_zero\n  pow_succ := UInt128.pow_succ\n  ofNat_succ x := UInt128.ofNat_add x 1\n  intCast_neg := UInt128.ofInt_neg\n  intCast_ofNat := UInt128.intCast_ofNat\n  neg_zsmul i a := by\n    change (-i : Int) * a = - (i * a)\n    simp [UInt128.intCast_neg, UInt128.neg_mul]\n  zsmul_natCast_eq_nsmul n a := congrArg (· * a) (UInt128.intCast_ofNat _)\n\ninstance : IsCharP UInt128 340282366920938463463374607431768211456 := IsCharP.mk' _ _\n  (ofNat_eq_zero_iff := fun x => by\n    have : OfNat.ofNat x = UInt128.ofNat x := rfl\n    simp [this, UInt128.ofNat_eq_iff_mod_eq_toNat])\n\n-- Verify we can derive the instances showing how `toInt` interacts with operations:\nexample : ToInt.Add UInt128 (.uint 128) := inferInstance\nexample : ToInt.Neg UInt128 (.uint 128) := inferInstance\nexample : ToInt.Sub UInt128 (.uint 128) := inferInstance\n\ninstance : ToInt.Pow UInt128 (.uint 128) := ToInt.pow_of_semiring (by simp)\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/GrindInstances/ToInt.lean",
    "content": "import Hax.MissingLean.Init.Data.SInt.Lemmas_Int128\nimport Hax.MissingLean.Init.Data.UInt.Lemmas_UInt128\n\nopen Lean.Grind\n\ninstance : ToInt UInt128 (.uint 128) where\n  toInt x := (x.toNat : Int)\n  toInt_inj x y w := private UInt128.toNat_inj.mp (Int.ofNat_inj.mp w)\n  toInt_mem x := by simpa using Int.lt_toNat.mp (UInt128.toNat_lt x)\n\n@[simp] theorem toInt_uint128 (x : UInt128) : ToInt.toInt x = (x.toNat : Int) := rfl\n\ninstance : ToInt.Zero UInt128 (.uint 128) where\n  toInt_zero := by simp\n\ninstance : ToInt.OfNat UInt128 (.uint 128) where\n  toInt_ofNat x := by simp; rfl\n\ninstance : ToInt.Add UInt128 (.uint 128) where\n  toInt_add x y := by simp\n\ninstance : ToInt.Mul UInt128 (.uint 128) where\n  toInt_mul x y := by simp\n\n-- The `ToInt.Pow` instance is defined in `Init.GrindInstances.Ring.UInt`,\n-- as it is convenient to use the ring structure.\n\ninstance : ToInt.Mod UInt128 (.uint 128) where\n  toInt_mod x y := by simp\n\ninstance : ToInt.Div UInt128 (.uint 128) where\n  toInt_div x y := by simp\n\ninstance : ToInt.LE UInt128 (.uint 128) where\n  le_iff x y := by simpa using UInt128.le_iff_toBitVec_le\n\ninstance : ToInt.LT UInt128 (.uint 128) where\n  lt_iff x y := by simpa using UInt128.lt_iff_toBitVec_lt\n\n\ninstance : ToInt Int128 (.sint 128) where\n  toInt x := x.toInt\n  toInt_inj x y w := private Int128.toInt_inj.mp w\n  toInt_mem x := by simp; exact ⟨Int128.le_toInt x, Int128.toInt_lt x⟩\n\n@[simp] theorem toInt_int128 (x : Int128) : ToInt.toInt x = (x.toInt : Int) := rfl\n\ninstance : ToInt.Zero Int128 (.sint 128) where\n  toInt_zero := by\n    -- simp -- FIXME: succeeds, but generates a `(kernel) application type mismatch` error!\n    change (0 : Int128).toInt = _\n    rw [Int128.toInt_zero]\n\ninstance : ToInt.OfNat Int128 (.sint 128) where\n  toInt_ofNat x := by\n    rw [toInt_int128, Int128.toInt_ofNat, Int128.size, Int.bmod_eq_emod, IntInterval.wrap]\n    simp\n    split <;> omega\n\ninstance : ToInt.Add Int128 (.sint 128) where\n  toInt_add x y := by\n    simp [Int.bmod_eq_emod]\n    split <;> · simp; omega\n\ninstance : ToInt.Mul Int128 (.sint 128) where\n  toInt_mul x y := by\n    simp [Int.bmod_eq_emod]\n    split <;> · simp; omega\n\n-- The `ToInt.Pow` instance is defined in `Init.GrindInstances.Ring.SInt`,\n-- as it is convenient to use the ring structure.\n\ninstance : ToInt.LE Int128 (.sint 128) where\n  le_iff x y := by simpa using Int128.le_iff_toInt_le\n\ninstance : ToInt.LT Int128 (.sint 128) where\n  lt_iff x y := by simpa using Int128.lt_iff_toInt_lt\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/Prelude.lean",
    "content": "\n-- Adapted from Init/Prelude.lean from the Lean v4.29.0-rc1 source code\n\nabbrev UInt128.size : Nat := 340282366920938463463374607431768211456\n\nstructure UInt128 where\n  ofBitVec ::\n  toBitVec : BitVec 128\n\ndef UInt128.ofNatLT (n : @& Nat) (h : LT.lt n UInt128.size) : UInt128 where\n  toBitVec := BitVec.ofNatLT n h\n\ndef UInt128.decEq (a b : UInt128) : Decidable (Eq a b) :=\n  match a, b with\n  | ⟨n⟩, ⟨m⟩ =>\n    dite (Eq n m)\n      (fun h => isTrue (h ▸ rfl))\n      (fun h => isFalse (fun h' => UInt128.noConfusion h' (fun h' => absurd h' h)))\n\ninstance : DecidableEq UInt128 := UInt128.decEq\n\ninstance : Inhabited UInt128 where\n  default := UInt128.ofNatLT 0 (of_decide_eq_true rfl)\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Init/While.lean",
    "content": "import Std.Do\n\nnamespace Lean\n\nopen Order\nopen Std.Do\n\nuniverse u v\n\n/-- Runs one iteration of a loop and continues with `l`. -/\ndef Loop.loopCombinator {β : Type u} {m : Type u → Type v} [Monad m]\n    (f : Unit → β → m (ForInStep β)) (l : β → m β) (b : β) := do\n  match ← f () b with\n    | ForInStep.done b => pure b\n    | ForInStep.yield b => l b\n\n/-- A monad function must implement this type class to be able to use loops based on\n`partial_fixpoint`. -/\nclass Loop.MonoLoopCombinator\n    {β : Type u} {m : Type u → Type v} [Monad m] [∀ α, CCPO (m α)]\n    (f : Unit → β → m (ForInStep β)) where\n  mono : monotone (loopCombinator f) := by unfold Lean.Loop.loopCombinator <;> monotonicity\n\n/-- Our own copy of `Loop.forIn` because the original one is `partial` and thus we cannot reason\nabout it. -/\n@[inline]\ndef Loop.MonoLoopCombinator.forIn {β : Type u} {m : Type u → Type v} [Monad m] [∀ α, CCPO (m α)]\n    (_ : Loop) (init : β) (f : Unit → β → m (ForInStep β)) [MonoLoopCombinator f] :\n    m β :=\n  let rec @[specialize] loop [MonoLoopCombinator f] (b : β) : m β :=\n    loopCombinator f loop b\n  partial_fixpoint monotonicity MonoLoopCombinator.mono\n  loop init\n\n/-- A while loop based on `Loop.MonoLoopCombinator.forIn`. -/\ndef Loop.MonoLoopCombinator.while_loop  {m} {ps : PostShape} {β: Type}\n    [Monad m] [∀ α, Order.CCPO (m α)] [WPMonad m ps]\n    (loop : Loop)\n    (cond: β → Bool)\n    (init : β)\n    (body : β -> m β)\n    [∀ f : Unit → β → m (ForInStep β), Loop.MonoLoopCombinator f] : m β :=\n  Loop.MonoLoopCombinator.forIn loop init fun () s => do\n    if cond s then\n      let s ← body s\n      pure (.yield s)\n    else\n      pure (.done s)\n\nend Lean\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Lean/Tactic/Simp/BuiltinSimpProcs/SInt.lean",
    "content": "import Lean\nimport Hax.MissingLean.Lean.ToExpr\n\n\nnamespace Int128\n\nopen Lean Meta Simp\n\ndef fromExpr (e : Expr) : SimpM (Option Int128) := do\n  if let some (n, _) ← getOfNatValue? e ``Int128 then\n    return some (ofNat n)\n  let_expr Neg.neg _ _ a ← e | return none\n  let some (n, _) ← getOfNatValue? a ``Int128 | return none\n  return some (ofInt (- n))\n\n@[inline] def reduceBin (declName : Name) (arity : Nat) (op : Int128 → Int128 → Int128) (e : Expr) : SimpM DStep := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  return .done <| toExpr (op n m)\n\n@[inline] def reduceBinPred (declName : Name) (arity : Nat) (op : Int128 → Int128 → Bool) (e : Expr) : SimpM Step := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  evalPropStep e (op n m)\n\n@[inline] def reduceBoolPred (declName : Name) (arity : Nat) (op : Int128 → Int128 → Bool) (e : Expr) : SimpM DStep := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  return .done <| toExpr (op n m)\n\nopen Lean Meta Simp in\ndsimproc [simp, seval] reduceNeg ((- _ : Int128)) := fun e => do\n  let_expr Neg.neg _ _ arg ← e | return .continue\n  if arg.isAppOfArity ``OfNat.ofNat 3 then\n    -- We return .done to ensure `Neg.neg` is not unfolded even when `ground := true`.\n    return .done e\n  else\n    let some v ← (fromExpr arg) | return .continue\n    return .done <| toExpr (- v)\n\ndsimproc [simp, seval] reduceAdd ((_ + _ : Int128)) := reduceBin ``HAdd.hAdd 6 (· + ·)\ndsimproc [simp, seval] reduceMul ((_ * _ : Int128)) := reduceBin ``HMul.hMul 6 (· * ·)\ndsimproc [simp, seval] reduceSub ((_ - _ : Int128)) := reduceBin ``HSub.hSub 6 (· - ·)\ndsimproc [simp, seval] reduceDiv ((_ / _ : Int128)) := reduceBin ``HDiv.hDiv 6 (· / ·)\ndsimproc [simp, seval] reduceMod ((_ % _ : Int128)) := reduceBin ``HMod.hMod 6 (· % ·)\n\nsimproc [simp, seval] reduceLT  (( _ : Int128) < _)  := reduceBinPred ``LT.lt 4 (. < .)\nsimproc [simp, seval] reduceLE  (( _ : Int128) ≤ _)  := reduceBinPred ``LE.le 4 (. ≤ .)\nsimproc [simp, seval] reduceGT  (( _ : Int128) > _)  := reduceBinPred ``GT.gt 4 (. > .)\nsimproc [simp, seval] reduceGE  (( _ : Int128) ≥ _)  := reduceBinPred ``GE.ge 4 (. ≥ .)\nsimproc [simp, seval] reduceEq  (( _ : Int128) = _)  := reduceBinPred ``Eq 3 (. = .)\nsimproc [simp, seval] reduceNe  (( _ : Int128) ≠ _)  := reduceBinPred ``Ne 3 (. ≠ .)\ndsimproc [simp, seval] reduceBEq  (( _ : Int128) == _)  := reduceBoolPred ``BEq.beq 4 (. == .)\ndsimproc [simp, seval] reduceBNe  (( _ : Int128) != _)  := reduceBoolPred ``bne 4 (. != .)\n\ndsimproc [simp, seval] reduceOfIntLE (ofIntLE _ _ _) := fun e => do\n  unless e.isAppOfArity ``ofIntLE 3 do return .continue\n  let some value ← Int.fromExpr? e.appFn!.appFn!.appArg! | return .continue\n  let value := ofInt value\n  return .done <| toExpr value\n\ndsimproc [simp, seval] reduceOfNat (ofNat _) := fun e => do\n  unless e.isAppOfArity ``ofNat 1 do return .continue\n  let some value ← Nat.fromExpr? e.appArg! | return .continue\n  let value := ofNat value\n  return .done <| toExpr value\n\ndsimproc [simp, seval] reduceOfInt (ofInt _) := fun e => do\n  unless e.isAppOfArity ``ofInt 1 do return .continue\n  let some value ← Int.fromExpr? e.appArg! | return .continue\n  let value := ofInt value\n  return .done <| toExpr value\n\ndsimproc [simp, seval] reduceToInt (toInt _) := fun e => do\n  unless e.isAppOfArity ``toInt 1 do return .continue\n  let some v ← (fromExpr e.appArg!) | return .continue\n  let n := toInt v\n  return .done <| toExpr n\n\ndsimproc [simp, seval] reduceToNatClampNeg (toNatClampNeg _) := fun e => do\n  unless e.isAppOfArity ``toNatClampNeg 1 do return .continue\n  let some v ← (fromExpr e.appArg!) | return .continue\n  let n := toNatClampNeg v\n  return .done <| toExpr n\n\n/-- Return `.done` for Int values. We don't want to unfold in the symbolic evaluator. -/\ndsimproc [seval] isValue ((OfNat.ofNat _ : Int128)) := fun e => do\n  unless (e.isAppOfArity ``OfNat.ofNat 3) do return .continue\n  return .done e\n\nend Int128\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Lean/Tactic/Simp/BuiltinSimpProcs/UInt.lean",
    "content": "import Lean\nimport Hax.MissingLean.Lean.ToExpr\n\nnamespace UInt128\n\nopen Lean Meta Simp\n\ndef fromExpr (e : Expr) : SimpM (Option UInt128) := do\n  let some (n, _) ← getOfNatValue? e ``UInt128 | return none\n  return ofNat n\n\n@[inline] def reduceBin (declName : Name) (arity : Nat) (op : UInt128 → UInt128 → UInt128) (e : Expr) : SimpM DStep := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  return .done <| toExpr (op n m)\n\n@[inline] def reduceBinPred (declName : Name) (arity : Nat) (op : UInt128 → UInt128 → Bool) (e : Expr) : SimpM Step := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  evalPropStep e (op n m)\n\n@[inline] def reduceBoolPred (declName : Name) (arity : Nat) (op : UInt128 → UInt128 → Bool) (e : Expr) : SimpM DStep := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  return .done <| toExpr (op n m)\n\ndsimproc [simp, seval] reduceAdd ((_ + _ : UInt128)) := reduceBin ``HAdd.hAdd 6 (· + ·)\ndsimproc [simp, seval] reduceMul ((_ * _ : UInt128)) := reduceBin ``HMul.hMul 6 (· * ·)\ndsimproc [simp, seval] reduceSub ((_ - _ : UInt128)) := reduceBin ``HSub.hSub 6 (· - ·)\ndsimproc [simp, seval] reduceDiv ((_ / _ : UInt128)) := reduceBin ``HDiv.hDiv 6 (· / ·)\ndsimproc [simp, seval] reduceMod ((_ % _ : UInt128)) := reduceBin ``HMod.hMod 6 (· % ·)\n\nsimproc [simp, seval] reduceLT  (( _ : UInt128) < _)  := reduceBinPred ``LT.lt 4 (. < .)\nsimproc [simp, seval] reduceLE  (( _ : UInt128) ≤ _)  := reduceBinPred ``LE.le 4 (. ≤ .)\nsimproc [simp, seval] reduceGT  (( _ : UInt128) > _)  := reduceBinPred ``GT.gt 4 (. > .)\nsimproc [simp, seval] reduceGE  (( _ : UInt128) ≥ _)  := reduceBinPred ``GE.ge 4 (. ≥ .)\nsimproc [simp, seval] reduceEq  (( _ : UInt128) = _)  := reduceBinPred ``Eq 3 (. = .)\nsimproc [simp, seval] reduceNe  (( _ : UInt128) ≠ _)  := reduceBinPred ``Ne 3 (. ≠ .)\ndsimproc [simp, seval] reduceBEq  (( _ : UInt128) == _)  := reduceBoolPred ``BEq.beq 4 (. == .)\ndsimproc [simp, seval] reduceBNe  (( _ : UInt128) != _)  := reduceBoolPred ``bne 4 (. != .)\n\ndsimproc [simp, seval] reduceOfNatLT (ofNatLT _ _) := fun e => do\n  unless e.isAppOfArity ``ofNatLT 2 do return .continue\n  let some value ← Nat.fromExpr? e.appFn!.appArg! | return .continue\n  let value := ofNat value\n  return .done <| toExpr value\n\ndsimproc [simp, seval] reduceOfNat (ofNat _) := fun e => do\n  unless e.isAppOfArity ``ofNat 1 do return .continue\n  let some value ← Nat.fromExpr? e.appArg! | return .continue\n  let value := ofNat value\n  return .done <| toExpr value\n\ndsimproc [simp, seval] reduceToNat (toNat _) := fun e => do\n  unless e.isAppOfArity ``toNat 1 do return .continue\n  let some v ← (fromExpr e.appArg!) | return .continue\n  let n := toNat v\n  return .done <| toExpr n\n\n/-- Return `.done` for UInt values. We don't want to unfold in the symbolic evaluator. -/\ndsimproc [seval] isValue ((OfNat.ofNat _ : UInt128)) := fun e => do\n  unless (e.isAppOfArity ``OfNat.ofNat 3) do return .continue\n  return .done e\n\nend UInt128\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Lean/ToExpr.lean",
    "content": "import Lean\nimport Hax.MissingLean.Init.Data.UInt.Basic\nimport Hax.MissingLean.Init.Data.SInt.Basic_Int128\n\nopen Lean in\ninstance : ToExpr UInt128 where\n  toTypeExpr := mkConst ``UInt128\n  toExpr a :=\n    let r := mkRawNatLit a.toNat\n    mkApp3 (.const ``OfNat.ofNat [0]) (mkConst ``UInt128) r\n      (.app (.const ``UInt128.instOfNat []) r)\n\nopen Lean in\ninstance : ToExpr Int128 where\n  toTypeExpr := mkConst ``Int128\n  toExpr i := if 0 ≤ i then\n    mkNat i.toNatClampNeg\n  else\n    mkApp3 (.const ``Neg.neg [0]) (.const ``Int128 []) (.const ``Int128.instNeg [])\n      (mkNat (-(i.toInt)).toNat)\nwhere\n  mkNat (n : Nat) : Expr :=\n    let r := mkRawNatLit n\n    mkApp3 (.const ``OfNat.ofNat [0]) (.const ``Int128 []) r\n        (.app (.const ``Int128.instOfNat []) r)\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Std/Do/PostCond.lean",
    "content": "import Std.Do.PostCond\n\nnamespace Std.Do\nuniverse u\nvariable {ps : PostShape.{u}} {α σ ε : Type u}\n\ntheorem PostCond.entails.of_left_entails\n    {p q : α → Assertion ps} {x : ExceptConds ps}  (h : ∀ a, p a ⊢ₛ q a) :\n    (p, x) ⊢ₚ (q, x) := by simp [h]\n\nend Std.Do\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Std/Do/Triple/Basic.lean",
    "content": "import Std.Do.Triple.Basic\n\nnamespace Std.Do\n\ntheorem Triple.of_entails_left {m} {ps : PostShape} {β: Type} [Monad m] [WPMonad m ps]\n    (P Q : Assertion ps) (R : PostCond β ps) (x : m β) (hPR : ⦃P⦄ x ⦃R⦄) (hPQ : Q ⊢ₛ P) : ⦃Q⦄ x ⦃R⦄ :=\n  SPred.entails.trans hPQ hPR\n\ntheorem Triple.of_entails_right {m} {ps : PostShape} {β: Type} [Monad m] [WPMonad m ps]\n    (P : Assertion ps) (Q R : PostCond β ps) (x : m β) (hPR : ⦃P⦄ x ⦃Q⦄) (hPQ : Q ⊢ₚ R) : ⦃P⦄ x ⦃R⦄ :=\n  SPred.entails.trans hPR (PredTrans.mono _ _ _ hPQ)\n\ntheorem Triple.map {m} {ps : PostShape} {α β} [Monad m] [WPMonad m ps] (f : α → β)\n    (x : m α) (P : Assertion ps) (Q : PostCond β ps) :\n    ⦃P⦄ (f <$> x) ⦃Q⦄ ↔ ⦃P⦄ x ⦃(fun a => Q.fst (f a), Q.snd)⦄ := by rw [Triple, WP.map]; rfl\n\nend Std.Do\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean/Std/Do/Triple/SpecLemmas.lean",
    "content": "import Std.Do.Triple.Basic\nimport Hax.MissingLean.Std.Do.Triple.Basic\nimport Hax.MissingLean.Init.While\nimport Hax.MissingLean.Std.Do.PostCond\n\nnamespace Std.Do\nopen Lean\n\n@[spec]\ntheorem Spec.forIn_monoLoopCombinator {m} {ps : PostShape} {β: Type}\n    [Monad m] [∀ α, Order.CCPO (m α)] [WPMonad m ps]\n    (loop : Loop)\n    (init : β)\n    (f : Unit → β → m (ForInStep β)) [Loop.MonoLoopCombinator f]\n    (inv : β → Prop)\n    (termination : β -> Nat)\n    (post : β → Prop)\n    (step : ∀ b,\n      ⦃⌜ inv b ⌝⦄\n        f () b\n      ⦃⇓ r => match r with\n        | .yield b' => spred(⌜ termination b' < termination b ⌝ ∧ ⌜ inv b' ⌝)\n        | .done b' => ⌜ post b' ⌝⦄) :\n    ⦃⌜ inv init ⌝⦄ Loop.MonoLoopCombinator.forIn loop init f ⦃⇓ b => ⌜ post b ⌝⦄ := by\n  unfold Loop.MonoLoopCombinator.forIn Loop.MonoLoopCombinator.forIn.loop Loop.loopCombinator\n  apply Triple.bind\n  · apply step\n  · rintro (b | b)\n    · refine Triple.pure b ?_\n      exact SPred.entails.refl _\n    · apply SPred.imp_elim\n      apply SPred.pure_elim'\n      intro h\n      rw [SPred.entails_true_intro]\n      apply Spec.forIn_monoLoopCombinator loop _ f inv termination post step\ntermination_by termination init\ndecreasing_by exact h\n\n@[spec]\ntheorem Spec.MonoLoopCombinator.while_loop {m} {ps : PostShape} {β: Type}\n    [Monad m] [∀ α, Order.CCPO (m α)] [WPMonad m ps]\n    [∀ f : Unit → β → m (ForInStep β), Loop.MonoLoopCombinator f]\n    (init : β)\n    (loop : Loop)\n    (cond: β → Bool)\n    (body : β → m β)\n    (inv: β → Prop)\n    (termination : β → Nat)\n    (step :\n      ∀ (b : β), cond b →\n        ⦃⌜ inv b ⌝⦄\n          body b\n        ⦃⇓ b' => spred(⌜ termination b' < termination b ⌝ ∧ ⌜ inv b' ⌝)⦄ ) :\n    ⦃⌜ inv init ⌝⦄\n      Loop.MonoLoopCombinator.while_loop loop cond init body\n    ⦃⇓ b => ⌜ inv b ∧ ¬ cond b ⌝⦄ := by\n  apply Spec.forIn_monoLoopCombinator\n  intro b\n  by_cases hb : cond b\n  · simpa [hb, Triple.map] using step b hb\n  · simp [hb, Triple.pure]\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/MissingLean.lean",
    "content": "import Hax.MissingLean.Init.Data.Array.Lemmas\nimport Hax.MissingLean.Init.Data.BitVec.Basic\nimport Hax.MissingLean.Init.Data.Nat.Div.Basic\nimport Hax.MissingLean.Init.Data.UInt.Basic\nimport Hax.MissingLean.Init.Data.UInt.Lemmas\nimport Hax.MissingLean.Init.Data.SInt.Basic\nimport Hax.MissingLean.Init.Data.SInt.Lemmas\nimport Hax.MissingLean.Init.Data.Vector.Basic\nimport Hax.MissingLean.Init.Data.Nat.MinMax\nimport Hax.MissingLean.Init.Data.Int.DivMod.Lemmas\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic/HaxBVDecide.lean",
    "content": "import Std.Tactic.BVDecide\n\nmacro \"hax_bv_decide\" c:Lean.Parser.Tactic.optConfig : tactic => `(tactic| (\n  simp only [hax_bv_decide] at *; bv_decide $c\n))\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic/HaxConstructPure.lean",
    "content": "import Hax.Tactic.HaxZify\nimport Hax.Tactic.HaxMvcgen\nimport Qq\n\nopen Lean Elab Tactic Meta Qq Std.Do\n\n/-- This tactic is supposed to be run on results of `mvcgen` where the postcondition is of the form\n`⇓ r => r = ?mvar`. This tactic will analyse the goals produced by `mvcgen` and instantiate the\nmetavariable accordingly.\n\nFor example, `mvcgen` might produce a goal of the form\n```\nx r : Int32\nh : r.toInt = x.toInt + x.toInt\n⊢ ((r.toInt == 0) = true) = ?mvar\n```\nThen this tactic should instantiate `?mvar` with `((x.toInt + x.toInt == 0) = true)`\n-/\ndef haxConstructPure (mvarId : MVarId) : TacticM Unit := do\n  -- Find goals that contain `mvar`\n  let allGoals ← getGoals\n  let goals ← allGoals.filterM\n    fun goal => do pure ((← goal.getType).findMVar? (· == mvarId)).isSome\n  if (goals.length > 1) then\n    throwError m!\"hax_construct_pure: `mvcgen generated more than one goal containing the \\\n      metavariable. This is currently unsupported. Try to remove if-then-else and match-constructs.\"\n  let [goal] := goals\n    | throwError m!\"hax_construct_pure: No goal contains the metavariable.\"\n\n  goal.withContext do\n    -- Zify:\n    let zifyVars ← collectZifyVars\n    let goal ← haxZify goal (fun decl => zifyVars.contains decl.fvarId)\n    trace `Hax.hax_construct_pure fun () => m!\"Goal after `zify`: {goal}\"\n    -- Subst:\n    let goal ← substVars goal\n    trace `Hax.hax_construct_pure fun () => m!\"Goal after `subst`: {goal}\"\n    -- Assign the meta-variable by reflexivity\n    withAssignableSyntheticOpaque goal.applyRfl\n    pruneSolvedGoals\nwhere\n  /-- Collect all machine integer variables that should be converted into integers. We want to\n  collect all variables `x` with a hypothesis of the form `x.toInt = ...` here. Then,\n  `hax_zify` will convert this into a hypothesis of the form `y = ...` for a new integer variable\n  `y`, which we can ultimately eliminate using `subst_vars`. -/\n  collectZifyVars : MetaM (Std.HashSet FVarId) := do\n    let lctx ← getLCtx\n    let mut zifyVars := Std.HashSet.emptyWithCapacity lctx.size\n    for decl in lctx do\n      if !decl.type.isEq then continue\n      let lhs := decl.type.getArg! 1\n      if !haxZifyTypes.any (fun (_, toInt, _) => lhs.isAppOfArity toInt 1) then continue\n      let some fvarId := (lhs.getArg! 0).fvarId?\n        | continue\n      zifyVars := zifyVars.insert fvarId\n    return zifyVars\n\n/-- The `hax_construct_pure` tactic should be applied to goals of the form\n```\n { p // ⦃⌜ ... ⌝⦄ ... ⦃⇓ r => ⌜r = p⌝⦄ }\n```\nUnder the hood, it will use `hax_mvcgen` to generate verification conditions for the given Hoare\ntriple and then generate a suitable value for `p`. The default call to `hax_mvcgen` can be replaced\nvia the syntax `hax_construct_pure => custom_tactics`.\n -/\nsyntax (name := hax_construct_pure) \"hax_construct_pure\" (\" => \" tacticSeq)? : tactic\n\n@[tactic hax_construct_pure]\ndef elabHaxConstructPure : Tactic := fun stx => do\n  let tac ← match stx with\n  | `(tactic| hax_construct_pure => $tac:tacticSeq) => pure tac\n  | `(tactic| hax_construct_pure) => `(tacticSeq| hax_mvcgen -trivial <;> intros)\n  | _ => throwUnsupportedSyntax\n\n  let goal ← getMainGoal\n  let goalType ← goal.getType\n\n  unless goalType.isAppOf ``Subtype do\n    throwError m!\"hax_construct_pure: Goal must be of the form `\\{ p // ... }` (Subtype), \\\n      but got:\\n{goalType}\"\n\n  let u ← mkFreshLevelMVar\n  let type : Q(Type) ← mkFreshExprMVar (mkSort u) MetavarKind.natural Name.anonymous\n  let mvarP : Q($type → Prop) ← mkFreshExprMVar q($type → Prop)\n  let mvarVal : Q($type) ← mkFreshExprSyntheticOpaqueMVar type\n  replaceMainGoal (← goal.apply q(@Subtype.mk $type $mvarP $mvarVal))\n  evalTactic (← `(tactic| intros))\n  evalTactic tac\n  let goals ← getGoals\n  trace `Hax.hax_construct_pure fun () => m!\"Goals after `mvcgen`: {goals}\"\n  haxConstructPure mvarVal.mvarId!\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic/HaxMvcgen.lean",
    "content": "import Hax.Tactic.SpecSet\nimport Hax.Tactic.Init\n\nnamespace Hax.HaxMvcgen\n\nopen Lean Elab Syntax Parser Tactic\n\ndef mkMvcgenCall (args: Array Name) (cfgStx : Syntax) (argStx : Syntax) : CoreM Syntax := do\n  let cfgStx : TSyntax `Lean.Parser.Tactic.optConfig := .mk cfgStx\n  let mut elems := argStx[1].getArgs.getSepElems\n  for arg in args do\n    elems := elems.push\n      (Syntax.node .none ``Lean.Parser.Tactic.simpLemma #[mkNullNode, mkNullNode, mkIdent arg])\n  let argStx : TSepArray _ _ := Syntax.TSepArray.ofElems (elems.map .mk)\n  let tac := ← `(tactic| mvcgen $cfgStx [$argStx,*])\n  pure tac\n\nsyntax (name := hax_mvcgen) \"hax_mvcgen\" optConfig\n  (\" [\" withoutPosition((simpStar <|> simpErase <|> simpLemma),*,?) \"] \")? : tactic\n\n/-- A customized version of the `mvcgen` tactic. It provides `mvcgen` with additional lemmas\ngathered from `@[specset X]` annotations, where `X` is the current setting of\n`set_option hax_mvcgen.specset`. -/\n@[tactic hax_mvcgen]\ndef elabHaxMvcgen : Tactic := fun stx => do\n  let specset := hax_mvcgen.specset.get (← getOptions)\n  let cfgStx := stx[1]\n  let argStx := stx[2]\n  let extState := specSetExt.getState (← getEnv)\n  let decls := (extState.getD specset.toName {}).toArray\n  let tac ← mkMvcgenCall decls cfgStx argStx\n  Tactic.evalTactic tac\n\nend  Hax.HaxMvcgen\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic/HaxSpec.lean",
    "content": "import Lean\nimport Hax.rust_primitives.Spec\n\nnamespace Hax.Tactic.HaxSpec\n\nopen Lean Meta\n\nprivate def addContractSpec (declName : Name) (attrKind : AttributeKind) : MetaM Unit := do\n  let cinfo ← getConstInfo declName\n  let type ← instantiateMVars cinfo.type\n  forallTelescope type fun xs bodyType => do\n    let bodyType ← whnf bodyType\n    unless bodyType.isAppOf ``Spec do\n      throwError \"@[hax_spec]: expected a definition of type `Spec`, got{indentExpr bodyType}\"\n    let us := cinfo.levelParams.map mkLevelParam\n    let app := mkAppN (mkConst declName us) xs\n    let contractVal := mkProj ``Spec 2 app\n    let contractType ← inferType contractVal\n    let contractType ← deltaExpand contractType (· == declName)\n    let closedVal ← mkLambdaFVars xs contractVal\n    let closedType ← mkForallFVars xs contractType\n    let contractDeclName := declName ++ `contract\n    addDecl (.thmDecl {\n      name := contractDeclName\n      levelParams := cinfo.levelParams\n      type := closedType\n      value := closedVal\n    })\n    let specStx := mkNode ``Lean.Parser.Attr.simple #[mkIdent `spec, mkNullNode]\n    Attribute.add contractDeclName `spec specStx attrKind\n\ninitialize registerBuiltinAttribute {\n  name := `hax_spec\n  descr := \"Registers a `Spec` definition for use with `mvcgen`.\"\n  applicationTime := .afterCompilation\n  add := fun declName _stx attrKind => do\n    discard <| (addContractSpec declName attrKind).run {} {}\n}\n\nend Hax.Tactic.HaxSpec\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic/HaxZify.lean",
    "content": "import Lean\nimport Hax.rust_primitives.USize64\n\nopen Lean Elab Tactic Meta\n\n/-- List of types supported by `hax_zify` -/\ndef haxZifyTypes := [\n  (``Int8, ``Int8.toInt, ``Int8.ofInt_eq_of_toInt_eq),\n  (``Int16, ``Int16.toInt, ``Int16.ofInt_eq_of_toInt_eq),\n  (``Int32, ``Int32.toInt, ``Int32.ofInt_eq_of_toInt_eq),\n  (``Int64, ``Int64.toInt, ``Int64.ofInt_eq_of_toInt_eq),\n  (``UInt8, ``UInt8.toNat, ``UInt8.ofNat_eq_of_toNat_eq),\n  (``UInt16, ``UInt16.toNat, ``UInt16.ofNat_eq_of_toNat_eq),\n  (``UInt64, ``UInt64.toNat, ``UInt64.ofNat_eq_of_toNat_eq),\n  (``USize64, ``USize64.toNat, ``USize64.ofNat_eq_of_toNat_eq),\n]\n\n/--\nReplaces a variable of machine integer type by a variable of integer type. This roughly corresponds\nto the application of the following tactics:\n```\ngeneralize h : var.toInt = x at *\nreplace h := Int32.ofInt_eq_of_toInt_eq h\nsubst h\n```\n-/\ndef haxZifySingle (mvarId : MVarId) (var : FVarId) (toInt ofInt_eq_of_toInt_eq : Name) : MetaM MVarId:= do\n  mvarId.withContext do\n    -- Generalize:\n    let arg := {expr := ← mkAppM toInt #[mkFVar var], hName? := `h}\n    let (_, newVars, mvarId) ← mvarId.generalizeHyp #[arg] ((← getLocalHyps).map (·.fvarId!))\n    mvarId.withContext do\n      unless newVars.size == 2 do\n        Lean.Meta.throwTacticEx `hax_zify mvarId (m!\"expected two variables, got {newVars.size}\")\n      -- Replace:\n      let {mvarId, fvarId, ..} ← mvarId.replace newVars[1]! (← mkAppM ofInt_eq_of_toInt_eq #[mkFVar newVars[1]!])\n      -- Subst:\n      let (_, mvarId) ← substCore mvarId fvarId (symm := true)\n      pure mvarId\n\n/-- Replaces all variables of machine integer type by variables of integer type. -/\ndef haxZify (mvarId : MVarId) (declFilter : LocalDecl → Bool := fun _ => true) : MetaM MVarId := do\n  mvarId.withContext do\n    let mut mvarId := mvarId\n    let lctx ← getLCtx\n    for decl in lctx do\n      if decl.isImplementationDetail then continue\n      if !declFilter decl then continue\n      let some (_, toInt, ofInt_eq_of_toInt_eq) ←\n          haxZifyTypes.findM? fun (ty, _, _) => (isDefEq decl.type (mkConst ty))\n        | continue\n      let var := decl.fvarId\n      mvarId ← haxZifySingle mvarId var toInt ofInt_eq_of_toInt_eq\n    pure mvarId\n\n/-- Replaces all variables of machine integer type in the current goal by variables of integer type. -/\nelab \"hax_zify\" : tactic =>\n  withMainContext do\n    replaceMainGoal [(← haxZify (← getMainGoal))]\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic/Init.lean",
    "content": "import Lean\n\ninitialize do pure () <*\n  Lean.Meta.registerSimpAttr `hax_bv_decide \"simp rules for hax-specific bv_decide preprocessing\"\n\ninitialize Lean.registerTraceClass `Hax.hax_construct_pure\n\n\nregister_option hax_mvcgen.specset : String := {\n  defValue := \"bv\"\n  descr    := \"Identifier of the set of specs used for `hax_mvcgen`\"\n}\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic/SpecSet.lean",
    "content": "import Lean\n\nopen Lean Elab Std\n\nabbrev SpecSetMap := HashMap Name (HashSet Name)\n\nstructure SpecSetEntry where\n  specSet : Name\n  decl : Name\n\n/-- Environment extension to store spec sets, i.e., sets of declarations to use with\n`hax_mvcgen`. -/\ninitialize specSetExt : SimplePersistentEnvExtension SpecSetEntry SpecSetMap ←\n  registerSimplePersistentEnvExtension {\n    name := `specSetExt\n    addEntryFn := fun state {specSet, decl} =>\n      let set := state.getD specSet {}\n      state.insert specSet (set.insert decl)\n    addImportedFn := fun states =>\n      states.foldl\n        (fun acc st =>\n          st.foldl\n            (fun acc {specSet, decl} =>\n              let merged := (acc.getD specSet {}).insert decl\n              acc.insert specSet merged)\n            acc)\n        {}\n  }\n\ninitialize\n  registerBuiltinAttribute {\n    name  := `specset\n    descr := \"Add a declaration to a given spec set for `hax_mvcgen`. The spec set can be activated\n      via `set_option hax_mvcgen.specset`\"\n    add   := fun decl stx kind => do\n      setEnv $ specSetExt.addEntry (← getEnv) {specSet := stx[1][0].getId, decl}\n  }\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/Tactic.lean",
    "content": "import Hax.Tactic.HaxBVDecide\nimport Hax.Tactic.HaxConstructPure\nimport Hax.Tactic.HaxMvcgen\nimport Hax.Tactic.HaxSpec\nimport Hax.Tactic.HaxZify\nimport Hax.Tactic.Init\nimport Hax.Tactic.SpecSet\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/core_models.lean",
    "content": "\n-- Experimental lean backend for Hax\n-- The Hax prelude library can be found in hax/proof-libs/lean\nimport Hax.core_models.prologue\nimport Hax.Tactic.HaxSpec\nimport Std.Tactic.Do\nimport Std.Do.Triple\nimport Std.Tactic.Do.Syntax\nopen Std.Do\nopen Std.Tactic\n\nset_option mvcgen.warning false\nset_option linter.unusedVariables false\n\n\nnamespace core_models.array\n\nstructure TryFromSliceError where\n  -- no fields\n\n@[spec]\ndef Impl_23.as_slice (T : Type) (N : usize) (s : (RustArray T N)) :\n    RustM (RustSlice T) := do\n  (rust_primitives.slice.array_as_slice T (N) s)\n\nend core_models.array\n\n\nnamespace core_models.array.iter\n\nstructure IntoIter (T : Type) (N : usize) where\n  _0 : (rust_primitives.sequence.Seq T)\n\nend core_models.array.iter\n\n\nnamespace core_models.borrow\n\nclass Borrow.AssociatedTypes (Self : Type) (Borrowed : Type) where\n\nclass Borrow (Self : Type) (Borrowed : Type)\n  [associatedTypes : outParam (Borrow.AssociatedTypes (Self : Type) (Borrowed :\n      Type))]\n  where\n  borrow (Self) (Borrowed) : (Self -> RustM Borrowed)\n\nend core_models.borrow\n\n\nnamespace core_models.clone\n\nclass Clone.AssociatedTypes (Self : Type) where\n\nclass Clone (Self : Type)\n  [associatedTypes : outParam (Clone.AssociatedTypes (Self : Type))]\n  where\n  clone (Self) : (Self -> RustM Self)\n\n@[reducible] instance Impl.AssociatedTypes (T : Type) :\n  Clone.AssociatedTypes T\n  where\n\ninstance Impl (T : Type) : Clone T where\n  clone := fun (self : T) => do (pure self)\n\nend core_models.clone\n\n\nnamespace core_models.cmp\n\nclass PartialEq.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass PartialEq (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (PartialEq.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  eq (Self) (Rhs) : (Self -> Rhs -> RustM Bool)\n\nclass Eq.AssociatedTypes (Self : Type) where\n  [trait_constr_Eq_i0 : PartialEq.AssociatedTypes Self Self]\n\nattribute [instance_reducible, instance] Eq.AssociatedTypes.trait_constr_Eq_i0\n\nclass Eq (Self : Type)\n  [associatedTypes : outParam (Eq.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_Eq_i0 : PartialEq Self Self]\n\nattribute [instance_reducible, instance] Eq.trait_constr_Eq_i0\n\ninductive Ordering : Type\n| Less : Ordering\n| Equal : Ordering\n| Greater : Ordering\n\ndef Ordering.Less.AnonConst : isize := (-1 : isize)\n\ndef Ordering.Equal.AnonConst : isize := (0 : isize)\n\ndef Ordering.Greater.AnonConst : isize := (1 : isize)\n\n@[spec]\ndef Ordering_cast_to_repr (x : Ordering) : RustM isize := do\n  match x with\n    | (Ordering.Less ) => do (pure Ordering.Less.AnonConst)\n    | (Ordering.Equal ) => do (pure Ordering.Equal.AnonConst)\n    | (Ordering.Greater ) => do (pure Ordering.Greater.AnonConst)\n\nclass Neq.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass Neq (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Neq.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  neq (Self) (Rhs) : (Self -> Rhs -> RustM Bool)\n\n@[reducible] instance Impl.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_associated_type_i0 : PartialEq.AssociatedTypes T T]\n  [trait_constr_Impl_i0 : PartialEq T T ] :\n  Neq.AssociatedTypes T T\n  where\n\ninstance Impl\n  (T : Type)\n  [trait_constr_Impl_associated_type_i0 : PartialEq.AssociatedTypes T T]\n  [trait_constr_Impl_i0 : PartialEq T T ] :\n  Neq T T\n  where\n  neq := fun (self : T) (y : T) => do ((← (PartialEq.eq T T self y)) ==? false)\n\nstructure Reverse (T : Type) where\n  _0 : T\n\n@[reducible] instance Impl_3.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_3_associated_type_i0 : PartialEq.AssociatedTypes T T]\n  [trait_constr_Impl_3_i0 : PartialEq T T ] :\n  PartialEq.AssociatedTypes (Reverse T) (Reverse T)\n  where\n\ninstance Impl_3\n  (T : Type)\n  [trait_constr_Impl_3_associated_type_i0 : PartialEq.AssociatedTypes T T]\n  [trait_constr_Impl_3_i0 : PartialEq T T ] :\n  PartialEq (Reverse T) (Reverse T)\n  where\n  eq := fun (self : (Reverse T)) (other : (Reverse T)) => do\n    (PartialEq.eq T T (Reverse._0 other) (Reverse._0 self))\n\n@[reducible] instance Impl_4.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_4_associated_type_i0 : Eq.AssociatedTypes T]\n  [trait_constr_Impl_4_i0 : Eq T ] :\n  Eq.AssociatedTypes (Reverse T)\n  where\n\ninstance Impl_4\n  (T : Type)\n  [trait_constr_Impl_4_associated_type_i0 : Eq.AssociatedTypes T]\n  [trait_constr_Impl_4_i0 : Eq T ] :\n  Eq (Reverse T)\n  where\n\n@[reducible] instance Impl_6.AssociatedTypes :\n  PartialEq.AssociatedTypes u8 u8\n  where\n\ninstance Impl_6 : PartialEq u8 u8 where\n  eq := fun (self : u8) (other : u8) => do (self ==? other)\n\n@[reducible] instance Impl_7.AssociatedTypes : Eq.AssociatedTypes u8 where\n\ninstance Impl_7 : Eq u8 where\n\n@[reducible] instance Impl_8.AssociatedTypes :\n  PartialEq.AssociatedTypes i8 i8\n  where\n\ninstance Impl_8 : PartialEq i8 i8 where\n  eq := fun (self : i8) (other : i8) => do (self ==? other)\n\n@[reducible] instance Impl_9.AssociatedTypes : Eq.AssociatedTypes i8 where\n\ninstance Impl_9 : Eq i8 where\n\n@[reducible] instance Impl_10.AssociatedTypes :\n  PartialEq.AssociatedTypes u16 u16\n  where\n\ninstance Impl_10 : PartialEq u16 u16 where\n  eq := fun (self : u16) (other : u16) => do (self ==? other)\n\n@[reducible] instance Impl_11.AssociatedTypes : Eq.AssociatedTypes u16 where\n\ninstance Impl_11 : Eq u16 where\n\n@[reducible] instance Impl_12.AssociatedTypes :\n  PartialEq.AssociatedTypes i16 i16\n  where\n\ninstance Impl_12 : PartialEq i16 i16 where\n  eq := fun (self : i16) (other : i16) => do (self ==? other)\n\n@[reducible] instance Impl_13.AssociatedTypes : Eq.AssociatedTypes i16 where\n\ninstance Impl_13 : Eq i16 where\n\n@[reducible] instance Impl_14.AssociatedTypes :\n  PartialEq.AssociatedTypes u32 u32\n  where\n\ninstance Impl_14 : PartialEq u32 u32 where\n  eq := fun (self : u32) (other : u32) => do (self ==? other)\n\n@[reducible] instance Impl_15.AssociatedTypes : Eq.AssociatedTypes u32 where\n\ninstance Impl_15 : Eq u32 where\n\n@[reducible] instance Impl_16.AssociatedTypes :\n  PartialEq.AssociatedTypes i32 i32\n  where\n\ninstance Impl_16 : PartialEq i32 i32 where\n  eq := fun (self : i32) (other : i32) => do (self ==? other)\n\n@[reducible] instance Impl_17.AssociatedTypes : Eq.AssociatedTypes i32 where\n\ninstance Impl_17 : Eq i32 where\n\n@[reducible] instance Impl_18.AssociatedTypes :\n  PartialEq.AssociatedTypes u64 u64\n  where\n\ninstance Impl_18 : PartialEq u64 u64 where\n  eq := fun (self : u64) (other : u64) => do (self ==? other)\n\n@[reducible] instance Impl_19.AssociatedTypes : Eq.AssociatedTypes u64 where\n\ninstance Impl_19 : Eq u64 where\n\n@[reducible] instance Impl_20.AssociatedTypes :\n  PartialEq.AssociatedTypes i64 i64\n  where\n\ninstance Impl_20 : PartialEq i64 i64 where\n  eq := fun (self : i64) (other : i64) => do (self ==? other)\n\n@[reducible] instance Impl_21.AssociatedTypes : Eq.AssociatedTypes i64 where\n\ninstance Impl_21 : Eq i64 where\n\n@[reducible] instance Impl_22.AssociatedTypes :\n  PartialEq.AssociatedTypes u128 u128\n  where\n\ninstance Impl_22 : PartialEq u128 u128 where\n  eq := fun (self : u128) (other : u128) => do (self ==? other)\n\n@[reducible] instance Impl_23.AssociatedTypes : Eq.AssociatedTypes u128 where\n\ninstance Impl_23 : Eq u128 where\n\n@[reducible] instance Impl_24.AssociatedTypes :\n  PartialEq.AssociatedTypes i128 i128\n  where\n\ninstance Impl_24 : PartialEq i128 i128 where\n  eq := fun (self : i128) (other : i128) => do (self ==? other)\n\n@[reducible] instance Impl_25.AssociatedTypes : Eq.AssociatedTypes i128 where\n\ninstance Impl_25 : Eq i128 where\n\n@[reducible] instance Impl_26.AssociatedTypes :\n  PartialEq.AssociatedTypes usize usize\n  where\n\ninstance Impl_26 : PartialEq usize usize where\n  eq := fun (self : usize) (other : usize) => do (self ==? other)\n\n@[reducible] instance Impl_27.AssociatedTypes : Eq.AssociatedTypes usize where\n\ninstance Impl_27 : Eq usize where\n\n@[reducible] instance Impl_28.AssociatedTypes :\n  PartialEq.AssociatedTypes isize isize\n  where\n\ninstance Impl_28 : PartialEq isize isize where\n  eq := fun (self : isize) (other : isize) => do (self ==? other)\n\n@[reducible] instance Impl_29.AssociatedTypes : Eq.AssociatedTypes isize where\n\ninstance Impl_29 : Eq isize where\n\nend core_models.cmp\n\n\nnamespace core_models.convert\n\nclass Into.AssociatedTypes (Self : Type) (T : Type) where\n\nclass Into (Self : Type) (T : Type)\n  [associatedTypes : outParam (Into.AssociatedTypes (Self : Type) (T : Type))]\n  where\n  into (Self) (T) : (Self -> RustM T)\n\nclass From.AssociatedTypes (Self : Type) (T : Type) where\n\nclass From (Self : Type) (T : Type)\n  [associatedTypes : outParam (From.AssociatedTypes (Self : Type) (T : Type))]\n  where\n  _from (Self) (T) : (T -> RustM Self)\n\n@[reducible] instance Impl.AssociatedTypes\n  (T : Type)\n  (U : Type)\n  [trait_constr_Impl_associated_type_i0 : From.AssociatedTypes U T]\n  [trait_constr_Impl_i0 : From U T ] :\n  Into.AssociatedTypes T U\n  where\n\ninstance Impl\n  (T : Type)\n  (U : Type)\n  [trait_constr_Impl_associated_type_i0 : From.AssociatedTypes U T]\n  [trait_constr_Impl_i0 : From U T ] :\n  Into T U\n  where\n  into := fun (self : T) => do (From._from U T self)\n\nstructure Infallible where\n  -- no fields\n\n@[reducible] instance Impl_3.AssociatedTypes (T : Type) :\n  From.AssociatedTypes T T\n  where\n\ninstance Impl_3 (T : Type) : From T T where\n  _from := fun (x : T) => do (pure x)\n\nclass AsRef.AssociatedTypes (Self : Type) (T : Type) where\n\nclass AsRef (Self : Type) (T : Type)\n  [associatedTypes : outParam (AsRef.AssociatedTypes (Self : Type) (T : Type))]\n  where\n  as_ref (Self) (T) : (Self -> RustM T)\n\n@[reducible] instance Impl_4.AssociatedTypes (T : Type) :\n  AsRef.AssociatedTypes T T\n  where\n\ninstance Impl_4 (T : Type) : AsRef T T where\n  as_ref := fun (self : T) => do (pure self)\n\nend core_models.convert\n\n\nnamespace core_models.default\n\nclass Default.AssociatedTypes (Self : Type) where\n\nclass Default (Self : Type)\n  [associatedTypes : outParam (Default.AssociatedTypes (Self : Type))]\n  where\n  default (Self) : (rust_primitives.hax.Tuple0 -> RustM Self)\n\nend core_models.default\n\n\nnamespace core_models.f32\n\nopaque Impl.abs (x : f64) : RustM f64\n\nend core_models.f32\n\n\nnamespace core_models.fmt\n\nstructure Error where\n  -- no fields\n\nstructure Formatter where\n  -- no fields\n\nstructure Arguments where\n  _0 : rust_primitives.hax.Tuple0\n\nend core_models.fmt\n\n\nnamespace core_models.fmt.rt\n\nopaque ArgumentType : Type\n\nstructure Argument where\n  ty : ArgumentType\n\nopaque Impl.new_display (T : Type) (x : T) : RustM Argument\n\nopaque Impl.new_debug (T : Type) (x : T) : RustM Argument\n\nopaque Impl.new_lower_hex (T : Type) (x : T) : RustM Argument\n\nopaque Impl_1.new_binary (T : Type) (x : T) : RustM Argument\n\nopaque Impl_1.new_const (T : Type) (U : Type) (x : T) (y : U) :\n    RustM core_models.fmt.Arguments\n\nopaque Impl_1.new_v1 (T : Type) (U : Type) (V : Type) (W : Type)\n    (x : T)\n    (y : U)\n    (z : V)\n    (t : W) :\n    RustM core_models.fmt.Arguments\n\n@[spec]\ndef Impl_1.none (_ : rust_primitives.hax.Tuple0) :\n    RustM (RustArray Argument 0) := do\n  (pure (RustArray.ofVec #v[]))\n\nopaque Impl_1.new_v1_formatted (T : Type) (U : Type) (V : Type)\n    (x : T)\n    (y : U)\n    (z : V) :\n    RustM core_models.fmt.Arguments\n\ninductive Count : Type\n| Is : u16 -> Count\n| Param : u16 -> Count\n| Implied : Count\n\nstructure Placeholder where\n  position : usize\n  flags : u32\n  precision : Count\n  width : Count\n\nstructure UnsafeArg where\n  -- no fields\n\nend core_models.fmt.rt\n\n\nnamespace core_models.hash\n\nclass Hasher.AssociatedTypes (Self : Type) where\n\nclass Hasher (Self : Type)\n  [associatedTypes : outParam (Hasher.AssociatedTypes (Self : Type))]\n  where\n\nclass Hash.AssociatedTypes (Self : Type) where\n\nclass Hash (Self : Type)\n  [associatedTypes : outParam (Hash.AssociatedTypes (Self : Type))]\n  where\n  hash (Self)\n    (H : Type)\n    [trait_constr_hash_associated_type_i1 : Hasher.AssociatedTypes H]\n    [trait_constr_hash_i1 : Hasher H ] :\n    (Self -> H -> RustM H)\n\nend core_models.hash\n\n\nnamespace core_models.hint\n\ndef black_box (T : Type) (dummy : T) : RustM T := do (pure dummy)\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef black_box.spec (T : Type) (dummy : T) :\n    Spec\n      (requires := do pure True)\n      (ensures := fun res => do (hax_lib.prop.Impl.from_bool true))\n      (black_box (T : Type) (dummy : T)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [black_box] <;> bv_decide\n}\n\ndef must_use (T : Type) (value : T) : RustM T := do (pure value)\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef must_use.spec (T : Type) (value : T) :\n    Spec\n      (requires := do pure True)\n      (ensures := fun res => do (hax_lib.prop.Impl.from_bool true))\n      (must_use (T : Type) (value : T)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [must_use] <;> bv_decide\n}\n\nend core_models.hint\n\n\nnamespace core_models.iter.adapters.enumerate\n\nstructure Enumerate (I : Type) where\n  iter : I\n  count : usize\n\n@[spec]\ndef Impl.new (I : Type) (iter : I) : RustM (Enumerate I) := do\n  (pure (Enumerate.mk (iter := iter) (count := (0 : usize))))\n\nend core_models.iter.adapters.enumerate\n\n\nnamespace core_models.iter.adapters.step_by\n\nstructure StepBy (I : Type) where\n  iter : I\n  step : usize\n\n@[spec]\ndef Impl.new (I : Type) (iter : I) (step : usize) : RustM (StepBy I) := do\n  (pure (StepBy.mk (iter := iter) (step := step)))\n\nend core_models.iter.adapters.step_by\n\n\nnamespace core_models.iter.adapters.map\n\nstructure Map (I : Type) (F : Type) where\n  iter : I\n  f : F\n\n@[spec]\ndef Impl.new (I : Type) (F : Type) (iter : I) (f : F) : RustM (Map I F) := do\n  (pure (Map.mk (iter := iter) (f := f)))\n\nend core_models.iter.adapters.map\n\n\nnamespace core_models.iter.adapters.take\n\nstructure Take (I : Type) where\n  iter : I\n  n : usize\n\n@[spec]\ndef Impl.new (I : Type) (iter : I) (n : usize) : RustM (Take I) := do\n  (pure (Take.mk (iter := iter) (n := n)))\n\nend core_models.iter.adapters.take\n\n\nnamespace core_models.iter.adapters.zip\n\nstructure Zip (I1 : Type) (I2 : Type) where\n  it1 : I1\n  it2 : I2\n\nend core_models.iter.adapters.zip\n\n\nnamespace core_models.marker\n\nclass Copy.AssociatedTypes (Self : Type) where\n  [trait_constr_Copy_i0 : core_models.clone.Clone.AssociatedTypes Self]\n\nattribute [instance_reducible, instance]\n  Copy.AssociatedTypes.trait_constr_Copy_i0\n\nclass Copy (Self : Type)\n  [associatedTypes : outParam (Copy.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_Copy_i0 : core_models.clone.Clone Self]\n\nattribute [instance_reducible, instance] Copy.trait_constr_Copy_i0\n\nclass Send.AssociatedTypes (Self : Type) where\n\nclass Send (Self : Type)\n  [associatedTypes : outParam (Send.AssociatedTypes (Self : Type))]\n  where\n\nclass Sync.AssociatedTypes (Self : Type) where\n\nclass Sync (Self : Type)\n  [associatedTypes : outParam (Sync.AssociatedTypes (Self : Type))]\n  where\n\nclass Sized.AssociatedTypes (Self : Type) where\n\nclass Sized (Self : Type)\n  [associatedTypes : outParam (Sized.AssociatedTypes (Self : Type))]\n  where\n\nclass StructuralPartialEq.AssociatedTypes (Self : Type) where\n\nclass StructuralPartialEq (Self : Type)\n  [associatedTypes : outParam (StructuralPartialEq.AssociatedTypes (Self :\n      Type))]\n  where\n\n@[reducible] instance Impl.AssociatedTypes (T : Type) :\n  Send.AssociatedTypes T\n  where\n\ninstance Impl (T : Type) : Send T where\n\n@[reducible] instance Impl_1.AssociatedTypes (T : Type) :\n  Sync.AssociatedTypes T\n  where\n\ninstance Impl_1 (T : Type) : Sync T where\n\n@[reducible] instance Impl_2.AssociatedTypes (T : Type) :\n  Sized.AssociatedTypes T\n  where\n\ninstance Impl_2 (T : Type) : Sized T where\n\n@[reducible] instance Impl_3.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_3_associated_type_i0 :\n    core_models.clone.Clone.AssociatedTypes\n    T]\n  [trait_constr_Impl_3_i0 : core_models.clone.Clone T ] :\n  Copy.AssociatedTypes T\n  where\n\ninstance Impl_3\n  (T : Type)\n  [trait_constr_Impl_3_associated_type_i0 :\n    core_models.clone.Clone.AssociatedTypes\n    T]\n  [trait_constr_Impl_3_i0 : core_models.clone.Clone T ] :\n  Copy T\n  where\n\nstructure PhantomData (T : Type) where\n\nend core_models.marker\n\n\nnamespace core_models.mem\n\nopaque forget (T : Type) (t : T) : RustM rust_primitives.hax.Tuple0\n\nopaque forget_unsized (T : Type) (t : T) : RustM rust_primitives.hax.Tuple0\n\nopaque size_of (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize\n\nopaque size_of_val (T : Type) (val : T) : RustM usize\n\nopaque min_align_of (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize\n\nopaque min_align_of_val (T : Type) (val : T) : RustM usize\n\nopaque align_of (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize\n\nopaque align_of_val (T : Type) (val : T) : RustM usize\n\nopaque align_of_val_raw (T : Type) (val : T) : RustM usize\n\nopaque needs_drop (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM Bool\n\nopaque uninitialized (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM T\n\nopaque swap (T : Type) (x : T) (y : T) : RustM (rust_primitives.hax.Tuple2 T T)\n\nopaque replace (T : Type) (dest : T) (src : T) :\n    RustM (rust_primitives.hax.Tuple2 T T)\n\nopaque drop (T : Type) (_x : T) : RustM rust_primitives.hax.Tuple0\n\n@[spec]\ndef copy\n    (T : Type)\n    [trait_constr_copy_associated_type_i0 :\n      core_models.marker.Copy.AssociatedTypes\n      T]\n    [trait_constr_copy_i0 : core_models.marker.Copy T ]\n    (x : T) :\n    RustM T := do\n  (rust_primitives.mem.copy T x)\n\nopaque take (T : Type) (x : T) : RustM (rust_primitives.hax.Tuple2 T T)\n\nopaque transmute_copy (Src : Type) (Dst : Type) (src : Src) : RustM Dst\n\nopaque variant_count (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM usize\n\nopaque zeroed (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM T\n\nopaque transmute (Src : Type) (Dst : Type) (src : Src) : RustM Dst\n\nend core_models.mem\n\n\nnamespace core_models.mem.manually_drop\n\nstructure ManuallyDrop (T : Type) where\n  value : T\n\nend core_models.mem.manually_drop\n\n\nnamespace core_models.num.error\n\nstructure TryFromIntError where\n  _0 : rust_primitives.hax.Tuple0\n\nstructure IntErrorKind where\n  -- no fields\n\nstructure ParseIntError where\n  kind : IntErrorKind\n\nend core_models.num.error\n\n\nnamespace core_models.num\n\n@[spec]\ndef Impl_6.wrapping_add (x : u8) (y : u8) : RustM u8 := do\n  (rust_primitives.arithmetic.wrapping_add_u8 x y)\n\n@[spec]\ndef Impl_6.wrapping_sub (x : u8) (y : u8) : RustM u8 := do\n  (rust_primitives.arithmetic.wrapping_sub_u8 x y)\n\n@[spec]\ndef Impl_6.wrapping_mul (x : u8) (y : u8) : RustM u8 := do\n  (rust_primitives.arithmetic.wrapping_mul_u8 x y)\n\n@[spec]\ndef Impl_6.pow (x : u8) (exp : u32) : RustM u8 := do\n  (rust_primitives.arithmetic.pow_u8 x exp)\n\nopaque Impl_6.leading_zeros (x : u8) : RustM u32\n\nopaque Impl_6.ilog2 (x : u8) : RustM u32\n\n@[spec]\ndef Impl_7.wrapping_add (x : u16) (y : u16) : RustM u16 := do\n  (rust_primitives.arithmetic.wrapping_add_u16 x y)\n\n@[spec]\ndef Impl_7.wrapping_sub (x : u16) (y : u16) : RustM u16 := do\n  (rust_primitives.arithmetic.wrapping_sub_u16 x y)\n\n@[spec]\ndef Impl_7.wrapping_mul (x : u16) (y : u16) : RustM u16 := do\n  (rust_primitives.arithmetic.wrapping_mul_u16 x y)\n\n@[spec]\ndef Impl_7.pow (x : u16) (exp : u32) : RustM u16 := do\n  (rust_primitives.arithmetic.pow_u16 x exp)\n\nopaque Impl_7.leading_zeros (x : u16) : RustM u32\n\nopaque Impl_7.ilog2 (x : u16) : RustM u32\n\n@[spec]\ndef Impl_8.wrapping_add (x : u32) (y : u32) : RustM u32 := do\n  (rust_primitives.arithmetic.wrapping_add_u32 x y)\n\n@[spec]\ndef Impl_8.wrapping_sub (x : u32) (y : u32) : RustM u32 := do\n  (rust_primitives.arithmetic.wrapping_sub_u32 x y)\n\n@[spec]\ndef Impl_8.wrapping_mul (x : u32) (y : u32) : RustM u32 := do\n  (rust_primitives.arithmetic.wrapping_mul_u32 x y)\n\n@[spec]\ndef Impl_8.pow (x : u32) (exp : u32) : RustM u32 := do\n  (rust_primitives.arithmetic.pow_u32 x exp)\n\nopaque Impl_8.leading_zeros (x : u32) : RustM u32\n\nopaque Impl_8.ilog2 (x : u32) : RustM u32\n\n@[spec]\ndef Impl_9.wrapping_add (x : u64) (y : u64) : RustM u64 := do\n  (rust_primitives.arithmetic.wrapping_add_u64 x y)\n\n@[spec]\ndef Impl_9.wrapping_sub (x : u64) (y : u64) : RustM u64 := do\n  (rust_primitives.arithmetic.wrapping_sub_u64 x y)\n\n@[spec]\ndef Impl_9.wrapping_mul (x : u64) (y : u64) : RustM u64 := do\n  (rust_primitives.arithmetic.wrapping_mul_u64 x y)\n\n@[spec]\ndef Impl_9.pow (x : u64) (exp : u32) : RustM u64 := do\n  (rust_primitives.arithmetic.pow_u64 x exp)\n\nopaque Impl_9.leading_zeros (x : u64) : RustM u32\n\nopaque Impl_9.ilog2 (x : u64) : RustM u32\n\n@[spec]\ndef Impl_10.wrapping_add (x : u128) (y : u128) : RustM u128 := do\n  (rust_primitives.arithmetic.wrapping_add_u128 x y)\n\n@[spec]\ndef Impl_10.wrapping_sub (x : u128) (y : u128) : RustM u128 := do\n  (rust_primitives.arithmetic.wrapping_sub_u128 x y)\n\n@[spec]\ndef Impl_10.wrapping_mul (x : u128) (y : u128) : RustM u128 := do\n  (rust_primitives.arithmetic.wrapping_mul_u128 x y)\n\n@[spec]\ndef Impl_10.pow (x : u128) (exp : u32) : RustM u128 := do\n  (rust_primitives.arithmetic.pow_u128 x exp)\n\nopaque Impl_10.leading_zeros (x : u128) : RustM u32\n\nopaque Impl_10.ilog2 (x : u128) : RustM u32\n\n@[spec]\ndef Impl_11.wrapping_add (x : usize) (y : usize) : RustM usize := do\n  (rust_primitives.arithmetic.wrapping_add_usize x y)\n\n@[spec]\ndef Impl_11.wrapping_sub (x : usize) (y : usize) : RustM usize := do\n  (rust_primitives.arithmetic.wrapping_sub_usize x y)\n\n@[spec]\ndef Impl_11.wrapping_mul (x : usize) (y : usize) : RustM usize := do\n  (rust_primitives.arithmetic.wrapping_mul_usize x y)\n\n@[spec]\ndef Impl_11.pow (x : usize) (exp : u32) : RustM usize := do\n  (rust_primitives.arithmetic.pow_usize x exp)\n\nopaque Impl_11.leading_zeros (x : usize) : RustM u32\n\nopaque Impl_11.ilog2 (x : usize) : RustM u32\n\n@[spec]\ndef Impl_12.wrapping_add (x : i8) (y : i8) : RustM i8 := do\n  (rust_primitives.arithmetic.wrapping_add_i8 x y)\n\n@[spec]\ndef Impl_12.wrapping_sub (x : i8) (y : i8) : RustM i8 := do\n  (rust_primitives.arithmetic.wrapping_sub_i8 x y)\n\n@[spec]\ndef Impl_12.wrapping_mul (x : i8) (y : i8) : RustM i8 := do\n  (rust_primitives.arithmetic.wrapping_mul_i8 x y)\n\n@[spec]\ndef Impl_12.pow (x : i8) (exp : u32) : RustM i8 := do\n  (rust_primitives.arithmetic.pow_i8 x exp)\n\nopaque Impl_12.leading_zeros (x : i8) : RustM u32\n\nopaque Impl_12.ilog2 (x : i8) : RustM u32\n\n@[spec]\ndef Impl_13.wrapping_add (x : i16) (y : i16) : RustM i16 := do\n  (rust_primitives.arithmetic.wrapping_add_i16 x y)\n\n@[spec]\ndef Impl_13.wrapping_sub (x : i16) (y : i16) : RustM i16 := do\n  (rust_primitives.arithmetic.wrapping_sub_i16 x y)\n\n@[spec]\ndef Impl_13.wrapping_mul (x : i16) (y : i16) : RustM i16 := do\n  (rust_primitives.arithmetic.wrapping_mul_i16 x y)\n\n@[spec]\ndef Impl_13.pow (x : i16) (exp : u32) : RustM i16 := do\n  (rust_primitives.arithmetic.pow_i16 x exp)\n\nopaque Impl_13.leading_zeros (x : i16) : RustM u32\n\nopaque Impl_13.ilog2 (x : i16) : RustM u32\n\n@[spec]\ndef Impl_14.wrapping_add (x : i32) (y : i32) : RustM i32 := do\n  (rust_primitives.arithmetic.wrapping_add_i32 x y)\n\n@[spec]\ndef Impl_14.wrapping_sub (x : i32) (y : i32) : RustM i32 := do\n  (rust_primitives.arithmetic.wrapping_sub_i32 x y)\n\n@[spec]\ndef Impl_14.wrapping_mul (x : i32) (y : i32) : RustM i32 := do\n  (rust_primitives.arithmetic.wrapping_mul_i32 x y)\n\n@[spec]\ndef Impl_14.pow (x : i32) (exp : u32) : RustM i32 := do\n  (rust_primitives.arithmetic.pow_i32 x exp)\n\nopaque Impl_14.leading_zeros (x : i32) : RustM u32\n\nopaque Impl_14.ilog2 (x : i32) : RustM u32\n\n@[spec]\ndef Impl_15.wrapping_add (x : i64) (y : i64) : RustM i64 := do\n  (rust_primitives.arithmetic.wrapping_add_i64 x y)\n\n@[spec]\ndef Impl_15.wrapping_sub (x : i64) (y : i64) : RustM i64 := do\n  (rust_primitives.arithmetic.wrapping_sub_i64 x y)\n\n@[spec]\ndef Impl_15.wrapping_mul (x : i64) (y : i64) : RustM i64 := do\n  (rust_primitives.arithmetic.wrapping_mul_i64 x y)\n\n@[spec]\ndef Impl_15.pow (x : i64) (exp : u32) : RustM i64 := do\n  (rust_primitives.arithmetic.pow_i64 x exp)\n\nopaque Impl_15.leading_zeros (x : i64) : RustM u32\n\nopaque Impl_15.ilog2 (x : i64) : RustM u32\n\n@[spec]\ndef Impl_16.wrapping_add (x : i128) (y : i128) : RustM i128 := do\n  (rust_primitives.arithmetic.wrapping_add_i128 x y)\n\n@[spec]\ndef Impl_16.wrapping_sub (x : i128) (y : i128) : RustM i128 := do\n  (rust_primitives.arithmetic.wrapping_sub_i128 x y)\n\n@[spec]\ndef Impl_16.wrapping_mul (x : i128) (y : i128) : RustM i128 := do\n  (rust_primitives.arithmetic.wrapping_mul_i128 x y)\n\n@[spec]\ndef Impl_16.pow (x : i128) (exp : u32) : RustM i128 := do\n  (rust_primitives.arithmetic.pow_i128 x exp)\n\nopaque Impl_16.leading_zeros (x : i128) : RustM u32\n\nopaque Impl_16.ilog2 (x : i128) : RustM u32\n\n@[spec]\ndef Impl_17.wrapping_add (x : isize) (y : isize) : RustM isize := do\n  (rust_primitives.arithmetic.wrapping_add_isize x y)\n\n@[spec]\ndef Impl_17.wrapping_sub (x : isize) (y : isize) : RustM isize := do\n  (rust_primitives.arithmetic.wrapping_sub_isize x y)\n\n@[spec]\ndef Impl_17.wrapping_mul (x : isize) (y : isize) : RustM isize := do\n  (rust_primitives.arithmetic.wrapping_mul_isize x y)\n\n@[spec]\ndef Impl_17.pow (x : isize) (exp : u32) : RustM isize := do\n  (rust_primitives.arithmetic.pow_isize x exp)\n\nopaque Impl_17.leading_zeros (x : isize) : RustM u32\n\nopaque Impl_17.ilog2 (x : isize) : RustM u32\n\n@[reducible] instance Impl_18.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes u8\n  where\n\ninstance Impl_18 : core_models.default.Default u8 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u8))\n\n@[reducible] instance Impl_19.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes u16\n  where\n\ninstance Impl_19 : core_models.default.Default u16 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u16))\n\n@[reducible] instance Impl_20.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes u32\n  where\n\ninstance Impl_20 : core_models.default.Default u32 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u32))\n\n@[reducible] instance Impl_21.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes u64\n  where\n\ninstance Impl_21 : core_models.default.Default u64 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u64))\n\n@[reducible] instance Impl_22.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes u128\n  where\n\ninstance Impl_22 : core_models.default.Default u128 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u128))\n\n@[reducible] instance Impl_23.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes usize\n  where\n\ninstance Impl_23 : core_models.default.Default usize where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : usize))\n\n@[reducible] instance Impl_24.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes i8\n  where\n\ninstance Impl_24 : core_models.default.Default i8 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i8))\n\n@[reducible] instance Impl_25.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes i16\n  where\n\ninstance Impl_25 : core_models.default.Default i16 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i16))\n\n@[reducible] instance Impl_26.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes i32\n  where\n\ninstance Impl_26 : core_models.default.Default i32 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i32))\n\n@[reducible] instance Impl_27.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes i64\n  where\n\ninstance Impl_27 : core_models.default.Default i64 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i64))\n\n@[reducible] instance Impl_28.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes i128\n  where\n\ninstance Impl_28 : core_models.default.Default i128 where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : i128))\n\n@[reducible] instance Impl_29.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes isize\n  where\n\ninstance Impl_29 : core_models.default.Default isize where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : isize))\n\nend core_models.num\n\n\nnamespace core_models.ops.arith\n\nclass AddAssign.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass AddAssign (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (AddAssign.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  add_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self)\n\nclass SubAssign.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass SubAssign (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (SubAssign.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  sub_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self)\n\nclass MulAssign.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass MulAssign (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (MulAssign.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  mul_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self)\n\nclass DivAssign.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass DivAssign (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (DivAssign.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  div_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self)\n\nclass RemAssign.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass RemAssign (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (RemAssign.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  rem_assign (Self) (Rhs) : (Self -> Rhs -> RustM Self)\n\nend core_models.ops.arith\n\n\nnamespace core_models.ops.control_flow\n\ninductive ControlFlow (B : Type) (C : Type) : Type\n| Continue : C -> ControlFlow (B : Type) (C : Type)\n| Break : B -> ControlFlow (B : Type) (C : Type)\n\nend core_models.ops.control_flow\n\n\nnamespace core_models.ops.try_trait\n\nclass FromResidual.AssociatedTypes (Self : Type) (R : Type) where\n\nclass FromResidual (Self : Type) (R : Type)\n  [associatedTypes : outParam (FromResidual.AssociatedTypes (Self : Type) (R :\n      Type))]\n  where\n  from_residual (Self) (R) : (R -> RustM Self)\n\nend core_models.ops.try_trait\n\n\nnamespace core_models.ops.drop\n\nclass Drop.AssociatedTypes (Self : Type) where\n\nclass Drop (Self : Type)\n  [associatedTypes : outParam (Drop.AssociatedTypes (Self : Type))]\n  where\n  drop (Self) : (Self -> RustM Self)\n\nend core_models.ops.drop\n\n\nnamespace core_models.ops.range\n\nstructure RangeTo (T : Type) where\n  _end : T\n\nstructure RangeFrom (T : Type) where\n  start : T\n\nstructure Range (T : Type) where\n  start : T\n  _end : T\n\nstructure RangeFull where\n  -- no fields\n\nend core_models.ops.range\n\n\nnamespace core_models.option\n\ninductive Option (T : Type) : Type\n| Some : T -> Option (T : Type)\n| None : Option (T : Type)\n\nend core_models.option\n\n\nnamespace core_models.cmp\n\nclass PartialOrd.AssociatedTypes (Self : Type) (Rhs : Type) where\n  [trait_constr_PartialOrd_i0 : PartialEq.AssociatedTypes Self Rhs]\n\nattribute [instance_reducible, instance]\n  PartialOrd.AssociatedTypes.trait_constr_PartialOrd_i0\n\nclass PartialOrd (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (PartialOrd.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  [trait_constr_PartialOrd_i0 : PartialEq Self Rhs]\n  partial_cmp (Self) (Rhs) :\n    (Self -> Rhs -> RustM (core_models.option.Option Ordering))\n\nattribute [instance_reducible, instance] PartialOrd.trait_constr_PartialOrd_i0\n\nclass PartialOrdDefaults.AssociatedTypes (Self : Type) (Rhs : Type) where\n\nclass PartialOrdDefaults (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (PartialOrdDefaults.AssociatedTypes (Self : Type)\n      (Rhs : Type))]\n  where\n  lt (Self) (Rhs)\n    [trait_constr_lt_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs]\n    [trait_constr_lt_i1 : PartialOrd Self Rhs ] :\n    (Self -> Rhs -> RustM Bool)\n  le (Self) (Rhs)\n    [trait_constr_le_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs]\n    [trait_constr_le_i1 : PartialOrd Self Rhs ] :\n    (Self -> Rhs -> RustM Bool)\n  gt (Self) (Rhs)\n    [trait_constr_gt_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs]\n    [trait_constr_gt_i1 : PartialOrd Self Rhs ] :\n    (Self -> Rhs -> RustM Bool)\n  ge (Self) (Rhs)\n    [trait_constr_ge_associated_type_i1 : PartialOrd.AssociatedTypes Self Rhs]\n    [trait_constr_ge_i1 : PartialOrd Self Rhs ] :\n    (Self -> Rhs -> RustM Bool)\n\n@[reducible] instance Impl_1.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_1_associated_type_i0 : PartialOrd.AssociatedTypes T T]\n  [trait_constr_Impl_1_i0 : PartialOrd T T ] :\n  PartialOrdDefaults.AssociatedTypes T T\n  where\n\ninstance Impl_1\n  (T : Type)\n  [trait_constr_Impl_1_associated_type_i0 : PartialOrd.AssociatedTypes T T]\n  [trait_constr_Impl_1_i0 : PartialOrd T T ] :\n  PartialOrdDefaults T T\n  where\n  lt :=\n    fun\n      [trait_constr_lt_associated_type_i1 : PartialOrd.AssociatedTypes T T]\n      [trait_constr_lt_i1 : PartialOrd T T ] (self : T) (y : T) => do\n    match (← (PartialOrd.partial_cmp T T self y)) with\n      | (core_models.option.Option.Some  (Ordering.Less )) => do (pure true)\n      | _ => do (pure false)\n  le :=\n    fun\n      [trait_constr_le_associated_type_i1 : PartialOrd.AssociatedTypes T T]\n      [trait_constr_le_i1 : PartialOrd T T ] (self : T) (y : T) => do\n    match (← (PartialOrd.partial_cmp T T self y)) with\n      | (core_models.option.Option.Some  (Ordering.Less )) |\n        (core_models.option.Option.Some  (Ordering.Equal )) => do\n        (pure true)\n      | _ => do (pure false)\n  gt :=\n    fun\n      [trait_constr_gt_associated_type_i1 : PartialOrd.AssociatedTypes T T]\n      [trait_constr_gt_i1 : PartialOrd T T ] (self : T) (y : T) => do\n    match (← (PartialOrd.partial_cmp T T self y)) with\n      | (core_models.option.Option.Some  (Ordering.Greater )) => do (pure true)\n      | _ => do (pure false)\n  ge :=\n    fun\n      [trait_constr_ge_associated_type_i1 : PartialOrd.AssociatedTypes T T]\n      [trait_constr_ge_i1 : PartialOrd T T ] (self : T) (y : T) => do\n    match (← (PartialOrd.partial_cmp T T self y)) with\n      | (core_models.option.Option.Some  (Ordering.Greater )) |\n        (core_models.option.Option.Some  (Ordering.Equal )) => do\n        (pure true)\n      | _ => do (pure false)\n\nclass Ord.AssociatedTypes (Self : Type) where\n  [trait_constr_Ord_i0 : Eq.AssociatedTypes Self]\n  [trait_constr_Ord_i1 : PartialOrd.AssociatedTypes Self Self]\n\nattribute [instance_reducible, instance] Ord.AssociatedTypes.trait_constr_Ord_i0\n\nattribute [instance_reducible, instance] Ord.AssociatedTypes.trait_constr_Ord_i1\n\nclass Ord (Self : Type)\n  [associatedTypes : outParam (Ord.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_Ord_i0 : Eq Self]\n  [trait_constr_Ord_i1 : PartialOrd Self Self]\n  cmp (Self) : (Self -> Self -> RustM Ordering)\n\nattribute [instance_reducible, instance] Ord.trait_constr_Ord_i0\n\nattribute [instance_reducible, instance] Ord.trait_constr_Ord_i1\n\n@[spec]\ndef max\n    (T : Type)\n    [trait_constr_max_associated_type_i0 : Ord.AssociatedTypes T]\n    [trait_constr_max_i0 : Ord T ]\n    (v1 : T)\n    (v2 : T) :\n    RustM T := do\n  match (← (Ord.cmp T v1 v2)) with\n    | (Ordering.Greater ) => do (pure v1)\n    | _ => do (pure v2)\n\n@[spec]\ndef min\n    (T : Type)\n    [trait_constr_min_associated_type_i0 : Ord.AssociatedTypes T]\n    [trait_constr_min_i0 : Ord T ]\n    (v1 : T)\n    (v2 : T) :\n    RustM T := do\n  match (← (Ord.cmp T v1 v2)) with\n    | (Ordering.Greater ) => do (pure v2)\n    | _ => do (pure v1)\n\n@[reducible] instance Impl_2.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_2_associated_type_i0 : PartialOrd.AssociatedTypes T T]\n  [trait_constr_Impl_2_i0 : PartialOrd T T ] :\n  PartialOrd.AssociatedTypes (Reverse T) (Reverse T)\n  where\n\ninstance Impl_2\n  (T : Type)\n  [trait_constr_Impl_2_associated_type_i0 : PartialOrd.AssociatedTypes T T]\n  [trait_constr_Impl_2_i0 : PartialOrd T T ] :\n  PartialOrd (Reverse T) (Reverse T)\n  where\n  partial_cmp := fun (self : (Reverse T)) (other : (Reverse T)) => do\n    (PartialOrd.partial_cmp T T (Reverse._0 other) (Reverse._0 self))\n\n@[reducible] instance Impl_5.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_5_associated_type_i0 : Ord.AssociatedTypes T]\n  [trait_constr_Impl_5_i0 : Ord T ] :\n  Ord.AssociatedTypes (Reverse T)\n  where\n\ninstance Impl_5\n  (T : Type)\n  [trait_constr_Impl_5_associated_type_i0 : Ord.AssociatedTypes T]\n  [trait_constr_Impl_5_i0 : Ord T ] :\n  Ord (Reverse T)\n  where\n  cmp := fun (self : (Reverse T)) (other : (Reverse T)) => do\n    (Ord.cmp T (Reverse._0 other) (Reverse._0 self))\n\n@[reducible] instance Impl_30.AssociatedTypes :\n  PartialOrd.AssociatedTypes u8 u8\n  where\n\ninstance Impl_30 : PartialOrd u8 u8 where\n  partial_cmp := fun (self : u8) (other : u8) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_31.AssociatedTypes : Ord.AssociatedTypes u8 where\n\ninstance Impl_31 : Ord u8 where\n  cmp := fun (self : u8) (other : u8) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_32.AssociatedTypes :\n  PartialOrd.AssociatedTypes i8 i8\n  where\n\ninstance Impl_32 : PartialOrd i8 i8 where\n  partial_cmp := fun (self : i8) (other : i8) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_33.AssociatedTypes : Ord.AssociatedTypes i8 where\n\ninstance Impl_33 : Ord i8 where\n  cmp := fun (self : i8) (other : i8) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_34.AssociatedTypes :\n  PartialOrd.AssociatedTypes u16 u16\n  where\n\ninstance Impl_34 : PartialOrd u16 u16 where\n  partial_cmp := fun (self : u16) (other : u16) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_35.AssociatedTypes : Ord.AssociatedTypes u16 where\n\ninstance Impl_35 : Ord u16 where\n  cmp := fun (self : u16) (other : u16) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_36.AssociatedTypes :\n  PartialOrd.AssociatedTypes i16 i16\n  where\n\ninstance Impl_36 : PartialOrd i16 i16 where\n  partial_cmp := fun (self : i16) (other : i16) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_37.AssociatedTypes : Ord.AssociatedTypes i16 where\n\ninstance Impl_37 : Ord i16 where\n  cmp := fun (self : i16) (other : i16) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_38.AssociatedTypes :\n  PartialOrd.AssociatedTypes u32 u32\n  where\n\ninstance Impl_38 : PartialOrd u32 u32 where\n  partial_cmp := fun (self : u32) (other : u32) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_39.AssociatedTypes : Ord.AssociatedTypes u32 where\n\ninstance Impl_39 : Ord u32 where\n  cmp := fun (self : u32) (other : u32) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_40.AssociatedTypes :\n  PartialOrd.AssociatedTypes i32 i32\n  where\n\ninstance Impl_40 : PartialOrd i32 i32 where\n  partial_cmp := fun (self : i32) (other : i32) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_41.AssociatedTypes : Ord.AssociatedTypes i32 where\n\ninstance Impl_41 : Ord i32 where\n  cmp := fun (self : i32) (other : i32) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_42.AssociatedTypes :\n  PartialOrd.AssociatedTypes u64 u64\n  where\n\ninstance Impl_42 : PartialOrd u64 u64 where\n  partial_cmp := fun (self : u64) (other : u64) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_43.AssociatedTypes : Ord.AssociatedTypes u64 where\n\ninstance Impl_43 : Ord u64 where\n  cmp := fun (self : u64) (other : u64) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_44.AssociatedTypes :\n  PartialOrd.AssociatedTypes i64 i64\n  where\n\ninstance Impl_44 : PartialOrd i64 i64 where\n  partial_cmp := fun (self : i64) (other : i64) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_45.AssociatedTypes : Ord.AssociatedTypes i64 where\n\ninstance Impl_45 : Ord i64 where\n  cmp := fun (self : i64) (other : i64) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_46.AssociatedTypes :\n  PartialOrd.AssociatedTypes u128 u128\n  where\n\ninstance Impl_46 : PartialOrd u128 u128 where\n  partial_cmp := fun (self : u128) (other : u128) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_47.AssociatedTypes : Ord.AssociatedTypes u128 where\n\ninstance Impl_47 : Ord u128 where\n  cmp := fun (self : u128) (other : u128) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_48.AssociatedTypes :\n  PartialOrd.AssociatedTypes i128 i128\n  where\n\ninstance Impl_48 : PartialOrd i128 i128 where\n  partial_cmp := fun (self : i128) (other : i128) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_49.AssociatedTypes : Ord.AssociatedTypes i128 where\n\ninstance Impl_49 : Ord i128 where\n  cmp := fun (self : i128) (other : i128) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_50.AssociatedTypes :\n  PartialOrd.AssociatedTypes usize usize\n  where\n\ninstance Impl_50 : PartialOrd usize usize where\n  partial_cmp := fun (self : usize) (other : usize) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_51.AssociatedTypes : Ord.AssociatedTypes usize where\n\ninstance Impl_51 : Ord usize where\n  cmp := fun (self : usize) (other : usize) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\n@[reducible] instance Impl_52.AssociatedTypes :\n  PartialOrd.AssociatedTypes isize isize\n  where\n\ninstance Impl_52 : PartialOrd isize isize where\n  partial_cmp := fun (self : isize) (other : isize) => do\n    if (← (self <? other)) then do\n      (pure (core_models.option.Option.Some Ordering.Less))\n    else do\n      if (← (self >? other)) then do\n        (pure (core_models.option.Option.Some Ordering.Greater))\n      else do\n        (pure (core_models.option.Option.Some Ordering.Equal))\n\n@[reducible] instance Impl_53.AssociatedTypes : Ord.AssociatedTypes isize where\n\ninstance Impl_53 : Ord isize where\n  cmp := fun (self : isize) (other : isize) => do\n    if (← (self <? other)) then do\n      (pure Ordering.Less)\n    else do\n      if (← (self >? other)) then do\n        (pure Ordering.Greater)\n      else do\n        (pure Ordering.Equal)\n\nend core_models.cmp\n\n\nnamespace core_models.iter.adapters.flat_map\n\nstructure FlatMap (I : Type) (U : Type) (F : Type) where\n  it : I\n  f : F\n  current : (core_models.option.Option U)\n\nend core_models.iter.adapters.flat_map\n\n\nnamespace core_models.option\n\n@[spec]\ndef Impl.as_ref (T : Type) (self : (Option T)) : RustM (Option T) := do\n  match self with\n    | (Option.Some  x) => do (pure (Option.Some x))\n    | (Option.None ) => do (pure Option.None)\n\n@[spec]\ndef Impl.unwrap_or (T : Type) (self : (Option T)) (default : T) : RustM T := do\n  match self with\n    | (Option.Some  x) => do (pure x)\n    | (Option.None ) => do (pure default)\n\n@[spec]\ndef Impl.unwrap_or_default\n    (T : Type)\n    [trait_constr_unwrap_or_default_associated_type_i0 :\n      core_models.default.Default.AssociatedTypes\n      T]\n    [trait_constr_unwrap_or_default_i0 : core_models.default.Default T ]\n    (self : (Option T)) :\n    RustM T := do\n  match self with\n    | (Option.Some  x) => do (pure x)\n    | (Option.None ) => do\n      (core_models.default.Default.default T rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef Impl.take (T : Type) (self : (Option T)) :\n    RustM (rust_primitives.hax.Tuple2 (Option T) (Option T)) := do\n  (pure (rust_primitives.hax.Tuple2.mk Option.None self))\n\ndef Impl.is_some (T : Type) (self : (Option T)) : RustM Bool := do\n  match self with | (Option.Some  _) => do (pure true) | _ => do (pure false)\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef Impl.is_some.spec (T : Type) (self : (Option T)) :\n    Spec\n      (requires := do pure True)\n      (ensures := fun\n          res => do\n          (hax_lib.prop.constructors.implies\n            (← (hax_lib.prop.constructors.from_bool res))\n            (← (hax_lib.prop.Impl.from_bool true))))\n      (Impl.is_some (T : Type) (self : (Option T))) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [Impl.is_some] <;> bv_decide\n}\n\n@[spec]\ndef Impl.is_none (T : Type) (self : (Option T)) : RustM Bool := do\n  ((← (Impl.is_some T self)) ==? false)\n\nend core_models.option\n\n\nnamespace core_models.panicking\n\nopaque panic_explicit (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Never\n\nopaque panic (_msg : String) : RustM rust_primitives.hax.Never\n\nopaque panic_fmt (_fmt : core_models.fmt.Arguments) :\n    RustM rust_primitives.hax.Never\n\nend core_models.panicking\n\n\nnamespace core_models.panicking.internal\n\nopaque panic (T : Type) (_ : rust_primitives.hax.Tuple0) : RustM T\n\nend core_models.panicking.internal\n\n\nnamespace core_models.hash\n\n@[reducible] instance Impl.AssociatedTypes (T : Type) :\n  Hash.AssociatedTypes T\n  where\n\ninstance Impl (T : Type) : Hash T where\n  hash :=\n    fun\n      (H : Type)\n      [trait_constr_hash_associated_type_i0 : Hasher.AssociatedTypes H]\n      [trait_constr_hash_i0 : Hasher H ] (self : T) (h : H) => do\n    (core_models.panicking.internal.panic H rust_primitives.hax.Tuple0.mk)\n\nend core_models.hash\n\n\nnamespace core_models.result\n\ninductive Result (T : Type) (E : Type) : Type\n| Ok : T -> Result (T : Type) (E : Type)\n| Err : E -> Result (T : Type) (E : Type)\n\nend core_models.result\n\n\nnamespace core_models.fmt\n\nabbrev Result :\n  Type :=\n  (core_models.result.Result rust_primitives.hax.Tuple0 Error)\n\nclass Display.AssociatedTypes (Self : Type) where\n\nclass Display (Self : Type)\n  [associatedTypes : outParam (Display.AssociatedTypes (Self : Type))]\n  where\n  fmt (Self) :\n    (Self ->\n    Formatter ->\n    RustM (rust_primitives.hax.Tuple2\n      Formatter\n      (core_models.result.Result rust_primitives.hax.Tuple0 Error)))\n\nclass Debug.AssociatedTypes (Self : Type) where\n\nclass Debug (Self : Type)\n  [associatedTypes : outParam (Debug.AssociatedTypes (Self : Type))]\n  where\n  dbg_fmt (Self) :\n    (Self ->\n    Formatter ->\n    RustM (rust_primitives.hax.Tuple2\n      Formatter\n      (core_models.result.Result rust_primitives.hax.Tuple0 Error)))\n\nend core_models.fmt\n\n\nnamespace core_models.error\n\nclass Error.AssociatedTypes (Self : Type) where\n  [trait_constr_Error_i0 : core_models.fmt.Display.AssociatedTypes Self]\n  [trait_constr_Error_i1 : core_models.fmt.Debug.AssociatedTypes Self]\n\nattribute [instance_reducible, instance]\n  Error.AssociatedTypes.trait_constr_Error_i0\n\nattribute [instance_reducible, instance]\n  Error.AssociatedTypes.trait_constr_Error_i1\n\nclass Error (Self : Type)\n  [associatedTypes : outParam (Error.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_Error_i0 : core_models.fmt.Display Self]\n  [trait_constr_Error_i1 : core_models.fmt.Debug Self]\n\nattribute [instance_reducible, instance] Error.trait_constr_Error_i0\n\nattribute [instance_reducible, instance] Error.trait_constr_Error_i1\n\nend core_models.error\n\n\nnamespace core_models.fmt\n\n@[reducible] instance Impl.AssociatedTypes (T : Type) :\n  Debug.AssociatedTypes T\n  where\n\ninstance Impl (T : Type) : Debug T where\n  dbg_fmt := fun (self : T) (f : Formatter) => do\n    let\n      hax_temp_output : (core_models.result.Result\n        rust_primitives.hax.Tuple0\n        Error) :=\n      (core_models.result.Result.Ok rust_primitives.hax.Tuple0.mk);\n    (pure (rust_primitives.hax.Tuple2.mk f hax_temp_output))\n\n@[spec]\ndef Impl_11.write_fmt (f : Formatter) (args : Arguments) :\n    RustM\n    (rust_primitives.hax.Tuple2\n      Formatter\n      (core_models.result.Result rust_primitives.hax.Tuple0 Error))\n    := do\n  let\n    hax_temp_output : (core_models.result.Result\n      rust_primitives.hax.Tuple0\n      Error) :=\n    (core_models.result.Result.Ok rust_primitives.hax.Tuple0.mk);\n  (pure (rust_primitives.hax.Tuple2.mk f hax_temp_output))\n\nend core_models.fmt\n\n\nnamespace core_models.num\n\nopaque Impl_6.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result u8 core_models.num.error.ParseIntError)\n\nopaque Impl_7.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result u16 core_models.num.error.ParseIntError)\n\nopaque Impl_8.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result u32 core_models.num.error.ParseIntError)\n\nopaque Impl_9.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result u64 core_models.num.error.ParseIntError)\n\nopaque Impl_10.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result u128 core_models.num.error.ParseIntError)\n\nopaque Impl_11.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result usize core_models.num.error.ParseIntError)\n\nopaque Impl_12.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result i8 core_models.num.error.ParseIntError)\n\nopaque Impl_13.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result i16 core_models.num.error.ParseIntError)\n\nopaque Impl_14.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result i32 core_models.num.error.ParseIntError)\n\nopaque Impl_15.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result i64 core_models.num.error.ParseIntError)\n\nopaque Impl_16.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result i128 core_models.num.error.ParseIntError)\n\nopaque Impl_17.from_str_radix (src : String) (radix : u32) :\n    RustM (core_models.result.Result isize core_models.num.error.ParseIntError)\n\nend core_models.num\n\n\nnamespace core_models.option\n\n@[spec]\ndef Impl.ok_or (T : Type) (E : Type) (self : (Option T)) (err : E) :\n    RustM (core_models.result.Result T E) := do\n  match self with\n    | (Option.Some  v) => do (pure (core_models.result.Result.Ok v))\n    | (Option.None ) => do (pure (core_models.result.Result.Err err))\n\nend core_models.option\n\n\nnamespace core_models.result\n\n@[spec]\ndef Impl.unwrap_or (T : Type) (E : Type) (self : (Result T E)) (default : T) :\n    RustM T := do\n  match self with\n    | (Result.Ok  t) => do (pure t)\n    | (Result.Err  _) => do (pure default)\n\n@[spec]\ndef Impl.is_ok (T : Type) (E : Type) (self : (Result T E)) : RustM Bool := do\n  match self with | (Result.Ok  _) => do (pure true) | _ => do (pure false)\n\n@[spec]\ndef Impl.ok (T : Type) (E : Type) (self : (Result T E)) :\n    RustM (core_models.option.Option T) := do\n  match self with\n    | (Result.Ok  x) => do (pure (core_models.option.Option.Some x))\n    | (Result.Err  _) => do (pure core_models.option.Option.None)\n\nend core_models.result\n\n\nnamespace core_models.slice.iter\n\nstructure Chunks (T : Type) where\n  cs : usize\n  elements : (RustSlice T)\n\n@[spec]\ndef Impl.new (T : Type) (cs : usize) (elements : (RustSlice T)) :\n    RustM (Chunks T) := do\n  (pure (Chunks.mk (cs := cs) (elements := elements)))\n\nstructure ChunksExact (T : Type) where\n  cs : usize\n  elements : (RustSlice T)\n\n@[spec]\ndef Impl_1.new (T : Type) (cs : usize) (elements : (RustSlice T)) :\n    RustM (ChunksExact T) := do\n  (pure (ChunksExact.mk (cs := cs) (elements := elements)))\n\nstructure Iter (T : Type) where\n  _0 : (rust_primitives.sequence.Seq T)\n\nend core_models.slice.iter\n\n\nnamespace core_models.slice\n\n@[spec]\ndef Impl.len (T : Type) (s : (RustSlice T)) : RustM usize := do\n  (rust_primitives.slice.slice_length T s)\n\n@[spec]\ndef Impl.chunks (T : Type) (s : (RustSlice T)) (cs : usize) :\n    RustM (core_models.slice.iter.Chunks T) := do\n  (core_models.slice.iter.Impl.new T cs s)\n\n@[spec]\ndef Impl.iter (T : Type) (s : (RustSlice T)) :\n    RustM (core_models.slice.iter.Iter T) := do\n  (pure (core_models.slice.iter.Iter.mk\n    (← (rust_primitives.sequence.seq_from_slice T s))))\n\n@[spec]\ndef Impl.chunks_exact (T : Type) (s : (RustSlice T)) (cs : usize) :\n    RustM (core_models.slice.iter.ChunksExact T) := do\n  (core_models.slice.iter.Impl_1.new T cs s)\n\n@[spec]\ndef Impl.is_empty (T : Type) (s : (RustSlice T)) : RustM Bool := do\n  ((← (Impl.len T s)) ==? (0 : usize))\n\nopaque Impl.contains (T : Type) (s : (RustSlice T)) (v : T) : RustM Bool\n\nopaque Impl.copy_within\n    (T : Type)\n    (R : Type)\n    [trait_constr_copy_within_associated_type_i0 :\n      core.marker.Copy.AssociatedTypes\n      T]\n    [trait_constr_copy_within_i0 : core.marker.Copy T ]\n    (s : (RustSlice T))\n    (src : R)\n    (dest : usize) :\n    RustM (RustSlice T)\n\nopaque Impl.binary_search (T : Type) (s : (RustSlice T)) (x : T) :\n    RustM (core_models.result.Result usize usize)\n\ndef Impl.copy_from_slice\n    (T : Type)\n    [trait_constr_copy_from_slice_associated_type_i0 :\n      core_models.marker.Copy.AssociatedTypes\n      T]\n    [trait_constr_copy_from_slice_i0 : core_models.marker.Copy T ]\n    (s : (RustSlice T))\n    (src : (RustSlice T)) :\n    RustM (RustSlice T) := do\n  let ⟨tmp0, out⟩ ← (rust_primitives.mem.replace (RustSlice T) s src);\n  let s : (RustSlice T) := tmp0;\n  let _ := out;\n  (pure s)\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef\n      Impl.copy_from_slice.spec\n      (T : Type)\n      [trait_constr_copy_from_slice_associated_type_i0 :\n        core_models.marker.Copy.AssociatedTypes\n        T]\n      [trait_constr_copy_from_slice_i0 : core_models.marker.Copy T ]\n      (s : (RustSlice T))\n      (src : (RustSlice T)) :\n    Spec\n      (requires := do ((← (Impl.len T s)) ==? (← (Impl.len T src))))\n      (ensures := fun _ => pure True)\n      (Impl.copy_from_slice\n        (T : Type)\n        (s : (RustSlice T))\n        (src : (RustSlice T))) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [Impl.copy_from_slice] <;> bv_decide\n}\n\ndef Impl.clone_from_slice\n    (T : Type)\n    [trait_constr_clone_from_slice_associated_type_i0 :\n      core_models.clone.Clone.AssociatedTypes\n      T]\n    [trait_constr_clone_from_slice_i0 : core_models.clone.Clone T ]\n    (s : (RustSlice T))\n    (src : (RustSlice T)) :\n    RustM (RustSlice T) := do\n  let ⟨tmp0, out⟩ ← (rust_primitives.mem.replace (RustSlice T) s src);\n  let s : (RustSlice T) := tmp0;\n  let _ := out;\n  (pure s)\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef\n      Impl.clone_from_slice.spec\n      (T : Type)\n      [trait_constr_clone_from_slice_associated_type_i0 :\n        core_models.clone.Clone.AssociatedTypes\n        T]\n      [trait_constr_clone_from_slice_i0 : core_models.clone.Clone T ]\n      (s : (RustSlice T))\n      (src : (RustSlice T)) :\n    Spec\n      (requires := do ((← (Impl.len T s)) ==? (← (Impl.len T src))))\n      (ensures := fun _ => pure True)\n      (Impl.clone_from_slice\n        (T : Type)\n        (s : (RustSlice T))\n        (src : (RustSlice T))) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [Impl.clone_from_slice] <;> bv_decide\n}\n\ndef Impl.split_at (T : Type) (s : (RustSlice T)) (mid : usize) :\n    RustM (rust_primitives.hax.Tuple2 (RustSlice T) (RustSlice T)) := do\n  (rust_primitives.slice.slice_split_at T s mid)\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef Impl.split_at.spec (T : Type) (s : (RustSlice T)) (mid : usize) :\n    Spec\n      (requires := do (mid <=? (← (Impl.len T s))))\n      (ensures := fun _ => pure True)\n      (Impl.split_at (T : Type) (s : (RustSlice T)) (mid : usize)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [Impl.split_at] <;> bv_decide\n}\n\n@[spec]\ndef Impl.split_at_checked (T : Type) (s : (RustSlice T)) (mid : usize) :\n    RustM\n    (core_models.option.Option\n      (rust_primitives.hax.Tuple2 (RustSlice T) (RustSlice T)))\n    := do\n  if (← (mid <=? (← (Impl.len T s)))) then do\n    (pure (core_models.option.Option.Some (← (Impl.split_at T s mid))))\n  else do\n    (pure core_models.option.Option.None)\n\nend core_models.slice\n\n\nnamespace core_models.str.error\n\nstructure Utf8Error where\n  -- no fields\n\nend core_models.str.error\n\n\nnamespace core_models.str.converts\n\nopaque from_utf8 (s : (RustSlice u8)) :\n    RustM (core_models.result.Result String core_models.str.error.Utf8Error)\n\nend core_models.str.converts\n\n\nnamespace core_models.str.iter\n\nstructure Split (T : Type) where\n  _0 : T\n\nend core_models.str.iter\n\n\nnamespace core_models.convert\n\nclass TryInto.AssociatedTypes (Self : Type) (T : Type) where\n  Error : Type\n\nattribute [reducible] TryInto.AssociatedTypes.Error\n\nabbrev TryInto.Error :=\n  TryInto.AssociatedTypes.Error\n\nclass TryInto (Self : Type) (T : Type)\n  [associatedTypes : outParam (TryInto.AssociatedTypes (Self : Type) (T :\n      Type))]\n  where\n  try_into (Self) (T) :\n    (Self -> RustM (core_models.result.Result T associatedTypes.Error))\n\nclass TryFrom.AssociatedTypes (Self : Type) (T : Type) where\n  Error : Type\n\nattribute [reducible] TryFrom.AssociatedTypes.Error\n\nabbrev TryFrom.Error :=\n  TryFrom.AssociatedTypes.Error\n\nclass TryFrom (Self : Type) (T : Type)\n  [associatedTypes : outParam (TryFrom.AssociatedTypes (Self : Type) (T :\n      Type))]\n  where\n  try_from (Self) (T) :\n    (T -> RustM (core_models.result.Result Self associatedTypes.Error))\n\nend core_models.convert\n\n\nnamespace core_models.iter.traits.iterator\n\nclass Iterator.AssociatedTypes (Self : Type) where\n  Item : Type\n\nattribute [reducible] Iterator.AssociatedTypes.Item\n\nabbrev Iterator.Item :=\n  Iterator.AssociatedTypes.Item\n\nclass Iterator (Self : Type)\n  [associatedTypes : outParam (Iterator.AssociatedTypes (Self : Type))]\n  where\n  next (Self) :\n    (Self ->\n    RustM (rust_primitives.hax.Tuple2\n      Self\n      (core_models.option.Option associatedTypes.Item)))\n\nend core_models.iter.traits.iterator\n\n\nnamespace core_models.iter.traits.collect\n\nclass IntoIterator.AssociatedTypes (Self : Type) where\n  IntoIter : Type\n\nattribute [reducible] IntoIterator.AssociatedTypes.IntoIter\n\nabbrev IntoIterator.IntoIter :=\n  IntoIterator.AssociatedTypes.IntoIter\n\nclass IntoIterator (Self : Type)\n  [associatedTypes : outParam (IntoIterator.AssociatedTypes (Self : Type))]\n  where\n  into_iter (Self) : (Self -> RustM associatedTypes.IntoIter)\n\nend core_models.iter.traits.collect\n\n\nnamespace core_models.ops.arith\n\nclass Add.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] Add.AssociatedTypes.Output\n\nabbrev Add.Output :=\n  Add.AssociatedTypes.Output\n\nclass Add (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Add.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  add (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass Sub.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] Sub.AssociatedTypes.Output\n\nabbrev Sub.Output :=\n  Sub.AssociatedTypes.Output\n\nclass Sub (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Sub.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  sub (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass Mul.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] Mul.AssociatedTypes.Output\n\nabbrev Mul.Output :=\n  Mul.AssociatedTypes.Output\n\nclass Mul (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Mul.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  mul (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass Div.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] Div.AssociatedTypes.Output\n\nabbrev Div.Output :=\n  Div.AssociatedTypes.Output\n\nclass Div (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Div.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  div (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass Neg.AssociatedTypes (Self : Type) where\n  Output : Type\n\nattribute [reducible] Neg.AssociatedTypes.Output\n\nabbrev Neg.Output :=\n  Neg.AssociatedTypes.Output\n\nclass Neg (Self : Type)\n  [associatedTypes : outParam (Neg.AssociatedTypes (Self : Type))]\n  where\n  neg (Self) : (Self -> RustM associatedTypes.Output)\n\nclass Rem.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] Rem.AssociatedTypes.Output\n\nabbrev Rem.Output :=\n  Rem.AssociatedTypes.Output\n\nclass Rem (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Rem.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  rem (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nend core_models.ops.arith\n\n\nnamespace core_models.ops.bit\n\nclass Shr.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] Shr.AssociatedTypes.Output\n\nabbrev Shr.Output :=\n  Shr.AssociatedTypes.Output\n\nclass Shr (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Shr.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  shr (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass Shl.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] Shl.AssociatedTypes.Output\n\nabbrev Shl.Output :=\n  Shl.AssociatedTypes.Output\n\nclass Shl (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (Shl.AssociatedTypes (Self : Type) (Rhs : Type))]\n  where\n  shl (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass BitXor.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] BitXor.AssociatedTypes.Output\n\nabbrev BitXor.Output :=\n  BitXor.AssociatedTypes.Output\n\nclass BitXor (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (BitXor.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  bitxor (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass BitAnd.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] BitAnd.AssociatedTypes.Output\n\nabbrev BitAnd.Output :=\n  BitAnd.AssociatedTypes.Output\n\nclass BitAnd (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (BitAnd.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  bitand (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nclass BitOr.AssociatedTypes (Self : Type) (Rhs : Type) where\n  Output : Type\n\nattribute [reducible] BitOr.AssociatedTypes.Output\n\nabbrev BitOr.Output :=\n  BitOr.AssociatedTypes.Output\n\nclass BitOr (Self : Type) (Rhs : Type)\n  [associatedTypes : outParam (BitOr.AssociatedTypes (Self : Type) (Rhs :\n      Type))]\n  where\n  bitor (Self) (Rhs) : (Self -> Rhs -> RustM associatedTypes.Output)\n\nend core_models.ops.bit\n\n\nnamespace core_models.ops.index\n\nclass Index.AssociatedTypes (Self : Type) (Idx : Type) where\n  Output : Type\n\nattribute [reducible] Index.AssociatedTypes.Output\n\nabbrev Index.Output :=\n  Index.AssociatedTypes.Output\n\nclass Index (Self : Type) (Idx : Type)\n  [associatedTypes : outParam (Index.AssociatedTypes (Self : Type) (Idx :\n      Type))]\n  where\n  index (Self) (Idx) : (Self -> Idx -> RustM associatedTypes.Output)\n\nend core_models.ops.index\n\n\nnamespace core_models.ops.function\n\nclass FnOnce.AssociatedTypes (Self : Type) (Args : Type) where\n  Output : Type\n\nattribute [reducible] FnOnce.AssociatedTypes.Output\n\nabbrev FnOnce.Output :=\n  FnOnce.AssociatedTypes.Output\n\nclass FnOnce (Self : Type) (Args : Type)\n  [associatedTypes : outParam (FnOnce.AssociatedTypes (Self : Type) (Args :\n      Type))]\n  where\n  call_once (Self) (Args) : (Self -> Args -> RustM associatedTypes.Output)\n\nend core_models.ops.function\n\n\nnamespace core_models.ops.try_trait\n\nclass Try.AssociatedTypes (Self : Type) where\n  Output : Type\n  Residual : Type\n\nattribute [reducible] Try.AssociatedTypes.Output\n\nattribute [reducible] Try.AssociatedTypes.Residual\n\nabbrev Try.Output :=\n  Try.AssociatedTypes.Output\n\nabbrev Try.Residual :=\n  Try.AssociatedTypes.Residual\n\nclass Try (Self : Type)\n  [associatedTypes : outParam (Try.AssociatedTypes (Self : Type))]\n  where\n  from_output (Self) : (associatedTypes.Output -> RustM Self)\n  branch (Self) :\n    (Self ->\n    RustM (core_models.ops.control_flow.ControlFlow\n      associatedTypes.Residual\n      associatedTypes.Output))\n\nend core_models.ops.try_trait\n\n\nnamespace core_models.ops.deref\n\nclass Deref.AssociatedTypes (Self : Type) where\n  Target : Type\n\nattribute [reducible] Deref.AssociatedTypes.Target\n\nabbrev Deref.Target :=\n  Deref.AssociatedTypes.Target\n\nclass Deref (Self : Type)\n  [associatedTypes : outParam (Deref.AssociatedTypes (Self : Type))]\n  where\n  deref (Self) : (Self -> RustM associatedTypes.Target)\n\nend core_models.ops.deref\n\n\nnamespace core_models.slice\n\nclass SliceIndex.AssociatedTypes (Self : Type) (T : Type) where\n  Output : Type\n\nattribute [reducible] SliceIndex.AssociatedTypes.Output\n\nabbrev SliceIndex.Output :=\n  SliceIndex.AssociatedTypes.Output\n\nclass SliceIndex (Self : Type) (T : Type)\n  [associatedTypes : outParam (SliceIndex.AssociatedTypes (Self : Type) (T :\n      Type))]\n  where\n  get (Self) (T) :\n    (Self -> T -> RustM (core_models.option.Option associatedTypes.Output))\n\nend core_models.slice\n\n\nnamespace core_models.str.traits\n\nclass FromStr.AssociatedTypes (Self : Type) where\n  Err : Type\n\nattribute [reducible] FromStr.AssociatedTypes.Err\n\nabbrev FromStr.Err :=\n  FromStr.AssociatedTypes.Err\n\nclass FromStr (Self : Type)\n  [associatedTypes : outParam (FromStr.AssociatedTypes (Self : Type))]\n  where\n  from_str (Self) :\n    (String -> RustM (core_models.result.Result Self associatedTypes.Err))\n\nend core_models.str.traits\n\n\nnamespace core_models.array\n\n@[spec]\ndef Impl_23.map\n    (T : Type)\n    (N : usize)\n    (F : Type)\n    (U : Type)\n    [trait_constr_map_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    (s : (RustArray T N))\n    (f : (T -> RustM U)) :\n    RustM (RustArray U N) := do\n  (rust_primitives.slice.array_map T U (N) (T -> RustM U) s f)\n\n@[spec]\ndef from_fn\n    (T : Type)\n    (N : usize)\n    (F : Type)\n    [trait_constr_from_fn_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      usize]\n    [trait_constr_from_fn_i0 : core_models.ops.function.FnOnce\n      F\n      usize\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F usize\n        by infer_instance\n        with Output := T})]\n    (f : (usize -> RustM T)) :\n    RustM (RustArray T N) := do\n  (rust_primitives.slice.array_from_fn T (N) (usize -> RustM T) f)\n\nend core_models.array\n\n\nnamespace core_models.convert\n\n@[reducible] instance Impl_1.AssociatedTypes\n  (T : Type)\n  (U : Type)\n  [trait_constr_Impl_1_associated_type_i0 : From.AssociatedTypes U T]\n  [trait_constr_Impl_1_i0 : From U T ] :\n  TryFrom.AssociatedTypes U T\n  where\n  Error := Infallible\n\ninstance Impl_1\n  (T : Type)\n  (U : Type)\n  [trait_constr_Impl_1_associated_type_i0 : From.AssociatedTypes U T]\n  [trait_constr_Impl_1_i0 : From U T ] :\n  TryFrom U T\n  where\n  try_from := fun (x : T) => do\n    (pure (core_models.result.Result.Ok (← (From._from U T x))))\n\n@[reducible] instance Impl_2.AssociatedTypes\n  (T : Type)\n  (U : Type)\n  [trait_constr_Impl_2_associated_type_i0 : TryFrom.AssociatedTypes U T]\n  [trait_constr_Impl_2_i0 : TryFrom U T ] :\n  TryInto.AssociatedTypes T U\n  where\n  Error := (TryFrom.Error U T)\n\ninstance Impl_2\n  (T : Type)\n  (U : Type)\n  [trait_constr_Impl_2_associated_type_i0 : TryFrom.AssociatedTypes U T]\n  [trait_constr_Impl_2_i0 : TryFrom U T ] :\n  TryInto T U\n  where\n  try_into := fun (self : T) => do (TryFrom.try_from U T self)\n\nend core_models.convert\n\n\nnamespace core_models.iter.traits.iterator\n\n@[reducible] instance Impl_1.AssociatedTypes\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 : Iterator.AssociatedTypes I]\n  [trait_constr_Impl_1_i0 : Iterator I ] :\n  core_models.iter.traits.collect.IntoIterator.AssociatedTypes I\n  where\n  IntoIter := I\n\ninstance Impl_1\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 : Iterator.AssociatedTypes I]\n  [trait_constr_Impl_1_i0 : Iterator I ] :\n  core_models.iter.traits.collect.IntoIterator I\n  where\n  into_iter := fun (self : I) => do (pure self)\n\nend core_models.iter.traits.iterator\n\n\nnamespace core_models.iter.traits.collect\n\nclass FromIterator.AssociatedTypes (Self : Type) (A : Type) where\n\nclass FromIterator (Self : Type) (A : Type)\n  [associatedTypes : outParam (FromIterator.AssociatedTypes (Self : Type) (A :\n      Type))]\n  where\n  from_iter (Self) (A)\n    (T : Type)\n    [trait_constr_from_iter_associated_type_i1 : IntoIterator.AssociatedTypes T]\n    [trait_constr_from_iter_i1 : IntoIterator T ] :\n    (T -> RustM Self)\n\nend core_models.iter.traits.collect\n\n\nnamespace core_models.iter.adapters.enumerate\n\n@[reducible] instance Impl_1.AssociatedTypes\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (Enumerate I)\n  where\n  Item := (rust_primitives.hax.Tuple2\n    usize\n    (core_models.iter.traits.iterator.Iterator.Item I))\n\ninstance Impl_1\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] :\n  core_models.iter.traits.iterator.Iterator (Enumerate I)\n  where\n  next := fun (self : (Enumerate I)) => do\n    let ⟨tmp0, out⟩ ←\n      (core_models.iter.traits.iterator.Iterator.next I (Enumerate.iter self));\n    let self : (Enumerate I) := {self with iter := tmp0};\n    let ⟨self, hax_temp_output⟩ ←\n      match out with\n        | (core_models.option.Option.Some  a) => do\n          let i : usize := (Enumerate.count self);\n          let _ ←\n            (hax_lib.assume\n              (← (hax_lib.prop.constructors.from_bool\n                (← ((Enumerate.count self) <? core.num.Impl_11.MAX)))));\n          let self : (Enumerate I) :=\n            {self with count := (← ((Enumerate.count self) +? (1 : usize)))};\n          (pure (rust_primitives.hax.Tuple2.mk\n            self\n            (core_models.option.Option.Some\n              (rust_primitives.hax.Tuple2.mk i a))))\n        | (core_models.option.Option.None ) => do\n          (pure (rust_primitives.hax.Tuple2.mk\n            self\n            core_models.option.Option.None));\n    (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output))\n\nend core_models.iter.adapters.enumerate\n\n\nnamespace core_models.iter.adapters.step_by\n\n@[instance] opaque Impl_1.AssociatedTypes\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (StepBy I) :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_1\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] :\n  core_models.iter.traits.iterator.Iterator (StepBy I) :=\n  by constructor <;> exact Inhabited.default\n\nend core_models.iter.adapters.step_by\n\n\nnamespace core_models.iter.adapters.map\n\n@[reducible] instance Impl_1.AssociatedTypes\n  (I : Type)\n  (O : Type)\n  (F : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.ops.function.FnOnce.AssociatedTypes\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)]\n  [trait_constr_Impl_1_i1 : core_models.ops.function.FnOnce\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)\n    (associatedTypes := {\n      show\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (core_models.iter.traits.iterator.Iterator.Item I)\n      by infer_instance\n      with Output := O})] :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (Map I F)\n  where\n  Item := O\n\ninstance Impl_1\n  (I : Type)\n  (O : Type)\n  (F : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.ops.function.FnOnce.AssociatedTypes\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)]\n  [trait_constr_Impl_1_i1 : core_models.ops.function.FnOnce\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)\n    (associatedTypes := {\n      show\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (core_models.iter.traits.iterator.Iterator.Item I)\n      by infer_instance\n      with Output := O})] :\n  core_models.iter.traits.iterator.Iterator (Map I F)\n  where\n  next := fun (self : (Map I F)) => do\n    let ⟨tmp0, out⟩ ←\n      (core_models.iter.traits.iterator.Iterator.next I (Map.iter self));\n    let self : (Map I F) := {self with iter := tmp0};\n    let hax_temp_output : (core_models.option.Option O) ←\n      match out with\n        | (core_models.option.Option.Some  v) => do\n          (pure (core_models.option.Option.Some\n            (← (core_models.ops.function.FnOnce.call_once\n              F\n              (core_models.iter.traits.iterator.Iterator.Item I)\n              (Map.f self)\n              v))))\n        | (core_models.option.Option.None ) => do\n          (pure core_models.option.Option.None);\n    (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output))\n\nend core_models.iter.adapters.map\n\n\nnamespace core_models.iter.adapters.take\n\n@[reducible] instance Impl_1.AssociatedTypes\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (Take I)\n  where\n  Item := (core_models.iter.traits.iterator.Iterator.Item I)\n\ninstance Impl_1\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ] :\n  core_models.iter.traits.iterator.Iterator (Take I)\n  where\n  next := fun (self : (Take I)) => do\n    let ⟨self, hax_temp_output⟩ ←\n      if (← ((Take.n self) !=? (0 : usize))) then do\n        let self : (Take I) :=\n          {self with n := (← ((Take.n self) -? (1 : usize)))};\n        let ⟨tmp0, out⟩ ←\n          (core_models.iter.traits.iterator.Iterator.next I (Take.iter self));\n        let self : (Take I) := {self with iter := tmp0};\n        (pure (rust_primitives.hax.Tuple2.mk self out))\n      else do\n        (pure (rust_primitives.hax.Tuple2.mk\n          self\n          core_models.option.Option.None));\n    (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output))\n\nend core_models.iter.adapters.take\n\n\nnamespace core_models.iter.adapters.flat_map\n\n@[spec]\ndef Impl.new\n    (I : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_new_associated_type_i0 :\n      core_models.iter.traits.iterator.Iterator.AssociatedTypes\n      I]\n    [trait_constr_new_i0 : core_models.iter.traits.iterator.Iterator I ]\n    [trait_constr_new_associated_type_i1 :\n      core_models.iter.traits.iterator.Iterator.AssociatedTypes\n      U]\n    [trait_constr_new_i1 : core_models.iter.traits.iterator.Iterator U ]\n    [trait_constr_new_associated_type_i2 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      (core_models.iter.traits.iterator.Iterator.Item I)]\n    [trait_constr_new_i2 : core_models.ops.function.FnOnce\n      F\n      (core_models.iter.traits.iterator.Iterator.Item I)\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          F\n          (core_models.iter.traits.iterator.Iterator.Item I)\n        by infer_instance\n        with Output := U})]\n    (it : I)\n    (f : F) :\n    RustM (FlatMap I U F) := do\n  (pure (FlatMap.mk\n    (it := it)\n    (f := f)\n    (current := core_models.option.Option.None)))\n\n@[instance] opaque Impl_1.AssociatedTypes\n  (I : Type)\n  (U : Type)\n  (F : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    U]\n  [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator U ]\n  [trait_constr_Impl_1_associated_type_i2 :\n    core_models.ops.function.FnOnce.AssociatedTypes\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)]\n  [trait_constr_Impl_1_i2 : core_models.ops.function.FnOnce\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)\n    (associatedTypes := {\n      show\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (core_models.iter.traits.iterator.Iterator.Item I)\n      by infer_instance\n      with Output := U})] :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (FlatMap I U F) :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_1\n  (I : Type)\n  (U : Type)\n  (F : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    U]\n  [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator U ]\n  [trait_constr_Impl_1_associated_type_i2 :\n    core_models.ops.function.FnOnce.AssociatedTypes\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)]\n  [trait_constr_Impl_1_i2 : core_models.ops.function.FnOnce\n    F\n    (core_models.iter.traits.iterator.Iterator.Item I)\n    (associatedTypes := {\n      show\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (core_models.iter.traits.iterator.Iterator.Item I)\n      by infer_instance\n      with Output := U})] :\n  core_models.iter.traits.iterator.Iterator (FlatMap I U F) :=\n  by constructor <;> exact Inhabited.default\n\nend core_models.iter.adapters.flat_map\n\n\nnamespace core_models.iter.adapters.flatten\n\nstructure Flatten\n  (I : Type)\n  [trait_constr_Flatten_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Flatten_i0 : core_models.iter.traits.iterator.Iterator I ]\n  [trait_constr_Flatten_associated_type_i1 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    (core_models.iter.traits.iterator.Iterator.Item I)]\n  [trait_constr_Flatten_i1 : core_models.iter.traits.iterator.Iterator\n    (core_models.iter.traits.iterator.Iterator.Item I)\n    ]\n  where\n  it : I\n  current : (core_models.option.Option\n      (core_models.iter.traits.iterator.Iterator.Item I))\n\nend core_models.iter.adapters.flatten\n\n\nnamespace core_models.iter.traits.iterator\n\nclass IteratorMethods.AssociatedTypes (Self : Type) where\n  [trait_constr_IteratorMethods_i0 : Iterator.AssociatedTypes Self]\n\nattribute [instance_reducible, instance]\n  IteratorMethods.AssociatedTypes.trait_constr_IteratorMethods_i0\n\nclass IteratorMethods (Self : Type)\n  [associatedTypes : outParam (IteratorMethods.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_IteratorMethods_i0 : Iterator Self]\n  fold (Self)\n    (B : Type)\n    (F : Type)\n    [trait_constr_fold_associated_type_i1 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      (rust_primitives.hax.Tuple2 B (Iterator.Item Self))]\n    [trait_constr_fold_i1 : core_models.ops.function.FnOnce\n      F\n      (rust_primitives.hax.Tuple2 B (Iterator.Item Self))\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          F\n          (rust_primitives.hax.Tuple2 B (Iterator.Item Self))\n        by infer_instance\n        with Output := B})] :\n    (Self -> B -> F -> RustM B)\n  enumerate (Self) :\n    (Self -> RustM (core_models.iter.adapters.enumerate.Enumerate Self))\n  step_by (Self) :\n    (Self -> usize -> RustM (core_models.iter.adapters.step_by.StepBy Self))\n  map (Self)\n    (O : Type)\n    (F : Type)\n    [trait_constr_map_associated_type_i1 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      (Iterator.Item Self)]\n    [trait_constr_map_i1 : core_models.ops.function.FnOnce\n      F\n      (Iterator.Item Self)\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          F\n          (Iterator.Item Self)\n        by infer_instance\n        with Output := O})] :\n    (Self -> F -> RustM (core_models.iter.adapters.map.Map Self F))\n  all (Self)\n    (F : Type)\n    [trait_constr_all_associated_type_i1 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      (Iterator.Item Self)]\n    [trait_constr_all_i1 : core_models.ops.function.FnOnce\n      F\n      (Iterator.Item Self)\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          F\n          (Iterator.Item Self)\n        by infer_instance\n        with Output := Bool})] :\n    (Self -> F -> RustM Bool)\n  take (Self) :\n    (Self -> usize -> RustM (core_models.iter.adapters.take.Take Self))\n  flat_map (Self)\n    (U : Type)\n    (F : Type)\n    [trait_constr_flat_map_associated_type_i1 : Iterator.AssociatedTypes U]\n    [trait_constr_flat_map_i1 : Iterator U ]\n    [trait_constr_flat_map_associated_type_i2 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      (Iterator.Item Self)]\n    [trait_constr_flat_map_i2 : core_models.ops.function.FnOnce\n      F\n      (Iterator.Item Self)\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          F\n          (Iterator.Item Self)\n        by infer_instance\n        with Output := U})] :\n    (Self -> F -> RustM (core_models.iter.adapters.flat_map.FlatMap Self U F))\n  flatten (Self)\n    [trait_constr_flatten_associated_type_i1 : Iterator.AssociatedTypes\n      (Iterator.Item Self)]\n    [trait_constr_flatten_i1 : Iterator (Iterator.Item Self) ] :\n    (Self -> RustM (core_models.iter.adapters.flatten.Flatten Self))\n  zip (Self)\n    (I2 : Type)\n    [trait_constr_zip_associated_type_i1 : Iterator.AssociatedTypes I2]\n    [trait_constr_zip_i1 : Iterator I2 ] :\n    (Self -> I2 -> RustM (core_models.iter.adapters.zip.Zip Self I2))\n\nattribute [instance_reducible, instance]\n  IteratorMethods.trait_constr_IteratorMethods_i0\n\nend core_models.iter.traits.iterator\n\n\nnamespace core_models.iter.adapters.flatten\n\n@[spec]\ndef Impl.new\n    (I : Type)\n    [trait_constr_new_associated_type_i0 :\n      core_models.iter.traits.iterator.Iterator.AssociatedTypes\n      I]\n    [trait_constr_new_i0 : core_models.iter.traits.iterator.Iterator I ]\n    [trait_constr_new_associated_type_i1 :\n      core_models.iter.traits.iterator.Iterator.AssociatedTypes\n      (core_models.iter.traits.iterator.Iterator.Item I)]\n    [trait_constr_new_i1 : core_models.iter.traits.iterator.Iterator\n      (core_models.iter.traits.iterator.Iterator.Item I)\n      ]\n    (it : I) :\n    RustM (Flatten I) := do\n  (pure (Flatten.mk (it := it) (current := core_models.option.Option.None)))\n\n@[instance] opaque Impl_1.AssociatedTypes\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    (core_models.iter.traits.iterator.Iterator.Item I)]\n  [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator\n    (core_models.iter.traits.iterator.Iterator.Item I)\n    ] :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (Flatten I) :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_1\n  (I : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    (core_models.iter.traits.iterator.Iterator.Item I)]\n  [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator\n    (core_models.iter.traits.iterator.Iterator.Item I)\n    ] :\n  core_models.iter.traits.iterator.Iterator (Flatten I) :=\n  by constructor <;> exact Inhabited.default\n\nend core_models.iter.adapters.flatten\n\n\nnamespace core_models.iter.adapters.zip\n\n@[spec]\ndef Impl.new\n    (I1 : Type)\n    (I2 : Type)\n    [trait_constr_new_associated_type_i0 :\n      core_models.iter.traits.iterator.Iterator.AssociatedTypes\n      I1]\n    [trait_constr_new_i0 : core_models.iter.traits.iterator.Iterator I1 ]\n    [trait_constr_new_associated_type_i1 :\n      core_models.iter.traits.iterator.Iterator.AssociatedTypes\n      I2]\n    [trait_constr_new_i1 : core_models.iter.traits.iterator.Iterator I2 ]\n    (it1 : I1)\n    (it2 : I2) :\n    RustM (Zip I1 I2) := do\n  (pure (Zip.mk (it1 := it1) (it2 := it2)))\n\nend core_models.iter.adapters.zip\n\n\nnamespace core_models.iter.traits.iterator\n\n@[reducible] instance Impl.AssociatedTypes\n  (I : Type)\n  [trait_constr_Impl_associated_type_i0 : Iterator.AssociatedTypes I]\n  [trait_constr_Impl_i0 : Iterator I ] :\n  IteratorMethods.AssociatedTypes I\n  where\n\ninstance Impl\n  (I : Type)\n  [trait_constr_Impl_associated_type_i0 : Iterator.AssociatedTypes I]\n  [trait_constr_Impl_i0 : Iterator I ] :\n  IteratorMethods I\n  where\n  fold :=\n    fun\n      (B : Type)\n      (F : Type)\n      [trait_constr_fold_associated_type_i1 :\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (rust_primitives.hax.Tuple2 B (Iterator.Item I))]\n      [trait_constr_fold_i1 : core_models.ops.function.FnOnce\n        F\n        (rust_primitives.hax.Tuple2 B (Iterator.Item I))\n        (associatedTypes := {\n          show\n            core_models.ops.function.FnOnce.AssociatedTypes\n            F\n            (rust_primitives.hax.Tuple2 B (Iterator.Item I))\n          by infer_instance\n          with Output := B})] (self : I) (init : B) (f : F) => do\n    (pure init)\n  enumerate := fun (self : I) => do\n    (core_models.iter.adapters.enumerate.Impl.new I self)\n  step_by := fun (self : I) (step : usize) => do\n    (core_models.iter.adapters.step_by.Impl.new I self step)\n  map :=\n    fun\n      (O : Type)\n      (F : Type)\n      [trait_constr_map_associated_type_i1 :\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (Iterator.Item I)]\n      [trait_constr_map_i1 : core_models.ops.function.FnOnce\n        F\n        (Iterator.Item I)\n        (associatedTypes := {\n          show\n            core_models.ops.function.FnOnce.AssociatedTypes\n            F\n            (Iterator.Item I)\n          by infer_instance\n          with Output := O})] (self : I) (f : F) => do\n    (core_models.iter.adapters.map.Impl.new I F self f)\n  all :=\n    fun\n      (F : Type)\n      [trait_constr_all_associated_type_i1 :\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (Iterator.Item I)]\n      [trait_constr_all_i1 : core_models.ops.function.FnOnce\n        F\n        (Iterator.Item I)\n        (associatedTypes := {\n          show\n            core_models.ops.function.FnOnce.AssociatedTypes\n            F\n            (Iterator.Item I)\n          by infer_instance\n          with Output := Bool})] (self : I) (f : F) => do\n    (pure true)\n  take := fun (self : I) (n : usize) => do\n    (core_models.iter.adapters.take.Impl.new I self n)\n  flat_map :=\n    fun\n      (U : Type)\n      (F : Type)\n      [trait_constr_flat_map_associated_type_i1 : Iterator.AssociatedTypes U]\n      [trait_constr_flat_map_i1 : Iterator U ]\n      [trait_constr_flat_map_associated_type_i2 :\n        core_models.ops.function.FnOnce.AssociatedTypes\n        F\n        (Iterator.Item I)]\n      [trait_constr_flat_map_i2 : core_models.ops.function.FnOnce\n        F\n        (Iterator.Item I)\n        (associatedTypes := {\n          show\n            core_models.ops.function.FnOnce.AssociatedTypes\n            F\n            (Iterator.Item I)\n          by infer_instance\n          with Output := U})] (self : I) (f : F) => do\n    (core_models.iter.adapters.flat_map.Impl.new I U F self f)\n  flatten :=\n    fun\n      [trait_constr_flatten_associated_type_i1 : Iterator.AssociatedTypes\n        (Iterator.Item I)]\n      [trait_constr_flatten_i1 : Iterator (Iterator.Item I) ] (self : I) => do\n    (core_models.iter.adapters.flatten.Impl.new I self)\n  zip :=\n    fun\n      (I2 : Type)\n      [trait_constr_zip_associated_type_i1 : Iterator.AssociatedTypes I2]\n      [trait_constr_zip_i1 : Iterator I2 ] (self : I) (it2 : I2) => do\n    (core_models.iter.adapters.zip.Impl.new I I2 self it2)\n\nend core_models.iter.traits.iterator\n\n\nnamespace core_models.iter.adapters.zip\n\n@[instance] opaque Impl_1.AssociatedTypes\n  (I1 : Type)\n  (I2 : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I1]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I1 ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I2]\n  [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator I2 ] :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (Zip I1 I2) :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_1\n  (I1 : Type)\n  (I2 : Type)\n  [trait_constr_Impl_1_associated_type_i0 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I1]\n  [trait_constr_Impl_1_i0 : core_models.iter.traits.iterator.Iterator I1 ]\n  [trait_constr_Impl_1_associated_type_i1 :\n    core_models.iter.traits.iterator.Iterator.AssociatedTypes\n    I2]\n  [trait_constr_Impl_1_i1 : core_models.iter.traits.iterator.Iterator I2 ] :\n  core_models.iter.traits.iterator.Iterator (Zip I1 I2) :=\n  by constructor <;> exact Inhabited.default\n\nend core_models.iter.adapters.zip\n\n\nnamespace core_models.ops.function\n\nclass Fn.AssociatedTypes (Self : Type) (Args : Type) where\n  [trait_constr_Fn_i0 : FnOnce.AssociatedTypes Self Args]\n\nattribute [instance_reducible, instance] Fn.AssociatedTypes.trait_constr_Fn_i0\n\nclass Fn (Self : Type) (Args : Type)\n  [associatedTypes : outParam (Fn.AssociatedTypes (Self : Type) (Args : Type))]\n  where\n  [trait_constr_Fn_i0 : FnOnce Self Args]\n  call (Self) (Args) : (Self -> Args -> RustM (FnOnce.Output Self Args))\n\nattribute [instance_reducible, instance] Fn.trait_constr_Fn_i0\n\n@[reducible] instance Impl_2.AssociatedTypes (Arg : Type) (Out : Type) :\n  FnOnce.AssociatedTypes (Arg -> RustM Out) Arg\n  where\n  Output := Out\n\ninstance Impl_2 (Arg : Type) (Out : Type) : FnOnce (Arg -> RustM Out) Arg where\n  call_once := fun (self : (Arg -> RustM Out)) (arg : Arg) => do (self arg)\n\n@[reducible] instance Impl.AssociatedTypes\n  (Arg1 : Type)\n  (Arg2 : Type)\n  (Out : Type) :\n  FnOnce.AssociatedTypes\n  (Arg1 -> Arg2 -> RustM Out)\n  (rust_primitives.hax.Tuple2 Arg1 Arg2)\n  where\n  Output := Out\n\ninstance Impl (Arg1 : Type) (Arg2 : Type) (Out : Type) :\n  FnOnce (Arg1 -> Arg2 -> RustM Out) (rust_primitives.hax.Tuple2 Arg1 Arg2)\n  where\n  call_once :=\n    fun\n      (self : (Arg1 -> Arg2 -> RustM Out))\n      (arg : (rust_primitives.hax.Tuple2 Arg1 Arg2)) => do\n    (self\n      (rust_primitives.hax.Tuple2._0 arg)\n      (rust_primitives.hax.Tuple2._1 arg))\n\n@[reducible] instance Impl_1.AssociatedTypes\n  (Arg1 : Type)\n  (Arg2 : Type)\n  (Arg3 : Type)\n  (Out : Type) :\n  FnOnce.AssociatedTypes\n  (Arg1 -> Arg2 -> Arg3 -> RustM Out)\n  (rust_primitives.hax.Tuple3 Arg1 Arg2 Arg3)\n  where\n  Output := Out\n\ninstance Impl_1 (Arg1 : Type) (Arg2 : Type) (Arg3 : Type) (Out : Type) :\n  FnOnce\n  (Arg1 -> Arg2 -> Arg3 -> RustM Out)\n  (rust_primitives.hax.Tuple3 Arg1 Arg2 Arg3)\n  where\n  call_once :=\n    fun\n      (self : (Arg1 -> Arg2 -> Arg3 -> RustM Out))\n      (arg : (rust_primitives.hax.Tuple3 Arg1 Arg2 Arg3)) => do\n    (self\n      (rust_primitives.hax.Tuple3._0 arg)\n      (rust_primitives.hax.Tuple3._1 arg)\n      (rust_primitives.hax.Tuple3._2 arg))\n\nend core_models.ops.function\n\n\nnamespace core_models.ops.deref\n\n@[reducible] instance Impl.AssociatedTypes (T : Type) :\n  Deref.AssociatedTypes T\n  where\n  Target := T\n\ninstance Impl (T : Type) : Deref T where\n  deref := fun (self : T) => do (pure self)\n\nend core_models.ops.deref\n\n\nnamespace core_models.option\n\n@[spec]\ndef Impl.is_some_and\n    (T : Type)\n    (F : Type)\n    [trait_constr_is_some_and_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_is_some_and_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := Bool})]\n    (self : (Option T))\n    (f : F) :\n    RustM Bool := do\n  match self with\n    | (Option.None ) => do (pure false)\n    | (Option.Some  x) => do (core_models.ops.function.FnOnce.call_once F T f x)\n\n@[spec]\ndef Impl.is_none_or\n    (T : Type)\n    (F : Type)\n    [trait_constr_is_none_or_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_is_none_or_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := Bool})]\n    (self : (Option T))\n    (f : F) :\n    RustM Bool := do\n  match self with\n    | (Option.None ) => do (pure true)\n    | (Option.Some  x) => do (core_models.ops.function.FnOnce.call_once F T f x)\n\n@[spec]\ndef Impl.unwrap_or_else\n    (T : Type)\n    (F : Type)\n    [trait_constr_unwrap_or_else_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      rust_primitives.hax.Tuple0]\n    [trait_constr_unwrap_or_else_i0 : core_models.ops.function.FnOnce\n      F\n      rust_primitives.hax.Tuple0\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          F\n          rust_primitives.hax.Tuple0\n        by infer_instance\n        with Output := T})]\n    (self : (Option T))\n    (f : F) :\n    RustM T := do\n  match self with\n    | (Option.Some  x) => do (pure x)\n    | (Option.None ) => do\n      (core_models.ops.function.FnOnce.call_once\n        F\n        rust_primitives.hax.Tuple0 f rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef Impl.map\n    (T : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_map_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    (self : (Option T))\n    (f : F) :\n    RustM (Option U) := do\n  match self with\n    | (Option.Some  x) => do\n      (pure (Option.Some\n        (← (core_models.ops.function.FnOnce.call_once F T f x))))\n    | (Option.None ) => do (pure Option.None)\n\n@[spec]\ndef Impl.map_or\n    (T : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_map_or_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_or_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    (self : (Option T))\n    (default : U)\n    (f : F) :\n    RustM U := do\n  match self with\n    | (Option.Some  t) => do (core_models.ops.function.FnOnce.call_once F T f t)\n    | (Option.None ) => do (pure default)\n\n@[spec]\ndef Impl.map_or_else\n    (T : Type)\n    (U : Type)\n    (D : Type)\n    (F : Type)\n    [trait_constr_map_or_else_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_or_else_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    [trait_constr_map_or_else_associated_type_i1 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      D\n      rust_primitives.hax.Tuple0]\n    [trait_constr_map_or_else_i1 : core_models.ops.function.FnOnce\n      D\n      rust_primitives.hax.Tuple0\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          D\n          rust_primitives.hax.Tuple0\n        by infer_instance\n        with Output := U})]\n    (self : (Option T))\n    (default : D)\n    (f : F) :\n    RustM U := do\n  match self with\n    | (Option.Some  t) => do (core_models.ops.function.FnOnce.call_once F T f t)\n    | (Option.None ) => do\n      (core_models.ops.function.FnOnce.call_once\n        D\n        rust_primitives.hax.Tuple0 default rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef Impl.map_or_default\n    (T : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_map_or_default_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_or_default_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    [trait_constr_map_or_default_associated_type_i1 :\n      core_models.default.Default.AssociatedTypes\n      U]\n    [trait_constr_map_or_default_i1 : core_models.default.Default U ]\n    (self : (Option T))\n    (f : F) :\n    RustM U := do\n  match self with\n    | (Option.Some  t) => do (core_models.ops.function.FnOnce.call_once F T f t)\n    | (Option.None ) => do\n      (core_models.default.Default.default U rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef Impl.ok_or_else\n    (T : Type)\n    (E : Type)\n    (F : Type)\n    [trait_constr_ok_or_else_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      rust_primitives.hax.Tuple0]\n    [trait_constr_ok_or_else_i0 : core_models.ops.function.FnOnce\n      F\n      rust_primitives.hax.Tuple0\n      (associatedTypes := {\n        show\n          core_models.ops.function.FnOnce.AssociatedTypes\n          F\n          rust_primitives.hax.Tuple0\n        by infer_instance\n        with Output := E})]\n    (self : (Option T))\n    (err : F) :\n    RustM (core_models.result.Result T E) := do\n  match self with\n    | (Option.Some  v) => do (pure (core_models.result.Result.Ok v))\n    | (Option.None ) => do\n      (pure (core_models.result.Result.Err\n        (← (core_models.ops.function.FnOnce.call_once\n          F\n          rust_primitives.hax.Tuple0 err rust_primitives.hax.Tuple0.mk))))\n\n@[spec]\ndef Impl.and_then\n    (T : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_and_then_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_and_then_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := (Option U)})]\n    (self : (Option T))\n    (f : F) :\n    RustM (Option U) := do\n  match self with\n    | (Option.Some  x) => do (core_models.ops.function.FnOnce.call_once F T f x)\n    | (Option.None ) => do (pure Option.None)\n\nend core_models.option\n\n\nnamespace core_models.result\n\n@[spec]\ndef Impl.map\n    (T : Type)\n    (E : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_map_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    (self : (Result T E))\n    (op : F) :\n    RustM (Result U E) := do\n  match self with\n    | (Result.Ok  t) => do\n      (pure (Result.Ok\n        (← (core_models.ops.function.FnOnce.call_once F T op t))))\n    | (Result.Err  e) => do (pure (Result.Err e))\n\n@[spec]\ndef Impl.map_or\n    (T : Type)\n    (E : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_map_or_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_or_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    (self : (Result T E))\n    (default : U)\n    (f : F) :\n    RustM U := do\n  match self with\n    | (Result.Ok  t) => do (core_models.ops.function.FnOnce.call_once F T f t)\n    | (Result.Err  _e) => do (pure default)\n\n@[spec]\ndef Impl.map_or_else\n    (T : Type)\n    (E : Type)\n    (U : Type)\n    (D : Type)\n    (F : Type)\n    [trait_constr_map_or_else_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_map_or_else_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    [trait_constr_map_or_else_associated_type_i1 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      D\n      E]\n    [trait_constr_map_or_else_i1 : core_models.ops.function.FnOnce\n      D\n      E\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes D E\n        by infer_instance\n        with Output := U})]\n    (self : (Result T E))\n    (default : D)\n    (f : F) :\n    RustM U := do\n  match self with\n    | (Result.Ok  t) => do (core_models.ops.function.FnOnce.call_once F T f t)\n    | (Result.Err  e) => do\n      (core_models.ops.function.FnOnce.call_once D E default e)\n\n@[spec]\ndef Impl.map_err\n    (T : Type)\n    (E : Type)\n    (F : Type)\n    (O : Type)\n    [trait_constr_map_err_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      O\n      E]\n    [trait_constr_map_err_i0 : core_models.ops.function.FnOnce\n      O\n      E\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes O E\n        by infer_instance\n        with Output := F})]\n    (self : (Result T E))\n    (op : O) :\n    RustM (Result T F) := do\n  match self with\n    | (Result.Ok  t) => do (pure (Result.Ok t))\n    | (Result.Err  e) => do\n      (pure (Result.Err\n        (← (core_models.ops.function.FnOnce.call_once O E op e))))\n\n@[spec]\ndef Impl.and_then\n    (T : Type)\n    (E : Type)\n    (U : Type)\n    (F : Type)\n    [trait_constr_and_then_associated_type_i0 :\n      core_models.ops.function.FnOnce.AssociatedTypes\n      F\n      T]\n    [trait_constr_and_then_i0 : core_models.ops.function.FnOnce\n      F\n      T\n      (associatedTypes := {\n        show core_models.ops.function.FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := (Result U E)})]\n    (self : (Result T E))\n    (op : F) :\n    RustM (Result U E) := do\n  match self with\n    | (Result.Ok  t) => do (core_models.ops.function.FnOnce.call_once F T op t)\n    | (Result.Err  e) => do (pure (Result.Err e))\n\nend core_models.result\n\n\nnamespace core_models.slice.iter\n\n@[reducible] instance Impl_2.AssociatedTypes (T : Type) :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (Iter T)\n  where\n  Item := T\n\ninstance Impl_2 (T : Type) :\n  core_models.iter.traits.iterator.Iterator (Iter T)\n  where\n  next := fun (self : (Iter T)) => do\n    let ⟨self, hax_temp_output⟩ ←\n      if\n      (← ((← (rust_primitives.sequence.seq_len T (Iter._0 self)))\n        ==? (0 : usize))) then do\n        (pure (rust_primitives.hax.Tuple2.mk\n          self\n          core_models.option.Option.None))\n      else do\n        let res : T ← (rust_primitives.sequence.seq_first T (Iter._0 self));\n        let self : (Iter T) :=\n          {self\n          with _0 := (← (rust_primitives.sequence.seq_slice T\n            (Iter._0 self)\n            (1 : usize)\n            (← (rust_primitives.sequence.seq_len T (Iter._0 self)))))};\n        (pure (rust_primitives.hax.Tuple2.mk\n          self\n          (core_models.option.Option.Some res)));\n    (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output))\n\n@[reducible] instance Impl_3.AssociatedTypes (T : Type) :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (Chunks T)\n  where\n  Item := (RustSlice T)\n\ninstance Impl_3 (T : Type) :\n  core_models.iter.traits.iterator.Iterator (Chunks T)\n  where\n  next := fun (self : (Chunks T)) => do\n    let ⟨self, hax_temp_output⟩ ←\n      if\n      (← ((← (rust_primitives.slice.slice_length T (Chunks.elements self)))\n        ==? (0 : usize))) then do\n        (pure (rust_primitives.hax.Tuple2.mk\n          self\n          core_models.option.Option.None))\n      else do\n        if\n        (← ((← (rust_primitives.slice.slice_length T (Chunks.elements self)))\n          <? (Chunks.cs self))) then do\n          let res : (RustSlice T) := (Chunks.elements self);\n          let self : (Chunks T) :=\n            {self\n            with elements := (← (rust_primitives.slice.slice_slice T\n              (Chunks.elements self)\n              (0 : usize)\n              (0 : usize)))};\n          (pure (rust_primitives.hax.Tuple2.mk\n            self\n            (core_models.option.Option.Some res)))\n        else do\n          let ⟨res, new_elements⟩ ←\n            (rust_primitives.slice.slice_split_at T\n              (Chunks.elements self)\n              (Chunks.cs self));\n          let self : (Chunks T) := {self with elements := new_elements};\n          (pure (rust_primitives.hax.Tuple2.mk\n            self\n            (core_models.option.Option.Some res)));\n    (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output))\n\n@[reducible] instance Impl_4.AssociatedTypes (T : Type) :\n  core_models.iter.traits.iterator.Iterator.AssociatedTypes (ChunksExact T)\n  where\n  Item := (RustSlice T)\n\ninstance Impl_4 (T : Type) :\n  core_models.iter.traits.iterator.Iterator (ChunksExact T)\n  where\n  next := fun (self : (ChunksExact T)) => do\n    let ⟨self, hax_temp_output⟩ ←\n      if\n      (← ((← (rust_primitives.slice.slice_length T (ChunksExact.elements self)))\n        <? (ChunksExact.cs self))) then do\n        (pure (rust_primitives.hax.Tuple2.mk\n          self\n          core_models.option.Option.None))\n      else do\n        let ⟨res, new_elements⟩ ←\n          (rust_primitives.slice.slice_split_at T\n            (ChunksExact.elements self)\n            (ChunksExact.cs self));\n        let self : (ChunksExact T) := {self with elements := new_elements};\n        (pure (rust_primitives.hax.Tuple2.mk\n          self\n          (core_models.option.Option.Some res)));\n    (pure (rust_primitives.hax.Tuple2.mk self hax_temp_output))\n\nend core_models.slice.iter\n\n\nnamespace core_models.slice\n\n@[spec]\ndef Impl.get\n    (T : Type)\n    (I : Type)\n    [trait_constr_get_associated_type_i0 : SliceIndex.AssociatedTypes\n      I\n      (RustSlice T)]\n    [trait_constr_get_i0 : SliceIndex I (RustSlice T) ]\n    (s : (RustSlice T))\n    (index : I) :\n    RustM (core_models.option.Option (SliceIndex.Output I (RustSlice T))) := do\n  (SliceIndex.get I (RustSlice T) index s)\n\nend core_models.slice\n\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/alloc.lean",
    "content": "import Hax.core_models.core_models\n\nopen rust_primitives.hax\n\n/-\n\n# Vectors\n\nRust vectors are represented as Lean Arrays (variable size)\n\n-/\nsection RustVectors\n\nopen rust_primitives.sequence\n\ndef alloc.alloc.Global : Type := Unit\n\nabbrev alloc.vec.Vec (α: Type) (_Allocator:Type) : Type := Seq α\n\n@[spec]\ndef alloc.vec.Impl.new (α: Type) (_:Tuple0) : RustM (alloc.vec.Vec α alloc.alloc.Global) :=\n  pure ⟨(List.nil).toArray, by grind⟩\n\n@[spec]\ndef alloc.vec.Impl_1.len (α: Type) (_Allocator: Type) (x: alloc.vec.Vec α alloc.alloc.Global) : RustM usize :=\n  pure (.ofNat x.val.size)\n\n@[spec]\ndef alloc.vec.Impl_2.extend_from_slice α (_Allocator: Type)\n    (x: alloc.vec.Vec α alloc.alloc.Global) (y: Seq α) :\n    RustM (alloc.vec.Vec α alloc.alloc.Global) :=\n  if h : x.val.size + y.val.size < USize64.size then\n    pure ⟨x.val.append y.val, by simp [h]⟩\n  else\n    .fail .maximumSizeExceeded\n\n@[spec]\ndef alloc.slice.Impl.to_vec α (a: rust_primitives.sequence.Seq α) :\n    RustM (alloc.vec.Vec α alloc.alloc.Global) :=\n  pure a\n\nend RustVectors\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/convert.lean",
    "content": "\nimport Hax.core_models.core_models\n\nset_option mvcgen.warning false\nopen rust_primitives.hax\nopen Std.Do\n\nnamespace core_models.convert\n\n@[reducible] instance {α : Type} {n : usize} : TryInto.AssociatedTypes (RustSlice α) (RustArray α n) where\n  Error := core_models.array.TryFromSliceError\n\ninstance {α : Type} {n : usize} : TryInto (RustSlice α) (RustArray α n) where\n  try_into a :=\n   pure (\n     if h: a.val.size = n.toNat then\n       core_models.result.Result.Ok (.ofVec (a.val.toVector.cast h))\n     else\n       .Err core_models.array.TryFromSliceError.mk\n     )\n\n@[spec]\ntheorem TryInto.try_into.spec {α : Type} {n: usize} (a: RustSlice α) :\n  (h: a.val.size = n.toNat) →\n  ⦃ ⌜ True ⌝ ⦄\n  (TryInto.try_into (RustSlice α) (RustArray α n) a )\n  ⦃ ⇓ r => ⌜ r = .Ok (.ofVec (a.val.toVector.cast h)) ⌝ ⦄ := by\n  intro h\n  mvcgen [TryInto.try_into]\n  grind\n\nend core_models.convert\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_Hax_convert_from_instances\" : command => do\n  let mut cmds := #[]\n  let tys := [\n    (\"UInt8\", 8, false),\n    (\"UInt16\", 16, false),\n    (\"UInt32\", 32, false),\n    (\"UInt64\", 64, false),\n    (\"Int8\", 8, true),\n    (\"Int16\", 16, true),\n    (\"Int32\", 32, true),\n    (\"Int64\", 64, true)\n  ]\n  for (ty1, width1, signed1) in tys do\n    for (ty2, width2, signed2) in tys do\n\n      if ty1 == ty2 || signed1 != signed2 || width1 < width2 then continue\n\n      let ty1Ident := mkIdent ty1.toName\n      let ty2Ident := mkIdent ty2.toName\n      let toTy1 := mkIdent (\"to\" ++ ty1).toName\n\n      cmds := cmds.push $ ← `(\n        @[reducible]\n        instance : core_models.convert.From.AssociatedTypes $ty1Ident $ty2Ident where\n        instance : core_models.convert.From $ty1Ident $ty2Ident where\n          _from := fun x => pure x.$toTy1\n      )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_Hax_convert_from_instances\n\nattribute [specset bv, hax_bv_decide]\n  core_models.convert.From._from\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/float.lean",
    "content": "import Hax.core_models.epilogue.num\n\nmacro \"declare_Hax_float_ops\" typeName:ident : command =>\n  `(\n    namespace $typeName\n\n    instance : core_models.ops.arith.Add.AssociatedTypes $typeName $typeName where\n      Output := $typeName\n\n    instance : core_models.ops.arith.Sub.AssociatedTypes $typeName $typeName where\n      Output := $typeName\n\n    instance : core_models.ops.arith.Mul.AssociatedTypes $typeName $typeName where\n      Output := $typeName\n\n    instance : core_models.ops.arith.Div.AssociatedTypes $typeName $typeName where\n      Output := $typeName\n\n    instance : core_models.ops.arith.Add $typeName $typeName where\n      add := fun x y => pure (x + y)\n\n    instance : core_models.ops.arith.Sub $typeName $typeName where\n      sub := fun x y => pure (x - y)\n\n    instance : core_models.ops.arith.Mul $typeName $typeName where\n      mul := fun x y => pure (x * y)\n\n    instance : core_models.ops.arith.Div $typeName $typeName where\n      div := fun x y => pure (x / y)\n\n    end $typeName\n  )\n\ndeclare_Hax_float_ops f32\ndeclare_Hax_float_ops f64\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/folds.lean",
    "content": "import Hax.core_models.core_models\nimport Hax.Tactic.SpecSet\nopen Std.Do\n\nset_option mvcgen.warning false\nset_option linter.unusedVariables false\n\n/-\n\n# Folds\n\nHax represents for-loops as folds over a range\n\n-/\nsection Fold\n\nopen core_models.ops.control_flow\nopen rust_primitives.hax\n\nclass rust_primitives.hax.folds {int_type: Type} where\n  /-- Encoding of Rust for-loops without early returns -/\n  fold_range {α : Type}\n    (s e : int_type)\n    (inv : α -> int_type -> RustM Prop)\n    (init: α)\n    (body : α -> int_type -> RustM α)\n    (pureInv:\n        {i : α -> int_type -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄} := by\n      set_option hax_mvcgen.specset \"bv\" in hax_construct_pure <;> bv_decide) :\n    RustM α\n  /-- Encoding of Rust for-loops with early returns -/\n  fold_range_return  {α_acc α_ret : Type}\n    (s e: int_type)\n    (inv : α_acc -> int_type -> RustM Prop)\n    (init: α_acc)\n    (body : α_acc -> int_type ->\n      RustM (ControlFlow (ControlFlow α_ret (Tuple2 Tuple0 α_acc)) α_acc ))\n    (pureInv:\n        {i : α_acc -> int_type -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄} := by\n      set_option hax_mvcgen.specset \"bv\" in hax_construct_pure <;> bv_decide) :\n    RustM (ControlFlow α_ret α_acc)\n\nattribute [spec] rust_primitives.hax.folds.fold_range\nattribute [spec] rust_primitives.hax.folds.fold_range_return\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_fold_specs\" s:(&\"signed\" <|> &\"unsigned\") typeName:ident width:term : command => do\n  let tyDot (n : Name) := mkIdent (typeName.getId ++ n)\n  let tySimp (n : Name) : TSyntax _ := .mk\n    (Syntax.node .none ``Lean.Parser.Tactic.simpLemma #[mkNullNode, mkNullNode, tyDot n])\n  let tyRw (n : Name) : TSyntax `Lean.Parser.Tactic.rwRule := .mk\n    (Syntax.node .none ``Lean.Parser.Tactic.rwRule #[mkNullNode, tyDot n])\n  `(\n    /-- Implementation of Rust for-loops without early returns -/\n    def $(tyDot `fold_range) {α : Type}\n        (s e : $typeName)\n        (inv : α -> $typeName -> RustM Prop)\n        (init: α)\n        (body : α -> $typeName -> RustM α)\n        (pureInv: {i : α -> $typeName -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄})\n        : RustM α := do\n        if s < e\n        then fold_range (s + 1) e inv (← body init s) body pureInv\n        else pure init\n    termination_by (e - s)\n    decreasing_by\n      simp only [$(tySimp `sizeOf), Nat.add_lt_add_iff_right]\n      exact $(tyDot `sub_succ_lt_self) _ _ (by assumption)\n\n    /-- Implementation of Rust for-loops with early returns -/\n    def $(tyDot `fold_range_return) {α_acc α_ret : Type}\n        (s e: $typeName)\n        (inv : α_acc -> $typeName -> RustM Prop)\n        (init: α_acc)\n        (body : α_acc -> $typeName ->\n          RustM (ControlFlow (ControlFlow α_ret (Tuple2 Tuple0 α_acc)) α_acc ))\n        (pureInv: {i : α_acc -> $typeName -> Prop // ∀ a b, ⦃⌜ True ⌝⦄ inv a b ⦃⇓ r => ⌜ r = (i a b) ⌝⦄}) := do\n      if s < e\n      then\n        match (← body init s) with\n        -- Rust: `return`\n        | .Break (.Break res ) => pure (ControlFlow.Break res)\n        -- Rust: `break`\n        | .Break (.Continue ⟨ ⟨ ⟩, res⟩) => pure (ControlFlow.Continue res)\n        -- Rust: `continue`\n        | .Continue res => fold_range_return (s + 1) e inv res body pureInv\n      else\n        pure (ControlFlow.Continue init)\n    termination_by (e - s)\n    decreasing_by\n      simp only [$(tySimp `sizeOf), Nat.add_lt_add_iff_right]\n      exact $(tyDot `sub_succ_lt_self) _ _ (by assumption)\n\n    @[spec]\n    instance : @rust_primitives.hax.folds $typeName where\n      fold_range := $(tyDot `fold_range)\n      fold_range_return := $(tyDot `fold_range_return)\n\n    /-- Specification of Rust for-loops without early returns (for bv_decide) -/\n    @[specset bv]\n    theorem $(mkIdent (s!\"rust_primitives.hax.folds.fold_range_spec_bv_{typeName.getId}\").toName) {α}\n      (s e : $typeName)\n      (inv : α -> $typeName -> RustM Prop)\n      (pureInv)\n      (init: α)\n      (body : α -> $typeName -> RustM α) :\n      s ≤ e →\n      pureInv.val init s →\n      (∀ (acc : α) (i : $typeName),\n        s ≤ i →\n        i < e →\n        pureInv.val acc i →\n        ⦃ ⌜ True ⌝ ⦄\n        (body acc i)\n        ⦃ ⇓ res => ⌜ pureInv.val res (i+1) ⌝ ⦄) →\n      ⦃ ⌜ True ⌝ ⦄\n      ($(tyDot `fold_range) s e inv init body pureInv)\n      ⦃ ⇓ r => ⌜ pureInv.val r e ⌝ ⦄\n    := by\n      intro h_le h_inv_s h_body\n      unfold $(tyDot `fold_range)\n      mvcgen\n      · mstart\n        mspec h_body _ _ ($(tyDot `le_refl) s) (by assumption) h_inv_s\n        mspec $(mkIdent (s!\"rust_primitives.hax.folds.fold_range_spec_bv_{typeName.getId}\").toName)\n          <;> grind\n      · grind\n    termination_by (e - s)\n    decreasing_by\n      simp only [$(tySimp `sizeOf), Nat.add_lt_add_iff_right]\n      exact $(tyDot `sub_succ_lt_self) _ _ (by assumption)\n\n    /-- Specification of Rust for-loops without early returns (for grind) -/\n    @[specset int]\n    theorem $(mkIdent (s!\"rust_primitives.hax.folds.fold_range_spec_int_{typeName.getId}\").toName) {α}\n        (s e : $typeName)\n        (inv : α -> $typeName -> RustM Prop)\n        (pureInv)\n        (init: α)\n        (body : α -> $typeName -> RustM α) :\n        s.toNat ≤ e.toNat →\n        pureInv.val init s →\n        (∀ (acc : α) (i : $typeName),\n          s.toNat ≤ i.toNat →\n          i.toNat < e.toNat →\n          pureInv.val acc i →\n          ⦃ ⌜ True ⌝ ⦄\n          (body acc i)\n          ⦃ ⇓ res => ⌜ pureInv.val res (i+1) ⌝ ⦄) →\n        ⦃ ⌜ True ⌝ ⦄\n        ($(tyDot `fold_range) s e inv init body pureInv)\n        ⦃ ⇓ r => ⌜ pureInv.val r e ⌝ ⦄ := by\n      apply $(mkIdent (s!\"rust_primitives.hax.folds.fold_range_spec_bv_{typeName.getId}\").toName)\n\n  )\n\ndeclare_fold_specs unsigned UInt8 8\ndeclare_fold_specs unsigned UInt16 16\ndeclare_fold_specs unsigned UInt32 32\ndeclare_fold_specs unsigned UInt64 64\ndeclare_fold_specs unsigned USize64 64\n\n\nend Fold\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/function.lean",
    "content": "\nimport Hax.core_models.core_models\n\nset_option mvcgen.warning false\nopen rust_primitives.hax\nopen Std.Do\nnamespace core_models.ops.function\n\ninstance {α β} : FnOnce.AssociatedTypes (α → RustM β) α where\n  Output := β\n\ninstance {α β} : FnOnce.AssociatedTypes (α → RustM β) (Tuple1 α) where\n  Output := β\n\ninstance {α β γ} : FnOnce.AssociatedTypes (α → β → RustM γ) (Tuple2 α β) where\n  Output := γ\n\ninstance {α β} : FnOnce (α → RustM β) α where\n  call_once f x := f x\n\ninstance {α β} : FnOnce (α → RustM β) (Tuple1 α) where\n  call_once f x := f x._0\n\ninstance {α β γ : Type} : FnOnce (α → β → RustM γ) (Tuple2 α β) where\n  call_once f x := f x._0 x._1\n\n\ninstance {α β} [FnOnce.AssociatedTypes (α → RustM β) α] : Fn.AssociatedTypes (α → RustM β) α where\n\ninstance {α β} [FnOnce.AssociatedTypes (α → RustM β) α] [FnOnce (α → RustM β) α] :\n    Fn (α → RustM β) α where\n  call f x := FnOnce.call_once _ _ f x\n\ninstance {α β} [FnOnce.AssociatedTypes (α → RustM β) (Tuple1 α)] :\n  Fn.AssociatedTypes (α → RustM β) (Tuple1 α) where\n\ninstance {α β} [FnOnce.AssociatedTypes (α → RustM β) (Tuple1 α)] [FnOnce (α → RustM β) (Tuple1 α)] :\n    Fn (α → RustM β) (Tuple1 α) where\n  call f x := FnOnce.call_once _ _ f x\n\ninstance {α β γ} [FnOnce.AssociatedTypes (α → β → RustM γ) (Tuple2 α β)] :\n  Fn.AssociatedTypes (α → β → RustM γ) (Tuple2 α β) where\n\ninstance {α β γ} [FnOnce.AssociatedTypes (α → β → RustM γ) (Tuple2 α β)] [FnOnce (α → β → RustM γ) (Tuple2 α β)] :\n    Fn (α → β → RustM γ) (Tuple2 α β) where\n  call f x := FnOnce.call_once _ _ f x\n\nend core_models.ops.function\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/num.lean",
    "content": "import Hax.core_models.core_models\n\nattribute [specset bv, hax_bv_decide]\n  core_models.convert.From._from\n\nnamespace core_models.num.Impl_8\n\n@[spec]\ndef rotate_left (x : u32) (n : u32) : RustM u32 :=\n  pure (UInt32.ofBitVec (BitVec.rotateLeft x.toBitVec n.toNat))\n\n@[spec]\ndef from_le_bytes (x : RustArray u8 4) : RustM u32 :=\n  pure (x.toVec[0].toUInt32\n  + (x.toVec[1].toUInt32 <<< 8)\n  + (x.toVec[2].toUInt32 <<< 16)\n  + (x.toVec[3].toUInt32 <<< 24))\n\n@[spec]\ndef to_le_bytes (x : u32) : RustM (RustArray u8 4) :=\n  pure (.ofVec #v[\n    (x % 256).toUInt8,\n    (x >>> 8 % 256).toUInt8,\n    (x >>> 16 % 256).toUInt8,\n    (x >>> 24 % 256).toUInt8,\n  ])\n\nend core_models.num.Impl_8\n\n\nattribute [spec] core_models.num.Impl_8.wrapping_add\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/ops.lean",
    "content": "import Hax.core_models.core_models\n\nattribute [spec] core_models.ops.deref.Impl core_models.ops.deref.Deref.deref\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/range.lean",
    "content": "import Hax.core_models.core_models\n\nopen core_models.ops.range\nopen Std.Do\n\nset_option mvcgen.warning false\n\nopen rust_primitives.sequence\n\nattribute [local grind! .] USize64.toNat_lt_size\n\ninstance Range.instGetElemResultArrayUSize64 {α: Type}:\n  GetElemResult\n    (Seq α)\n    (Range usize)\n    (Seq α) where\n  getElemResult xs i := match i with\n  | ⟨s, e⟩ =>\n    let size := xs.val.size;\n    if s ≤ e && e.toNat ≤ size then\n      pure ⟨xs.val.extract s.toNat e.toNat, by grind⟩\n    else\n      RustM.fail Error.arrayOutOfBounds\n\ninstance Range.instGetElemResultRustArrayUSize64 {α : Type} {n : usize} :\n  GetElemResult\n    (RustArray α n)\n    (Range usize)\n    (Seq α) where\n  getElemResult xs i := match i with\n  | ⟨s, e⟩ =>\n    if s ≤ e && e.toNat ≤ n.toNat then\n      pure ⟨(xs.toVec.extract s.toNat e.toNat).toArray, by grind⟩\n    else\n      RustM.fail Error.arrayOutOfBounds\n\n@[spec]\ntheorem Range.getElemArrayUSize64_spec\n  (α : Type) (a: Seq α) (s e: usize) :\n  s.toNat ≤ e.toNat →\n  e.toNat ≤ a.val.size →\n  ⦃ ⌜ True ⌝ ⦄\n  ( a[(Range.mk s e)]_? )\n  ⦃ ⇓ r => ⌜ r = ⟨Array.extract a.val s.toNat e.toNat, by grind⟩ ⌝ ⦄\n:= by\n  intros\n  mvcgen [Range.instGetElemResultArrayUSize64, getElemResult]\n  grind [USize64.le_iff_toNat_le]\n\n@[spec]\ntheorem Range.getElemVectorUSize64_spec\n  (α : Type) (n: usize) (a: RustArray α n) (s e: usize) :\n  s.toNat ≤ e.toNat →\n  e.toNat ≤ a.toVec.size →\n  ⦃ ⌜ True ⌝ ⦄\n  ( a[(Range.mk s e)]_? )\n  ⦃ ⇓ r => ⌜ r = ⟨(Vector.extract a.toVec s.toNat e.toNat).toArray, by grind⟩ ⌝ ⦄\n:= by\n  intros\n  mvcgen [Range.instGetElemResultRustArrayUSize64, getElemResult]\n  grind [USize64.le_iff_toNat_le]\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/result.lean",
    "content": "import Hax.core_models.core_models\n\nset_option mvcgen.warning false\nopen rust_primitives.hax\nopen Std.Do\n\nnamespace core_models.result\n\ndef Impl.unwrap\n  (T : Type) (E : Type) (self : (Result T E))\n  : RustM T\n  := do\n  match self with\n    | (Result.Ok t) => (pure t)\n    | (Result.Err _)\n      => (core_models.panicking.internal.panic T rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ntheorem Impl.unwrap.spec {α β} (x: Result α β) v :\n  x = Result.Ok v →\n  ⦃ ⌜ True ⌝ ⦄\n  (Impl.unwrap α β x)\n  ⦃ ⇓ r => ⌜ r = v ⌝ ⦄\n  := by\n  intros\n  mvcgen [Impl.unwrap] <;> try grind\n\nend core_models.result\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/slice.lean",
    "content": "import Hax.core_models.core_models\n\nopen Std.Do\nset_option mvcgen.warning false\n\nnamespace core_models.slice\n\n@[spec]\ntheorem Impl.len.spec (α : Type) (s : RustSlice α) :\n    ⦃ ⌜ True ⌝ ⦄ Impl.len α s ⦃⇓ r => ⌜ r.toNat = s.val.size ⌝ ⦄ := by\n  mvcgen; rw[USize64.toNat_ofNat_of_lt' s.size_lt_usizeSize]\n\nend core_models.slice\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue/string.lean",
    "content": "\nabbrev string_indirection : Type := String\nabbrev alloc.string.String : Type := string_indirection\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/epilogue.lean",
    "content": "import Hax.core_models.epilogue.alloc\nimport Hax.core_models.epilogue.convert\nimport Hax.core_models.epilogue.float\nimport Hax.core_models.epilogue.folds\nimport Hax.core_models.epilogue.function\nimport Hax.core_models.epilogue.num\nimport Hax.core_models.epilogue.ops\nimport Hax.core_models.epilogue.range\nimport Hax.core_models.epilogue.result\nimport Hax.core_models.epilogue.slice\nimport Hax.core_models.epilogue.string\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/prologue/clone.lean",
    "content": "/-\nHax Lean Backend - Cryspen\n\nCore-model for Clone represented as a no-op\n-/\n\nimport Hax.rust_primitives\n\nnamespace core.clone\n\nclass Clone (Self : Type) where\n\ndef Clone.clone {Self: Type} : Self -> RustM Self :=\n  fun x => pure x\n\nend core.clone\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/prologue/marker.lean",
    "content": "import Hax.core_models.prologue.clone\n\nclass core.marker.Copy.AssociatedTypes (Self : Type) where\n\nclass core.marker.Copy\n  (Self : Type)\n  [associatedTypes : outParam (core.marker.Copy.AssociatedTypes (Self :\n      Type))]\n  where\n  [trait_constr : core.clone.Clone Self]\n\nattribute [instance_reducible, instance] core.marker.Copy.trait_constr\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/prologue/num.lean",
    "content": "import Hax.rust_primitives\n\ndef core.num.Impl_11.MAX : usize := -1\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models/prologue.lean",
    "content": "import Hax.core_models.prologue.clone\nimport Hax.core_models.prologue.marker\nimport Hax.core_models.prologue.num\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/core_models.lean",
    "content": "import Hax.core_models.prologue\nimport Hax.core_models.core_models\nimport Hax.core_models.epilogue\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/BVDecide.lean",
    "content": "import Hax.rust_primitives.RustM\n\nopen Std.Do\n\n/-\n\n# BV_Decide Lemmas\n\nIn the following, we define an encoding of the entire `RustM` monad so that we can run `bv_decide`\non equalities between `RustM` values.\n\n-/\n\n/-- We encode `RustM` values into the following structure to be able to run `bv_decide`: -/\nstructure BVRustM (α : Type) where\n  ok : Bool\n  val : α\n  err : BitVec 3\n\n/-- Encodes `RustM` values into `BVRustM` to be able to run `bv_decide`. -/\ndef RustM.toBVRustM {α : Type} [Inhabited α] : RustM α → BVRustM α\n| .ok v                      => ⟨ true, v, 0 ⟩\n| .fail .assertionFailure    => ⟨ false, default, 0 ⟩\n| .fail .integerOverflow     => ⟨ false, default, 1 ⟩\n| .fail .divisionByZero      => ⟨ false, default, 2 ⟩\n| .fail .arrayOutOfBounds    => ⟨ false, default, 3 ⟩\n| .fail .maximumSizeExceeded => ⟨ false, default, 4 ⟩\n| .fail .panic               => ⟨ false, default, 5 ⟩\n| .fail .undef               => ⟨ false, default, 6 ⟩\n| .div                       => ⟨ false, default, 7 ⟩\n\nattribute [hax_bv_decide] Coe.coe\n\n@[hax_bv_decide] theorem RustM.toBVRustM_pure {α : Type} [Inhabited α] {v : α} :\n    (pure v : RustM α).toBVRustM = ⟨ true, v, 0 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_ok {α : Type} [Inhabited α] {v : α} :\n    (RustM.ok v).toBVRustM = ⟨ true, v, 0 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_assertionFailure {α : Type} [Inhabited α] :\n    (RustM.fail .assertionFailure : RustM α).toBVRustM = ⟨ false, default, 0 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_integerOverflow {α : Type} [Inhabited α] :\n    (RustM.fail .integerOverflow : RustM α).toBVRustM = ⟨ false, default, 1 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_divisionByZero {α : Type} [Inhabited α] :\n    (RustM.fail .divisionByZero : RustM α).toBVRustM = ⟨ false, default, 2 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_arrayOutOfBounds {α : Type} [Inhabited α] :\n    (RustM.fail .arrayOutOfBounds : RustM α).toBVRustM = ⟨ false, default, 3 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_maximumSizeExceeded {α : Type} [Inhabited α] :\n    (RustM.fail .maximumSizeExceeded: RustM α).toBVRustM = ⟨ false, default, 4 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_panic {α : Type} [Inhabited α] :\n    (RustM.fail .panic : RustM α).toBVRustM = ⟨ false, default, 5 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_undef {α : Type} [Inhabited α] :\n    (RustM.fail .undef : RustM α).toBVRustM = ⟨ false, default, 6 ⟩ := rfl\n@[hax_bv_decide] theorem RustM.toBVRustM_div {α : Type} [Inhabited α] :\n    (RustM.div : RustM α ).toBVRustM = ⟨ false, default, 7 ⟩ := rfl\n\n@[hax_bv_decide]\ntheorem RustM.toBVRustM_ite {α : Type} [Inhabited α] {c : Prop} [Decidable c]  (x y : RustM α) :\n    (if c then x else y).toBVRustM = (if c then x.toBVRustM else y.toBVRustM) := by grind\n\n@[hax_bv_decide]\ntheorem RustM.beq_iff_toBVRustM_eq {α : Type} [Inhabited α] [DecidableEq α] (x y : RustM α) :\n    decide (x = y) =\n      (x.toBVRustM.ok == y.toBVRustM.ok &&\n       x.toBVRustM.val == y.toBVRustM.val &&\n       x.toBVRustM.err == y.toBVRustM.err) := by\n  by_cases h : x = y\n  · simp [h]\n  · revert h\n    cases x using RustM.toBVRustM.match_1 <;>\n    cases y using RustM.toBVRustM.match_1 <;>\n    grind [toBVRustM]\n\n@[hax_bv_decide]\ntheorem RustM.toBVRustM_bind {α β : Type} [Inhabited α] [Inhabited β] (x : RustM α) (f : α → RustM β) :\n  (x >>= f).toBVRustM =\n    if x.toBVRustM.ok\n    then (f x.toBVRustM.val).toBVRustM\n    else {x.toBVRustM with val := default} := by\n  cases x using RustM.toBVRustM.match_1 <;> rfl\n\n@[hax_bv_decide]\ntheorem RustM.Triple_iff_BitVec {α : Type} [Inhabited α]\n    (a : Prop) [Decidable a] (b : α → Prop) (x : RustM α) [Decidable (b x.toBVRustM.val)] :\n    ⦃ ⌜ a ⌝ ⦄ x ⦃ ⇓ r => ⌜ b r ⌝ ⦄ ↔\n      (!decide a || (x.toBVRustM.ok && decide (b x.toBVRustM.val))) := by\n  cases x using RustM.toBVRustM.match_1 <;>\n    by_cases a\n      <;> simp only [Triple, PredTrans.apply, wp, SPred.entails_nil, SPred.down_pure,\n        Decidable.imp_iff_not_or, toBVRustM, BitVec.ofNat_eq_ofNat, Bool.false_and, Bool.or_false,\n        Bool.not_eq_eq_eq_not, Bool.not_true, decide_eq_false_iff_not, or_iff_left_iff_imp,\n        Bool.true_and, Bool.or_eq_true, Bool.not_eq_eq_eq_not, Bool.not_true,\n        decide_eq_false_iff_not, decide_eq_true_eq]\n      <;> try rfl\n  all_goals exact fun x => False.elim x\n\n/-- This lemma is used to make some variants of `>>>?` accessible for `bv_decide` -/\n@[hax_bv_decide]\ntheorem Int32.to_Int64_toNatClampNeg : (Int32.toNatClampNeg 1).toInt64 = 1 := rfl\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/Cast.lean",
    "content": "import Hax.rust_primitives.ops\nimport Hax.Tactic.Init\n\n/-\n\n# Casts\n\n-/\nsection Cast\n\n/-- Rust-supported casts on base types -/\nclass Cast (α β: Type) where\n  cast : α → RustM β\n\nattribute [spec, hax_bv_decide] Cast.cast\n\n-- Macro to generate Cast instances for all integer type pairs.\nopen Lean in\nset_option hygiene false in\nmacro \"declare_Hax_cast_instances\" : command => do\n  let mut cmds := #[]\n  let tys : List Name := [`UInt8,`UInt16,`UInt32,`UInt64,`USize64,`Int8,`Int16,`Int32,`Int64,`ISize]\n  for srcName in tys do\n    for dstName in tys do\n      let srcIdent := mkIdent srcName\n      let dstIdent := mkIdent dstName\n      let result ←\n        if dstName == srcName then\n          `(x)\n        else\n          `($(mkIdent (srcName ++ dstName.appendBefore \"to\")) x)\n      cmds := cmds.push $ ← `(\n        @[spec] instance : Cast $srcIdent $dstIdent where cast x := pure $result\n      )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_Hax_cast_instances\n\n@[spec]\ninstance : Cast String String where\n  cast x := pure x\n\n@[simp, spec, hax_bv_decide]\ndef rust_primitives.hax.cast_op {α β} [c: Cast α β] (x:α) : (RustM β) := c.cast x\n\nend Cast\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/GetElemResult.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.ops\nimport Hax.rust_primitives.sequence\nimport Hax.rust_primitives.hax.array\nimport Hax.Tactic.SpecSet\n\nopen Error\nopen Std.Do\n\nset_option mvcgen.warning false\n\n/-\n\n# Polymorphic index access\n\nHax introduces polymorphic index accesses, for any integer type (returning a\nsingle value) and for ranges (returning an array of values). A typeclass-based\nnotation `a[i]_?` is introduced to support panicking lookups\n\n-/\n\n/--\nThe classes `GetElemResult` implement lookup notation `xs[i]_?`.\n-/\nclass GetElemResult (coll : Type) (idx : Type) (elem : outParam (Type)) where\n  /--\n  The syntax `arr[i]_?` gets the `i`'th element of the collection `arr`. It\n  can panic if the index is out of bounds.\n  -/\n  getElemResult (xs : coll) (i : idx) : RustM elem\n\nexport GetElemResult (getElemResult)\n\n@[inherit_doc getElemResult]\nsyntax:max term noWs \"[\" withoutPosition(term) \"]\" noWs \"_?\": term\nmacro_rules | `($x[$i]_?) => `(getElemResult $x $i)\n\n-- Have lean use the notation when printing\n@[app_unexpander getElemResult] meta def unexpandGetElemResult : Lean.PrettyPrinter.Unexpander\n  | `($_ $array $index) => `($array[$index]_?)\n  | _ => throw ()\n\nopen rust_primitives.sequence\n\ninstance usize.instGetElemResultSeq {α} : GetElemResult (Seq α) usize α where\n  getElemResult xs i :=\n    if h: i.toNat < xs.val.size then pure (xs.val[i])\n    else .fail arrayOutOfBounds\n\ninstance usize.instGetElemResultVector {α n} : GetElemResult (RustArray α n) usize α where\n  getElemResult xs i :=\n    if h: i.toNat < n.toNat then pure (xs.toVec[i.toNat])\n    else .fail arrayOutOfBounds\n\ninstance Nat.instGetElemResultSeq {α} : GetElemResult (Seq α) Nat α where\n  getElemResult xs i :=\n    if h: i < xs.val.size then pure (xs.val[i])\n    else .fail arrayOutOfBounds\n\ninstance Nat.instGetElemResultVector {α n} : GetElemResult (RustArray α n) Nat α where\n  getElemResult xs i :=\n    if h: i < n.toNat then pure (xs.toVec[i])\n    else .fail arrayOutOfBounds\n\n@[spec]\ntheorem Nat.getElemSeqResult_spec\n  (α : Type) (a: Seq α) (i: Nat) (h: i < a.val.size) :\n  ⦃ ⌜ True ⌝ ⦄\n  ( a[i]_? )\n  ⦃ ⇓ r => ⌜ r = a.val[i] ⌝ ⦄ :=\n  by mvcgen [RustM.ofOption, Nat.instGetElemResultSeq, getElemResult]\n\n@[spec]\ntheorem Nat.getElemVectorResult_spec\n  (α : Type) (n : usize) (a : RustArray α n) (i: Nat) (h : i < n.toNat) :\n  ⦃ ⌜ True ⌝ ⦄\n  ( a[i]_? )\n  ⦃ ⇓ r => ⌜ r = a.toVec[i] ⌝ ⦄ :=\n  by mvcgen [Nat.instGetElemResultVector, getElemResult]\n\n@[spec]\ntheorem usize.getElemSeqResult_spec\n  (α : Type) (a: Seq α) (i: usize) (h: i.toNat < a.val.size) :\n  ⦃ ⌜ True ⌝ ⦄\n  ( a[i]_? )\n  ⦃ ⇓ r => ⌜ r = a.val[i.toNat] ⌝ ⦄ :=\n  by mvcgen [usize.instGetElemResultSeq, getElemResult]\n\n@[spec]\ntheorem usize.getElemVectorResult_spec\n  (α : Type) (n : usize) (a : RustArray α n) (i : usize) (h : i.toNat < n.toNat) :\n  ⦃ ⌜ True ⌝ ⦄\n  ( a[i]_? )\n  ⦃ ⇓ r => ⌜ r = a.toVec[i.toNat] ⌝ ⦄ :=\n  by mvcgen [usize.instGetElemResultVector, getElemResult]\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/RustM.lean",
    "content": "import Hax.Tactic.Init\nimport Hax.Tactic.SpecSet\nimport Hax.MissingLean.Init.While\nimport Std.Tactic.Do\n\nopen Std.Do\nopen Std.Tactic\n\n/-\n# Monadic encoding\n\nThe encoding is based on the `RustM` monad: all rust computations are wrapped\nin the monad, representing the fact that they are not total.\n\nIt borrows some definitions from the Aeneas project\n(https://github.com/AeneasVerif/aeneas/)\n\n-/\n\n/--\n  (Aeneas) Error cases\n-/\ninductive Error where\n   | assertionFailure: Error\n   | integerOverflow: Error\n   | divisionByZero: Error\n   | arrayOutOfBounds: Error\n   | maximumSizeExceeded: Error\n   | panic: Error\n   | undef: Error\nderiving Repr, BEq, DecidableEq\nopen Error\n\n\n/--\n  RustM monad (corresponding to Aeneas's `Result` monad), representing\n  possible results of rust computations.\n\n  Defined as `ExceptT Error Option`, i.e. `Option (Except Error α)`.\n  The `Option` layer models divergence and the `Except Error` layer models\n  Rust panics. The `ExceptT` transformer ensures that once a program has\n  paniced, it can not diverge any more (and vice versa).\n-/\ndef RustM (α : Type) := ExceptT Error Option α\n\nnamespace RustM\n\n-- These `Except` instances are missing in Lean's library.\n-- We use them to derive the corresponding `RustM` instances below.\nderiving instance BEq, DecidableEq for Except\n\ninstance instBEq {α : Type} [BEq α] : BEq (RustM α) :=\n  inferInstanceAs (BEq (Option (Except Error α)))\ninstance instDecidableEq {α : Type} [DecidableEq α] : DecidableEq (RustM α) :=\n  inferInstanceAs (DecidableEq (Option (Except Error α)))\ninstance instInhabited {α : Type} : Inhabited (RustM α) :=\n  inferInstanceAs (Inhabited (Option (Except Error α)))\ninstance instMonad : Monad RustM :=\n  inferInstanceAs (Monad (ExceptT Error Option))\ninstance instLawfulMonad : LawfulMonad RustM :=\n  inferInstanceAs (LawfulMonad (ExceptT Error Option))\n\n@[reducible, match_pattern] def ok {α : Type} (v : α) : RustM α := some (.ok v)\n@[reducible, match_pattern] def fail {α : Type} (e : Error) : RustM α := some (.error e)\n@[reducible, match_pattern] def div {α : Type} : RustM α := none\n\ninstance {α : Type} [Repr α] : Repr (RustM α) where\n  reprPrec x prec := match x with\n    | .ok v   => Repr.addAppParen (f!\"RustM.ok {reprArg v}\") prec\n    | .fail e => Repr.addAppParen (f!\"RustM.fail {reprArg e}\") prec\n    | .div    => \"RustM.div\"\n\ndef ofOption {α : Type} (x : Option α) (e : Error) : RustM α :=\n  match x with\n  | .some v => pure v\n  | .none => .fail e\n\n@[reducible]\ndef isOk {α : Type} (x : RustM α) : Bool :=\n  match x with\n  | .ok _ => true\n  | _ => false\n\n@[reducible, specset bv, hax_bv_decide]\ndef of_isOk {α : Type} (x : RustM α) (h : RustM.isOk x) : α :=\n  match x with\n  | .ok v => v\n\n@[simp, spec]\ndef ok_of_isOk {α : Type} (v : α) (h : isOk (ok v)) : (ok v).of_isOk h = v := by rfl\n\ninstance instWP : WP RustM (.except Error (.except PUnit .pure)) :=\n  inferInstanceAs (WP (ExceptT Error Option) _)\ninstance instWPMonad : WPMonad RustM (.except Error (.except PUnit .pure)) :=\n  inferInstanceAs (WPMonad (ExceptT Error Option) _)\n\nsection Order\n\nopen Lean.Order\n\n/- These instances are required to use `partial_fixpoint` in the `RustM` monad. -/\n\ninstance {α : Type} : PartialOrder (RustM α) := inferInstanceAs (PartialOrder (ExceptT Error Option α))\ninstance {α : Type} : CCPO (RustM α) := inferInstanceAs (CCPO (ExceptT Error Option α))\ninstance : MonoBind RustM := inferInstanceAs (MonoBind (ExceptT Error Option))\n\nopen Lean Order in\n/-- `Loop.MonoLoopCombinator` is used to implement while loops in `RustM`: -/\ninstance {β : Type} (f : Unit → β → RustM (ForInStep β)) : Loop.MonoLoopCombinator f := {\n  mono := by\n    unfold Loop.loopCombinator\n    repeat monotonicity\n}\n\nend Order\n\nend RustM\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/Spec.lean",
    "content": "import Std.Do\nimport Hax.rust_primitives.RustM\n\nopen Std.Do\n\n/-\n\n# Specs\n\n-/\n\nstructure Spec {α}\n    (requires : RustM Prop)\n    (ensures : α → RustM Prop)\n    (f : RustM α) where\n  pureRequires : {p : Prop // ⦃ ⌜ True ⌝ ⦄ requires ⦃ ⇓r => ⌜ r = p ⌝ ⦄}\n  pureEnsures : {p : α → Prop // pureRequires.val → ∀ a, ⦃ ⌜ True ⌝ ⦄ ensures a ⦃ ⇓r => ⌜ r = p a ⌝ ⦄}\n  contract : ⦃ ⌜ pureRequires.val ⌝ ⦄ f ⦃ ⇓r => ⌜ pureEnsures.val r ⌝ ⦄\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/USize64.lean",
    "content": "import Hax.MissingLean\nimport Lean.Meta.Tactic.Simp.BuiltinSimprocs.UInt\n\n/-!\n# USize64\n\nWe define a type `USize64` to represent Rust's `usize` type. It is simply a copy of `UInt64`.\nThis file aims to collect all definitions, lemmas, and type class instances about `UInt64` from\nLean's standard library and to state them for `USize64`.\n\nThe regular `USize` type does not work for us because of https://github.com/cryspen/hax/issues/1702.\n-/\n\n/-- A copy of `UInt64`, which we use to represent Rust's `usize` type. -/\nstructure USize64 where ofBitVec :: toBitVec : BitVec 64\n\n@[reducible] def USize64.size : Nat := UInt64.size\ndef USize64.ofNat (n : @& Nat) : USize64 := ⟨BitVec.ofNat 64 n⟩\ndef USize64.toNat (n : USize64) : Nat := n.toBitVec.toNat\ndef USize64.toFin (x : USize64) : Fin UInt64.size := x.toBitVec.toFin\n\ndef USize64.ofNatLT (n : @& Nat) (h : LT.lt n USize64.size) : USize64 where\n  toBitVec := BitVec.ofNatLT n h\n\ndef USize64.decEq (a b : USize64) : Decidable (Eq a b) :=\n  match a, b with\n  | ⟨n⟩, ⟨m⟩ =>\n    dite (Eq n m)\n      (fun h => isTrue (h ▸ rfl))\n      (fun h => isFalse (fun h' => USize64.noConfusion h' (fun h' => absurd h' h)))\n\nabbrev Nat.toUSize64 := USize64.ofNat\n\nnamespace USize64\n\ninstance : DecidableEq USize64 := USize64.decEq\n\ninstance : Inhabited USize64 where\n  default := USize64.ofNatLT 0 (of_decide_eq_true rfl)\n\ninstance {n} : OfNat USize64 n := ⟨⟨OfNat.ofNat n⟩⟩\n\nend USize64\n\n@[inline] def USize64.ofFin (a : Fin USize64.size) : USize64 := ⟨⟨a⟩⟩\n\ndef USize64.ofInt (x : Int) : USize64 := ofNat (x % 2 ^ 64).toNat\n\n@[simp] theorem USize64.le_size : 2 ^ 32 ≤ USize64.size := by simp [USize64.size, UInt64.size]\n@[simp] theorem USize64.size_le : USize64.size ≤ 2 ^ 64 := by simp [USize64.size, UInt64.size]\n\nprotected def USize64.add (a b : USize64) : USize64 := ⟨a.toBitVec + b.toBitVec⟩\nprotected def USize64.sub (a b : USize64) : USize64 := ⟨a.toBitVec - b.toBitVec⟩\nprotected def USize64.mul (a b : USize64) : USize64 := ⟨a.toBitVec * b.toBitVec⟩\nprotected def USize64.div (a b : USize64) : USize64 := ⟨a.toBitVec / b.toBitVec⟩\nprotected def USize64.pow (x : USize64) (n : Nat) : USize64 := ⟨x.toBitVec ^ n⟩\nprotected def USize64.mod (a b : USize64) : USize64 := ⟨a.toBitVec % b.toBitVec⟩\n\nprotected def USize64.land (a b : USize64) : USize64 := ⟨a.toBitVec &&& b.toBitVec⟩\nprotected def USize64.lor (a b : USize64) : USize64 := ⟨a.toBitVec ||| b.toBitVec⟩\nprotected def USize64.xor (a b : USize64) : USize64 := ⟨a.toBitVec ^^^ b.toBitVec⟩\nprotected def USize64.shiftLeft (a b : USize64) : USize64 := ⟨a.toBitVec <<< (USize64.mod b 64).toBitVec⟩\nprotected def USize64.shiftRight (a b : USize64) : USize64 := ⟨a.toBitVec >>> (USize64.mod b 64).toBitVec⟩\nprotected def USize64.lt (a b : USize64) : Prop := a.toBitVec < b.toBitVec\nprotected def USize64.le (a b : USize64) : Prop := a.toBitVec ≤ b.toBitVec\n\ninstance : Add USize64       := ⟨USize64.add⟩\ninstance : Sub USize64       := ⟨USize64.sub⟩\ninstance : Mul USize64       := ⟨USize64.mul⟩\ninstance : Pow USize64 Nat   := ⟨USize64.pow⟩\ninstance : Mod USize64       := ⟨USize64.mod⟩\n\ninstance : HMod USize64 Nat USize64 := ⟨fun x n => ⟨x.toBitVec % n⟩⟩\n\ninstance : Div USize64       := ⟨USize64.div⟩\ninstance : LT USize64        := ⟨USize64.lt⟩\ninstance : LE USize64        := ⟨USize64.le⟩\n\nprotected def USize64.complement (a : USize64) : USize64 := ⟨~~~a.toBitVec⟩\nprotected def USize64.neg (a : USize64) : USize64 := ⟨-a.toBitVec⟩\n\ninstance : Complement USize64 := ⟨USize64.complement⟩\ninstance : Neg USize64 := ⟨USize64.neg⟩\ninstance : AndOp USize64     := ⟨USize64.land⟩\ninstance : OrOp USize64      := ⟨USize64.lor⟩\ninstance : XorOp USize64       := ⟨USize64.xor⟩\ninstance : ShiftLeft USize64  := ⟨USize64.shiftLeft⟩\ninstance : ShiftRight USize64 := ⟨USize64.shiftRight⟩\n\ndef USize64.ofNat32 (n : @& Nat) (h : n < 4294967296) : USize64 :=\n  USize64.ofNatLT n (Nat.lt_of_lt_of_le h USize64.le_size)\ndef UInt8.toUSize64 (a : UInt8) : USize64 :=\n  USize64.ofNat32 a.toBitVec.toNat (Nat.lt_trans a.toBitVec.isLt (by decide))\ndef USize64.toUInt8 (a : USize64) : UInt8 := a.toNat.toUInt8\ndef UInt16.toUSize64 (a : UInt16) : USize64 :=\n  USize64.ofNat32 a.toBitVec.toNat (Nat.lt_trans a.toBitVec.isLt (by decide))\ndef USize64.toUInt16 (a : USize64) : UInt16 := a.toNat.toUInt16\ndef UInt32.toUSize64 (a : UInt32) : USize64 := USize64.ofNat32 a.toBitVec.toNat a.toBitVec.isLt\ndef USize64.toUInt32 (a : USize64) : UInt32 := a.toNat.toUInt32\ndef UInt64.toUSize64 (a : UInt64) : USize64 := a.toNat.toUSize64\ndef USize64.toUInt64 (a : USize64) : UInt64 := a.toNat.toUInt64\ndef USize64.toUSize (a : USize64) : USize := a.toNat.toUSize\n\ndef USize64.toInt8 (a : USize64) : Int8 := a.toNat.toInt8\ndef USize64.toInt16 (a : USize64) : Int16 := a.toNat.toInt16\ndef USize64.toInt32 (a : USize64) : Int32 := a.toNat.toInt32\ndef USize64.toInt64 (a : USize64) : Int64 := a.toNat.toInt64\ndef USize64.toISize (a : USize64) : ISize := a.toNat.toISize\n\ndef Int8.toUSize64 (a : Int8) : USize64 := USize64.ofInt a.toInt\ndef Int16.toUSize64 (a : Int16) : USize64 := USize64.ofInt a.toInt\ndef Int32.toUSize64 (a : Int32) : USize64 := USize64.ofInt a.toInt\ndef Int64.toUSize64 (a : Int64) : USize64 := USize64.ofInt a.toInt\ndef ISize.toUSize64 (a : ISize) : USize64 := USize64.ofInt a.toInt\n\ndef Bool.toUSize64 (b : Bool) : USize64 := if b then 1 else 0\ndef USize64.decLt (a b : USize64) : Decidable (a < b) :=\n  inferInstanceAs (Decidable (a.toBitVec < b.toBitVec))\n\ndef USize64.decLe (a b : USize64) : Decidable (a ≤ b) :=\n  inferInstanceAs (Decidable (a.toBitVec ≤ b.toBitVec))\n\nattribute [instance_reducible, instance] USize64.decLt USize64.decLe\n\ninstance : Max USize64 := maxOfLe\ninstance : Min USize64 := minOfLe\n\ninstance {α} : GetElem (Array α) USize64 α fun xs i => i.toNat < xs.size where\n  getElem xs i h := xs[i.toNat]\n\nopen Std Lean in\nset_option autoImplicit true in\ndeclare_uint_theorems USize64 64\n\ntheorem USize64.uaddOverflow_iff (x y : USize64) :\n    BitVec.uaddOverflow x.toBitVec y.toBitVec ↔ x.toNat + y.toNat ≥ 2 ^ 64 :=\n  by simp [BitVec.uaddOverflow]\n\ntheorem USize64.umulOverflow_iff (x y : USize64) :\n    BitVec.umulOverflow x.toBitVec y.toBitVec ↔ x.toNat * y.toNat ≥ 2 ^ 64 :=\n  by simp [BitVec.umulOverflow]\n\nattribute [grind =] USize64.toNat_toBitVec\nattribute [grind =] USize64.toNat_ofNat_of_lt\nattribute [grind =] USize64.toNat_ofNat_of_lt'\ngrind_pattern USize64.toBitVec_ofNat => USize64.toBitVec (OfNat.ofNat n)\n\nadditional_uint_decls USize64 64\n\n@[simp] theorem USize64.toNat_lt (n : USize64) : n.toNat < 2 ^ 64 := n.toFin.isLt\n\ntheorem USize64.le_self_add {a b : USize64} (h : a.toNat + b.toNat < 2 ^ 64) :\n    a ≤ a + b := by\n  rw [le_iff_toNat_le, USize64.toNat_add_of_lt h]\n  exact Nat.le_add_right a.toNat b.toNat\n\ntheorem USize64.add_le_of_le {a b c : USize64} (habc : a + b ≤ c) (hab : a.toNat + b.toNat < 2 ^ 64):\n    a ≤ c := by\n  rw [USize64.le_iff_toNat_le, USize64.toNat_add_of_lt hab] at *\n  omega\n\n/-!\n## Init.Data.UInt.Lemmas\n-/\n\nprotected theorem USize64.add_assoc (a b c : USize64) : a + b + c = a + (b + c) :=\n  USize64.toBitVec_inj.1 (BitVec.add_assoc _ _ _)\n\nprotected theorem USize64.add_comm (a b : USize64) : a + b = b + a := USize64.toBitVec_inj.1 (BitVec.add_comm _ _)\n\n@[simp] protected theorem USize64.add_zero (a : USize64) : a + 0 = a := USize64.toBitVec_inj.1 (BitVec.add_zero _)\n\nprotected theorem USize64.add_left_neg (a : USize64) : -a + a = 0 := USize64.toBitVec_inj.1 (BitVec.add_left_neg _)\n\nprotected theorem USize64.mul_assoc (a b c : USize64) : a * b * c = a * (b * c) := USize64.toBitVec_inj.1 (BitVec.mul_assoc _ _ _)\n\n@[simp] theorem USize64.mul_one (a : USize64) : a * 1 = a := USize64.toBitVec_inj.1 (BitVec.mul_one _)\n\n@[simp] theorem USize64.one_mul (a : USize64) : 1 * a = a := USize64.toBitVec_inj.1 (BitVec.one_mul _)\n\nprotected theorem USize64.mul_comm (a b : USize64) : a * b = b * a := USize64.toBitVec_inj.1 (BitVec.mul_comm _ _)\n\n@[simp] theorem USize64.mul_zero {a : USize64} : a * 0 = 0 := USize64.toBitVec_inj.1 BitVec.mul_zero\n\n@[simp] theorem USize64.zero_mul {a : USize64} : 0 * a = 0 := USize64.toBitVec_inj.1 BitVec.zero_mul\n\nprotected theorem USize64.sub_eq_add_neg (a b : USize64) : a - b = a + (-b) := USize64.toBitVec_inj.1 (BitVec.sub_eq_add_neg _ _)\n\n@[simp] protected theorem USize64.pow_zero (x : USize64) : x ^ 0 = 1 := (rfl)\n\nprotected theorem USize64.pow_succ (x : USize64) (n : Nat) : x ^ (n + 1) = x ^ n * x := (rfl)\n\ntheorem USize64.ofNat_eq_iff_mod_eq_toNat (a : Nat) (b : USize64) : USize64.ofNat a = b ↔ a % 2 ^ 64 = b.toNat := by\n  simp [← USize64.toNat_inj]\n\n@[simp] theorem USize64.ofNat_add (a b : Nat) : USize64.ofNat (a + b) = USize64.ofNat a + USize64.ofNat b := by\n  simp [USize64.ofNat_eq_iff_mod_eq_toNat]\n\ntheorem USize64.ofNat_mod_size (x : Nat) : ofNat (x % 2 ^ 64) = ofNat x := by\n  simp [ofNat, BitVec.ofNat, Fin.ofNat]\n\n@[simp] theorem USize64.ofNat_mul (a b : Nat) : USize64.ofNat (a * b) = USize64.ofNat a * USize64.ofNat b := by\n  simp [USize64.ofNat_eq_iff_mod_eq_toNat]\n\n@[simp] theorem USize64.ofInt_mul (x y : Int) : ofInt (x * y) = ofInt x * ofInt y := by\n  dsimp only [USize64.ofInt]\n  rw [Int.mul_emod]\n  have h₁ : 0 ≤ x % 2 ^ 64 := Int.emod_nonneg _ (by decide)\n  have h₂ : 0 ≤ y % 2 ^ 64 := Int.emod_nonneg _ (by decide)\n  have h₃ : 0 ≤ (x % 2 ^ 64) * (y % 2 ^ 64) := Int.mul_nonneg h₁ h₂\n  rw [Int.toNat_emod h₃ (by decide), Int.toNat_mul h₁ h₂]\n  have : (2 ^ 64 : Int).toNat = 2 ^ 64 := (rfl)\n  rw [this, USize64.ofNat_mod_size, USize64.ofNat_mul]\n\n@[simp] theorem USize64.ofInt_neg_one : ofInt (-1) = -1 := (rfl)\n\ntheorem USize64.toBitVec_one : toBitVec 1 = 1#64 := (rfl)\n\ntheorem USize64.neg_eq_neg_one_mul (a : USize64) : -a = -1 * a := by\n  apply USize64.toBitVec_inj.1\n  rw [USize64.toBitVec_neg, USize64.toBitVec_mul, USize64.toBitVec_neg, USize64.toBitVec_one, BitVec.neg_eq_neg_one_mul]\n\n@[simp] protected theorem USize64.ofInt_neg (x : Int) : ofInt (-x) = -ofInt x := by\n  rw [Int.neg_eq_neg_one_mul, ofInt_mul, ofInt_neg_one, ← USize64.neg_eq_neg_one_mul]\n\nprotected theorem USize64.mul_add {a b c : USize64} : a * (b + c) = a * b + a * c :=\n    USize64.toBitVec_inj.1 BitVec.mul_add\n\nprotected theorem USize64.add_mul {a b c : USize64} : (a + b) * c = a * c + b * c := by\n  rw [USize64.mul_comm, USize64.mul_add, USize64.mul_comm a c, USize64.mul_comm c b]\n\nprotected theorem USize64.neg_mul (a b : USize64) : -a * b = -(a * b) := USize64.toBitVec_inj.1 (BitVec.neg_mul _ _)\n\n@[simp] protected theorem USize64.add_sub_cancel (a b : USize64) : a + b - b = a := USize64.toBitVec_inj.1 (BitVec.add_sub_cancel _ _)\n\ntheorem USize64.ofNat_sub {a b : Nat} (hab : b ≤ a) : USize64.ofNat (a - b) = USize64.ofNat a - USize64.ofNat b := by\n  rw [(Nat.sub_add_cancel hab ▸ USize64.ofNat_add (a - b) b :), USize64.add_sub_cancel]\n\n@[simp] protected theorem USize64.sub_add_cancel (a b : USize64) : a - b + b = a :=\n  USize64.toBitVec_inj.1 (BitVec.sub_add_cancel _ _)\n\ntheorem USize64.le_ofNat_iff {n : USize64} {m : Nat} (h : m < size) : n ≤ ofNat m ↔ n.toNat ≤ m := by\n  rw [le_iff_toNat_le, toNat_ofNat_of_lt' h]\n\n/-!\n## Grind's ToInt\n\nFor grind to use integer arithmetic on `USize64`, we need the following instances, inspired by\nthe modules `Init.GrindInstances.ToInt` and `Init.GrindInstances.Ring.UInt`.\n-/\n\nnamespace Lean.Grind\n\ninstance : ToInt USize64 (.uint 64) where\n  toInt x := (x.toNat : Int)\n  toInt_inj x y w := USize64.toNat_inj.mp (Int.ofNat_inj.mp w)\n  toInt_mem x := by simpa using Int.lt_toNat.mp (USize64.toNat_lt x)\n\n@[simp] theorem toInt_usize64 (x : USize64) : ToInt.toInt x = (x.toNat : Int) := rfl\n\ninstance : ToInt.Zero USize64 (.uint 64) where\n  toInt_zero := by simp\n\ninstance : ToInt.OfNat USize64 (.uint 64) where\n  toInt_ofNat x := by simp; rfl\n\ninstance : ToInt.Add USize64 (.uint 64) where\n  toInt_add x y := by simp\n\ninstance : ToInt.Mul USize64 (.uint 64) where\n  toInt_mul x y := by simp\n\ninstance : ToInt.Mod USize64 (.uint 64) where\n  toInt_mod x y := by simp\n\ninstance : ToInt.Div USize64 (.uint 64) where\n  toInt_div x y := by simp\n\ninstance : ToInt.LE USize64 (.uint 64) where\n  le_iff x y := by simpa using USize64.le_iff_toBitVec_le\n\ninstance : ToInt.LT USize64 (.uint 64) where\n  lt_iff x y := by simpa using USize64.lt_iff_toBitVec_lt\n\n\n@[expose]\ndef USize64.natCast : NatCast USize64 where\n  natCast x := USize64.ofNat x\n\n@[expose]\ndef USize64.intCast : IntCast USize64 where\n  intCast x := USize64.ofInt x\n\nattribute [local instance_reducible, local instance] USize64.natCast USize64.intCast\n\ntheorem USize64.intCast_ofNat (x : Nat) : (OfNat.ofNat (α := Int) x : USize64) = OfNat.ofNat x := by\n    change USize64.ofInt (OfNat.ofNat x) = OfNat.ofNat x\n    rw [USize64.ofInt]\n    rw [Int.toNat_emod (Int.zero_le_ofNat x) (by decide)]\n    erw [Int.toNat_natCast]\n    rw [Int.toNat_pow_of_nonneg (by decide)]\n    simp +instances only [USize64.ofNat, BitVec.ofNat, Fin.Internal.ofNat_eq_ofNat, Fin.ofNat, Int.reduceToNat, Nat.dvd_refl,\n      Nat.mod_mod_of_dvd]\n    try rfl\n\ntheorem USize64.intCast_neg (x : Int) : ((-x : Int) : USize64) = - (x : USize64) :=\n  USize64.ofInt_neg _\n\ninstance : CommRing USize64 where\n  nsmul := ⟨(· * ·)⟩\n  zsmul := ⟨(· * ·)⟩\n  add_assoc := USize64.add_assoc\n  add_comm := USize64.add_comm\n  add_zero := USize64.add_zero\n  neg_add_cancel := USize64.add_left_neg\n  mul_assoc := USize64.mul_assoc\n  mul_comm := USize64.mul_comm\n  mul_one := USize64.mul_one\n  one_mul := USize64.one_mul\n  left_distrib _ _ _ := USize64.mul_add\n  right_distrib _ _ _ := USize64.add_mul\n  zero_mul _ := USize64.zero_mul\n  mul_zero _ := USize64.mul_zero\n  sub_eq_add_neg := USize64.sub_eq_add_neg\n  pow_zero := USize64.pow_zero\n  pow_succ := USize64.pow_succ\n  ofNat_succ x := USize64.ofNat_add x 1\n  intCast_neg := USize64.ofInt_neg\n  intCast_ofNat := USize64.intCast_ofNat\n  neg_zsmul i a := by\n    change (-i : Int) * a = - (i * a)\n    simp [USize64.intCast_neg, USize64.neg_mul]\n  zsmul_natCast_eq_nsmul n a := congrArg (· * a) (USize64.intCast_ofNat _)\n\ninstance : IsCharP USize64 18446744073709551616 := IsCharP.mk' _ _\n  (ofNat_eq_zero_iff := fun x => by\n    have : OfNat.ofNat x = USize64.ofNat x := rfl\n    simp [this, USize64.ofNat_eq_iff_mod_eq_toNat]\n    )\n\ninstance : ToInt.Pow USize64 (.uint 64) := ToInt.pow_of_semiring (by simp)\n\n\nend Lean.Grind\n\n\n/-!\n## Simp-Procs\n\nGrind and simp use some simplification procedures for UInts. They are defined in\n`Lean.Meta.Tactic.Simp.BuiltinSimprocs.UInt` and replicated here.\n-/\n\nnamespace USize64\nopen Lean Meta Simp\n\ninstance : ToExpr USize64 where\n  toTypeExpr := mkConst ``USize64\n  toExpr a :=\n    let r := mkRawNatLit a.toNat\n    mkApp3 (.const ``OfNat.ofNat [0]) (mkConst ``USize64) r\n      (.app (.const ``USize64.instOfNat []) r)\n\ndef fromExpr (e : Expr) : SimpM (Option USize64) := do\n  let some (n, _) ← getOfNatValue? e `USize64 | return none\n  return USize64.ofNat n\n\n@[inline] def reduceBin (declName : Name) (arity : Nat) (op : USize64 → USize64 → USize64) (e : Expr) : SimpM DStep := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  return .done <| toExpr (op n m)\n\n@[inline] def reduceBinPred (declName : Name) (arity : Nat) (op : USize64 → USize64 → Bool) (e : Expr) : SimpM Step := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  evalPropStep e (op n m)\n\n@[inline] def reduceBoolPred (declName : Name) (arity : Nat) (op : USize64 → USize64 → Bool) (e : Expr) : SimpM DStep := do\n  unless e.isAppOfArity declName arity do return .continue\n  let some n ← (fromExpr e.appFn!.appArg!) | return .continue\n  let some m ← (fromExpr e.appArg!) | return .continue\n  return .done <| toExpr (op n m)\n\ndsimproc [simp, seval] reduceAdd ((_ + _ : USize64)) := reduceBin ``HAdd.hAdd 6 (· + ·)\ndsimproc [simp, seval] reduceMul ((_ * _ : USize64)) := reduceBin ``HMul.hMul 6 (· * ·)\ndsimproc [simp, seval] reduceSub ((_ - _ : USize64)) := reduceBin ``HSub.hSub 6 (· - ·)\ndsimproc [simp, seval] reduceDiv ((_ / _ : USize64)) := reduceBin ``HDiv.hDiv 6 (· / ·)\ndsimproc [simp, seval] reduceMod ((_ % _ : USize64)) := reduceBin ``HMod.hMod 6 (· % ·)\n\nsimproc [simp, seval] reduceLT  (( _ : USize64) < _)  := reduceBinPred ``LT.lt 4 (. < .)\nsimproc [simp, seval] reduceLE  (( _ : USize64) ≤ _)  := reduceBinPred ``LE.le 4 (. ≤ .)\nsimproc [simp, seval] reduceGT  (( _ : USize64) > _)  := reduceBinPred ``GT.gt 4 (. > .)\nsimproc [simp, seval] reduceGE  (( _ : USize64) ≥ _)  := reduceBinPred ``GE.ge 4 (. ≥ .)\nsimproc [simp, seval] reduceEq  (( _ : USize64) = _)  := reduceBinPred ``Eq 3 (. = .)\nsimproc [simp, seval] reduceNe  (( _ : USize64) ≠ _)  := reduceBinPred ``Ne 3 (. ≠ .)\ndsimproc [simp, seval] reduceBEq  (( _ : USize64) == _)  := reduceBoolPred ``BEq.beq 4 (. == .)\ndsimproc [simp, seval] reduceBNe  (( _ : USize64) != _)  := reduceBoolPred ``bne 4 (. != .)\n\ndsimproc [simp, seval] reduceOfNatLT (USize64.ofNatLT _ _) := fun e => do\n  unless e.isAppOfArity `USize64.ofNatLT 2 do return .continue\n  let some value ← Nat.fromExpr? e.appFn!.appArg! | return .continue\n  let value := USize64.ofNat value\n  return .done <| toExpr value\n\ndsimproc [simp, seval] reduceOfNat (USize64.ofNat _) := fun e => do\n  unless e.isAppOfArity `USize64.ofNat 1 do return .continue\n  let some value ← Nat.fromExpr? e.appArg! | return .continue\n  let value := USize64.ofNat value\n  return .done <| toExpr value\n\ndsimproc [simp, seval] reduceToNat (USize64.toNat _) := fun e => do\n  unless e.isAppOfArity `USize64.toNat 1 do return .continue\n  let some v ← (fromExpr e.appArg!) | return .continue\n  let n := USize64.toNat v\n  return .done <| toExpr n\n\n/-- Return `.done` for UInt values. We don't want to unfold in the symbolic evaluator. -/\ndsimproc [seval] isValue ((OfNat.ofNat _ : USize64)) := fun e => do\n  unless (e.isAppOfArity ``OfNat.ofNat 3) do return .continue\n  return .done e\n\nend USize64\n\n/-\n## Lemmas from `Init.SizeOfLemmas`:\n-/\n\n@[simp] protected theorem USize64.sizeOf (a : USize64) : sizeOf a = a.toNat + 3 := by\n  cases a; simp +arith [USize64.toNat, BitVec.toNat, -BitVec.val_toFin]\n\n/-\n## Lemmas from `MissingLean`:\n-/\n\ntheorem USize64.ofNat_eq_of_toNat_eq {a : Nat} {b : USize64} (h : b.toNat = a) : ofNat a = b := by\n  subst_vars; exact USize64.ofNat_toNat\n\ntheorem USize64.sub_add_eq {a b c : USize64} : a - (b + c) = a - b - c := by grind\n\ntheorem USize64.sub_succ_lt_self (a b : USize64) (h : a < b) :\n    (b - (a + 1)).toNat < (b - a).toNat := by\n  rw [sub_add_eq]\n  rw [USize64.toNat_sub_of_le]\n  try simp only [USize.toNat_one]\n  apply Nat.sub_one_lt_of_lt\n  · change (0 : USize64).toNat < (b - a).toNat\n    rw [← lt_iff_toNat_lt]\n    grind\n  · grind\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/arithmetic.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.ops\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_arith_ops\" s:(&\"signed\" <|> &\"unsigned\") typeName:ident suffix:ident width:term : command => do\n  let signed ← match s.raw[0].getKind with\n  | `signed => pure true\n  | `unsigned => pure false\n  | _ => throw .unsupportedSyntax\n  let ident (kind: String) := mkIdent (kind ++ \"_\" ++ suffix.getId.toString).toName\n\n  let mut cmds ← Syntax.getArgs <$> `(\n    namespace rust_primitives.arithmetic\n\n    @[spec]\n    def $(ident \"wrapping_add\") (x : $typeName) (y : $typeName) : RustM $typeName :=\n      pure (x + y)\n\n    @[spec]\n    def $(ident \"wrapping_sub\") (x : $typeName) (y : $typeName) : RustM $typeName :=\n      pure (x - y)\n\n    @[spec]\n    def $(ident \"wrapping_mul\") (x : $typeName) (y : $typeName) : RustM $typeName :=\n      pure (x * y)\n  )\n\n  if signed then\n    cmds := cmds.push $ ← `(\n      def $(ident \"pow\") (x : $typeName) (y : u32) : RustM $typeName :=\n        if x.toInt ^ y.toNat ≥ 2 ^ ($width - 1) || x.toInt ^ y.toNat < - 2 ^ ($width - 1)\n        then .fail .integerOverflow\n        else pure (x ^ y.toNat)\n    )\n  else\n    cmds := cmds.push $ ← `(\n      def $(ident \"pow\") (x : $typeName) (y : u32) : RustM $typeName :=\n        if x.toNat ^ y.toNat ≥ 2 ^ $width\n        then .fail .integerOverflow\n        else pure (x ^ y.toNat)\n    )\n\n  cmds := cmds.push $ ← `(\n    end rust_primitives.arithmetic\n  )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_arith_ops unsigned UInt8 u8 8\ndeclare_arith_ops unsigned UInt16 u16 16\ndeclare_arith_ops unsigned UInt32 u32 32\ndeclare_arith_ops unsigned UInt64 u64 64\ndeclare_arith_ops unsigned u128 u128 128\ndeclare_arith_ops unsigned USize64 usize 64\n\ndeclare_arith_ops signed Int8 i8 8\ndeclare_arith_ops signed Int16 i16 16\ndeclare_arith_ops signed Int32 i32 32\ndeclare_arith_ops signed Int64 i64 64\ndeclare_arith_ops signed i128 i128 128\ndeclare_arith_ops signed ISize isize 64\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/boxed.lean",
    "content": "\nabbrev alloc.boxed.Box (T _Allocator : Type) := T\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax/array.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.ops\nimport Hax.rust_primitives.sequence\n\nopen Std.Do\nset_option mvcgen.warning false\n\nattribute [local grind! .] USize64.toNat_lt_size\n\n/-\n\n# Arrays\n\nRust arrays, are represented as Lean `Vector` (Lean Arrays of known size)\n\n-/\nsection RustArray\n\nstructure RustArray (α : Type) (n : usize) where\n  ofVec :: toVec : Vector α n.toNat\n\n@[spec]\ndef rust_primitives.hax.monomorphized_update_at.update_at_usize {α n}\n  (a : RustArray α n) (i : usize) (v : α) : RustM (RustArray α n) :=\n  if h: i.toNat < a.toVec.size then\n    pure (.ofVec (Vector.set a.toVec i.toNat v))\n  else\n    .fail (.arrayOutOfBounds)\n\n@[spec]\ndef rust_primitives.hax.update_at {α n} (m : RustArray α n) (i : usize) (v : α) : RustM (RustArray α n) :=\n  if i.toNat < n.toNat then\n    pure (.ofVec (Vector.setIfInBounds m.toVec i.toNat v))\n  else\n    .fail (.arrayOutOfBounds)\n\n@[spec]\ndef rust_primitives.hax.repeat\n  {α int_type : Type}\n  {n : usize} [ToNat int_type]\n  (v:α) (size:int_type) : RustM (RustArray α n)\n  :=\n  if (n.toNat = ToNat.toNat size) then\n    pure (.ofVec (Vector.replicate n.toNat v))\n  else\n    .fail Error.arrayOutOfBounds\n\n@[spec]\ndef rust_primitives.unsize {α n} (a: RustArray α n) : RustM (rust_primitives.sequence.Seq α) :=\n  pure ⟨a.toVec.toArray, by grind⟩\n\nend RustArray\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax/int.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.USize64\nimport Hax.rust_primitives.ops\n\nopen Std.Do\nset_option mvcgen.warning false\n\nnamespace rust_primitives.hax.int\n\nopen Lean.Grind in\nabbrev from_machine {α} {range} [ToInt α range] (x : α) : RustM Int :=\n  pure (ToInt.toInt x)\n\nattribute [grind]\n  Lean.Grind.ToInt.toInt\n  Lean.Grind.instToIntUInt8UintOfNatNat\n  Lean.Grind.instToIntUInt16UintOfNatNat\n  Lean.Grind.instToIntUInt32UintOfNatNat\n  Lean.Grind.instToIntUInt64UintOfNatNat\n  Lean.Grind.instToIntUSize64UintOfNatNat\n  Lean.Grind.instToIntInt8SintOfNatNat\n  Lean.Grind.instToIntInt16SintOfNatNat\n  Lean.Grind.instToIntInt32SintOfNatNat\n  Lean.Grind.instToIntInt64SintOfNatNat\n  Lean.Grind.instToIntISizeSintNumBits\n\n@[spec] def add (x y : Int) : RustM Int := pure (x + y)\n@[spec] def sub (x y : Int) : RustM Int := pure (x - y)\n@[spec] def mul (x y : Int) : RustM Int := pure (x * y)\n@[spec] def div (x y : Int) : RustM Int :=\n  if y == 0 then\n    .fail .divisionByZero\n  else\n    pure (x / y)\n@[spec] def neg (x : Int) : RustM Int := pure (-x)\n@[spec] def gt (x y : Int) : RustM Bool := pure (x > y)\n@[spec] def lt (x y : Int) : RustM Bool := pure (x < y)\n@[spec] def ge (x y : Int) : RustM Bool := pure (x ≥ y)\n@[spec] def le (x y : Int) : RustM Bool := pure (x ≤ y)\n@[spec] def eq (x y : Int) : RustM Bool := pure (x == y)\n\nend rust_primitives.hax.int\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax/logical_op.lean",
    "content": "import Hax.Tactic.Init\nimport Hax.rust_primitives.RustM\n\n/-\n  Logic predicates introduced by Hax (in pre/post conditions)\n-/\nnamespace rust_primitives.hax.logical_op\n\n/-- Boolean conjunction. Cannot panic (always returns .ok ) -/\n@[simp, spec, hax_bv_decide]\ndef and (a b: Bool) : RustM Bool := pure (a && b)\n\n/-- Boolean disjunction. Cannot panic (always returns .ok )-/\n@[simp, spec, hax_bv_decide]\ndef or (a b: Bool) : RustM Bool := pure (a || b)\n\n/-- Boolean exclusive disjunction. Cannot panic (always returns .ok )-/\n@[simp, spec, hax_bv_decide]\ndef xor (a b: Bool) : RustM Bool := pure (a ^^ b)\n\n/-- Boolean negation. Cannot panic (always returns .ok )-/\n@[simp, spec, hax_bv_decide]\ndef not (a :Bool) : RustM Bool := pure (!a)\n\n@[inherit_doc] infixl:35 \" &&? \" => and\n@[inherit_doc] infixl:30 \" ||? \" => or\n@[inherit_doc] infixl:30 \" ^^? \" => xor\n@[inherit_doc] notation:max \"!?\" b:40 => not b\n\nend rust_primitives.hax.logical_op\n\nnamespace rust_primitives.hax\n\n@[spec] def logical_op_or (x y : Bool) : RustM Bool := pure (x || y)\n@[spec] def logical_op_and (x y : Bool) : RustM Bool := pure (x && y)\n\nend rust_primitives.hax\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax/machine_int.lean",
    "content": "\nimport Hax.Tactic.SpecSet\n\nattribute [specset bv] bne\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax/never.lean",
    "content": "namespace rust_primitives.hax\n\n  abbrev Never : Type := Empty\n  abbrev never_to_any.{u} {α : Sort u} : Never → α := Empty.elim\n\nend rust_primitives.hax\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax/tuple.lean",
    "content": "\n/-\n\n# Tuples\n\n-/\n\nnamespace rust_primitives.hax\n\nstructure Tuple0 where\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple1 (α0: Type) where\n  _0 : α0\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple2 (α0 α1: Type) where\n  _0 : α0\n  _1 : α1\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple3 (α0 α1 α2: Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple4 (α0 α1 α2 α3 : Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\n  _3 : α3\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple5 (α0 α1 α2 α3 α4 : Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\n  _3 : α3\n  _4 : α4\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple6 (α0 α1 α2 α3 α4 α5 : Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\n  _3 : α3\n  _4 : α4\n  _5 : α5\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple7 (α0 α1 α2 α3 α4 α5 α6 : Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\n  _3 : α3\n  _4 : α4\n  _5 : α5\n  _6 : α6\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple8 (α0 α1 α2 α3 α4 α5 α6 α7 : Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\n  _3 : α3\n  _4 : α4\n  _5 : α5\n  _6 : α6\n  _7 : α7\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple9 (α0 α1 α2 α3 α4 α5 α6 α7 α8 : Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\n  _3 : α3\n  _4 : α4\n  _5 : α5\n  _6 : α6\n  _7 : α7\n  _8 : α8\nderiving Repr, BEq, DecidableEq\n\nstructure Tuple10 (α0 α1 α2 α3 α4 α5 α6 α7 α8 α9: Type) where\n  _0 : α0\n  _1 : α1\n  _2 : α2\n  _3 : α3\n  _4 : α4\n  _5 : α5\n  _6 : α6\n  _7 : α7\n  _8 : α8\n  _9 : α9\nderiving Repr, BEq, DecidableEq\n\nend rust_primitives.hax\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax/while_loop.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.hax_lib\nimport Hax.MissingLean.Std.Do.Triple.SpecLemmas\nimport Hax.Tactic.HaxConstructPure\n\nopen Std.Do\n\n/-\n\n# Loops\n\n-/\nopen Lean\n\n/-- `while_loop` is used to represent while-loops in `RustM` programs. The function provides\n  extra arguments to store a termination measure and an invariant, which can be used to verify the\n  program. The arguments `pureInv` and `pureTermination` are usually not provided explicitly and\n  derived by the default tactic given below. -/\ndef rust_primitives.hax.while_loop {β : Type}\n    (inv: β → RustM Prop)\n    (cond: β → RustM Bool)\n    (termination : β -> RustM hax_lib.int.Int)\n    (init : β)\n    (body : β -> RustM β)\n    (pureInv:\n        {i : β -> Prop // ∀ b, ⦃⌜ True ⌝⦄ inv b ⦃⇓ r => ⌜ r = (i b) ⌝⦄} := by\n      set_option hax_mvcgen.specset \"int\" in hax_construct_pure <;> grind)\n    (_pureTermination :\n        {t : β -> Nat // ∀ b, ⦃⌜ True ⌝⦄ termination b ⦃⇓ r => ⌜ r = Int.ofNat (t b) ⌝⦄} := by\n      set_option hax_mvcgen.specset \"int\" in hax_construct_pure <;> grind)\n    (pureCond :\n        {c : β -> Bool // ∀ b, ⦃⌜ pureInv.val b ⌝⦄ cond b ⦃⇓ r => ⌜ r = c b ⌝⦄} := by\n      set_option hax_mvcgen.specset \"int\" in hax_construct_pure <;> grind) : RustM β :=\n  Loop.MonoLoopCombinator.while_loop Loop.mk pureCond.val init body\n\n@[spec]\ntheorem rust_primitives.hax.while_loop.spec {β : Type}\n    (inv: β → RustM Prop)\n    (cond: β → RustM Bool)\n    (termination: β → RustM hax_lib.int.Int)\n    (init : β)\n    (body : β -> RustM β)\n    (pureInv: {i : β -> Prop // ∀ b, ⦃⌜ True ⌝⦄ inv b ⦃⇓ r => ⌜ r = (i b) ⌝⦄})\n    (pureTermination :\n      {t : β -> Nat // ∀ b, ⦃⌜ True ⌝⦄ termination b ⦃⇓ r => ⌜ r = Int.ofNat (t b) ⌝⦄})\n    (pureCond : {c : β -> Bool // ∀ b, ⦃⌜ pureInv.val b ⌝⦄ cond b ⦃⇓ r => ⌜ r = c b ⌝⦄})\n    (step :\n      ∀ (b : β), pureCond.val b →\n        ⦃⌜ pureInv.val b ⌝⦄\n          body b\n        ⦃⇓ b' => spred(⌜ pureTermination.val b' < pureTermination.val b ⌝ ∧ ⌜ pureInv.val b' ⌝)⦄ ) :\n    ⦃⌜ pureInv.val init ⌝⦄\n      while_loop inv cond termination init body pureInv pureTermination pureCond\n    ⦃⇓ r => ⌜ pureInv.val r ∧ ¬ pureCond.val r ⌝⦄ :=\n  Spec.MonoLoopCombinator.while_loop init Loop.mk pureCond.val body pureInv pureTermination step\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax.lean",
    "content": "import Hax.rust_primitives.hax.array\nimport Hax.rust_primitives.hax.int\nimport Hax.rust_primitives.hax.logical_op\nimport Hax.rust_primitives.hax.machine_int\nimport Hax.rust_primitives.hax.never\nimport Hax.rust_primitives.hax.tuple\nimport Hax.rust_primitives.hax.while_loop\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/hax_lib.lean",
    "content": "import Hax.rust_primitives.hax.tuple\nimport Hax.rust_primitives.RustM\nimport Hax.Tactic.HaxConstructPure\n\nopen rust_primitives.hax\nopen Std.Do\n\nnamespace hax_lib\n\nabbrev prop.Prop := Prop\n\n@[spec] def assert (b:Bool) : RustM Tuple0 :=\n  if b then pure ⟨ ⟩\n  else .fail (Error.assertionFailure)\n\n@[spec] def assume : Prop -> RustM Tuple0 := fun _ => pure ⟨ ⟩\n\n@[spec] def prop.constructors.from_bool (b : Bool) : RustM Prop := pure (b = true)\n\n@[spec] def prop.Impl.from_bool (b : Bool) : RustM Prop := pure (b = true)\n\n@[spec] def prop.constructors.implies (a b : Prop) : RustM Prop := pure (a → b)\n@[spec] def prop.constructors.not     (a : Prop)   : RustM Prop := pure (¬ a)\n@[spec] def prop.constructors.and     (a b : Prop) : RustM Prop := pure (a ∧ b)\n@[spec] def prop.constructors.or      (a b : Prop) : RustM Prop := pure (a ∨ b)\n@[spec] def prop.constructors.eq      (a b : Prop) : RustM Prop := pure (a = b)\n@[spec] def prop.constructors.ne      (a b : Prop) : RustM Prop := pure (a ≠ b)\n\n@[spec]\ndef prop.constructors.forall {α : Type}\n    (p : α → RustM Prop)\n    (pureP : {p' : α -> Prop // ∀ a, ⦃⌜ True ⌝⦄ p a ⦃⇓ r => ⌜ r = (p' a) ⌝⦄} := by\n      set_option hax_mvcgen.specset \"int\" in hax_construct_pure <;> grind) : RustM Prop :=\n  pure (∀ a : α, pureP.val a)\n\n@[spec]\ndef prop.constructors.exists {α : Type}\n    (p : α → RustM Prop)\n    (pureP : {p' : α -> Prop // ∀ a, ⦃⌜ True ⌝⦄ p a ⦃⇓ r => ⌜ r = (p' a) ⌝⦄} := by\n      set_option hax_mvcgen.specset \"int\" in hax_construct_pure <;> grind) : RustM Prop :=\n  pure (∃ a : α, pureP.val a)\n\nend hax_lib\n\nabbrev hax_lib.int.Int : Type := _root_.Int\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/mem.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.hax\n\ndef rust_primitives.mem.replace (α : Type) (dst : α) (src : α) :\n  RustM (rust_primitives.hax.Tuple2 α α) := pure ⟨src, dst⟩\n\ndef rust_primitives.mem.copy (α : Type) (a : α) : RustM α := pure a\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/ops.lean",
    "content": "import Hax.Tactic.Init\nimport Hax.rust_primitives.USize64\nimport Hax.Tactic.SpecSet\nimport Hax.MissingLean\nimport Hax.rust_primitives.RustM\nopen Std.Do\nopen Std.Tactic\n\nopen Std.Do\nset_option mvcgen.warning false\n\n/-\n  Integer types are represented as the corresponding type in Lean\n-/\nabbrev u8 := UInt8\nabbrev u16 := UInt16\nabbrev u32 := UInt32\nabbrev u64 := UInt64\nabbrev u128 := UInt128\nabbrev usize := USize64\nabbrev i8 := Int8\nabbrev i16 := Int16\nabbrev i32 := Int32\nabbrev i64 := Int64\nabbrev i128 := Int128\nabbrev isize := ISize\n\nabbrev f32 := Float32\nabbrev f64 := Float\n\n/-- Class of objects that can be transformed into Nat -/\nclass ToNat (α: Type) where\n  toNat : α -> Nat\n\nattribute [grind] ToNat.toNat\n\n@[simp, grind]\ninstance : ToNat usize where\n  toNat x := x.toNat\n@[simp, grind]\ninstance : ToNat u128 where\n  toNat x := x.toNat\n@[simp, grind]\ninstance : ToNat u64 where\n  toNat x := x.toNat\n@[simp, grind]\ninstance : ToNat u32 where\n  toNat x := x.toNat\n@[simp, grind]\ninstance : ToNat u16 where\n  toNat x := x.toNat\n@[simp, grind]\ninstance : ToNat u8 where\n  toNat x := x.toNat\n\ninfixl:58 \" ^^^? \" => fun a b => pure (HXor.hXor a b)\ninfixl:60 \" &&&? \" => fun a b => pure (HAnd.hAnd a b)\ninfixl:60 \" |||? \" => fun a b => pure (HOr.hOr a b)\nprefix:75 \"~?\"     => fun a => pure (~~~a)\n\n/-\n\n## Boolean comparisons\n\nBoolean comparisons that are prettyfied for the integer and boolean types.\n\n-/\n\nnamespace rust_primitives.cmp\n\ndef eq {α : Type} [BEq α] (a b : α) : RustM Bool := pure (a == b)\ndef ne {α : Type} [BEq α] (a b : α) : RustM Bool := pure (a != b)\ndef lt {α : Type} [LT α] [DecidableLT α] (a b : α) : RustM Bool :=\n  pure (decide (a < b))\ndef le {α : Type} [LE α] [DecidableLE α] (a b : α) : RustM Bool :=\n  pure (decide (a <= b))\ndef gt {α : Type} [LT α] [DecidableLT α] (a b : α) : RustM Bool :=\n  pure (decide (a > b))\ndef ge {α : Type} [LE α] [DecidableLE α] (a b : α) : RustM Bool :=\n  pure (decide (a >= b))\n\ninfixl:80 \" ==? \"  => rust_primitives.cmp.eq\ninfixl:80 \" !=? \"  => rust_primitives.cmp.ne\ninfixl:80 \" <? \"   => rust_primitives.cmp.lt\ninfixl:80 \" <=? \"  => rust_primitives.cmp.le\ninfixl:80 \" >? \"   => rust_primitives.cmp.gt\ninfixl:80 \" >=? \"  => rust_primitives.cmp.ge\n\nattribute [spec 100, specset bv, hax_bv_decide]\nrust_primitives.cmp.eq\nrust_primitives.cmp.ne\nrust_primitives.cmp.lt\nrust_primitives.cmp.le\nrust_primitives.cmp.gt\nrust_primitives.cmp.ge\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_comparison_specs\" s:(&\"signed\" <|> &\"unsigned\") typeName:ident width:term : command => do\n\n  let signed ← match s.raw[0].getKind with\n  | `signed => pure true\n  | `unsigned => pure false\n  | _ => throw .unsupportedSyntax\n\n  if signed then\n    return ← `(\n      namespace $typeName\n\n      @[specset int]\n      def eq_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ eq x y ⦃ ⇓ r => ⌜ r = (x.toInt == y.toInt) ⌝ ⦄ := by\n        mvcgen [eq]; rw [← @Bool.coe_iff_coe]; simp [x.toInt_inj]\n\n      @[specset int]\n      def ne_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ne x y ⦃ ⇓ r => ⌜ r = (x.toInt != y.toInt) ⌝ ⦄ := by\n        mvcgen [ne]; rw [← @Bool.coe_iff_coe]; simp [x.toInt_inj]\n\n      @[specset int]\n      def lt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ lt x y ⦃ ⇓ r => ⌜ r = decide (x.toInt < y.toInt) ⌝ ⦄ := by\n        mvcgen [lt]; simp [x.lt_iff_toInt_lt]\n\n      @[specset int]\n      def le_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ le x y ⦃ ⇓ r => ⌜ r = decide (x.toInt ≤ y.toInt) ⌝ ⦄ := by\n        mvcgen [le]; simp [x.le_iff_toInt_le]\n\n      @[specset int]\n      def gt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ gt x y ⦃ ⇓ r => ⌜ r = decide (x.toInt > y.toInt ) ⌝ ⦄ := by\n        mvcgen [gt]; simp [y.lt_iff_toInt_lt]\n\n      @[specset int]\n      def ge_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ge x y ⦃ ⇓ r => ⌜ r = decide (x.toInt ≥ y.toInt) ⌝ ⦄ := by\n        mvcgen [ge]; simp [y.le_iff_toInt_le]\n\n      end $typeName\n    )\n  else return ← `(\n      namespace $typeName\n\n      @[specset int]\n      def eq_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ eq x y ⦃ ⇓ r => ⌜ r = (x.toNat == y.toNat) ⌝ ⦄ := by\n        mvcgen [eq]; rw [← @Bool.coe_iff_coe]; simp [x.toNat_inj]\n\n      @[specset int]\n      def ne_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ne x y ⦃ ⇓ r => ⌜ r = (x.toNat != y.toNat) ⌝ ⦄ := by\n        mvcgen [ne]; rw [← @Bool.coe_iff_coe]; simp [x.toNat_inj]\n\n      @[specset int]\n      def lt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ lt x y ⦃ ⇓ r => ⌜ r = decide (x.toNat < y.toNat) ⌝ ⦄ := by\n        mvcgen [lt]\n\n      @[specset int]\n      def le_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ le x y ⦃ ⇓ r => ⌜ r = decide (x.toNat ≤ y.toNat) ⌝ ⦄ := by\n        mvcgen [le]\n\n      @[specset int]\n      def gt_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ gt x y ⦃ ⇓ r => ⌜ r = decide (x.toNat > y.toNat ) ⌝ ⦄ := by\n        mvcgen [gt]\n\n      @[specset int]\n      def ge_spec (x y : $typeName) : ⦃ ⌜ True ⌝ ⦄ ge x y ⦃ ⇓ r => ⌜ r = decide (x.toNat ≥ y.toNat) ⌝ ⦄ := by\n        mvcgen [ge]\n\n      end $typeName\n  )\n\ndeclare_comparison_specs signed Int8 8\ndeclare_comparison_specs signed Int16 16\ndeclare_comparison_specs signed Int32 32\ndeclare_comparison_specs signed Int64 64\ndeclare_comparison_specs signed ISize System.Platform.numBits\ndeclare_comparison_specs unsigned UInt8 8\ndeclare_comparison_specs unsigned UInt16 16\ndeclare_comparison_specs unsigned UInt32 32\ndeclare_comparison_specs unsigned UInt64 64\ndeclare_comparison_specs unsigned USize64 64\n\nend rust_primitives.cmp\n\nset_option linter.unusedVariables false in\n\n/-\n\n## Arithmetic operations\n\nThe Rust arithmetic operations have their own notations, using a `?`. They\nreturn a `RustM`, that is `.fail` when arithmetic overflows occur.\n\n-/\n\nclass rust_primitives.ops.arith.Add (α : Type) where\n  add : α → α → RustM α\nclass rust_primitives.ops.arith.Sub (α : Type) where\n  sub : α → α → RustM α\nclass rust_primitives.ops.arith.Mul (α : Type) where\n  mul : α → α → RustM α\nclass rust_primitives.ops.arith.Rem (α : Type) where\n  rem : α → α → RustM α\nclass rust_primitives.ops.arith.Div (α : Type) where\n  div : α → α → RustM α\nclass rust_primitives.ops.arith.Neg (α : Type) where\n  neg : α → RustM α\nclass rust_primitives.ops.bit.Shr (α : Type) (β : Type) where\n  shr : α → β → RustM α\nclass rust_primitives.ops.bit.Shl (α : Type) (β : Type) where\n  shl : α → β → RustM α\n\ninfixl:65 \" +? \"   => rust_primitives.ops.arith.Add.add\ninfixl:65 \" -? \"   => rust_primitives.ops.arith.Sub.sub\ninfixl:70 \" *? \"   => rust_primitives.ops.arith.Mul.mul\ninfixl:75 \" >>>? \" => rust_primitives.ops.bit.Shr.shr\ninfixl:75 \" <<<? \" => rust_primitives.ops.bit.Shl.shl\ninfixl:70 \" %? \"   => rust_primitives.ops.arith.Rem.rem\ninfixl:70 \" /? \"   => rust_primitives.ops.arith.Div.div\nprefix:75 \"-?\"   => rust_primitives.ops.arith.Neg.neg\n\nattribute [specset bv, hax_bv_decide]\n  rust_primitives.ops.arith.Add.add\n  rust_primitives.ops.arith.Sub.sub\n  rust_primitives.ops.arith.Mul.mul\n  rust_primitives.ops.bit.Shr.shr\n  rust_primitives.ops.bit.Shl.shl\n  rust_primitives.ops.arith.Rem.rem\n  rust_primitives.ops.arith.Div.div\n  rust_primitives.ops.arith.Neg.neg\n\nopen Lean in\nmacro \"declare_Hax_int_ops\" s:(&\"signed\" <|> &\"unsigned\") typeName:ident width:term : command => do\n\n  let signed ← match s.raw[0].getKind with\n  | `signed => pure true\n  | `unsigned => pure false\n  | _ => throw .unsupportedSyntax\n\n  let mut cmds ← Syntax.getArgs <$> `(\n\n    /-- Addition on Rust integers. Panics on overflow. -/\n    instance : rust_primitives.ops.arith.Add $typeName where\n      add x y :=\n        if ($(mkIdent (if signed then `BitVec.saddOverflow else `BitVec.uaddOverflow)) x.toBitVec y.toBitVec) then\n          .fail .integerOverflow\n        else pure (x + y)\n\n    /-- Subtraction on Rust integers. Panics on overflow. -/\n    instance : rust_primitives.ops.arith.Sub $typeName where\n      sub x y :=\n        if ($(mkIdent (if signed then `BitVec.ssubOverflow else `BitVec.usubOverflow)) x.toBitVec y.toBitVec) then\n          .fail .integerOverflow\n        else pure (x - y)\n\n    /-- Multiplication on Rust integers. Panics on overflow. -/\n    instance : rust_primitives.ops.arith.Mul $typeName where\n      mul x y :=\n        if ($(mkIdent (if signed then `BitVec.smulOverflow else `BitVec.umulOverflow)) x.toBitVec y.toBitVec) then\n          .fail .integerOverflow\n        else pure (x * y)\n  )\n  if signed then\n    cmds := cmds.append $ ← Syntax.getArgs <$> `(\n      /-- Division of signed Rust integers. Panics on overflow (when x is IntMin and `y = -1`)\n        and when dividing by zero. -/\n      instance : rust_primitives.ops.arith.Div $typeName where\n        div x y :=\n          if x = $(mkIdent (typeName.getId ++ `minValue)) && y = -1 then .fail .integerOverflow\n          else if y = 0 then .fail .divisionByZero\n          else pure (x / y)\n\n      /-- Remainder of signed Rust integers. Panics on overflow (when x is IntMin and `y = -1`)\n        and when the modulus is zero. -/\n      instance : rust_primitives.ops.arith.Rem $typeName where\n        rem x y :=\n          if x = $(mkIdent (typeName.getId ++ `minValue)) && y = -1 then .fail .integerOverflow\n          else if y = 0 then .fail .divisionByZero\n          else pure (x % y)\n\n      /-- Negation on signed integers. Panics on overflow (when `x` is `minValue`). -/\n      instance : rust_primitives.ops.arith.Neg $typeName where\n        neg x :=\n          if x = $(mkIdent (typeName.getId ++ `minValue))\n          then .fail .integerOverflow\n          else pure (- x)\n    )\n  else -- unsigned\n    cmds := cmds.append $ ← Syntax.getArgs <$> `(\n      /-- Division on unsigned Rust integers. Panics when dividing by zero.  -/\n      instance : rust_primitives.ops.arith.Div $typeName where\n        div x y :=\n          if y = 0 then .fail .divisionByZero\n          else pure (x / y)\n\n      /-- Division on unsigned Rust integers. Panics when the modulus is zero. -/\n      instance : rust_primitives.ops.arith.Rem $typeName where\n        rem x y :=\n          if y = 0 then .fail .divisionByZero\n          else pure (x % y)\n    )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_Hax_int_ops unsigned UInt8 8\ndeclare_Hax_int_ops unsigned UInt16 16\ndeclare_Hax_int_ops unsigned UInt32 32\ndeclare_Hax_int_ops unsigned UInt64 64\ndeclare_Hax_int_ops unsigned UInt128 128\ndeclare_Hax_int_ops unsigned USize64 64\ndeclare_Hax_int_ops signed Int8 8\ndeclare_Hax_int_ops signed Int16 16\ndeclare_Hax_int_ops signed Int32 32\ndeclare_Hax_int_ops signed Int64 64\ndeclare_Hax_int_ops signed Int128 128\ndeclare_Hax_int_ops signed ISize System.Platform.numBits\n\n\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_Hax_shift_ops\" : command => do\n  let mut cmds := #[]\n  let tys := [\n    (\"UInt8\", ← `(term| 8)),\n    (\"UInt16\", ← `(term| 16)),\n    (\"UInt32\", ← `(term| 32)),\n    (\"UInt64\", ← `(term| 64)),\n    (\"UInt128\", ← `(term| 128)),\n    (\"USize64\", ← `(term| 64)),\n    (\"Int8\", ← `(term| 8)),\n    (\"Int16\", ← `(term| 16)),\n    (\"Int32\", ← `(term| 32)),\n    (\"Int64\", ← `(term| 64)),\n    (\"Int128\", ← `(term| 128)),\n    (\"ISize\", ← `(term| OfNat.ofNat System.Platform.numBits))\n  ]\n  for (ty1, width1) in tys do\n    for (ty2, _width2) in tys do\n\n      let ty1Ident := mkIdent ty1.toName\n      let ty2Ident := mkIdent ty2.toName\n      let toTy1 := mkIdent (\"to\" ++ ty1).toName\n      let ty2Signed := ty2.startsWith \"I\"\n      let ty2ToNat := mkIdent (if ty2Signed then `toNatClampNeg else `toNat)\n      let yConverted ← if ty1 == ty2 then `(y) else `(y.$ty2ToNat.$toTy1)\n\n      cmds := cmds.push $ ← `(\n        /-- Shift right for Rust integers. Panics when shifting by a negative number or\n          by the bitsize or more. -/\n        instance : rust_primitives.ops.bit.Shr $ty1Ident $ty2Ident where\n          shr x y :=\n            if 0 ≤ y && y < $width1\n            then pure (x >>> $yConverted)\n            else .fail .integerOverflow\n\n        /-- Left shifting on signed integers. Panics when shifting by a negative number,\n          or when shifting by more than the size. -/\n        instance : rust_primitives.ops.bit.Shl $ty1Ident $ty2Ident where\n          shl x y :=\n            if 0 ≤ y && y < $width1\n            then pure (x <<< $yConverted)\n            else\n              .fail .integerOverflow\n      )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_Hax_shift_ops\n\n\n/-\n## Specifications for integer operations\n-/\n\nopen Lean in\nset_option hygiene false in\nmacro \"declare_Hax_int_ops_spec\" s:(&\"signed\" <|> &\"unsigned\") typeName:ident width:term : command => do\n\n  let signed ← match s.raw[0].getKind with\n  | `signed => pure true\n  | `unsigned => pure false\n  | _ => throw .unsupportedSyntax\n\n  let toX := if signed then mkIdent `toInt else mkIdent `toNat\n  let minValue := mkIdent (typeName.getId ++ `minValue)\n  let grind : TSyntax `tactic ←\n    if signed then `(tactic| grind)\n    else `(tactic| grind [toNat_add_of_lt, toNat_sub_of_le', toNat_mul_of_lt])\n\n  let mut cmds ← Syntax.getArgs <$> `(\n    namespace $typeName\n\n      /-- Specification for rust addition -/\n      @[specset int]\n      theorem haxAdd_spec {x y : $typeName}\n          (h : ¬ $(mkIdent (typeName.getId ++ `addOverflow)) x y) :\n          ⦃ ⌜ True ⌝ ⦄ (x +? y) ⦃ ⇓ r => ⌜ r.$toX = x.$toX + y.$toX ⌝ ⦄ := by\n        mvcgen [rust_primitives.ops.arith.Add.add]; $grind\n\n      /-- Specification for rust subtraction -/\n      @[specset int]\n      theorem haxSub_spec {x y : $typeName}\n          (h : ¬ $(mkIdent (typeName.getId ++ `subOverflow)) x y) :\n          ⦃ ⌜ True ⌝ ⦄ (x -? y) ⦃ ⇓ r => ⌜ r.$toX = x.$toX - y.$toX ⌝ ⦄ := by\n        mvcgen [rust_primitives.ops.arith.Sub.sub]; $grind\n\n      /-- Specification for rust multiplication -/\n      @[specset int]\n      theorem haxMul_spec {x y : $typeName}\n          (h : ¬ $(mkIdent (typeName.getId ++ `mulOverflow)) x y) :\n          ⦃ ⌜ True ⌝ ⦄ (x *? y) ⦃ ⇓ r => ⌜ r.$toX = x.$toX * y.$toX ⌝ ⦄ := by\n        mvcgen [rust_primitives.ops.arith.Mul.mul]; $grind\n  )\n  if signed then\n    cmds := cmds.append $ ← Syntax.getArgs <$> `(\n      /-- Specification for rust negation for signed integers-/\n      @[specset int]\n      theorem haxNeg_spec {x : $typeName} (hx : x ≠ $minValue) :\n          ⦃ ⌜ True ⌝ ⦄ (-? x) ⦃ ⇓ r => ⌜ r.toInt = - x.toInt ⌝ ⦄ := by\n        mvcgen [rust_primitives.ops.arith.Neg.neg]\n        rw [toInt_neg_of_ne_intMin hx]\n\n      /-- Specification for rust multiplication for signed integers-/\n      @[specset int]\n      theorem haxDiv_spec {x y : $typeName}\n          (hx : x ≠ $minValue ∨ y ≠ -1) (hy : ¬ y = 0) :\n          ⦃ ⌜ True ⌝ ⦄ (x /? y) ⦃ ⇓ r => ⌜ r.toInt = x.toInt.tdiv y.toInt ⌝ ⦄ := by\n        have : ¬ (x = $minValue && y = -1) := by grind\n        mvcgen [rust_primitives.ops.arith.Div.div]\n        cases hx with\n        | inl hx => apply toInt_div_of_ne_left x y hx\n        | inr hx => apply toInt_div_of_ne_right x y hx\n\n      /-- Specification for rust remainder for signed integers -/\n      @[specset int]\n      theorem haxRem_spec (x y : $typeName)\n          (hx : x ≠ $minValue ∨ y ≠ -1) (hy : ¬ y = 0) :\n          ⦃ ⌜ True ⌝ ⦄ (x %? y) ⦃ ⇓ r => ⌜ r.toInt = x.toInt.tmod y.toInt ⌝ ⦄ :=  by\n        have : ¬ (x = $minValue && y = -1) := by grind\n        mvcgen [rust_primitives.ops.arith.Rem.rem]\n        apply toInt_mod\n    )\n  else -- unsigned\n    cmds := cmds.append $ ← Syntax.getArgs <$> `(\n      /-- Specification for rust multiplication for unsigned integers -/\n      @[specset int]\n      theorem haxDiv_spec (x y : $typeName) (h : ¬ y = 0) :\n          ⦃ ⌜ True ⌝ ⦄ (x /? y) ⦃ ⇓ r => ⌜ r.toNat = x.toNat / y.toNat ⌝ ⦄ := by\n        mvcgen [rust_primitives.ops.arith.Div.div]\n\n      /-- Specification for rust remainder for unsigned integers -/\n      @[specset int]\n      theorem haxRem_spec (x y : $typeName) (h : ¬ y = 0) :\n          ⦃ ⌜ True ⌝ ⦄ (x %? y) ⦃ ⇓ r => ⌜ r.toNat = x.toNat % y.toNat ⌝ ⦄ := by\n        mvcgen [rust_primitives.ops.arith.Rem.rem]\n    )\n  cmds := cmds.push $ ← `(\n    end $typeName\n  )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_Hax_int_ops_spec unsigned UInt8 8\ndeclare_Hax_int_ops_spec unsigned UInt16 16\ndeclare_Hax_int_ops_spec unsigned UInt32 32\ndeclare_Hax_int_ops_spec unsigned UInt64 64\ndeclare_Hax_int_ops_spec unsigned UInt128 128\ndeclare_Hax_int_ops_spec unsigned USize64 64\ndeclare_Hax_int_ops_spec signed Int8 8\ndeclare_Hax_int_ops_spec signed Int16 16\ndeclare_Hax_int_ops_spec signed Int32 32\ndeclare_Hax_int_ops_spec signed Int64 64\ndeclare_Hax_int_ops_spec signed Int128 128\ndeclare_Hax_int_ops_spec signed ISize System.Platform.numBits\n\nopen Lean in\nmacro \"declare_Hax_shift_ops_spec\" : command => do\n  let mut cmds := #[]\n  let tys := [\n    (\"UInt8\", ← `(term| 8)),\n    (\"UInt16\", ← `(term| 16)),\n    (\"UInt32\", ← `(term| 32)),\n    (\"UInt64\", ← `(term| 64)),\n    -- (\"UInt128\", ← `(term| 128)),\n    (\"Int8\", ← `(term| 8)),\n    (\"Int16\", ← `(term| 16)),\n    (\"Int32\", ← `(term| 32)),\n    (\"Int64\", ← `(term| 64)),\n    -- (\"Int128\", ← `(term| 128)),\n  ]\n  for (ty1, width1) in tys do\n    for (ty2, _width2) in tys do\n\n      let ty1Ident := mkIdent ty1.toName\n      let ty2Ident := mkIdent ty2.toName\n      let toTy1 := mkIdent (\"to\" ++ ty1).toName\n      let ty2Signed := ty2.startsWith \"I\"\n      let ty2ToNat := mkIdent (if ty2Signed then `toNatClampNeg else `toNat)\n      let yConverted ← if ty1 == ty2 then `(y) else `(y.$ty2ToNat.$toTy1)\n      let haxShiftRight_spec := mkIdent (\"haxShiftRight_\" ++ ty2 ++ \"_spec\").toName\n      let haxShiftLeft_spec := mkIdent (\"haxShiftLeft_\" ++ ty2 ++ \"_spec\").toName\n\n      cmds := cmds.push $ ← `(\n        namespace $ty1Ident\n          /-- Bitvec-based specification for rust remainder on unsigned integers -/\n          @[spec]\n          theorem $haxShiftRight_spec (x : $ty1Ident) (y : $ty2Ident) :\n            0 ≤ y →\n            y.$ty2ToNat < $width1 →\n            ⦃ ⌜ True ⌝ ⦄ (x >>>? y) ⦃ ⇓ r => ⌜ r = x >>> $yConverted ⌝ ⦄\n            := by intros; mvcgen [rust_primitives.ops.bit.Shr.shr]; grind\n\n          /-- Bitvec-based specification for rust remainder on unsigned integers -/\n          @[spec]\n          theorem $haxShiftLeft_spec (x : $ty1Ident) (y : $ty2Ident) :\n            0 ≤ y →\n            y.$ty2ToNat < $width1 →\n            ⦃ ⌜ True ⌝ ⦄ (x <<<? y) ⦃ ⇓ r => ⌜ r = x <<< $yConverted ⌝ ⦄\n            := by intros; mvcgen [rust_primitives.ops.bit.Shl.shl]; grind\n        end $ty1Ident\n      )\n  return ⟨mkNullNode cmds⟩\n\ndeclare_Hax_shift_ops_spec\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/sequence.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.ops\n\nstructure rust_primitives.sequence.Seq α where\n  val : Array α\n  size_lt_usizeSize : val.size < USize64.size\n\nattribute [grind .] rust_primitives.sequence.Seq.size_lt_usizeSize\nattribute [local grind! .] USize64.toNat_lt_size\n\n@[grind =, simp]\ntheorem rust_primitives.sequence.Seq.toNat_ofNat_size {α} (m : rust_primitives.sequence.Seq α) :\n    (USize64.ofNat m.val.size).toNat = m.val.size :=\n  USize64.toNat_ofNat_of_lt' m.size_lt_usizeSize\n\ndef rust_primitives.sequence.seq_len (α : Type) (s : rust_primitives.sequence.Seq α) :\n  RustM usize := pure (USize64.ofNat s.val.size)\n\ndef rust_primitives.sequence.seq_first (α : Type) (s : rust_primitives.sequence.Seq α) : RustM α :=\n  if h : s.val.size == 0 then\n    .fail .arrayOutOfBounds\n  else\n    pure (s.val[0]'(by grind))\n\ndef rust_primitives.sequence.seq_slice\n  (α : Type) (seq : rust_primitives.sequence.Seq α) (s e : usize) : RustM (rust_primitives.sequence.Seq α) :=\n  if s ≤ e && e ≤ .ofNat seq.val.size then\n    pure ⟨seq.val[s.toNat:e.toNat].toArray, by grind⟩\n  else\n    .fail .arrayOutOfBounds\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives/slice.lean",
    "content": "import Hax.rust_primitives.RustM\nimport Hax.rust_primitives.hax\nimport Hax.rust_primitives.sequence\n\n\n\nabbrev RustVector := rust_primitives.sequence.Seq\nabbrev RustSlice := rust_primitives.sequence.Seq\n\nattribute [local grind! .] rust_primitives.sequence.Seq.size_lt_usizeSize\nattribute [local grind! .] USize64.toNat_lt_size\n\n@[spec]\ndef rust_primitives.slice.array_as_slice (α : Type) (n : usize) :\n    RustArray α n → RustM (RustSlice α) :=\n  fun x => pure ⟨Vector.toArray x.toVec, by grind⟩\n\n@[spec]\ndef rust_primitives.slice.array_map (α : Type) (β : Type) (n : usize) (_ : Type)\n    (a : RustArray α n) (f : α -> RustM β) : RustM (RustArray β n) := do\n  pure (.ofVec (← a.toVec.mapM (f ·) ))\n\n@[spec]\ndef rust_primitives.slice.array_from_fn (α : Type) (n : usize) (_ : Type)\n    (f : usize -> RustM α) : RustM (RustArray α n) := do\n  pure (.ofVec (← (Vector.range n.toNat).mapM fun i => f (USize64.ofNat i)))\n\n@[spec]\ndef rust_primitives.slice.slice_length (α : Type) (s : RustSlice α) : RustM usize :=\n  pure (USize64.ofNat s.val.size)\n\n@[spec]\ndef rust_primitives.sequence.seq_from_slice (α : Type) (s : RustSlice α) :\n    RustM (rust_primitives.sequence.Seq α) :=\n  pure s\n\n@[spec]\ndef rust_primitives.slice.slice_split_at (α : Type) (s : RustSlice α) (mid : usize) :\n    RustM (rust_primitives.hax.Tuple2 (RustSlice α) (RustSlice α)) :=\n  if mid <= .ofNat s.val.size then\n    pure ⟨⟨s.val.take mid.toNat, by grind⟩, ⟨s.val.drop mid.toNat, by grind⟩⟩\n  else\n    .fail .arrayOutOfBounds\n\ndef rust_primitives.slice.slice_slice\n  (α : Type) (seq : RustSlice α) (s e : usize) : RustM (RustSlice α) :=\n  if s ≤ e && e ≤ .ofNat seq.val.size then\n    pure ⟨seq.val[s.toNat:e.toNat].toArray, by grind⟩\n  else\n    .fail .arrayOutOfBounds\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax/rust_primitives.lean",
    "content": "import Hax.rust_primitives.arithmetic\nimport Hax.rust_primitives.boxed\nimport Hax.rust_primitives.BVDecide\nimport Hax.rust_primitives.Cast\nimport Hax.rust_primitives.hax\nimport Hax.rust_primitives.hax_lib\nimport Hax.rust_primitives.GetElemResult\nimport Hax.rust_primitives.mem\nimport Hax.rust_primitives.ops\nimport Hax.rust_primitives.RustM\nimport Hax.rust_primitives.sequence\nimport Hax.rust_primitives.Spec\nimport Hax.rust_primitives.slice\nimport Hax.rust_primitives.USize64\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/Hax.lean",
    "content": "/- Copyright 2025 Cryspen\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\nhttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License. -/\n\n-- This module serves as the root of the `Hax` library.\n-- Import modules here that should be built as part of the library.\n\n/- Additions to the Lean library -/\nimport Hax.MissingLean\n\n/- Implementation of Rust primitives in Lean -/\nimport Hax.rust_primitives\n\n/- Core Models, extracted from our model written in Rust -/\nimport Hax.core_models\n\n/- Tactics -/\nimport Hax.Tactic\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/README.md",
    "content": "# Hax Lean library\n\nThis folder contains the Lean library necessary to use hax-extracted rust code\nin Lean. It is organized as follows:\n\n- `Lib.lean` : main prelude definitions (integer types, arrays, errors, etc)\n- `BitVec.lean` : additional lemmas and tactics to handle bitvectors\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/lake-manifest.json",
    "content": "{\n  \"version\": \"1.1.0\",\n  \"packagesDir\": \".lake/packages\",\n  \"packages\": [\n    {\n      \"url\": \"https://github.com/leanprover-community/quote4\",\n      \"type\": \"git\",\n      \"subDir\": null,\n      \"scope\": \"\",\n      \"rev\": \"23324752757bf28124a518ec284044c8db79fee5\",\n      \"name\": \"Qq\",\n      \"manifestFile\": \"lake-manifest.json\",\n      \"inputRev\": \"v4.29.0-rc1\",\n      \"inherited\": false,\n      \"configFile\": \"lakefile.toml\"\n    }\n  ],\n  \"name\": \"Hax\",\n  \"lakeDir\": \".lake\"\n}"
  },
  {
    "path": "hax-lib/proof-libs/lean/lakefile.toml",
    "content": "name = \"Hax\"\nversion = \"0.1.0\"\ndefaultTargets = [\"Hax\"]\n\n[leanOptions]\nautoImplicit = false\nrelaxedAutoImplicit = false\nweak.linter.mathlibStandardSet = true\nmaxSynthPendingDepth = 3\n\n[[lean_lib]]\nname = \"Hax\"\n\n[[require]]\nname = \"Qq\"\ngit = \"https://github.com/leanprover-community/quote4\"\nrev = \"v4.29.0-rc1\"\n"
  },
  {
    "path": "hax-lib/proof-libs/lean/lean-toolchain",
    "content": "leanprover/lean4:v4.29.0-rc1 "
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.Abstraction.fst",
    "content": "module Hax_lib.Abstraction\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\n/// Marks a type as abstract: its values can be lowered to concrete\n/// values. This might panic.\nclass t_Concretization (v_Self: Type0) (v_T: Type0) = {\n  f_concretize_pre:v_Self -> Type0;\n  f_concretize_post:v_Self -> v_T -> Type0;\n  f_concretize:x0: v_Self\n    -> Prims.Pure v_T (f_concretize_pre x0) (fun result -> f_concretize_post x0 result)\n}\n\n/// Marks a type as abstractable: its values can be mapped to an\n/// idealized version of the type. For instance, machine integers,\n/// which have bounds, can be mapped to mathematical integers.\n/// Each type can have only one abstraction.\nclass t_Abstraction (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_AbstractType:Type0;\n  f_lift_pre:v_Self -> Type0;\n  f_lift_post:v_Self -> f_AbstractType -> Type0;\n  f_lift:x0: v_Self\n    -> Prims.Pure f_AbstractType (f_lift_pre x0) (fun result -> f_lift_post x0 result)\n}\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.Bundle.fst",
    "content": "module Hax_lib.Bundle\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Hax_lib.Abstraction in\n  let open Num_bigint in\n  let open Num_bigint.Bigint in\n  let open Num_bigint.Bigint.Addition in\n  let open Num_bigint.Bigint.Convert in\n  let open Num_bigint.Bigint.Division in\n  let open Num_bigint.Bigint.Multiplication in\n  let open Num_bigint.Bigint.Subtraction in\n  let open Num_traits.Cast in\n  let open Num_traits.Ops.Euclid in\n  ()\n\n/// This function exists only when compiled with `hax`, and is not\n/// meant to be used directly. It is called by `assert!` only in\n/// appropriate situations.\nlet v_assert (e_formula: bool) : Prims.unit = ()\n\n/// This function exists only when compiled with `hax`, and is not meant to be\n/// used directly. It is called by `assert_prop!` only in appropriate\n/// situations.\nlet assert_prop (e_formula: Hax_lib.Prop.t_Prop) : Prims.unit = ()\n\n/// This function exists only when compiled with `hax`, and is not\n/// meant to be used directly. It is called by `assume!` only in\n/// appropriate situations.\nlet v_assume (e_formula: Hax_lib.Prop.t_Prop) : Prims.unit = ()\n\n/// Dummy function that carries a string to be printed as such in the output language\nlet v_inline (_: string) : Prims.unit = ()\n\n/// Similar to `inline`, but allows for any type. Do not use directly.\nlet inline_unsafe (#v_T: Type0) (_: string) : v_T =\n  Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic \"internal error: entered unreachable code\"\n\n      <:\n      Rust_primitives.Hax.t_Never)\n\n/// Sink for any value into unit. This is used internally by hax to capture\n/// value of any type. Specifically, this is useful for the `decreases` clauses\n/// for the F* backend.\nlet any_to_unit (#v_T: Type0) (_: v_T) : Prims.unit =\n  Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic \"internal error: entered unreachable code\"\n\n      <:\n      Rust_primitives.Hax.t_Never)\n\n/// A dummy function that holds a loop invariant.\nlet e_internal_loop_invariant\n      (#v_T: Type0)\n      (#v_R: Type0)\n      (#v_P: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into v_R Hax_lib.Prop.t_Prop)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Ops.Function.t_FnOnce v_P v_T)\n      (#_: unit{i1.Core_models.Ops.Function.f_Output == v_R})\n      (_: v_P)\n    : Prims.unit = ()\n\n/// A dummy function that holds a while loop invariant.\nlet e_internal_while_loop_invariant (_: Hax_lib.Prop.t_Prop) : Prims.unit = ()\n\n(* item error backend: The mutation of this &mut is not allowed here.\n\nThis is discussed in issue https://github.com/hacspec/hax/issues/420.\nPlease upvote or comment this issue if you see this error message.\nNote: the error was labeled with context `DirectAndMut`.\n\nLast available AST for this item:\n\n/// A type that implements `Refinement` should be a newtype for a\n/// type `T`. The field holding the value of type `T` should be\n/// private, and `Refinement` should be the only interface to the\n/// type.\n/// Please never implement this trait yourself, use the\n/// `refinement_type` macro instead.\n#[no_std()]\n#[feature(register_tool)]\n#[register_tool(_hax)]\ntrait t_Refinement<Self_> {\n    /// The base type\n    #[no_std()]\n    #[feature(register_tool)]\n    #[register_tool(_hax)]\n    type f_InnerType: TodoPrintRustBoundsTyp;\n    /// Smart constructor capturing an invariant. Its extraction will\n    /// yield a proof obligation.\n    #[no_std()]\n    #[feature(register_tool)]\n    #[register_tool(_hax)]\n    fn f_new(_: proj_asso_type!()) -> Self;\n    /// Destructor for the refined type\n    #[no_std()]\n    #[feature(register_tool)]\n    #[register_tool(_hax)]\n    fn f_get(_: Self) -> proj_asso_type!();\n    /// Gets a mutable reference to a refinement\n    #[no_std()]\n    #[feature(register_tool)]\n    #[register_tool(_hax)]\n    fn f_get_mut<Anonymous: 'unk>(_: Self) -> tuple2<Self, &mut proj_asso_type!()>;\n    /// Tests wether a value satisfies the refinement\n    #[no_std()]\n    #[feature(register_tool)]\n    #[register_tool(_hax)]\n    fn f_invariant(_: proj_asso_type!()) -> hax_lib::prop::t_Prop;\n}\n\n\nLast AST:\n/** print_rust: pitem: not implemented  (item: { Concrete_ident.T.def_id =\n  { Explicit_def_id.T.is_constructor = false;\n    def_id =\n    { Types.index = (0, 0, None); is_local = true; kind = Types.Trait;\n      krate = \"hax_lib\";\n      parent =\n      (Some { Types.contents =\n              { Types.id = 0;\n                value =\n                { Types.index = (0, 0, None); is_local = true;\n                  kind = Types.Mod; krate = \"hax_lib\"; parent = None;\n                  path = [] }\n                }\n              });\n      path =\n      [{ Types.data = (Types.TypeNs \"Refinement\"); disambiguator = 0 }] }\n    };\n  moved =\n  (Some { Concrete_ident.Fresh_module.id = 2;\n          hints =\n          [{ Explicit_def_id.T.is_constructor = false;\n             def_id =\n             { Types.index = (0, 0, None); is_local = true; kind = Types.Use;\n               krate = \"hax_lib\";\n               parent =\n               (Some { Types.contents =\n                       { Types.id = 0;\n                         value =\n                         { Types.index = (0, 0, None); is_local = true;\n                           kind = Types.Mod; krate = \"hax_lib\";\n                           parent = None; path = [] }\n                         }\n                       });\n               path = [{ Types.data = Types.Use; disambiguator = 0 }] }\n             };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.ExternCrate; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"core\"); disambiguator = 0 }] }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path = [{ Types.data = Types.Use; disambiguator = 1 }] }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path = [{ Types.data = Types.Use; disambiguator = 2 }] }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path = [{ Types.data = Types.Use; disambiguator = 3 }] }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path = [{ Types.data = Types.Use; disambiguator = 4 }] }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = (Types.Macro Types.Bang); krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.MacroNs \"proxy_macro_if_not_hax\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = (Types.Macro Types.Bang); krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.MacroNs \"debug_assert\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = (Types.Macro Types.Bang); krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.MacroNs \"assert\"); disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"assert\"); disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = (Types.Macro Types.Bang); krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.MacroNs \"assert_prop\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"assert_prop\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"assume\"); disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = (Types.Macro Types.Bang); krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.MacroNs \"assume\"); disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"inline\"); disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"inline_unsafe\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"any_to_unit\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"_internal_loop_invariant\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data =\n                   (Types.ValueNs \"_internal_while_loop_invariant\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true; kind = Types.Fn;\n                krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.ValueNs \"_internal_loop_decreases\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Trait; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"Refinement\");\n                   disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Trait; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"RefineAs\"); disambiguator = 0\n                   }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Mod; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent = None; path = [] }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 }] }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 1 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 2 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 3 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 4 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 5 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Struct; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = (Types.TypeNs \"Int\"); disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 8 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 9 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 10 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 11 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 12 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 13 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 14 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 15 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 1 }]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 1 };\n                  { Types.data = (Types.ValueNs \"new\"); disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 1 }]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 1 };\n                  { Types.data = (Types.ValueNs \"get\"); disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 2 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 3 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 4 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 5 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 6 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 7 }]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 7 };\n                  { Types.data = (Types.ValueNs \"pow2\"); disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 7 }]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 7 };\n                  { Types.data = (Types.ValueNs \"_unsafe_from_str\");\n                    disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 7 }]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 7 };\n                  { Types.data = (Types.ValueNs \"rem_euclid\");\n                    disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.AssocFn; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true;\n                                        kind = Types.Impl {of_trait = false};\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent =\n                                                    (Some { Types.contents =\n                                                            { Types.id = 0;\n                                                              value =\n                                                              { Types.index =\n                                                                (0, 0, None);\n                                                                is_local =\n                                                                true;\n                                                                kind =\n                                                                Types.Mod;\n                                                                krate =\n                                                                \"hax_lib\";\n                                                                parent = None;\n                                                                path = [] }\n                                                              }\n                                                            });\n                                                    path =\n                                                    [{ Types.data =\n                                                       (Types.TypeNs \"int\");\n                                                       disambiguator = 0 }\n                                                      ]\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 };\n                                          { Types.data = Types.Impl;\n                                            disambiguator = 7 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 7 };\n                              { Types.data =\n                                (Types.ValueNs \"_unsafe_from_str\");\n                                disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 7 };\n                  { Types.data = (Types.ValueNs \"_unsafe_from_str\");\n                    disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Use; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.AssocFn; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true;\n                                        kind = Types.Impl {of_trait = false};\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent =\n                                                    (Some { Types.contents =\n                                                            { Types.id = 0;\n                                                              value =\n                                                              { Types.index =\n                                                                (0, 0, None);\n                                                                is_local =\n                                                                true;\n                                                                kind =\n                                                                Types.Mod;\n                                                                krate =\n                                                                \"hax_lib\";\n                                                                parent = None;\n                                                                path = [] }\n                                                              }\n                                                            });\n                                                    path =\n                                                    [{ Types.data =\n                                                       (Types.TypeNs \"int\");\n                                                       disambiguator = 0 }\n                                                      ]\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 };\n                                          { Types.data = Types.Impl;\n                                            disambiguator = 7 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 7 };\n                              { Types.data = (Types.ValueNs \"rem_euclid\");\n                                disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 7 };\n                  { Types.data = (Types.ValueNs \"rem_euclid\");\n                    disambiguator = 0 };\n                  { Types.data = Types.Use; disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Trait; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = (Types.TypeNs \"ToInt\"); disambiguator = 0 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = (Types.Macro Types.Bang); krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = (Types.MacroNs \"implement_abstraction\");\n                    disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 16 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 17 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 18 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 19 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 20 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 21 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 22 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 23 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 24 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 25 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 26 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 27 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 28 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 29 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 30 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 31 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 32 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 33 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 34 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 35 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 36 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 37 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 38 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 39 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = (Types.Macro Types.Bang); krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = (Types.MacroNs \"implement_concretize\");\n                    disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 40 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 41 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 41 };\n                  { Types.data = (Types.ValueNs \"to_u8\"); disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 42 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 43 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 43 };\n                  { Types.data = (Types.ValueNs \"to_u16\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 44 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 45 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 45 };\n                  { Types.data = (Types.ValueNs \"to_u32\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 46 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 47 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 47 };\n                  { Types.data = (Types.ValueNs \"to_u64\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 48 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 49 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 49 };\n                  { Types.data = (Types.ValueNs \"to_u128\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 50 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 51 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 51 };\n                  { Types.data = (Types.ValueNs \"to_usize\");\n                    disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 52 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 53 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 53 };\n                  { Types.data = (Types.ValueNs \"to_i8\"); disambiguator = 0 }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 54 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 55 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 55 };\n                  { Types.data = (Types.ValueNs \"to_i16\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 56 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 57 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 57 };\n                  { Types.data = (Types.ValueNs \"to_i32\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 58 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 59 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 59 };\n                  { Types.data = (Types.ValueNs \"to_i64\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 60 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 61 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 61 };\n                  { Types.data = (Types.ValueNs \"to_i128\"); disambiguator = 0\n                    }\n                  ]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.Impl {of_trait = true}; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Mod; krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\"; parent = None;\n                                        path = [] }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 62 }]\n                }\n              };\n            { Explicit_def_id.T.is_constructor = false;\n              def_id =\n              { Types.index = (0, 0, None); is_local = true;\n                kind = Types.AssocFn; krate = \"hax_lib\";\n                parent =\n                (Some { Types.contents =\n                        { Types.id = 0;\n                          value =\n                          { Types.index = (0, 0, None); is_local = true;\n                            kind = Types.Impl {of_trait = false};\n                            krate = \"hax_lib\";\n                            parent =\n                            (Some { Types.contents =\n                                    { Types.id = 0;\n                                      value =\n                                      { Types.index = (0, 0, None);\n                                        is_local = true; kind = Types.Mod;\n                                        krate = \"hax_lib\";\n                                        parent =\n                                        (Some { Types.contents =\n                                                { Types.id = 0;\n                                                  value =\n                                                  { Types.index =\n                                                    (0, 0, None);\n                                                    is_local = true;\n                                                    kind = Types.Mod;\n                                                    krate = \"hax_lib\";\n                                                    parent = None; path = []\n                                                    }\n                                                  }\n                                                });\n                                        path =\n                                        [{ Types.data = (Types.TypeNs \"int\");\n                                           disambiguator = 0 }\n                                          ]\n                                        }\n                                      }\n                                    });\n                            path =\n                            [{ Types.data = (Types.TypeNs \"int\");\n                               disambiguator = 0 };\n                              { Types.data = Types.Impl; disambiguator = 63 }\n                              ]\n                            }\n                          }\n                        });\n                path =\n                [{ Types.data = (Types.TypeNs \"int\"); disambiguator = 0 };\n                  { Types.data = Types.Impl; disambiguator = 63 };\n                  { Types.data = (Types.ValueNs \"to_isize\");\n                    disambiguator = 0 }\n                  ]\n                }\n              }\n            ];\n          label = \"bundle\" });\n  suffix = None }) */\nconst _: () = ();\n *)\n\n/// A utilitary trait that provides a `into_checked` method on traits\n/// that have a refined counter part. This trait is parametrized by a\n/// type `Target`: a base type can be refined in multiple ways.\n/// Please never implement this trait yourself, use the\n/// `refinement_type` macro instead.\nclass t_RefineAs (v_Self: Type0) (v_RefinedType: Type0) = {\n  f_into_checked_pre:v_Self -> Type0;\n  f_into_checked_post:v_Self -> v_RefinedType -> Type0;\n  f_into_checked:x0: v_Self\n    -> Prims.Pure v_RefinedType\n        (f_into_checked_pre x0)\n        (fun result -> f_into_checked_post x0 result)\n}\n\n/// Mathematical integers for writting specifications. Mathematical\n/// integers are unbounded and arithmetic operation on them never over\n/// or underflow.\ntype t_Int = | Int : Hax_lib.Int.Bigint.t_BigInt -> t_Int\n\n/// A dummy function that holds a loop variant.\nlet e_internal_loop_decreases (_: t_Int) : Prims.unit = ()\n\nlet impl_9: Core_models.Clone.t_Clone t_Int =\n  { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_8': Core_models.Marker.t_Copy t_Int\n\nunfold\nlet impl_8 = impl_8'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_11': Core_models.Marker.t_StructuralPartialEq t_Int\n\nunfold\nlet impl_11 = impl_11'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_12': Core_models.Cmp.t_PartialEq t_Int t_Int\n\nunfold\nlet impl_12 = impl_12'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_10': Core_models.Cmp.t_Eq t_Int\n\nunfold\nlet impl_10 = impl_10'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_14': Core_models.Cmp.t_PartialOrd t_Int t_Int\n\nunfold\nlet impl_14 = impl_14'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_13': Core_models.Cmp.t_Ord t_Int\n\nunfold\nlet impl_13 = impl_13'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_15': Core_models.Fmt.t_Debug t_Int\n\nunfold\nlet impl_15 = impl_15'\n\nlet impl_1__new\n      (#iimpl_637761304_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into iimpl_637761304_ Num_bigint.Bigint.t_BigInt)\n      (x: iimpl_637761304_)\n    : t_Int =\n  Int\n  (Hax_lib.Int.Bigint.impl_BigInt__new (Core_models.Convert.f_into #iimpl_637761304_\n          #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          x\n        <:\n        Num_bigint.Bigint.t_BigInt))\n  <:\n  t_Int\n\nlet impl_1__get (self: t_Int) : Num_bigint.Bigint.t_BigInt =\n  Hax_lib.Int.Bigint.impl_BigInt__get self._0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Fmt.t_Display t_Int =\n  {\n    f_fmt_pre = (fun (self: t_Int) (f: Core_models.Fmt.t_Formatter) -> true);\n    f_fmt_post\n    =\n    (fun\n        (self: t_Int)\n        (f: Core_models.Fmt.t_Formatter)\n        (out1:\n          (Core_models.Fmt.t_Formatter &\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n        ->\n        true);\n    f_fmt\n    =\n    fun (self: t_Int) (f: Core_models.Fmt.t_Formatter) ->\n      let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n        let list =\n          [\n            Core_models.Fmt.Rt.impl__new_display #Num_bigint.Bigint.t_BigInt\n              (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n          ]\n        in\n        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n        Rust_primitives.Hax.array_of_list 1 list\n      in\n      let tmp0, out:(Core_models.Fmt.t_Formatter &\n        Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) =\n        Core_models.Fmt.impl_11__write_fmt f\n          (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 1)\n              (mk_usize 1)\n              (let list = [\"\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n              args\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let f:Core_models.Fmt.t_Formatter = tmp0 in\n      let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error = out in\n      f, hax_temp_output\n      <:\n      (Core_models.Fmt.t_Formatter & Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2: Core_models.Ops.Arith.t_Add t_Int t_Int =\n  {\n    f_Output = t_Int;\n    f_add_pre = (fun (self: t_Int) (other: t_Int) -> true);\n    f_add_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true);\n    f_add\n    =\n    fun (self: t_Int) (other: t_Int) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Ops.Arith.f_add #Num_bigint.Bigint.t_BigInt\n            #Num_bigint.Bigint.t_BigInt\n            #FStar.Tactics.Typeclasses.solve\n            (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n            (impl_1__get other <: Num_bigint.Bigint.t_BigInt)\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3: Core_models.Ops.Arith.t_Neg t_Int =\n  {\n    f_Output = t_Int;\n    f_neg_pre = (fun (self: t_Int) -> true);\n    f_neg_post = (fun (self: t_Int) (out: t_Int) -> true);\n    f_neg\n    =\n    fun (self: t_Int) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Ops.Arith.f_neg #Num_bigint.Bigint.t_BigInt\n            #FStar.Tactics.Typeclasses.solve\n            (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4: Core_models.Ops.Arith.t_Sub t_Int t_Int =\n  {\n    f_Output = t_Int;\n    f_sub_pre = (fun (self: t_Int) (other: t_Int) -> true);\n    f_sub_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true);\n    f_sub\n    =\n    fun (self: t_Int) (other: t_Int) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Ops.Arith.f_sub #Num_bigint.Bigint.t_BigInt\n            #Num_bigint.Bigint.t_BigInt\n            #FStar.Tactics.Typeclasses.solve\n            (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n            (impl_1__get other <: Num_bigint.Bigint.t_BigInt)\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_5: Core_models.Ops.Arith.t_Mul t_Int t_Int =\n  {\n    f_Output = t_Int;\n    f_mul_pre = (fun (self: t_Int) (other: t_Int) -> true);\n    f_mul_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true);\n    f_mul\n    =\n    fun (self: t_Int) (other: t_Int) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Ops.Arith.f_mul #Num_bigint.Bigint.t_BigInt\n            #Num_bigint.Bigint.t_BigInt\n            #FStar.Tactics.Typeclasses.solve\n            (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n            (impl_1__get other <: Num_bigint.Bigint.t_BigInt)\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6: Core_models.Ops.Arith.t_Div t_Int t_Int =\n  {\n    f_Output = t_Int;\n    f_div_pre = (fun (self: t_Int) (other: t_Int) -> true);\n    f_div_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true);\n    f_div\n    =\n    fun (self: t_Int) (other: t_Int) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Ops.Arith.f_div #Num_bigint.Bigint.t_BigInt\n            #Num_bigint.Bigint.t_BigInt\n            #FStar.Tactics.Typeclasses.solve\n            (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n            (impl_1__get other <: Num_bigint.Bigint.t_BigInt)\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n/// Raises `2` at the power `self`\nlet impl_7__pow2 (self: t_Int) : t_Int =\n  let exponent:u32 =\n    Core_models.Option.impl__expect #u32\n      (Num_traits.Cast.f_to_u32 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n        <:\n        Core_models.Option.t_Option u32)\n      \"Exponent doesn't fit in a u32\"\n  in\n  impl_1__new #Num_bigint.Bigint.t_BigInt\n    (Num_bigint.Bigint.impl_BigInt__pow (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #u8\n            #FStar.Tactics.Typeclasses.solve\n            (mk_u8 2)\n          <:\n          Num_bigint.Bigint.t_BigInt)\n        exponent\n      <:\n      Num_bigint.Bigint.t_BigInt)\n\n/// Constructs a `Int` out of a string literal. This function\n/// assumes its argument consists only of decimal digits, with\n/// optionally a minus sign prefix.\nlet impl_7__e_unsafe_from_str (s: string) : t_Int =\n  impl_1__new #Num_bigint.Bigint.t_BigInt\n    (Core_models.Result.impl__unwrap #Num_bigint.Bigint.t_BigInt\n        #Num_bigint.t_ParseBigIntError\n        (Core_models.Str.Traits.f_from_str #Num_bigint.Bigint.t_BigInt\n            #FStar.Tactics.Typeclasses.solve\n            s\n          <:\n          Core_models.Result.t_Result Num_bigint.Bigint.t_BigInt Num_bigint.t_ParseBigIntError)\n      <:\n      Num_bigint.Bigint.t_BigInt)\n\nlet impl_7__rem_euclid (self v: t_Int) : t_Int =\n  impl_1__new #Num_bigint.Bigint.t_BigInt\n    (Num_traits.Ops.Euclid.f_rem_euclid #Num_bigint.Bigint.t_BigInt\n        #FStar.Tactics.Typeclasses.solve\n        (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n        (impl_1__get v <: Num_bigint.Bigint.t_BigInt)\n      <:\n      Num_bigint.Bigint.t_BigInt)\n\nclass t_ToInt (v_Self: Type0) = {\n  f_to_int_pre:v_Self -> Type0;\n  f_to_int_post:v_Self -> t_Int -> Type0;\n  f_to_int:x0: v_Self -> Prims.Pure t_Int (f_to_int_pre x0) (fun result -> f_to_int_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_16: Hax_lib.Abstraction.t_Abstraction u8 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: u8) -> true);\n    f_lift_post = (fun (self: u8) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: u8) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #u8\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_17: t_ToInt u8 =\n  {\n    f_to_int_pre = (fun (self: u8) -> true);\n    f_to_int_post = (fun (self: u8) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: u8) -> Hax_lib.Abstraction.f_lift #u8 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_18: Hax_lib.Abstraction.t_Abstraction u16 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: u16) -> true);\n    f_lift_post = (fun (self: u16) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: u16) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #u16\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_19: t_ToInt u16 =\n  {\n    f_to_int_pre = (fun (self: u16) -> true);\n    f_to_int_post = (fun (self: u16) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: u16) -> Hax_lib.Abstraction.f_lift #u16 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_20: Hax_lib.Abstraction.t_Abstraction u32 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: u32) -> true);\n    f_lift_post = (fun (self: u32) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: u32) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #u32\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_21: t_ToInt u32 =\n  {\n    f_to_int_pre = (fun (self: u32) -> true);\n    f_to_int_post = (fun (self: u32) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: u32) -> Hax_lib.Abstraction.f_lift #u32 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_22: Hax_lib.Abstraction.t_Abstraction u64 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: u64) -> true);\n    f_lift_post = (fun (self: u64) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: u64) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #u64\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_23: t_ToInt u64 =\n  {\n    f_to_int_pre = (fun (self: u64) -> true);\n    f_to_int_post = (fun (self: u64) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: u64) -> Hax_lib.Abstraction.f_lift #u64 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_24: Hax_lib.Abstraction.t_Abstraction u128 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: u128) -> true);\n    f_lift_post = (fun (self: u128) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: u128) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #u128\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_25: t_ToInt u128 =\n  {\n    f_to_int_pre = (fun (self: u128) -> true);\n    f_to_int_post = (fun (self: u128) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: u128) -> Hax_lib.Abstraction.f_lift #u128 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_26: Hax_lib.Abstraction.t_Abstraction usize =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: usize) -> true);\n    f_lift_post = (fun (self: usize) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: usize) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #usize\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_27: t_ToInt usize =\n  {\n    f_to_int_pre = (fun (self: usize) -> true);\n    f_to_int_post = (fun (self: usize) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: usize) -> Hax_lib.Abstraction.f_lift #usize #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_28: Hax_lib.Abstraction.t_Abstraction i8 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: i8) -> true);\n    f_lift_post = (fun (self: i8) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: i8) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #i8\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_29: t_ToInt i8 =\n  {\n    f_to_int_pre = (fun (self: i8) -> true);\n    f_to_int_post = (fun (self: i8) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: i8) -> Hax_lib.Abstraction.f_lift #i8 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_30: Hax_lib.Abstraction.t_Abstraction i16 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: i16) -> true);\n    f_lift_post = (fun (self: i16) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: i16) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #i16\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_31: t_ToInt i16 =\n  {\n    f_to_int_pre = (fun (self: i16) -> true);\n    f_to_int_post = (fun (self: i16) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: i16) -> Hax_lib.Abstraction.f_lift #i16 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_32: Hax_lib.Abstraction.t_Abstraction i32 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: i32) -> true);\n    f_lift_post = (fun (self: i32) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: i32) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #i32\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_33: t_ToInt i32 =\n  {\n    f_to_int_pre = (fun (self: i32) -> true);\n    f_to_int_post = (fun (self: i32) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: i32) -> Hax_lib.Abstraction.f_lift #i32 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_34: Hax_lib.Abstraction.t_Abstraction i64 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: i64) -> true);\n    f_lift_post = (fun (self: i64) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: i64) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #i64\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_35: t_ToInt i64 =\n  {\n    f_to_int_pre = (fun (self: i64) -> true);\n    f_to_int_post = (fun (self: i64) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: i64) -> Hax_lib.Abstraction.f_lift #i64 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_36: Hax_lib.Abstraction.t_Abstraction i128 =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: i128) -> true);\n    f_lift_post = (fun (self: i128) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: i128) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #i128\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_37: t_ToInt i128 =\n  {\n    f_to_int_pre = (fun (self: i128) -> true);\n    f_to_int_post = (fun (self: i128) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: i128) -> Hax_lib.Abstraction.f_lift #i128 #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_38: Hax_lib.Abstraction.t_Abstraction isize =\n  {\n    f_AbstractType = t_Int;\n    f_lift_pre = (fun (self: isize) -> true);\n    f_lift_post = (fun (self: isize) (out: t_Int) -> true);\n    f_lift\n    =\n    fun (self: isize) ->\n      impl_1__new #Num_bigint.Bigint.t_BigInt\n        (Core_models.Convert.f_from #Num_bigint.Bigint.t_BigInt\n            #isize\n            #FStar.Tactics.Typeclasses.solve\n            self\n          <:\n          Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_39: t_ToInt isize =\n  {\n    f_to_int_pre = (fun (self: isize) -> true);\n    f_to_int_post = (fun (self: isize) (out: t_Int) -> true);\n    f_to_int\n    =\n    fun (self: isize) -> Hax_lib.Abstraction.f_lift #isize #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_40: Hax_lib.Abstraction.t_Concretization t_Int u8 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: u8) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option u8 =\n        Num_traits.Cast.f_to_u8 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u8 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #u8 concretized\n  }\n\nlet impl_41__to_u8 (self: t_Int) : u8 =\n  Hax_lib.Abstraction.f_concretize #t_Int #u8 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_42: Hax_lib.Abstraction.t_Concretization t_Int u16 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: u16) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option u16 =\n        Num_traits.Cast.f_to_u16 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u16 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #u16 concretized\n  }\n\nlet impl_43__to_u16 (self: t_Int) : u16 =\n  Hax_lib.Abstraction.f_concretize #t_Int #u16 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_44: Hax_lib.Abstraction.t_Concretization t_Int u32 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: u32) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option u32 =\n        Num_traits.Cast.f_to_u32 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u32 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #u32 concretized\n  }\n\nlet impl_45__to_u32 (self: t_Int) : u32 =\n  Hax_lib.Abstraction.f_concretize #t_Int #u32 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_46: Hax_lib.Abstraction.t_Concretization t_Int u64 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: u64) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option u64 =\n        Num_traits.Cast.f_to_u64 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #u64 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #u64 concretized\n  }\n\nlet impl_47__to_u64 (self: t_Int) : u64 =\n  Hax_lib.Abstraction.f_concretize #t_Int #u64 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_48: Hax_lib.Abstraction.t_Concretization t_Int u128 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: u128) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option u128 =\n        Num_traits.Cast.f_to_u128 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit =\n            v_assert (Core_models.Option.impl__is_some #u128 concretized <: bool)\n          in\n          ()\n      in\n      Core_models.Option.impl__unwrap #u128 concretized\n  }\n\nlet impl_49__to_u128 (self: t_Int) : u128 =\n  Hax_lib.Abstraction.f_concretize #t_Int #u128 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_50: Hax_lib.Abstraction.t_Concretization t_Int usize =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: usize) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option usize =\n        Num_traits.Cast.f_to_usize #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit =\n            v_assert (Core_models.Option.impl__is_some #usize concretized <: bool)\n          in\n          ()\n      in\n      Core_models.Option.impl__unwrap #usize concretized\n  }\n\nlet impl_51__to_usize (self: t_Int) : usize =\n  Hax_lib.Abstraction.f_concretize #t_Int #usize #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_52: Hax_lib.Abstraction.t_Concretization t_Int i8 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: i8) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option i8 =\n        Num_traits.Cast.f_to_i8 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i8 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #i8 concretized\n  }\n\nlet impl_53__to_i8 (self: t_Int) : i8 =\n  Hax_lib.Abstraction.f_concretize #t_Int #i8 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_54: Hax_lib.Abstraction.t_Concretization t_Int i16 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: i16) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option i16 =\n        Num_traits.Cast.f_to_i16 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i16 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #i16 concretized\n  }\n\nlet impl_55__to_i16 (self: t_Int) : i16 =\n  Hax_lib.Abstraction.f_concretize #t_Int #i16 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_56: Hax_lib.Abstraction.t_Concretization t_Int i32 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: i32) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option i32 =\n        Num_traits.Cast.f_to_i32 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i32 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #i32 concretized\n  }\n\nlet impl_57__to_i32 (self: t_Int) : i32 =\n  Hax_lib.Abstraction.f_concretize #t_Int #i32 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_58: Hax_lib.Abstraction.t_Concretization t_Int i64 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: i64) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option i64 =\n        Num_traits.Cast.f_to_i64 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = v_assert (Core_models.Option.impl__is_some #i64 concretized <: bool) in\n          ()\n      in\n      Core_models.Option.impl__unwrap #i64 concretized\n  }\n\nlet impl_59__to_i64 (self: t_Int) : i64 =\n  Hax_lib.Abstraction.f_concretize #t_Int #i64 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_60: Hax_lib.Abstraction.t_Concretization t_Int i128 =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: i128) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option i128 =\n        Num_traits.Cast.f_to_i128 #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit =\n            v_assert (Core_models.Option.impl__is_some #i128 concretized <: bool)\n          in\n          ()\n      in\n      Core_models.Option.impl__unwrap #i128 concretized\n  }\n\nlet impl_61__to_i128 (self: t_Int) : i128 =\n  Hax_lib.Abstraction.f_concretize #t_Int #i128 #FStar.Tactics.Typeclasses.solve self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_62: Hax_lib.Abstraction.t_Concretization t_Int isize =\n  {\n    f_concretize_pre = (fun (self: t_Int) -> true);\n    f_concretize_post = (fun (self: t_Int) (out: isize) -> true);\n    f_concretize\n    =\n    fun (self: t_Int) ->\n      let concretized:Core_models.Option.t_Option isize =\n        Num_traits.Cast.f_to_isize #Num_bigint.Bigint.t_BigInt\n          #FStar.Tactics.Typeclasses.solve\n          (impl_1__get self <: Num_bigint.Bigint.t_BigInt)\n      in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit =\n            v_assert (Core_models.Option.impl__is_some #isize concretized <: bool)\n          in\n          ()\n      in\n      Core_models.Option.impl__unwrap #isize concretized\n  }\n\nlet impl_63__to_isize (self: t_Int) : isize =\n  Hax_lib.Abstraction.f_concretize #t_Int #isize #FStar.Tactics.Typeclasses.solve self\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.Int.Bigint.fst",
    "content": "module Hax_lib.Int.Bigint\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Num_bigint.Bigint in\n  ()\n\n/// Maximal number of bytes stored in our copiable `BigInt`s.\nlet v_BYTES: usize = mk_usize 1024\n\ntype t_BigInt = {\n  f_sign:Num_bigint.Bigint.t_Sign;\n  f_data:t_Array u8 (mk_usize 1024)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_5': Core_models.Fmt.t_Debug t_BigInt\n\nunfold\nlet impl_5 = impl_5'\n\nlet impl_7: Core_models.Clone.t_Clone t_BigInt =\n  { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_6': Core_models.Marker.t_Copy t_BigInt\n\nunfold\nlet impl_6 = impl_6'\n\n/// Construct a [`BigInt`] from a [`num_bigint::BigInt`]. This\n/// operation panics when the provided [`num_bigint::BigInt`]\n/// has more than [`BYTES`] bytes.\nlet impl_BigInt__new (i: Num_bigint.Bigint.t_BigInt) : t_BigInt =\n  let sign, bytes:(Num_bigint.Bigint.t_Sign & Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) =\n    Num_bigint.Bigint.impl_BigInt__to_bytes_be i\n  in\n  let _:Prims.unit =\n    if (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global bytes <: usize) >. v_BYTES\n    then\n      Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const\n                (mk_usize 1)\n                (let list =\n                    [\"`copiable_bigint::BigInt::new`: too big, please consider increasing `BYTES`\"]\n                  in\n                  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                  Rust_primitives.Hax.array_of_list 1 list)\n              <:\n              Core_models.Fmt.t_Arguments)\n          <:\n          Rust_primitives.Hax.t_Never)\n  in\n  let data:t_Array u8 (mk_usize 1024) = Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 1024) in\n  let data:t_Array u8 (mk_usize 1024) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_range_from data\n      ({\n          Core_models.Ops.Range.f_start\n          =\n          v_BYTES -! (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global bytes <: usize) <: usize\n        }\n        <:\n        Core_models.Ops.Range.t_RangeFrom usize)\n      (Core_models.Slice.impl__copy_from_slice #u8\n          (data.[ {\n                Core_models.Ops.Range.f_start\n                =\n                v_BYTES -! (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global bytes <: usize) <: usize\n              }\n              <:\n              Core_models.Ops.Range.t_RangeFrom usize ]\n            <:\n            t_Slice u8)\n          (bytes.[ Core_models.Ops.Range.RangeFull <: Core_models.Ops.Range.t_RangeFull ]\n            <:\n            t_Slice u8)\n        <:\n        t_Slice u8)\n  in\n  { f_sign = sign; f_data = data } <: t_BigInt\n\n/// Constructs a [`num_bigint::BigInt`] out of a [`BigInt`].\nlet impl_BigInt__get (self: t_BigInt) : Num_bigint.Bigint.t_BigInt =\n  Num_bigint.Bigint.impl_BigInt__from_bytes_be self.f_sign (self.f_data <: t_Slice u8)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Core_models.Cmp.t_PartialEq t_BigInt t_BigInt =\n  {\n    f_eq_pre = (fun (self: t_BigInt) (other: t_BigInt) -> true);\n    f_eq_post = (fun (self: t_BigInt) (other: t_BigInt) (out: bool) -> true);\n    f_eq\n    =\n    fun (self: t_BigInt) (other: t_BigInt) ->\n      (impl_BigInt__get self <: Num_bigint.Bigint.t_BigInt) =.\n      (impl_BigInt__get other <: Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2: Core_models.Cmp.t_Eq t_BigInt = { _super_i0 = FStar.Tactics.Typeclasses.solve }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4: Core_models.Cmp.t_PartialOrd t_BigInt t_BigInt =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_partial_cmp_pre = (fun (self: t_BigInt) (other: t_BigInt) -> true);\n    f_partial_cmp_post\n    =\n    (fun\n        (self: t_BigInt)\n        (other: t_BigInt)\n        (out: Core_models.Option.t_Option Core_models.Cmp.t_Ordering)\n        ->\n        true);\n    f_partial_cmp\n    =\n    fun (self: t_BigInt) (other: t_BigInt) ->\n      Core_models.Cmp.f_partial_cmp #Num_bigint.Bigint.t_BigInt\n        #Num_bigint.Bigint.t_BigInt\n        #FStar.Tactics.Typeclasses.solve\n        (impl_BigInt__get self <: Num_bigint.Bigint.t_BigInt)\n        (impl_BigInt__get other <: Num_bigint.Bigint.t_BigInt)\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3: Core_models.Cmp.t_Ord t_BigInt =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    _super_i1 = FStar.Tactics.Typeclasses.solve;\n    f_cmp_pre = (fun (self: t_BigInt) (other: t_BigInt) -> true);\n    f_cmp_post = (fun (self: t_BigInt) (other: t_BigInt) (out: Core_models.Cmp.t_Ordering) -> true);\n    f_cmp\n    =\n    fun (self: t_BigInt) (other: t_BigInt) ->\n      Core_models.Cmp.f_cmp #Num_bigint.Bigint.t_BigInt\n        #FStar.Tactics.Typeclasses.solve\n        (impl_BigInt__get self <: Num_bigint.Bigint.t_BigInt)\n        (impl_BigInt__get other <: Num_bigint.Bigint.t_BigInt)\n  }\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.Int.fst",
    "content": "module Hax_lib.Int\n\nopen Rust_primitives\n\nunfold type t_Int = int\n\nunfold let impl_Int__to_u8 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_u16 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_u32 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_u64 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_u128 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_usize (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\n\nunfold let impl_Int__to_i8 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_i16 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_i32 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_i64 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_i128 (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\nunfold let impl_Int__to_isize (#t:inttype) (n:range_t t) : int_t t = mk_int #t n\n\nunfold let impl_Int__pow2 (n: nat) = pow2 n\nunfold let impl_Int__rem_euclid = (%)\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.Prop.Bundle.fst",
    "content": "module Hax_lib.Prop.Bundle\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Hax_lib.Abstraction in\n  ()\n\n/// Represent a logical proposition, that may be not computable.\ntype t_Prop = | Prop : bool -> t_Prop\n\nlet impl_7: Core_models.Clone.t_Clone t_Prop =\n  { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_8': Core_models.Marker.t_Copy t_Prop\n\nunfold\nlet impl_8 = impl_8'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_9': Core_models.Fmt.t_Debug t_Prop\n\nunfold\nlet impl_9 = impl_9'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Hax_lib.Abstraction.t_Abstraction bool =\n  {\n    f_AbstractType = t_Prop;\n    f_lift_pre = (fun (self: bool) -> true);\n    f_lift_post = (fun (self: bool) (out: t_Prop) -> true);\n    f_lift = fun (self: bool) -> Prop self <: t_Prop\n  }\n\nclass t_ToProp (v_Self: Type0) = {\n  f_to_prop_pre:v_Self -> Type0;\n  f_to_prop_post:v_Self -> t_Prop -> Type0;\n  f_to_prop:x0: v_Self\n    -> Prims.Pure t_Prop (f_to_prop_pre x0) (fun result -> f_to_prop_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_2: t_ToProp bool =\n  {\n    f_to_prop_pre = (fun (self: bool) -> true);\n    f_to_prop_post = (fun (self: bool) (out: t_Prop) -> true);\n    f_to_prop\n    =\n    fun (self: bool) -> Hax_lib.Abstraction.f_lift #bool #FStar.Tactics.Typeclasses.solve self\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_3: Core_models.Convert.t_From t_Prop bool =\n  {\n    f_from_pre = (fun (value: bool) -> true);\n    f_from_post = (fun (value: bool) (out: t_Prop) -> true);\n    f_from = fun (value: bool) -> Prop value <: t_Prop\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_4\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_T t_Prop)\n    : Core_models.Ops.Bit.t_BitAnd t_Prop v_T =\n  {\n    f_Output = t_Prop;\n    f_bitand_pre = (fun (self: t_Prop) (rhs: v_T) -> true);\n    f_bitand_post = (fun (self: t_Prop) (rhs: v_T) (out: t_Prop) -> true);\n    f_bitand\n    =\n    fun (self: t_Prop) (rhs: v_T) ->\n      Prop\n      (Core_models.Ops.Bit.f_bitand self._0\n          (Core_models.Convert.f_into #v_T #t_Prop #FStar.Tactics.Typeclasses.solve rhs <: t_Prop)\n            ._0)\n      <:\n      t_Prop\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_5\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_T t_Prop)\n    : Core_models.Ops.Bit.t_BitOr t_Prop v_T =\n  {\n    f_Output = t_Prop;\n    f_bitor_pre = (fun (self: t_Prop) (rhs: v_T) -> true);\n    f_bitor_post = (fun (self: t_Prop) (rhs: v_T) (out: t_Prop) -> true);\n    f_bitor\n    =\n    fun (self: t_Prop) (rhs: v_T) ->\n      Prop\n      (Core_models.Ops.Bit.f_bitor self._0\n          (Core_models.Convert.f_into #v_T #t_Prop #FStar.Tactics.Typeclasses.solve rhs <: t_Prop)\n            ._0)\n      <:\n      t_Prop\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_6: Core_models.Ops.Bit.t_Not t_Prop =\n  {\n    f_Output = t_Prop;\n    f_not_pre = (fun (self: t_Prop) -> true);\n    f_not_post = (fun (self: t_Prop) (out: t_Prop) -> true);\n    f_not = fun (self: t_Prop) -> Prop ~.self._0 <: t_Prop\n  }\n\nlet from_bool (b: bool) : t_Prop = Prop b <: t_Prop\n\n/// Lifts a boolean to a logical proposition.\nlet impl__from_bool (b: bool) : t_Prop = from_bool b\n\nlet v_and (lhs other: t_Prop) : t_Prop = Prop (lhs._0 && other._0) <: t_Prop\n\n/// Conjuction of two propositions.\nlet impl__and\n      (#iimpl_37134320_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into iimpl_37134320_ t_Prop)\n      (self: t_Prop)\n      (other: iimpl_37134320_)\n    : t_Prop =\n  v_and self\n    (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other\n      <:\n      t_Prop)\n\nlet or (lhs other: t_Prop) : t_Prop = Prop (lhs._0 || other._0) <: t_Prop\n\n/// Disjunction of two propositions.\nlet impl__or\n      (#iimpl_37134320_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into iimpl_37134320_ t_Prop)\n      (self: t_Prop)\n      (other: iimpl_37134320_)\n    : t_Prop =\n  or self\n    (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other\n      <:\n      t_Prop)\n\nlet not (lhs: t_Prop) : t_Prop = Prop ~.lhs._0 <: t_Prop\n\n/// Negation of a proposition.\nlet impl__not (self: t_Prop) : t_Prop = not self\n\n/// Logical equality between two value of *any* type\nlet eq (#v_T: Type0) (e_lhs e_rhs: v_T) : t_Prop = Prop true <: t_Prop\n\n/// Equality between two propositions.\nlet impl__eq\n      (#iimpl_37134320_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into iimpl_37134320_ t_Prop)\n      (self: t_Prop)\n      (other: iimpl_37134320_)\n    : t_Prop =\n  eq #t_Prop\n    self\n    (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other\n      <:\n      t_Prop)\n\nlet ne (#v_T: Type0) (e_lhs e_rhs: v_T) : t_Prop = Prop true <: t_Prop\n\n/// Equality between two propositions.\nlet impl__ne\n      (#iimpl_37134320_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into iimpl_37134320_ t_Prop)\n      (self: t_Prop)\n      (other: iimpl_37134320_)\n    : t_Prop =\n  ne #t_Prop\n    self\n    (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other\n      <:\n      t_Prop)\n\nlet implies__from__constructors (lhs other: t_Prop) : t_Prop = Prop (lhs._0 || ~.other._0) <: t_Prop\n\n/// Logical implication.\nlet impl__implies\n      (#iimpl_37134320_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into iimpl_37134320_ t_Prop)\n      (self: t_Prop)\n      (other: iimpl_37134320_)\n    : t_Prop =\n  implies__from__constructors self\n    (Core_models.Convert.f_into #iimpl_37134320_ #t_Prop #FStar.Tactics.Typeclasses.solve other\n      <:\n      t_Prop)\n\n/// The logical implication `a ==> b`.\nlet implies\n      (#iimpl_979615818_ #iimpl_979615818_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into iimpl_979615818_ t_Prop)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Convert.t_Into iimpl_648681637_ t_Prop)\n      (lhs: iimpl_979615818_)\n      (rhs: iimpl_648681637_)\n    : t_Prop =\n  implies__from__constructors (Core_models.Convert.f_into #iimpl_979615818_\n        #t_Prop\n        #FStar.Tactics.Typeclasses.solve\n        lhs\n      <:\n      t_Prop)\n    (Core_models.Convert.f_into #iimpl_648681637_ #t_Prop #FStar.Tactics.Typeclasses.solve rhs\n      <:\n      t_Prop)\n\nlet v_forall__from__constructors\n      (#v_A #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_Fn v_F v_A)\n      (e_pred: v_F)\n    : t_Prop = Prop true <: t_Prop\n\n/// The universal quantifier. This should be used only for Hax code: in\n/// Rust, this is always true.\n/// # Example:\n/// The Rust expression `forall(|x: T| phi(x))` corresponds to `∀ (x: T), phi(x)`.\nlet v_forall\n      (#v_T #v_U #iimpl_367644862_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_U t_Prop)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_Fn iimpl_367644862_ v_T)\n      (f: iimpl_367644862_)\n    : t_Prop =\n  v_forall__from__constructors #v_T\n    (fun x ->\n        let x:v_T = x in\n        Core_models.Convert.f_into #v_U\n          #t_Prop\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Ops.Function.f_call #iimpl_367644862_\n              #v_T\n              #FStar.Tactics.Typeclasses.solve\n              f\n              (x <: v_T)\n            <:\n            v_U)\n        <:\n        t_Prop)\n\nlet v_exists__from__constructors\n      (#v_A #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_Fn v_F v_A)\n      (e_pred: v_F)\n    : t_Prop = Prop true <: t_Prop\n\n/// The existential quantifier. This should be used only for Hax code: in\n/// Rust, this is always true.\n/// # Example:\n/// The Rust expression `exists(|x: T| phi(x))` corresponds to `∃ (x: T), phi(x)`.\nlet v_exists\n      (#v_T #v_U #iimpl_367644862_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_Into v_U t_Prop)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_Fn iimpl_367644862_ v_T)\n      (f: iimpl_367644862_)\n    : t_Prop =\n  v_exists__from__constructors #v_T\n    (fun x ->\n        let x:v_T = x in\n        Core_models.Convert.f_into #v_U\n          #t_Prop\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Ops.Function.f_call #iimpl_367644862_\n              #v_T\n              #FStar.Tactics.Typeclasses.solve\n              f\n              (x <: v_T)\n            <:\n            v_U)\n        <:\n        t_Prop)\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.Prop.Constructors.fst",
    "content": "module Hax_lib.Prop.Constructors\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Hax_lib.Prop.Bundle {from_bool as from_bool}\n\ninclude Hax_lib.Prop.Bundle {v_and as v_and}\n\ninclude Hax_lib.Prop.Bundle {or as or}\n\ninclude Hax_lib.Prop.Bundle {not as not}\n\ninclude Hax_lib.Prop.Bundle {eq as eq}\n\ninclude Hax_lib.Prop.Bundle {ne as ne}\n\ninclude Hax_lib.Prop.Bundle {implies__from__constructors as implies}\n\ninclude Hax_lib.Prop.Bundle {v_forall__from__constructors as v_forall}\n\ninclude Hax_lib.Prop.Bundle {v_exists__from__constructors as v_exists}\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.Prop.fst",
    "content": "module Hax_lib.Prop\n\nunfold type t_Prop = Type0\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Hax_lib.fst",
    "content": "module Hax_lib\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Tactics\n\nval v_assert (p: bool) : Pure unit (requires p) (ensures (fun x -> p))\nlet v_assert (v__formula: bool) = ()\n\nval assert_prop (p: Type0) : Pure unit (requires p) (ensures (fun x -> p))\nlet assert_prop (v__formula: Type0) = ()\n\nval v_assume (p: Type0) : Pure unit (requires True) (ensures (fun x -> p))\nlet v_assume (v__formula: Type0) = assume v__formula\n"
  },
  {
    "path": "hax-lib/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# It is tempting to factor this out into multiple Makefiles but that\n# makes it less portable, so resist temptation, or move to a more\n# sophisticated build system.\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nHACL_HOME     ?= $(HOME)/.hax/hacl_home\nFSTAR_BIN     ?= $(shell command -v fstar.exe 1>&2 2> /dev/null && echo \"fstar.exe\" || echo \"$(FSTAR_HOME)/bin/fstar.exe\")\n\nCACHE_DIR     ?= .cache\nHINT_DIR      ?= .hints\n\nSHELL ?= /usr/bin/env bash\n\nEXECUTABLES = cargo cargo-hax jq\nK := $(foreach bin,$(EXECUTABLES),\\\n        $(if $(shell command -v $(bin) 2> /dev/null),,$(error \"No $(bin) in PATH\")))\n\n.PHONY: all verify clean\n\nall:\n\trm -f .depend && $(MAKE) .depend\n\t$(MAKE) verify\n\n# Default hax invocation\nHAX_CLI = \"cargo hax into fstar\"\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\tmkdir -p \"${HACL_HOME}\"\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\n# If no any F* file is detected, we run hax\nifeq \"$(wildcard *.fst *fsti)\" \"\"\n$(shell $(SHELL) -c $(HAX_CLI))\nendif\n\n# By default, we process all the files in the current directory\nROOTS = $(wildcard *.fst *fsti)\n\n# Regenerate F* files via hax when Rust sources change\n$(ROOTS): $(shell find ../../../src -type f -name '*.rs')\n\t$(shell $(SHELL) -c $(HAX_CLI))\n\n# The following is a bash script that discovers F* libraries\ndefine FINDLIBS\n    # Prints a path if and only if it exists. Takes one argument: the\n    # path.\n    function print_if_exists() {\n        if [ -d \"$$1\" ]; then\n            echo \"$$1\"\n        fi\n    }\n    # Asks Cargo all the dependencies for the current crate or workspace,\n    # and extract all \"root\" directories for each. Takes zero argument.\n    function dependencies() {\n        cargo metadata --format-version 1 |\n            jq -r '.packages | .[] | .manifest_path | split(\"/\") | .[:-1] | join(\"/\")'\n    }\n    # Find hax libraries *around* a given path. Takes one argument: the\n    # path.\n    function find_hax_libraries_at_path() {\n        path=\"$$1\"\n        # if there is a `proofs/fstar/extraction` subfolder, then that's a\n        # F* library\n        print_if_exists \"$$path/proofs/fstar/extraction\"\n        # Maybe the `proof-libs` folder of hax is around?\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\")\n        if [ $$? -eq 0 ]; then\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\"\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\"\n        fi\n    }\n    { while IFS= read path; do\n          find_hax_libraries_at_path \"$$path\"\n      done < <(dependencies)\n    } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(shell bash -c \"$$FINDLIBS\")\n\nFSTAR_FLAGS = --cmi \\\n  --warn_error -331 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR = $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS)\n\t$(info $(ROOTS))\n\t$(FSTAR) --cmi --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR):\n\tmkdir -p $@\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR)\n\t$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints\n\nverify: $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\n\n# Targets for interactive mode\n\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n\n# Clean targets\n\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n"
  },
  {
    "path": "hax-lib/src/abstraction.rs",
    "content": "/// Marks a type as abstractable: its values can be mapped to an\n/// idealized version of the type. For instance, machine integers,\n/// which have bounds, can be mapped to mathematical integers.\n///\n/// Each type can have only one abstraction.\npub trait Abstraction {\n    /// What is the ideal type values should be mapped to?\n    type AbstractType;\n    /// Maps a concrete value to its abstract counterpart\n    fn lift(self) -> Self::AbstractType;\n}\n\n/// Marks a type as abstract: its values can be lowered to concrete\n/// values. This might panic.\npub trait Concretization<T> {\n    /// Maps an abstract value and lowers it to its concrete counterpart.\n    fn concretize(self) -> T;\n}\n"
  },
  {
    "path": "hax-lib/src/dummy.rs",
    "content": "mod abstraction;\npub use abstraction::Concretization;\n\npub mod prop;\npub use prop::*;\n\npub use int::*;\n\n#[cfg(feature = \"macros\")]\npub use crate::proc_macros::*;\n\n#[macro_export]\nmacro_rules! debug_assert {\n    ($($arg:tt)*) => {\n        ::core::debug_assert!($($arg)*);\n    };\n}\n\n#[macro_export]\nmacro_rules! assert {\n    ($($arg:tt)*) => {\n        ::core::assert!($($arg)*);\n    };\n}\n\n#[macro_export]\nmacro_rules! assert_prop {\n    ($($arg:tt)*) => {{}};\n}\n\n#[macro_export]\nmacro_rules! assume {\n    ($formula:expr) => {\n        ()\n    };\n}\n\n#[doc(hidden)]\npub fn inline(_: &str) {}\n\n#[doc(hidden)]\npub fn inline_unsafe<T>(_: &str) -> T {\n    unreachable!()\n}\n\n#[doc(hidden)]\npub const fn _internal_loop_invariant<T, R: Into<Prop>, P: FnOnce(T) -> R>(_: &P) {}\n\n#[doc(hidden)]\npub const fn _internal_while_loop_invariant(_: Prop) {}\n\n#[doc(hidden)]\npub const fn _internal_loop_decreases(_: int::Int) {}\n\npub trait Refinement {\n    type InnerType;\n    fn new(x: Self::InnerType) -> Self;\n    fn get(self) -> Self::InnerType;\n    fn get_mut(&mut self) -> &mut Self::InnerType;\n    fn invariant(value: Self::InnerType) -> crate::Prop;\n}\n\npub trait RefineAs<RefinedType> {\n    fn into_checked(self) -> RefinedType;\n}\n\npub mod int {\n    use core::ops::*;\n\n    #[macro_export]\n    macro_rules! int {\n        ($lit:expr) => {\n            Int($lit)\n        };\n    }\n\n    #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]\n    pub struct Int(pub u8);\n\n    impl Int {\n        pub fn new(x: impl Into<u8>) -> Self {\n            Int(x.into())\n        }\n        pub fn get(self) -> u8 {\n            self.0\n        }\n    }\n\n    impl Add for Int {\n        type Output = Self;\n\n        fn add(self, other: Self) -> Self::Output {\n            Int(self.0 + other.0)\n        }\n    }\n\n    impl Sub for Int {\n        type Output = Self;\n\n        fn sub(self, other: Self) -> Self::Output {\n            Int(self.0 - other.0)\n        }\n    }\n\n    impl Mul for Int {\n        type Output = Self;\n\n        fn mul(self, other: Self) -> Self::Output {\n            Int(self.0 * other.0)\n        }\n    }\n\n    impl Div for Int {\n        type Output = Self;\n\n        fn div(self, other: Self) -> Self::Output {\n            Int(self.0 / other.0)\n        }\n    }\n\n    impl Int {\n        pub fn pow2(self) -> Self {\n            self\n        }\n        pub fn _unsafe_from_str(_s: &str) -> Self {\n            Int(0)\n        }\n        pub fn rem_euclid(&self, v: Self) -> Self {\n            Self::new(self.0.rem_euclid(v.0))\n        }\n    }\n\n    pub trait ToInt {\n        fn to_int(self) -> Int;\n    }\n\n    pub trait Abstraction {\n        type AbstractType;\n        fn lift(self) -> Self::AbstractType;\n    }\n\n    pub trait Concretization<T> {\n        fn concretize(self) -> T;\n    }\n\n    macro_rules! implement_abstraction {\n        ($ty:ident) => {\n            impl Abstraction for $ty {\n                type AbstractType = Int;\n                fn lift(self) -> Self::AbstractType {\n                    Int(0)\n                }\n            }\n            impl ToInt for $ty {\n                fn to_int(self) -> Int {\n                    self.lift()\n                }\n            }\n        };\n        ($($ty:ident)*) => {\n            $(implement_abstraction!($ty);)*\n        };\n    }\n\n    implement_abstraction!(u8 u16 u32 u64 u128 usize);\n    implement_abstraction!(i8 i16 i32 i64 i128 isize);\n\n    macro_rules! implement_concretize {\n        ($ty:ident $method:ident) => {\n            impl Concretization<$ty> for Int {\n                fn concretize(self) -> $ty {\n                    self.0 as $ty\n                }\n            }\n            impl Int {\n                pub fn $method(self) -> $ty {\n                    self.concretize()\n                }\n            }\n        };\n        ($ty:ident $method:ident, $($tt:tt)*) => {\n            implement_concretize!($ty $method);\n            implement_concretize!($($tt)*);\n        };\n        () => {};\n    }\n\n    implement_concretize!(\n        u8    to_u8,\n        u16   to_u16,\n        u32   to_u32,\n        u64   to_u64,\n        u128  to_u128,\n        usize to_usize,\n        i8    to_i8,\n        i16   to_i16,\n        i32   to_i32,\n        i64   to_i64,\n        i128  to_i128,\n        isize to_isize,\n    );\n}\n"
  },
  {
    "path": "hax-lib/src/implementation.rs",
    "content": "mod abstraction;\npub use abstraction::*;\n\npub mod int;\npub use int::*;\n\npub mod prop;\npub use prop::*;\n\n#[cfg(feature = \"macros\")]\npub use crate::proc_macros::*;\n\n#[doc(hidden)]\n#[cfg(hax)]\n#[macro_export]\nmacro_rules! proxy_macro_if_not_hax {\n    ($macro:path, no, $($arg:tt)*) => {\n        ()\n    };\n    ($macro:path, $f:expr, $cond:expr$(, $($arg:tt)*)?) => {\n        $f($cond)\n    };\n}\n\n#[cfg(not(debug_assertions))]\n#[doc(hidden)]\n#[cfg(not(hax))]\n#[macro_export]\nmacro_rules! proxy_macro_if_not_hax {\n    ($macro:path, $f:expr, $($arg:tt)*) => {};\n}\n\n#[cfg(debug_assertions)]\n#[doc(hidden)]\n#[cfg(not(hax))]\n#[macro_export]\nmacro_rules! proxy_macro_if_not_hax {\n    ($macro:path, $f:expr, $($arg:tt)*) => {\n        $macro!($($arg)*)\n    };\n}\n\n#[macro_export]\n/// Proxy to `std::debug_assert!`. Compiled with `hax`, this\n/// disappears.\nmacro_rules! debug_assert {\n    ($($arg:tt)*) => {\n        $crate::proxy_macro_if_not_hax!(::core::debug_assert, no, $($arg)*)\n    };\n}\n\n#[macro_export]\n/// Proxy to `std::assert!`. Compiled with `hax`, this is transformed\n/// into a `assert` in the backend.\nmacro_rules! assert {\n    ($($arg:tt)*) => {\n        $crate::proxy_macro_if_not_hax!(::core::assert, $crate::assert, $($arg)*)\n    };\n}\n\n#[doc(hidden)]\n#[cfg(hax)]\n/// This function exists only when compiled with `hax`, and is not\n/// meant to be used directly. It is called by `assert!` only in\n/// appropriate situations.\npub fn assert(_formula: bool) {}\n\n#[macro_export]\n/// Assert a logical proposition [`Prop`]: this exists only in the backends of\n/// hax. In Rust, this macro expands to an empty block `{ }`.\nmacro_rules! assert_prop {\n    ($($arg:tt)*) => {\n        {\n            #[cfg(hax)]\n            {\n                $crate::assert_prop(::hax_lib::Prop::from($($arg)*));\n            }\n        }\n    };\n}\n\n#[doc(hidden)]\n#[cfg(hax)]\n/// This function exists only when compiled with `hax`, and is not meant to be\n/// used directly. It is called by `assert_prop!` only in appropriate\n/// situations.\npub fn assert_prop(_formula: Prop) {}\n\n#[doc(hidden)]\n#[cfg(hax)]\n/// This function exists only when compiled with `hax`, and is not\n/// meant to be used directly. It is called by `assume!` only in\n/// appropriate situations.\npub fn assume(_formula: Prop) {}\n\n#[cfg(hax)]\n#[macro_export]\nmacro_rules! assume {\n    ($formula:expr) => {\n        $crate::assume(::hax_lib::Prop::from($formula))\n    };\n}\n\n/// Assume a proposition holds. In Rust, this is expanded to the\n/// expression `()`. While extracted with Hax, this gets expanded to a\n/// call to an `assume` function.\n///\n/// # Example:\n///\n/// ```rust\n/// fn sum(x: u32, y: u32) -> u32 {\n///   hax_lib::assume!(x < 4242 && y < 424242);\n///   x + y\n/// }\n/// ```\n#[cfg(not(hax))]\n#[macro_export]\nmacro_rules! assume {\n    ($formula:expr) => {\n        ()\n    };\n}\n\n/// Dummy function that carries a string to be printed as such in the output language\n#[doc(hidden)]\npub fn inline(_: &str) {}\n\n/// Similar to `inline`, but allows for any type. Do not use directly.\n#[doc(hidden)]\npub fn inline_unsafe<T>(_: &str) -> T {\n    unreachable!()\n}\n\n/// Sink for any value into unit. This is used internally by hax to capture\n/// value of any type. Specifically, this is useful for the `decreases` clauses\n/// for the F* backend.\n#[doc(hidden)]\npub fn any_to_unit<T>(_: T) -> () {\n    unreachable!()\n}\n\n/// A dummy function that holds a loop invariant.\n#[doc(hidden)]\npub fn _internal_loop_invariant<T, R: Into<Prop>, P: FnOnce(T) -> R>(_: P) {}\n\n/// A dummy function that holds a while loop invariant.\n#[doc(hidden)]\npub const fn _internal_while_loop_invariant(_: Prop) {}\n\n/// A dummy function that holds a loop variant.\n#[doc(hidden)]\npub fn _internal_loop_decreases(_: Int) {}\n\n/// A type that implements `Refinement` should be a newtype for a\n/// type `T`. The field holding the value of type `T` should be\n/// private, and `Refinement` should be the only interface to the\n/// type.\n///\n/// Please never implement this trait yourself, use the\n/// `refinement_type` macro instead.\npub trait Refinement {\n    /// The base type\n    type InnerType;\n    /// Smart constructor capturing an invariant. Its extraction will\n    /// yield a proof obligation.\n    fn new(x: Self::InnerType) -> Self;\n    /// Destructor for the refined type\n    fn get(self) -> Self::InnerType;\n    /// Gets a mutable reference to a refinement\n    fn get_mut(&mut self) -> &mut Self::InnerType;\n    /// Tests wether a value satisfies the refinement\n    fn invariant(value: Self::InnerType) -> Prop;\n}\n\n/// A utilitary trait that provides a `into_checked` method on traits\n/// that have a refined counter part. This trait is parametrized by a\n/// type `Target`: a base type can be refined in multiple ways.\n///\n/// Please never implement this trait yourself, use the\n/// `refinement_type` macro instead.\npub trait RefineAs<RefinedType> {\n    /// Smart constructor for `RefinedType`, checking the invariant\n    /// `RefinedType::invariant`. The check is done statically via\n    /// extraction to hax: extracted code will yield static proof\n    /// obligations.\n    ///\n    /// In addition, in debug mode, the invariant is checked at\n    /// run-time, unless this behavior was disabled when defining the\n    /// refinement type `RefinedType` with the `refinement_type` macro\n    /// and its `no_debug_runtime_check` option.\n    fn into_checked(self) -> RefinedType;\n}\n"
  },
  {
    "path": "hax-lib/src/int/bigint.rs",
    "content": "//! This module provides an approximation of `BigInt` which is\n//! copiable, via an big array of `u8` of an fixed arbitrary size\n//! `BYTES`.\n//! Its interface provides bridges to `num_bigint::BigInt`.\n\n/// Maximal number of bytes stored in our copiable `BigInt`s.\nconst BYTES: usize = 1024;\n#[derive(Debug, Copy, Clone)]\npub(super) struct BigInt {\n    sign: num_bigint::Sign,\n    data: [u8; BYTES],\n}\nimpl BigInt {\n    /// Construct a [`BigInt`] from a [`num_bigint::BigInt`]. This\n    /// operation panics when the provided [`num_bigint::BigInt`]\n    /// has more than [`BYTES`] bytes.\n    pub(super) fn new(i: &num_bigint::BigInt) -> Self {\n        let (sign, bytes) = i.to_bytes_be();\n        if bytes.len() > BYTES {\n            panic!(\"`copiable_bigint::BigInt::new`: too big, please consider increasing `BYTES`\");\n        }\n        let mut data = [0; BYTES];\n        data[BYTES - bytes.len()..].copy_from_slice(&bytes[..]);\n        BigInt { sign, data }\n    }\n\n    /// Constructs a [`num_bigint::BigInt`] out of a [`BigInt`].\n    pub(super) fn get(self) -> num_bigint::BigInt {\n        num_bigint::BigInt::from_bytes_be(self.sign, &self.data)\n    }\n}\n\nimpl core::cmp::PartialEq for BigInt {\n    fn eq(&self, other: &Self) -> bool {\n        self.get() == other.get()\n    }\n}\nimpl core::cmp::Eq for BigInt {}\nimpl core::cmp::Ord for BigInt {\n    fn cmp(&self, other: &Self) -> core::cmp::Ordering {\n        self.get().cmp(&other.get())\n    }\n}\nimpl core::cmp::PartialOrd for BigInt {\n    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {\n        self.get().partial_cmp(&other.get())\n    }\n}\n"
  },
  {
    "path": "hax-lib/src/int/mod.rs",
    "content": "use core::fmt;\nuse core::ops::*;\nuse num_traits::cast::ToPrimitive;\n\nmod bigint;\nuse bigint::*;\n\nuse super::abstraction::*;\n\n#[cfg(feature = \"macros\")]\npub use hax_lib_macros::int;\n\n/// Mathematical integers for writting specifications. Mathematical\n/// integers are unbounded and arithmetic operation on them never over\n/// or underflow.\n#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]\npub struct Int(BigInt);\n\nimpl fmt::Display for Int {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        write!(f, \"{}\", self.get())\n    }\n}\n\nimpl Int {\n    fn new(x: impl Into<num_bigint::BigInt>) -> Self {\n        Int(BigInt::new(&x.into()))\n    }\n    fn get(self) -> num_bigint::BigInt {\n        self.0.get()\n    }\n}\n\nimpl Add for Int {\n    type Output = Self;\n\n    fn add(self, other: Self) -> Self::Output {\n        Self::new(self.get() + other.get())\n    }\n}\n\nimpl Neg for Int {\n    type Output = Self;\n\n    fn neg(self) -> Self::Output {\n        Self::new(-self.get())\n    }\n}\n\nimpl Sub for Int {\n    type Output = Self;\n\n    fn sub(self, other: Self) -> Self::Output {\n        Self::new(self.get() - other.get())\n    }\n}\n\nimpl Mul for Int {\n    type Output = Self;\n\n    fn mul(self, other: Self) -> Self::Output {\n        Self::new(self.get() * other.get())\n    }\n}\n\nimpl Div for Int {\n    type Output = Self;\n\n    fn div(self, other: Self) -> Self::Output {\n        Self::new(self.get() / other.get())\n    }\n}\n\nimpl Int {\n    /// Raises `2` at the power `self`\n    pub fn pow2(self) -> Self {\n        let exponent = self.get().to_u32().expect(\"Exponent doesn't fit in a u32\");\n        Self::new(num_bigint::BigInt::from(2u8).pow(exponent))\n    }\n\n    /// Constructs a `Int` out of a string literal. This function\n    /// assumes its argument consists only of decimal digits, with\n    /// optionally a minus sign prefix.\n    pub fn _unsafe_from_str(s: &str) -> Self {\n        use core::str::FromStr;\n        Self::new(num_bigint::BigInt::from_str(s).unwrap())\n    }\n\n    pub fn rem_euclid(&self, v: Self) -> Self {\n        use num_traits::Euclid;\n        Self::new(self.get().rem_euclid(&v.get()))\n    }\n}\n\n#[cfg(feature = \"macros\")]\npub trait ToInt {\n    fn to_int(self) -> Int;\n}\n\n/// Instead of defining one overloaded instance, which relies\n/// explicitely on `num_bigint`:\n///\n/// ```ignore\n/// impl<T: Into<num_bigint::BigInt>> Abstraction for T {\n///     type AbstractType = Int;\n///     fn lift(self) -> Self::AbstractType {\n///         Int::new(self.into())\n///     }\n/// }\n/// ```\n///\n/// We define an instance per machine type: we don't want the\n/// interface of this module to rely specifically on\n/// `num_bigint`. This module should be a very thin layer.\nmacro_rules! implement_abstraction {\n    ($ty:ident) => {\n        impl Abstraction for $ty {\n            type AbstractType = Int;\n            fn lift(self) -> Self::AbstractType {\n                Int::new(num_bigint::BigInt::from(self))\n            }\n        }\n        impl ToInt for $ty {\n            fn to_int(self) -> Int {\n                self.lift()\n            }\n        }\n    };\n    ($($ty:ident)*) => {\n        $(implement_abstraction!($ty);)*\n    };\n}\n\nimplement_abstraction!(u8 u16 u32 u64 u128 usize);\nimplement_abstraction!(i8 i16 i32 i64 i128 isize);\n\nmacro_rules! implement_concretize {\n    ($ty:ident $method:ident) => {\n        impl Concretization<$ty> for Int {\n            fn concretize(self) -> $ty {\n                let concretized = self.get().$method();\n                debug_assert!(concretized.is_some());\n                concretized.unwrap().into()\n            }\n        }\n        impl Int {\n            pub fn $method(self) -> $ty {\n                self.concretize()\n            }\n        }\n    };\n    ($ty:ident $method:ident, $($tt:tt)*) => {\n        implement_concretize!($ty $method);\n        implement_concretize!($($tt)*);\n    };\n    () => {};\n}\n\nimplement_concretize!(\n    u8    to_u8,\n    u16   to_u16,\n    u32   to_u32,\n    u64   to_u64,\n    u128  to_u128,\n    usize to_usize,\n    i8    to_i8,\n    i16   to_i16,\n    i32   to_i32,\n    i64   to_i64,\n    i128  to_i128,\n    isize to_isize,\n);\n"
  },
  {
    "path": "hax-lib/src/lib.rs",
    "content": "//! Hax-specific helpers for Rust programs. Those helpers are usually\n//! no-ops when compiled normally but meaningful when compiled under\n//! hax.\n//!\n//! # Example:\n//!\n//! ```rust\n//! use hax_lib::*;\n//! fn sum(x: Vec<u32>, y: Vec<u32>) -> Vec<u32> {\n//!   hax_lib::assume!(x.len() == y.len());\n//!   hax_lib::assert!(x.len() >= 0);\n//!   hax_lib::assert_prop!(forall(|i: usize| implies(i < x.len(), x[i] < 4242)));\n//!   hax_lib::debug_assert!(exists(|i: usize| implies(i < x.len(), x[i] > 123)));\n//!   x.into_iter().zip(y.into_iter()).map(|(x, y)| x + y).collect()\n//! }\n//! ```\n\n#![no_std]\n\n#[cfg(feature = \"macros\")]\nmod proc_macros;\n\n// hax engine relies on `hax-lib` names: to avoid cluttering names with\n// an additional `implementation` in all paths, we `include!` instead\n// of doing conditional `mod` and `pub use`.\n\n#[cfg(not(hax))]\ncore::include!(\"dummy.rs\");\n#[cfg(hax)]\ncore::include!(\"implementation.rs\");\n"
  },
  {
    "path": "hax-lib/src/proc_macros.rs",
    "content": "//! This module re-exports macros from `hax-lib-macros` since a\n//! proc-macro crate cannot export anything but procedural macros.\n\npub use hax_lib_macros::{\n    attributes, decreases, ensures, exclude, impl_fn_decoration, include, lemma, loop_decreases,\n    loop_invariant, opaque, opaque_type, refinement_type, requires, trait_fn_decoration,\n    transparent,\n};\n\npub use hax_lib_macros::{\n    process_init, process_read, process_write, protocol_messages, pv_constructor, pv_handwritten,\n};\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/proc_macros_generated.rs\"));\n"
  },
  {
    "path": "hax-lib/src/prop.rs",
    "content": "use crate::abstraction::*;\nuse core::ops::*;\n\n/// Represent a logical proposition, that may be not computable.\n#[derive(Clone, Copy, Debug)]\npub struct Prop(bool);\n\n/// This module provides monomorphic constructors for `Prop`.\n/// Hax rewrite more elaborated versions (see `forall` or `AndBit` below) to those monomorphic constructors.\npub mod constructors {\n    use super::Prop;\n    pub const fn from_bool(b: bool) -> Prop {\n        Prop(b)\n    }\n    pub fn and(lhs: Prop, other: Prop) -> Prop {\n        Prop(lhs.0 && other.0)\n    }\n    pub fn or(lhs: Prop, other: Prop) -> Prop {\n        Prop(lhs.0 || other.0)\n    }\n    pub fn not(lhs: Prop) -> Prop {\n        Prop(!lhs.0)\n    }\n\n    /// Logical equality between two value of *any* type\n    pub fn eq<T>(_lhs: T, _rhs: T) -> Prop {\n        Prop(true)\n    }\n\n    pub fn ne<T>(_lhs: T, _rhs: T) -> Prop {\n        Prop(true)\n    }\n\n    pub fn implies(lhs: Prop, other: Prop) -> Prop {\n        Prop(lhs.0 || !other.0)\n    }\n\n    pub fn forall<A, F: Fn(A) -> Prop>(_pred: F) -> Prop {\n        Prop(true)\n    }\n\n    pub fn exists<A, F: Fn(A) -> Prop>(_pred: F) -> Prop {\n        Prop(true)\n    }\n}\n\nimpl Prop {\n    /// Lifts a boolean to a logical proposition.\n    pub const fn from_bool(b: bool) -> Self {\n        constructors::from_bool(b)\n    }\n    /// Conjuction of two propositions.\n    pub fn and(self, other: impl Into<Self>) -> Self {\n        constructors::and(self, other.into())\n    }\n    /// Disjunction of two propositions.\n    pub fn or(self, other: impl Into<Self>) -> Self {\n        constructors::or(self, other.into())\n    }\n    /// Negation of a proposition.\n    pub fn not(self) -> Self {\n        constructors::not(self)\n    }\n    /// Equality between two propositions.\n    pub fn eq(self, other: impl Into<Self>) -> Self {\n        constructors::eq(self, other.into())\n    }\n    /// Equality between two propositions.\n    pub fn ne(self, other: impl Into<Self>) -> Self {\n        constructors::ne(self, other.into())\n    }\n    /// Logical implication.\n    pub fn implies(self, other: impl Into<Self>) -> Self {\n        constructors::implies(self, other.into())\n    }\n}\n\nimpl Abstraction for bool {\n    type AbstractType = Prop;\n    fn lift(self) -> Self::AbstractType {\n        Prop(self)\n    }\n}\n\npub trait ToProp {\n    fn to_prop(self) -> Prop;\n}\nimpl ToProp for bool {\n    fn to_prop(self) -> Prop {\n        self.lift()\n    }\n}\n\nimpl From<bool> for Prop {\n    fn from(value: bool) -> Self {\n        Prop(value)\n    }\n}\n\nimpl<T: Into<Prop>> BitAnd<T> for Prop {\n    type Output = Prop;\n    fn bitand(self, rhs: T) -> Self::Output {\n        Prop(self.0 & rhs.into().0)\n    }\n}\n\nimpl<T: Into<Prop>> BitOr<T> for Prop {\n    type Output = Prop;\n    fn bitor(self, rhs: T) -> Self::Output {\n        Prop(self.0 | rhs.into().0)\n    }\n}\n\nimpl Not for Prop {\n    type Output = Prop;\n    fn not(self) -> Self::Output {\n        Prop(!self.0)\n    }\n}\n\n/// The universal quantifier. This should be used only for Hax code: in\n/// Rust, this is always true.\n///\n/// # Example:\n///\n/// The Rust expression `forall(|x: T| phi(x))` corresponds to `∀ (x: T), phi(x)`.\npub fn forall<T, U: Into<Prop>>(f: impl Fn(T) -> U) -> Prop {\n    constructors::forall(|x| f(x).into())\n}\n\n/// The existential quantifier. This should be used only for Hax code: in\n/// Rust, this is always true.\n///\n/// # Example:\n///\n/// The Rust expression `exists(|x: T| phi(x))` corresponds to `∃ (x: T), phi(x)`.\npub fn exists<T, U: Into<Prop>>(f: impl Fn(T) -> U) -> Prop {\n    constructors::exists(|x| f(x).into())\n}\n\n/// The logical implication `a ==> b`.\npub fn implies(lhs: impl Into<Prop>, rhs: impl Into<Prop>) -> Prop {\n    constructors::implies(lhs.into(), rhs.into())\n}\n\npub use constructors::eq;\n"
  },
  {
    "path": "hax-lib-protocol/Cargo.toml",
    "content": "[package]\nname = \"hax-lib-protocol\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\n\n[dependencies]\nlibcrux = \"0.0.2-pre.2\"\n\n[package.metadata.release]\nrelease = false\n"
  },
  {
    "path": "hax-lib-protocol/README.md",
    "content": "# Hax Protocol Library\nThis crate provides tools for protocol developers to write protcol\nspecifications for hax.\n\n## Protocol Traits\nTo hax, a protocol is a collection of communicating state\nmachines. This module provides traits that describe parts of a state\nmachine's behaviour, specifically it provides traits for creating an\ninitial state, and for state transition behaviour when reading or\nwriting a message.\n\n## Cryptographic Abstractions\nBeside message passing and state transitions, a protocol of course\nincludes operations on the sent and received messages. For\ncryptographic protocols, these will be of a fairly restricted set of\ncryptoraphic primitive operations, which are provided in these\ncryptographic abstractions. This allows protocol authors to specify\nprotocol party internal operations in a way that is easily accessible\nto hax.\n"
  },
  {
    "path": "hax-lib-protocol/src/crypto.rs",
    "content": "//! This module defines a cryptographic abstraction layer for use in\n//! hax protocol specifications.\n\nuse crate::ProtocolError;\n\n/// An abstract Diffie-Hellman scalar.\n#[derive(Clone)]\npub struct DHScalar(Vec<u8>);\n\nimpl DHScalar {\n    /// Wrap bytes into a Diffie-Hellman scalar. Does *not* perform\n    /// input validation.\n    pub fn from_bytes(bytes: &[u8]) -> Self {\n        DHScalar(bytes.to_vec())\n    }\n}\n\n/// An abstract Diffie-Hellman group element.\npub struct DHElement(Vec<u8>);\n\nimpl DHElement {\n    /// Wrap bytes into a Diffie-Hellman group element. Does *not* perform\n    /// input validation.\n    pub fn from_bytes(bytes: &[u8]) -> Self {\n        DHElement(bytes.to_vec())\n    }\n}\n\n/// Choice of Diffie-Hellman groups.\npub enum DHGroup {\n    X25519,\n    X448,\n    P256,\n    P384,\n    P521,\n}\n\nimpl From<DHGroup> for libcrux::ecdh::Algorithm {\n    /// Converter to `libcrux` type.\n    fn from(value: DHGroup) -> Self {\n        match value {\n            DHGroup::X25519 => libcrux::ecdh::Algorithm::X25519,\n            DHGroup::X448 => libcrux::ecdh::Algorithm::X448,\n            DHGroup::P256 => libcrux::ecdh::Algorithm::P256,\n            DHGroup::P384 => libcrux::ecdh::Algorithm::P384,\n            DHGroup::P521 => libcrux::ecdh::Algorithm::P521,\n        }\n    }\n}\n\n/// Scalar multiplication of `scalar` and `element`.\npub fn dh_scalar_multiply(group: DHGroup, scalar: DHScalar, element: DHElement) -> Vec<u8> {\n    libcrux::ecdh::derive(group.into(), element.0, scalar.0).unwrap()\n}\n\n/// Scalar multiplication of a fixed generator and `scalar`.\npub fn dh_scalar_multiply_base(group: DHGroup, scalar: DHScalar) -> Vec<u8> {\n    libcrux::ecdh::secret_to_public(group.into(), scalar.0).unwrap()\n}\n\n/// An abstract AEAD key.\npub struct AEADKey(libcrux::aead::Key);\n\n/// Choice of AEAD algorithms.\npub enum AEADAlgorithm {\n    Aes128Gcm,\n    Aes256Gcm,\n    Chacha20Poly1305,\n}\n\nimpl From<AEADAlgorithm> for libcrux::aead::Algorithm {\n    /// Converter to `libcrux` type.\n    fn from(value: AEADAlgorithm) -> Self {\n        match value {\n            AEADAlgorithm::Aes128Gcm => libcrux::aead::Algorithm::Aes128Gcm,\n            AEADAlgorithm::Aes256Gcm => libcrux::aead::Algorithm::Aes256Gcm,\n            AEADAlgorithm::Chacha20Poly1305 => libcrux::aead::Algorithm::Chacha20Poly1305,\n        }\n    }\n}\n\nimpl AEADKey {\n    /// Attempt deserialization of `bytes` into an AEAD key for\n    /// `algorithm`. Panics on failure.\n    pub fn from_bytes(algorithm: AEADAlgorithm, bytes: &[u8]) -> Self {\n        AEADKey(libcrux::aead::Key::from_bytes(algorithm.into(), bytes.to_vec()).unwrap())\n    }\n}\n\n/// An abstract AEAD initialization vector.\npub struct AEADIV(libcrux::aead::Iv);\n\nimpl AEADIV {\n    /// Attempt construction of an AEAD IV from `bytes`. Panics if\n    /// number of `bytes` is insufficient.\n    pub fn from_bytes(bytes: &[u8]) -> Self {\n        AEADIV(libcrux::aead::Iv::new(bytes).unwrap())\n    }\n}\n\n/// An abstract AEAD authentication tag.\npub struct AEADTag(libcrux::aead::Tag);\nimpl AEADTag {\n    /// Attempt deserialization of an AEAD tag from `bytes`. Panics if\n    /// number of `bytes` is insufficient.\n    pub fn from_bytes(bytes: &[u8]) -> Self {\n        let bytes: [u8; 16] = bytes.try_into().unwrap();\n        AEADTag(libcrux::aead::Tag::from(bytes))\n    }\n}\n\n/// Abstract AEAD encryption using `algorithm`. Returns a pair of byte\n/// vectors `(ciphertext, tag)`.\npub fn aead_encrypt(key: AEADKey, iv: AEADIV, aad: &[u8], plain: &[u8]) -> (Vec<u8>, Vec<u8>) {\n    let (tag, cip) = libcrux::aead::encrypt_detached(&key.0, plain, iv.0, aad).unwrap();\n    (cip, tag.as_ref().to_vec())\n}\n\n/// Abstract AEAD decryption using `algorithm`. On success returns the\n/// decrypted plaintext, otherwise a `CryptoError`.\npub fn aead_decrypt(\n    key: AEADKey,\n    iv: AEADIV,\n    aad: &[u8],\n    cip: &[u8],\n    tag: AEADTag,\n) -> Result<Vec<u8>, ProtocolError> {\n    libcrux::aead::decrypt_detached(&key.0, cip, iv.0, aad, &tag.0)\n        .map_err(|_| ProtocolError::CryptoError)\n}\n\n/// Choice of hashing algorithms.\npub enum HashAlgorithm {\n    Sha1,\n    Sha224,\n    Sha256,\n    Sha384,\n    Sha512,\n    Blake2s,\n    Blake2b,\n    Sha3_224,\n    Sha3_256,\n    Sha3_384,\n    Sha3_512,\n}\n\nimpl From<HashAlgorithm> for libcrux::digest::Algorithm {\n    /// Converter to `libcrux` type.\n    fn from(value: HashAlgorithm) -> Self {\n        match value {\n            HashAlgorithm::Sha1 => libcrux::digest::Algorithm::Sha1,\n            HashAlgorithm::Sha224 => libcrux::digest::Algorithm::Sha224,\n            HashAlgorithm::Sha256 => libcrux::digest::Algorithm::Sha256,\n            HashAlgorithm::Sha384 => libcrux::digest::Algorithm::Sha384,\n            HashAlgorithm::Sha512 => libcrux::digest::Algorithm::Sha512,\n            HashAlgorithm::Blake2s => libcrux::digest::Algorithm::Blake2s,\n            HashAlgorithm::Blake2b => libcrux::digest::Algorithm::Blake2b,\n            HashAlgorithm::Sha3_224 => libcrux::digest::Algorithm::Sha3_224,\n            HashAlgorithm::Sha3_256 => libcrux::digest::Algorithm::Sha3_256,\n            HashAlgorithm::Sha3_384 => libcrux::digest::Algorithm::Sha3_384,\n            HashAlgorithm::Sha3_512 => libcrux::digest::Algorithm::Sha3_512,\n        }\n    }\n}\n\n/// Abstract hashing using `algorithm`.\npub fn hash(algorithm: HashAlgorithm, input: &[u8]) -> Vec<u8> {\n    libcrux::digest::hash(algorithm.into(), input)\n}\n\n/// Choice of algorithms for instantiation of HMAC.\npub enum HMACAlgorithm {\n    Sha1,\n    Sha256,\n    Sha384,\n    Sha512,\n}\n\nimpl From<HMACAlgorithm> for libcrux::hmac::Algorithm {\n    /// Converter to `libcrux` type.\n    fn from(value: HMACAlgorithm) -> Self {\n        match value {\n            HMACAlgorithm::Sha1 => libcrux::hmac::Algorithm::Sha1,\n            HMACAlgorithm::Sha256 => libcrux::hmac::Algorithm::Sha256,\n            HMACAlgorithm::Sha384 => libcrux::hmac::Algorithm::Sha384,\n            HMACAlgorithm::Sha512 => libcrux::hmac::Algorithm::Sha512,\n        }\n    }\n}\n\n/// Abstract HMAC using `algorithm` as the hash function.\npub fn hmac(algorithm: HMACAlgorithm, key: &[u8], input: &[u8]) -> Vec<u8> {\n    libcrux::hmac::hmac(algorithm.into(), key, input, None)\n}\n"
  },
  {
    "path": "hax-lib-protocol/src/lib.rs",
    "content": "//! This crate provides tools for protocol authors to write protocol\n//! specifications for hax.\n//!\n//! It contains a collection traits describing state machine behaviour, as\n//! well as a library of abstract primitive cryptographic operations for\n//! use in protocol specifications.\n\npub mod crypto;\npub mod state_machine;\n\n/// A protocol error type.\n#[derive(Debug)]\npub enum ProtocolError {\n    /// An error in the crypto abstraction layer\n    CryptoError,\n    /// On receiving an unexpected message, i.e. one that does not allow a state\n    /// transition from the current state.\n    InvalidMessage,\n    /// On receiving invalid initialization data.\n    InvalidPrologue,\n}\n\npub type ProtocolResult<T> = Result<T, ProtocolError>;\n"
  },
  {
    "path": "hax-lib-protocol/src/state_machine.rs",
    "content": "//! This module provides types and traits for implementing a protocol state\n//! machine.\n//!\n//! A protocol party is conceived of as having a set of possible states, one of\n//! which is the initial state. Transitioning to a different state is possible\n//! either through receiving and processing a message or through writing a\n//! message.\n\nuse crate::ProtocolResult;\n\n/// A trait for protocol initial states.\npub trait InitialState {\n    /// Initializes the state given initialization data in `prologue`.\n    ///\n    /// Errors on invalid initialization data.\n    fn init(prologue: Option<Vec<u8>>) -> ProtocolResult<Self>\n    where\n        Self: Sized;\n}\n\n/// A state where a message must be written before transitioning to the next state.\n///\n/// `WriteState` can only be implemented once by every state type, implying that\n/// in any protocol party state, if a message is to be written, that message and\n/// the state the party is in after writing the message are uniquely determined.\npub trait WriteState {\n    /// The uniquely determined state that is transitioned to after writing the message.\n    type NextState;\n    /// The type of the message that is being written.\n    type Message;\n    /// Produce the message to be written when transitioning to the next state.\n    fn write(self) -> ProtocolResult<(Self::NextState, Self::Message)>;\n}\n\n/// A state where a message must be read before transitioning to the next state.\n///\n/// A state type may implement `ReadState` multiple times, for different\n/// instances of `NextState`, allowing the following state to depend on the\n/// message that was received.\npub trait ReadState<NextState> {\n    /// The type of message to be read.\n    type Message;\n\n    /// Generate the next state based on the current state and the received\n    /// message.\n    fn read(self, msg: Self::Message) -> ProtocolResult<NextState>;\n}\n"
  },
  {
    "path": "hax-lib-protocol-macros/Cargo.toml",
    "content": "[package]\nname = \"hax-lib-protocol-macros\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\n\n[lib]\nproc-macro = true\n\n[dependencies]\nproc-macro-error2 = { version = \"2.0\" }\nproc-macro2.workspace = true\nquote.workspace = true\nsyn = { version = \"2.0\", features = [\n    \"full\",\n    \"visit-mut\",\n    \"extra-traits\",\n    \"parsing\",\n] }\n\n[package.metadata.release]\nrelease = false\n"
  },
  {
    "path": "hax-lib-protocol-macros/src/lib.rs",
    "content": "use quote::quote;\nuse syn::{parse, parse_macro_input};\n\n/// This macro takes an `fn` as the basis of an `InitialState` implementation\n/// for the state type that is returned by the `fn` (on success).\n///\n/// The `fn` is expected to build the state type specified as a `Path` attribute\n/// argument from a `Vec<u8>`, i.e. the signature should be compatible with\n/// `TryFrom<Vec<u8>>` for the state type given as argument to the macro.\n///\n/// Example:\n/// ```ignore\n/// pub struct A0 {\n///   data: u8,\n/// }\n///\n/// #[hax_lib_protocol_macros::init(A0)]\n/// fn init_a(prologue: Vec<u8>) -> ::hax_lib_protocol::ProtocolResult<A0> {\n///     if prologue.len() < 1 {\n///        return Err(::hax_lib_protocol::ProtocolError::InvalidPrologue);\n///     }\n///     Ok(A0 { data: prologue[0] })\n/// }\n///\n/// // The following is generated by the macro:\n/// #[hax_lib::exclude]\n/// impl TryFrom<Vec<u8>> for A0 {\n///     type Error = ::hax_lib_protocol::ProtocolError;\n///     fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {\n///         init_a(value)\n///     }\n/// }\n/// #[hax_lib::exclude]\n/// impl InitialState for A0 {\n///     fn init(prologue: Option<Vec<u8>>) -> ::hax_lib_protocol::ProtocolResult<Self> {\n///         if let Some(prologue) = prologue {\n///             prologue.try_into()\n///         } else {\n///             Err(::hax_lib_protocol::ProtocolError::InvalidPrologue)\n///         }\n///     }\n/// }\n/// ```\n#[proc_macro_attribute]\npub fn init(\n    attr: proc_macro::TokenStream,\n    item: proc_macro::TokenStream,\n) -> proc_macro::TokenStream {\n    let mut output = quote!(#[hax_lib::process_init]);\n    output.extend(proc_macro2::TokenStream::from(item.clone()));\n\n    let input: syn::ItemFn = parse_macro_input!(item);\n    let return_type: syn::Path = parse_macro_input!(attr);\n    let name = input.sig.ident;\n\n    let expanded = quote!(\n        #[hax_lib::exclude]\n        impl TryFrom<Vec<u8>> for #return_type {\n            type Error = ::hax_lib_protocol::ProtocolError;\n\n            fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {\n                #name(value)\n            }\n        }\n\n        #[hax_lib::exclude]\n        impl InitialState for #return_type {\n            fn init(prologue: Option<Vec<u8>>) -> ::hax_lib_protocol::ProtocolResult<Self> {\n                if let Some(prologue) = prologue {\n                    prologue.try_into()\n                } else {\n                    Err(::hax_lib_protocol::ProtocolError::InvalidPrologue)\n                }\n            }\n        }\n    );\n    output.extend(expanded);\n\n    output.into()\n}\n\n/// This macro takes an `fn` as the basis of an `InitialState` implementation\n/// for the state type that is returned by the `fn` (on success).\n///\n/// The `fn` is expected to build the state type specified as a `Path` attribute\n/// argument without additional input.\n/// Example:\n/// ```ignore\n/// pub struct B0 {}\n///\n/// #[hax_lib_protocol_macros::init_empty(B0)]\n/// fn init_b() -> ::hax_lib_protocol::ProtocolResult<B0> {\n///    Ok(B0 {})\n/// }\n///\n/// // The following is generated by the macro:\n/// #[hax_lib::exclude]\n/// impl InitialState for B0 {\n///     fn init(prologue: Option<Vec<u8>>) -> ::hax_lib_protocol::ProtocolResult<Self> {\n///         if let Some(_) = prologue {\n///             Err(::hax_lib_protocol::ProtocolError::InvalidPrologue)\n///         } else {\n///             init_b()\n///         }\n///     }\n/// }\n/// ```\n#[proc_macro_error2::proc_macro_error]\n#[proc_macro_attribute]\npub fn init_empty(\n    attr: proc_macro::TokenStream,\n    item: proc_macro::TokenStream,\n) -> proc_macro::TokenStream {\n    let mut output = quote!(#[hax_lib::process_init]);\n    output.extend(proc_macro2::TokenStream::from(item.clone()));\n\n    let input: syn::ItemFn = parse_macro_input!(item);\n    let return_type: syn::Path = parse_macro_input!(attr);\n    let name = input.sig.ident;\n\n    let expanded = quote!(\n        #[hax_lib::exclude]\n        impl InitialState for #return_type {\n            fn init(prologue: Option<Vec<u8>>) -> ::hax_lib_protocol::ProtocolResult<Self> {\n                if let Some(_) = prologue {\n                    Err(::hax_lib_protocol::ProtocolError::InvalidPrologue)\n                } else {\n                    #name()\n                }\n            }\n        }\n    );\n    output.extend(expanded);\n\n    return output.into();\n}\n\n/// A structure to parse transition tuples from `read` and `write` macros.\nstruct Transition {\n    /// `Path` to the current state type of the transition.\n    pub current_state: syn::Path,\n    /// `Path` to the destination state type of the transition.\n    pub next_state: syn::Path,\n    /// `Path` to the message type this transition is based on.\n    pub message_type: syn::Path,\n}\n\nimpl syn::parse::Parse for Transition {\n    fn parse(input: parse::ParseStream) -> syn::Result<Self> {\n        use syn::spanned::Spanned;\n        let punctuated =\n            syn::punctuated::Punctuated::<syn::Path, syn::Token![,]>::parse_terminated(input)?;\n        if punctuated.len() != 3 {\n            Err(syn::Error::new(\n                punctuated.span(),\n                \"Insufficient number of arguments\",\n            ))\n        } else {\n            let mut args = punctuated.into_iter();\n            Ok(Self {\n                current_state: args.next().unwrap(),\n                next_state: args.next().unwrap(),\n                message_type: args.next().unwrap(),\n            })\n        }\n    }\n}\n\n/// Macro deriving a `WriteState` implementation for the origin state type,\n/// generating a message of `message_type` and a new state, as indicated by the\n/// transition tuple.\n///\n/// Example:\n/// ```ignore\n/// #[hax_lib_protocol_macros::write(A0, A1, Message)]\n/// fn write_ping(state: A0) -> ::hax_lib_protocol::ProtocolResult<(A1, Message)> {\n///    Ok((A1 {}, Message::Ping(state.data)))\n/// }\n///\n/// // The following is generated by the macro:\n/// #[hax_lib::exclude]\n/// impl TryFrom<A0> for (A1, Message) {\n///    type Error = ::hax_lib_protocol::ProtocolError;\n///\n///    fn try_from(value: A0) -> Result<Self, Self::Error> {\n///       write_ping(value)\n///    }\n/// }\n///\n/// #[hax_lib::exclude]\n/// impl WriteState for A0 {\n///    type NextState = A1;\n///    type Message = Message;\n///\n///    fn write(self) -> ::hax_lib_protocol::ProtocolResult<(Self::NextState, Message)> {\n///        self.try_into()\n///    }\n/// }\n/// ```\n#[proc_macro_attribute]\npub fn write(\n    attr: proc_macro::TokenStream,\n    item: proc_macro::TokenStream,\n) -> proc_macro::TokenStream {\n    let mut output = quote!(#[hax_lib::process_write]);\n    output.extend(proc_macro2::TokenStream::from(item.clone()));\n\n    let input: syn::ItemFn = parse_macro_input!(item);\n    let Transition {\n        current_state,\n        next_state,\n        message_type,\n    } = parse_macro_input!(attr);\n\n    let name = input.sig.ident;\n\n    let expanded = quote!(\n        #[hax_lib::exclude]\n        impl TryFrom<#current_state> for (#next_state, #message_type) {\n            type Error = ::hax_lib_protocol::ProtocolError;\n\n            fn try_from(value: #current_state) -> Result<Self, Self::Error> {\n                #name(value)\n            }\n        }\n\n        #[hax_lib::exclude]\n        impl WriteState for #current_state {\n            type NextState = #next_state;\n            type Message = #message_type;\n\n            fn write(self) -> ::hax_lib_protocol::ProtocolResult<(Self::NextState, Self::Message)> {\n                self.try_into()\n            }\n        }\n    );\n    output.extend(expanded);\n\n    output.into()\n}\n\n/// Macro deriving a `ReadState` implementation for the destination state type,\n/// consuming a message of `message_type` and the current state, as indicated by\n/// the transition tuple.\n///\n/// Example:\n/// ```ignore\n/// #[hax_lib_protocol_macros::read(A1, A2, Message)]\n/// fn read_pong(_state: A1, msg: Message) -> ::hax_lib_protocol::ProtocolResult<A2> {\n///     match msg {\n///         Message::Ping(_) => Err(::hax_lib_protocol::ProtocolError::InvalidMessage),\n///         Message::Pong(received) => Ok(A2 { received }),\n///     }\n/// }\n/// // The following is generated by the macro:\n/// #[hax_lib::exclude]\n/// impl TryFrom<(A1, Message)> for A2 {\n///     type Error = ::hax_lib_protocol::ProtocolError;\n///     fn try_from((state, msg): (A1, Message)) -> Result<Self, Self::Error> {\n///         read_pong(state, msg)\n///     }\n/// }\n/// #[hax_lib::exclude]\n/// impl ReadState<A2> for A1 {\n///     type Message = Message;\n///     fn read(self, msg: Message) -> ::hax_lib_protocol::ProtocolResult<A2> {\n///         A2::try_from((self, msg))\n///     }\n/// }\n/// ```\n#[proc_macro_attribute]\npub fn read(\n    attr: proc_macro::TokenStream,\n    item: proc_macro::TokenStream,\n) -> proc_macro::TokenStream {\n    let mut output = quote!(#[hax_lib::process_read]);\n    output.extend(proc_macro2::TokenStream::from(item.clone()));\n\n    let input: syn::ItemFn = parse_macro_input!(item);\n    let Transition {\n        current_state,\n        next_state,\n        message_type,\n    } = parse_macro_input!(attr);\n\n    let name = input.sig.ident;\n\n    let expanded = quote!(\n        #[hax_lib::exclude]\n        impl TryFrom<(#current_state, #message_type)> for #next_state {\n            type Error = ::hax_lib_protocol::ProtocolError;\n\n            fn try_from((state, msg): (#current_state, #message_type)) -> Result<Self, Self::Error> {\n                #name(state, msg)\n            }\n        }\n\n        #[hax_lib::exclude]\n        impl ReadState<#next_state> for #current_state {\n            type Message = #message_type;\n            fn read(self, msg: Self::Message) -> ::hax_lib_protocol::ProtocolResult<#next_state> {\n                #next_state::try_from((self, msg))\n            }\n        }\n    );\n    output.extend(expanded);\n\n    output.into()\n}\n"
  },
  {
    "path": "hax-types/Cargo.toml",
    "content": "[package]\nname = \"hax-types\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"Helper crate defining the types used to communicate between the custom rustc driver, the CLI and the engine of hax.\"\n\n[dependencies]\nclap = { workspace = true, features = [\"env\"] }\nhax-frontend-exporter.workspace = true\nhax-frontend-exporter-options.workspace = true\nitertools.workspace = true\npath-clean = \"1.0.1\"\nschemars.workspace = true\nserde.workspace = true\ncolored.workspace = true\nserde_json.workspace = true\nannotate-snippets.workspace = true\nhax-adt-into.workspace = true\ntracing.workspace = true\nserde-brief ={ version = \"0.1\", features = [\"std\", \"alloc\"]}\nzstd = \"0.13.1\"\nmiette = \"7.2.0\"\n\n[features]\nrustc = [\"hax-frontend-exporter/rustc\"]\n"
  },
  {
    "path": "hax-types/README.md",
    "content": "# `hax-types`\nThis crate contains the type definitions that are used to communicate between:\n - the command line (the `cargo-hax` binary);\n - the custom rustc driver;\n - the hax engine (the `hax-engine` binary).\n \nThose three component send and receive messages in JSON or CBOR on\nstdin and stdout.\n"
  },
  {
    "path": "hax-types/build.rs",
    "content": "macro_rules! set_empty_env_var_with {\n    ($var:literal, $f: expr) => {{\n        println!(\"cargo:rurun-if-env-changed={}\", $var);\n        match option_env!($var) {\n            Some(value) => value.to_string(),\n            None => {\n                let value = $f;\n                println!(\"cargo:rustc-env={}={}\", $var, value);\n                value\n            }\n        }\n    }};\n}\n\nconst UNKNOWN: &str = \"unknown\";\n\nfn git_command(args: &[&str]) -> String {\n    std::process::Command::new(\"git\")\n        .args(args)\n        .output()\n        .map(|output| String::from_utf8(output.stdout).unwrap().trim().to_string())\n        .ok()\n        .filter(|s| !s.is_empty())\n        .unwrap_or(UNKNOWN.to_string())\n}\n\nfn main() {\n    let commit_hash =\n        set_empty_env_var_with!(\"HAX_GIT_COMMIT_HASH\", git_command(&[\"rev-parse\", \"HEAD\"]));\n\n    set_empty_env_var_with!(\"HAX_VERSION\", {\n        if commit_hash == UNKNOWN {\n            env!(\"CARGO_PKG_VERSION\").into()\n        } else {\n            git_command(&[\"tag\", \"--contains\", &commit_hash])\n                .lines()\n                .next()\n                .and_then(|tag| tag.split_once(\"hax-v\"))\n                .map(|(_, version)| version.trim().to_string())\n                .unwrap_or_else(|| format!(\"untagged-git-rev-{}\", &commit_hash[0..10]))\n        }\n    });\n}\n"
  },
  {
    "path": "hax-types/src/cli_options/extension.rs",
    "content": "/// This module defines a way to extend externally the CLI of hax, via\n/// the `Extension` trait. This trait defines one associated type per\n/// extension point.\nuse crate::prelude::*;\n\nuse clap::{Parser, Subcommand};\n\nmacro_rules! trait_alias {\n    ($name:ident = $($base:tt)+) => {\n        pub trait $name: $($base)+ { }\n        impl<T: $($base)+> $name for T { }\n    };\n}\n\ntrait_alias!(\n    ExtensionPoint =\n        std::fmt::Debug\n        + for<'a> serde::Deserialize<'a>\n        + serde::Serialize\n        + JsonSchema\n        + Clone\n);\n\ntrait_alias!(SubcommandExtensionPoint = ExtensionPoint + clap::Subcommand);\ntrait_alias!(ArgsExtensionPoint = ExtensionPoint + clap::Args);\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Parser, Debug, Clone)]\npub struct EmptyArgsExtension {}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Subcommand, Debug, Clone)]\npub enum EmptySubcommandExtension {}\n\npub trait Extension: 'static {\n    type Options: ArgsExtensionPoint;\n    type Command: SubcommandExtensionPoint;\n    type BackendOptions: ArgsExtensionPoint;\n    type FStarOptions: ArgsExtensionPoint;\n}\n\nimpl Extension for () {\n    type Options = EmptyArgsExtension;\n    type Command = EmptySubcommandExtension;\n    type BackendOptions = EmptyArgsExtension;\n    type FStarOptions = EmptyArgsExtension;\n}\n"
  },
  {
    "path": "hax-types/src/cli_options/mod.rs",
    "content": "use crate::prelude::*;\n\nuse clap::{Parser, Subcommand, ValueEnum};\nuse std::fmt;\n\npub use hax_frontend_exporter_options::*;\npub mod extension;\nuse extension::Extension;\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub enum DebugEngineMode {\n    File(PathOrDash),\n    Interactive,\n}\n\nimpl std::convert::From<&str> for DebugEngineMode {\n    fn from(s: &str) -> Self {\n        match s {\n            \"i\" | \"interactively\" => DebugEngineMode::Interactive,\n            s => DebugEngineMode::File(s.strip_prefix(\"file:\").unwrap_or(s).into()),\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone, Default)]\npub struct ForceCargoBuild {\n    pub data: u64,\n}\n\nimpl std::convert::From<&str> for ForceCargoBuild {\n    fn from(s: &str) -> Self {\n        use std::time::{SystemTime, UNIX_EPOCH};\n        if s == \"false\" {\n            let data = SystemTime::now()\n                .duration_since(UNIX_EPOCH)\n                .map(|r| r.as_millis())\n                .unwrap_or(0);\n            ForceCargoBuild { data: data as u64 }\n        } else {\n            ForceCargoBuild::default()\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone, JsonSchema)]\npub enum PathOrDash {\n    Dash,\n    Path(PathBuf),\n}\n\nimpl std::convert::From<&str> for PathOrDash {\n    fn from(s: &str) -> Self {\n        match s {\n            \"-\" => PathOrDash::Dash,\n            _ => PathOrDash::Path(PathBuf::from(s)),\n        }\n    }\n}\n\nimpl PathOrDash {\n    pub fn open_or_stdout(&self) -> Box<dyn std::io::Write> {\n        use std::io::BufWriter;\n        match self {\n            PathOrDash::Dash => Box::new(BufWriter::new(std::io::stdout())),\n            PathOrDash::Path(path) => {\n                Box::new(BufWriter::new(std::fs::File::create(&path).unwrap()))\n            }\n        }\n    }\n    pub fn map_path<F: FnOnce(&Path) -> PathBuf>(&self, f: F) -> Self {\n        match self {\n            PathOrDash::Path(path) => PathOrDash::Path(f(path)),\n            PathOrDash::Dash => PathOrDash::Dash,\n        }\n    }\n}\n\nfn absolute_path(path: impl AsRef<std::path::Path>) -> std::io::Result<std::path::PathBuf> {\n    use path_clean::PathClean;\n    let path = path.as_ref();\n\n    let absolute_path = if path.is_absolute() {\n        path.to_path_buf()\n    } else {\n        std::env::current_dir()?.join(path)\n    }\n    .clean();\n\n    Ok(absolute_path)\n}\n\npub trait NormalizePaths {\n    fn normalize_paths(&mut self);\n}\n\nimpl NormalizePaths for PathBuf {\n    fn normalize_paths(&mut self) {\n        *self = absolute_path(&self).unwrap();\n    }\n}\nimpl NormalizePaths for PathOrDash {\n    fn normalize_paths(&mut self) {\n        match self {\n            PathOrDash::Path(p) => p.normalize_paths(),\n            PathOrDash::Dash => (),\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Parser, Debug, Clone)]\npub struct ProVerifOptions {\n    /// Items for which hax should extract a default-valued process\n    /// macro with a corresponding type signature. This flag expects a\n    /// space-separated list of inclusion clauses. An inclusion clause\n    /// is a Rust path prefixed with `+`, `+!` or `-`. `-` means\n    /// implementation only, `+!` means interface only and `+` means\n    /// implementation and interface. Rust path chunks can be either a\n    /// concrete string, or a glob (just like bash globs, but with\n    /// Rust paths).\n    #[arg(\n        long,\n        value_parser = parse_inclusion_clause,\n        value_delimiter = ' ',\n        allow_hyphen_values(true)\n    )]\n    pub assume_items: Vec<InclusionClause>,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Parser, Debug, Clone)]\npub struct FStarOptions<E: Extension> {\n    /// Set the Z3 per-query resource limit\n    #[arg(long, default_value = \"15\")]\n    pub z3rlimit: u32,\n    /// Number of unrolling of recursive functions to try\n    #[arg(long, default_value = \"0\")]\n    pub fuel: u32,\n    /// Number of unrolling of inductive datatypes to try\n    #[arg(long, default_value = \"1\")]\n    pub ifuel: u32,\n    /// Modules for which Hax should extract interfaces (`*.fsti`\n    /// files) in supplement to implementations (`*.fst` files). By\n    /// default we extract no interface, only implementations. If a\n    /// item is signature only (see the `+:` prefix of the\n    /// `--include_namespaces` flag of the `into` subcommand), then\n    /// its namespace is extracted with an interface. This flag\n    /// expects a space-separated list of inclusion clauses. An\n    /// inclusion clause is a Rust path prefixed with `+`, `+!` or\n    /// `-`. `-` means implementation only, `+!` means interface only\n    /// and `+` means implementation and interface. Rust path chunks\n    /// can be either a concrete string, or a glob (just like bash\n    /// globs, but with Rust paths).\n    #[arg(\n        long,\n        value_parser = parse_inclusion_clause,\n        value_delimiter = ' ',\n        allow_hyphen_values(true)\n    )]\n    pub interfaces: Vec<InclusionClause>,\n\n    #[arg(long, default_value = \"100\", env = \"HAX_FSTAR_LINE_WIDTH\")]\n    pub line_width: u16,\n\n    #[group(flatten)]\n    pub cli_extension: E::FStarOptions,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Subcommand, Debug, Clone)]\npub enum Backend<E: Extension> {\n    /// Use the F* backend\n    Fstar(FStarOptions<E>),\n    /// Use the Lean backend (warning: experimental)\n    Lean,\n    /// Use the Coq backend\n    Coq,\n    /// Use the SSProve backend\n    Ssprove,\n    /// Use the EasyCrypt backend (warning: work in progress!)\n    Easycrypt,\n    /// Use the ProVerif backend (warning: work in progress!)\n    ProVerif(ProVerifOptions),\n    /// Use the Rust backend (warning: work in progress!)\n    #[clap(hide = true)]\n    Rust,\n    /// Extract `DefId`s of the crate as a Rust module tree.\n    /// This is a command that regenerates code for the rust engine.\n    #[clap(hide = true)]\n    GenerateRustEngineNames,\n    /// A debugger for the Rust engine\n    Debugger {\n        #[arg(long, short)]\n        interactive: bool,\n    },\n}\n\nimpl fmt::Display for Backend<()> {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        BackendName::from(self).fmt(f)\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub enum DepsKind {\n    Transitive,\n    Shallow,\n    None,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub enum InclusionKind {\n    /// `+query` include the items selected by `query`\n    Included(DepsKind),\n    SignatureOnly,\n    Excluded,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct InclusionClause {\n    pub kind: InclusionKind,\n    pub namespace: Namespace,\n}\n\nconst PREFIX_INCLUDED_TRANSITIVE: &str = \"+\";\nconst PREFIX_INCLUDED_SHALLOW: &str = \"+~\";\nconst PREFIX_INCLUDED_NONE: &str = \"+!\";\nconst PREFIX_SIGNATURE_ONLY: &str = \"+:\";\nconst PREFIX_EXCLUDED: &str = \"-\";\n\nimpl ToString for InclusionClause {\n    fn to_string(&self) -> String {\n        let kind = match self.kind {\n            InclusionKind::Included(DepsKind::Transitive) => PREFIX_INCLUDED_TRANSITIVE,\n            InclusionKind::Included(DepsKind::Shallow) => PREFIX_INCLUDED_SHALLOW,\n            InclusionKind::Included(DepsKind::None) => PREFIX_INCLUDED_NONE,\n            InclusionKind::SignatureOnly => PREFIX_SIGNATURE_ONLY,\n            InclusionKind::Excluded => PREFIX_EXCLUDED,\n        };\n        format!(\"{kind}{}\", self.namespace.to_string())\n    }\n}\n\npub fn parse_inclusion_clause(\n    s: &str,\n) -> Result<InclusionClause, Box<dyn std::error::Error + Send + Sync + 'static>> {\n    let s = s.trim();\n    if s.is_empty() {\n        Err(\"Expected `-` or `+`, got an empty string\")?\n    }\n    let (prefix, namespace) = {\n        let f = |&c: &char| matches!(c, '+' | '-' | '~' | '!' | ':');\n        (\n            s.chars().take_while(f).into_iter().collect::<String>(),\n            s.chars().skip_while(f).into_iter().collect::<String>(),\n        )\n    };\n    let kind = match &prefix[..] {\n        PREFIX_INCLUDED_TRANSITIVE => InclusionKind::Included(DepsKind::Transitive),\n        PREFIX_INCLUDED_SHALLOW => InclusionKind::Included(DepsKind::Shallow),\n        PREFIX_INCLUDED_NONE => InclusionKind::Included(DepsKind::None),\n        PREFIX_SIGNATURE_ONLY => InclusionKind::SignatureOnly,\n        PREFIX_EXCLUDED => InclusionKind::Excluded,\n        prefix => Err(format!(\n            \"Expected `+`, `+~`, `+!`, `+:` or `-`, got an `{prefix}`\"\n        ))?,\n    };\n    Ok(InclusionClause {\n        kind,\n        namespace: namespace.to_string().into(),\n    })\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Parser, Debug, Clone)]\npub struct TranslationOptions {\n    /// Controls which Rust item should be extracted or not.\n    ///\n    /// This is a space-separated list of patterns prefixed with a\n    /// modifier, read from the left to the right.\n    ///\n    /// A pattern is a Rust path (say `mycrate::mymod::myfn`) where\n    /// globs are allowed: `*` matches any name\n    /// (e.g. `mycrate::mymod::myfn` is matched by\n    /// `mycrate::*::myfn`), while `**` matches any subpath, empty\n    /// included (e.g. `mycrate::mymod::myfn` is matched by\n    /// `**::myfn`).\n\n    /// By default, hax includes all items. Then, the patterns\n    /// prefixed by modifiers are processed from left to right,\n    /// excluding or including items. Each pattern selects a number of\n    /// item. The modifiers are:\n\n    /// {n}{n} - `+`: includes the selected items with their\n    /// dependencies, transitively (e.g. if function `f` calls `g`\n    /// which in turn calls `h`, then `+k::f` includes `f`, `g` and\n    /// `h`)\n\n    /// {n} - `+~`: includes the selected items with their direct\n    /// dependencies only (following the previous example, `+~k::f`\n    /// would select `f` and `g`, but not `h`)\n\n    /// {n} - `+!`: includes the selected items, without their\n    /// dependencies (`+!k::f` would only select `f`)\n\n    /// {n} - `+:`: only includes the type of the selected items (no\n    /// dependencies). This includes full struct and enums, but only\n    /// the type signature of functions and trait impls (except when\n    /// they contain associated types), dropping their bodies.\n    #[arg(\n        value_parser = parse_inclusion_clause,\n        value_delimiter = ' ',\n    )]\n    #[arg(short, allow_hyphen_values(true))]\n    pub include_namespaces: Vec<InclusionClause>,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Parser, Debug, Clone)]\npub struct BackendOptions<E: Extension> {\n    #[command(subcommand)]\n    pub backend: Backend<E>,\n\n    /// Don't write anything on disk. Output everything as JSON to stdout\n    /// instead.\n    #[arg(long = \"dry-run\")]\n    pub dry_run: bool,\n\n    /// Verbose mode for the Hax engine. Set `-vv` for maximal verbosity.\n    #[arg(short, long, action = clap::ArgAction::Count)]\n    pub verbose: u8,\n\n    /// Prints statistics about how many items have been translated\n    /// successfully by the engine.\n    #[arg(long)]\n    pub stats: bool,\n\n    /// Enables profiling for the engine: for each phase of the\n    /// engine, time and memory usage are recorded and reported.\n    #[arg(long)]\n    pub profile: bool,\n\n    /// Prune Rust items that are not under the provided top-level module name.\n    /// This will effectively remove all items that don't match `*::<prune_haxmetadata>::**`.\n    /// This prunning occurs directly on the `haxmeta` file, in the frontend.\n    /// This is independent from any engine options.\n    #[arg(long)]\n    #[clap(hide = true)]\n    pub prune_haxmeta: Option<String>,\n\n    /// Enable engine debugging: dumps the AST at each phase.\n    ///\n    /// The value of `<DEBUG_ENGINE>` can be either:\n\n    /// {n}{n} - `interactive` (or `i`): enables debugging of the engine,\n    /// and visualize interactively in a webapp how a crate was\n    /// transformed by each phase, both in Rust-like syntax and\n    /// browsing directly the internal AST. By default, the webapp is\n    /// hosted on `http://localhost:8000`, the port can be override by\n    /// setting the `HAX_DEBUGGER_PORT` environment variable.\n\n    /// {n} - `<FILE>` or `file:<FILE>`: outputs the different AST as JSON\n    /// to `<FILE>`. `<FILE>` can be either [-] or a path.\n    #[arg(short, long = \"debug-engine\")]\n    pub debug_engine: Option<DebugEngineMode>,\n\n    /// Extract type aliases. This is disabled by default, since\n    /// extracted terms depends on expanded types rather than on type\n    /// aliases. Turning this option on is discouraged: Rust type\n    /// synonyms can ommit generic bounds, which are ususally\n    /// necessary in the hax backends, leading to typechecking\n    /// errors. For more details see\n    /// https://github.com/hacspec/hax/issues/708.\n    #[arg(long)]\n    pub extract_type_aliases: bool,\n\n    #[command(flatten)]\n    pub translation_options: TranslationOptions,\n\n    /// Where to put the output files resulting from the translation.\n    /// Defaults to \"<crate folder>/proofs/<backend>/extraction\".\n    #[arg(long)]\n    pub output_dir: Option<PathBuf>,\n\n    #[group(flatten)]\n    pub cli_extension: E::BackendOptions,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Subcommand, Debug, Clone)]\npub enum Command<E: Extension> {\n    /// Translate to a backend. The translated modules will be written\n    /// under the directory `<PKG>/proofs/<BACKEND>/extraction`, where\n    /// `<PKG>` is the translated cargo package name and `<BACKEND>`\n    /// the name of the backend.\n    #[clap(name = \"into\")]\n    Backend(BackendOptions<E>),\n\n    /// Export directly as a JSON file\n    JSON {\n        /// Path to the output JSON file, \"-\" denotes stdout.\n        #[arg(\n            short,\n            long = \"output-file\",\n            default_value = \"hax_frontend_export.json\"\n        )]\n        output_file: PathOrDash,\n        /// Whether the bodies are exported as THIR, built MIR, const\n        /// MIR, or a combination. Repeat this option to extract a\n        /// combination (e.g. `-k thir -k mir-built`). Pass `--kind`\n        /// alone with no value to disable body extraction.\n        #[arg(\n            value_enum,\n            short,\n            long = \"kind\",\n            num_args = 0..=3,\n            default_values_t = [ExportBodyKind::Thir]\n        )]\n        kind: Vec<ExportBodyKind>,\n\n        /// By default, `cargo hax json` outputs a JSON where every\n        /// piece of information is inlined. This however creates very\n        /// large JSON files. This flag enables the use of unique IDs\n        /// and outputs a map from IDs to actual objects.\n        #[arg(long)]\n        use_ids: bool,\n\n        /// Whether to include extra informations about `DefId`s.\n        #[arg(short = 'E', long = \"include-extra\", default_value = \"false\")]\n        include_extra: bool,\n    },\n\n    /// Serialize to a `haxmeta` file, the internal binary format used by hax to\n    /// store the ASTs produced by the hax exporter.\n    #[clap(hide = true)]\n    Serialize {\n        /// Whether the bodies are exported as THIR, built MIR, const\n        /// MIR, or a combination. Repeat this option to extract a\n        /// combination (e.g. `-k thir -k mir-built`). Pass `--kind`\n        /// alone with no value to disable body extraction.\n        #[arg(\n            value_enum,\n            short,\n            long = \"kind\",\n            num_args = 0..=3,\n            default_values_t = [ExportBodyKind::Thir]\n        )]\n        kind: Vec<ExportBodyKind>,\n\n        /// When extracting to a given backend, the exporter is called with different `cfg` options.\n        /// This option allows to set the same flags as `cargo hax into` would pick.\n        #[arg(short)]\n        backend: Option<BackendName>,\n    },\n\n    #[command(flatten)]\n    CliExtension(E::Command),\n}\n\nimpl<E: Extension> Command<E> {\n    pub fn body_kinds(&self) -> Vec<ExportBodyKind> {\n        match self {\n            Command::JSON { kind, .. } => kind.clone(),\n            Command::Serialize { kind, .. } => kind.clone(),\n            Command::Backend { .. } | Command::CliExtension { .. } => vec![ExportBodyKind::Thir],\n        }\n    }\n    pub fn backend_name(&self) -> Option<BackendName> {\n        match self {\n            Command::Backend(backend_options) => Some((&backend_options.backend).into()),\n            Command::JSON { .. } => None,\n            Command::Serialize { backend, .. } => backend.clone(),\n            Command::CliExtension(_) => None,\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, ValueEnum, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\npub enum ExportBodyKind {\n    Thir,\n    MirBuilt,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Parser, Debug, Clone)]\n#[command(\n    author,\n    version = crate::HAX_VERSION,\n    long_version = concat!(\"\\nversion=\", env!(\"HAX_VERSION\"), \"\\n\", \"commit=\", env!(\"HAX_GIT_COMMIT_HASH\")),\n    name = \"hax\",\n    about,\n    long_about = None\n)]\npub struct ExtensibleOptions<E: Extension> {\n    /// Semi-colon terminated list of arguments to pass to the\n    /// `cargo build` invocation. For example, to apply this\n    /// program on a package `foo`, use `-C -p foo ;`. (make sure\n    /// to escape `;` correctly in your shell)\n    #[arg(default_values = Vec::<&str>::new(), short='C', allow_hyphen_values=true, num_args=1.., long=\"cargo-args\", value_terminator=\";\")]\n    pub cargo_flags: Vec<String>,\n\n    #[command(subcommand)]\n    pub command: Command<E>,\n\n    /// `cargo` caching is enable by default, this flag disables it.\n    #[arg(long=\"disable-cargo-cache\", action=clap::builder::ArgAction::SetFalse)]\n    pub force_cargo_build: ForceCargoBuild,\n\n    /// Apply the command to every local package of the dependency closure. By\n    /// default, the command is only applied to the primary packages (i.e. the\n    /// package(s) of the current directory, or the ones selected with cargo\n    /// options like `-C -p <PKG> ;`).\n    #[arg(long = \"deps\")]\n    pub deps: bool,\n\n    /// Provide a precomputed haxmeta file explicitly.\n    /// Setting this option bypasses rustc and the exporter altogether.\n    #[arg(long)]\n    #[clap(hide = true)]\n    pub haxmeta: Option<PathBuf>,\n\n    /// By default, hax uses `$CARGO_TARGET_DIR/hax` as target folder,\n    /// to avoid recompilation when working both with `cargo hax` and\n    /// `cargo build` (or, e.g. `rust-analyzer`). This option disables\n    /// this behavior.\n    #[arg(long)]\n    pub no_custom_target_directory: bool,\n\n    /// Diagnostic format. Sets `cargo`'s `--message-format` as well,\n    /// if not present.\n    #[arg(long, default_value = \"human\")]\n    pub message_format: MessageFormat,\n\n    /// Enables experimental FullDef format for items exported from the frontend\n    /// in the haxmeta file.\n    #[arg(long, env = \"HAX_EXPERIMENTAL_FULL_DEF\")]\n    pub experimental_full_def: bool,\n\n    #[group(flatten)]\n    pub extension: E::Options,\n}\n\npub type Options = ExtensibleOptions<()>;\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, ValueEnum, Debug, Clone, Copy, Eq, PartialEq)]\npub enum MessageFormat {\n    Human,\n    Json,\n}\n\nimpl<E: Extension> NormalizePaths for Command<E> {\n    fn normalize_paths(&mut self) {\n        use Command::*;\n        match self {\n            JSON { output_file, .. } => output_file.normalize_paths(),\n            _ => (),\n        }\n    }\n}\n\nimpl NormalizePaths for Options {\n    fn normalize_paths(&mut self) {\n        self.command.normalize_paths()\n    }\n}\n\nimpl From<Options> for hax_frontend_exporter_options::Options {\n    fn from(_opts: Options) -> hax_frontend_exporter_options::Options {\n        hax_frontend_exporter_options::Options {\n            inline_anon_consts: true,\n            bounds_options: hax_frontend_exporter_options::BoundsOptions {\n                resolve_destruct: false,\n                prune_sized: true,\n            },\n            item_ref_use_concrete_impl: false,\n        }\n    }\n}\n\n/// The subset of `Options` the frontend is sensible to.\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct ExporterOptions {\n    pub deps: bool,\n    pub force_cargo_build: ForceCargoBuild,\n    /// When exporting, the driver sets `--cfg hax_backend_{backkend}`, thus we need this information.\n    pub backend: Option<BackendName>,\n    pub body_kinds: Vec<ExportBodyKind>,\n    pub experimental_full_def: bool,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, ValueEnum, Debug, Clone, Copy)]\npub enum BackendName {\n    Fstar,\n    Coq,\n    Ssprove,\n    Easycrypt,\n    ProVerif,\n    Lean,\n    Rust,\n    GenerateRustEngineNames,\n    Debugger,\n}\n\nimpl fmt::Display for BackendName {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        let name = match self {\n            BackendName::Fstar => \"fstar\",\n            BackendName::Coq => \"coq\",\n            BackendName::Ssprove => \"ssprove\",\n            BackendName::Easycrypt => \"easycrypt\",\n            BackendName::ProVerif => \"proverif\",\n            BackendName::Lean => \"lean\",\n            BackendName::Rust => \"rust\",\n            BackendName::GenerateRustEngineNames => \"generate_rust_engine_names\",\n            BackendName::Debugger => \"debugger\",\n        };\n        write!(f, \"{name}\")\n    }\n}\n\nimpl From<&Options> for ExporterOptions {\n    fn from(options: &Options) -> Self {\n        ExporterOptions {\n            deps: options.deps,\n            force_cargo_build: options.force_cargo_build.clone(),\n            backend: options.command.backend_name(),\n            body_kinds: options.command.body_kinds(),\n            experimental_full_def: options.experimental_full_def,\n        }\n    }\n}\n\nimpl<E: Extension> From<&Backend<E>> for BackendName {\n    fn from(backend: &Backend<E>) -> Self {\n        match backend {\n            Backend::Fstar { .. } => BackendName::Fstar,\n            Backend::Coq { .. } => BackendName::Coq,\n            Backend::Ssprove { .. } => BackendName::Ssprove,\n            Backend::Easycrypt { .. } => BackendName::Easycrypt,\n            Backend::ProVerif { .. } => BackendName::ProVerif,\n            Backend::Lean { .. } => BackendName::Lean,\n            Backend::Rust { .. } => BackendName::Rust,\n            Backend::GenerateRustEngineNames { .. } => BackendName::GenerateRustEngineNames,\n            Backend::Debugger { .. } => BackendName::Debugger,\n        }\n    }\n}\n\npub const ENV_VAR_OPTIONS_FRONTEND: &str = \"DRIVER_HAX_FRONTEND_OPTS\";\npub const ENV_VAR_OPTIONS_FULL: &str = \"DRIVER_HAX_FRONTEND_FULL_OPTS\";\n"
  },
  {
    "path": "hax-types/src/diagnostics/message.rs",
    "content": "use crate::cli_options::Backend;\nuse crate::prelude::*;\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone, JsonSchema)]\n#[repr(u8)]\npub enum HaxMessage {\n    Diagnostic {\n        diagnostic: super::Diagnostics,\n        working_dir: Option<PathBuf>,\n    } = 254,\n    EngineNotFound {\n        is_opam_setup_correctly: bool,\n    } = 0,\n    ProducedFile {\n        path: PathBuf,\n        wrote: bool,\n    } = 1,\n    HaxEngineFailure {\n        exit_code: i32,\n    } = 2,\n    CargoBuildFailure = 3,\n    WarnExperimentalBackend {\n        backend: Backend<()>,\n    } = 4,\n    ProfilingData(crate::engine_api::ProfilingData) = 5,\n    Stats {\n        errors_per_item: Vec<(hax_frontend_exporter::DefId, usize)>,\n    } = 6,\n}\n\nimpl HaxMessage {\n    // https://doc.rust-lang.org/reference/items/enumerations.html#pointer-casting\n    pub fn discriminant(&self) -> u16 {\n        unsafe { *(self as *const Self as *const u16) }\n    }\n\n    pub fn code(&self) -> String {\n        match self {\n            HaxMessage::Diagnostic { diagnostic, .. } => diagnostic.kind.code(),\n            _ => format!(\"CARGOHAX{:0>4}\", self.discriminant()),\n        }\n    }\n}\n"
  },
  {
    "path": "hax-types/src/diagnostics/mod.rs",
    "content": "use crate::prelude::*;\nuse colored::Colorize;\n\npub mod message;\npub mod report;\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone, JsonSchema, Eq, PartialEq, Hash)]\npub struct Diagnostics {\n    pub kind: Kind,\n    pub span: Vec<hax_frontend_exporter::Span>,\n    pub context: String,\n    pub owner_id: Option<hax_frontend_exporter::DefId>,\n}\n\nimpl std::fmt::Display for Diagnostics {\n    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n        match &self.kind {\n            Kind::Unimplemented { issue_id:_, details } => write!(\n                f,\n                \"something is not implemented yet.\\n{}\",\n                match details {\n                    Some(details) => format!(\"{}\", details),\n                    _ => \"\".to_string(),\n                },\n            ),\n            Kind::UnsupportedMacro { id } => write!(\n                f,\n                \"The unexpanded macro {} is not supported by this backend.\\nPlease verify the argument you passed to the {} (or {}) option.\",\n                id.bold(),\n                \"--inline-macro-call\".bold(), \"-i\".bold()\n            ),\n            Kind::UnsafeBlock => write!(f, \"Unsafe blocks are not allowed.\"),\n            Kind::AssertionFailure {details} => write!(\n                f,\n                \"Fatal error: something we considered as impossible occurred! {}\\nDetails: {}\",\n                \"Please report this by submitting an issue on GitHub!\".bold(),\n                details\n            ),\n            Kind::UnallowedMutRef => write!(\n                f,\n                \"The mutation of this {} is not allowed here.\",\n                \"&mut\".bold()\n            ),\n            Kind::ExpectedMutRef => write!(\n                f,\n                \"At this position, Hax was expecting an expression of the shape `&mut _`.\\nHax forbids `f(x)` (where `f` expects a mutable reference as input) when `x` is not a {}{} or when it is a dereference expression.\n\n{}\",\n                \"place expression\".bold(),\n                \"[1]\".bright_black(),\n                \"[1]: https://doc.rust-lang.org/reference/expressions.html#place-expressions-and-value-expressions\"\n            ),\n            Kind::ClosureMutatesParentBindings {bindings} => write!(\n                f,\n                \"The bindings {:?} cannot be mutated here: they don't belong to the closure scope, and this is not allowed.\",\n                bindings\n            ),\n            Kind::ArbitraryLHS => write!(f, \"Assignation of an arbitrary left-hand side is not supported.\\n`lhs = e` is fine only when `lhs` is a combination of local identifiers, field accessors and index accessors.\"),\n\n            Kind::AttributeRejected {reason} => write!(f, \"Here, this attribute cannot be used: {reason}.\"),\n\n            Kind::NonTrivialAndMutFnInput => write!(f, \"The support in hax of function with one or more inputs of type `&mut _` is limited.\\nOnly trivial patterns are allowed there: `fn f(x: &mut (T, U)) ...` is allowed while `f((x, y): &mut (T, U))` is rejected.\"),\n\n            Kind::FStarParseError { fstar_snippet, details: _ } => write!(f, \"The following code snippet could not be parsed as valid F*:\\n```\\n{fstar_snippet}\\n```\"),\n\n            Kind::ExplicitRejection { reason , .. } => write!(f, \"Explicit rejection by a phase in the Hax engine:\\n{}\", reason),\n\n            _ => write!(f, \"{:?}\", self.kind),\n        }?;\n        write!(f, \"\\n\\n\")?;\n        if let Some(issue) = self.kind.issue_number() {\n            write!(\n                f,\n                \"This is discussed in issue https://github.com/hacspec/hax/issues/{issue}.\\nPlease upvote or comment this issue if you see this error message.\\n\"\n            )?;\n        }\n        write!(\n            f,\n            \"{}\",\n            format!(\n                \"Note: the error was labeled with context `{}`.\\n\",\n                self.context\n            )\n            .bright_black()\n        )?;\n        Ok(())\n    }\n}\n\nimpl Kind {\n    fn issue_number(&self) -> Option<u32> {\n        match self {\n            Kind::UnsafeBlock => None,\n            Kind::ExplicitRejection { issue_id, .. } | Kind::Unimplemented { issue_id, .. } => {\n                issue_id.clone()\n            }\n            Kind::AssertionFailure { .. } => None,\n            Kind::UnallowedMutRef => Some(420),\n            Kind::UnsupportedMacro { .. } => None,\n            Kind::ErrorParsingMacroInvocation { .. } => None,\n            Kind::ClosureMutatesParentBindings { .. } => Some(1060),\n            Kind::ArbitraryLHS => None,\n            Kind::UnsupportedTupleSize { .. } => None,\n            Kind::ExpectedMutRef => Some(420),\n            Kind::NonTrivialAndMutFnInput => Some(1405),\n            Kind::AttributeRejected { .. } => None,\n            Kind::FStarParseError { .. } => None,\n            Kind::OcamlEngineErrorPayload { .. } => None,\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, JsonSchema)]\n#[repr(u16)]\npub enum Kind {\n    /// Unsafe code is not supported\n    UnsafeBlock = 0,\n\n    /// A feature is not currently implemented\n    Unimplemented {\n        /// Issue on the GitHub repository\n        issue_id: Option<u32>,\n        details: Option<String>,\n    } = 1,\n\n    /// Unknown error\n    // This is useful when doing sanity checks (i.e. one can yield\n    // this error kind for cases that should never happen)\n    AssertionFailure {\n        details: String,\n    } = 2,\n\n    /// Unallowed mutable reference\n    UnallowedMutRef = 3,\n\n    /// Unsupported macro invokation\n    UnsupportedMacro {\n        id: String,\n    } = 4,\n\n    /// Error parsing a macro invocation to a macro treated specifcially by a backend\n    ErrorParsingMacroInvocation {\n        macro_id: String,\n        details: String,\n    } = 5,\n\n    /// Mutation of bindings living outside a closure scope are not supported\n    ClosureMutatesParentBindings {\n        bindings: Vec<String>,\n    } = 6,\n\n    /// Assignation of an arbitrary left-hand side is not supported. `lhs = e` is fine only when `lhs` is a combination of local identifiers, field accessors and index accessors.\n    ArbitraryLHS = 7,\n\n    /// A phase explicitely rejected this chunk of code\n    ExplicitRejection {\n        reason: String,\n        issue_id: Option<u32>,\n    } = 8,\n\n    /// A backend doesn't support a tuple size\n    UnsupportedTupleSize {\n        tuple_size: u32,\n        reason: String,\n    } = 9,\n\n    ExpectedMutRef = 10,\n\n    /// &mut inputs should be trivial patterns\n    NonTrivialAndMutFnInput = 11,\n\n    /// An hax attribute (from `hax-lib-macros`) was rejected\n    AttributeRejected {\n        reason: String,\n    } = 12,\n\n    /// A snippet of F* code could not be parsed\n    FStarParseError {\n        fstar_snippet: String,\n        details: String,\n    } = 13,\n\n    /// Internal encoding\n    OcamlEngineErrorPayload(String) = 9999,\n}\n\nimpl Kind {\n    // https://doc.rust-lang.org/reference/items/enumerations.html#pointer-casting\n    pub fn discriminant(&self) -> u16 {\n        unsafe { *(self as *const Self as *const u16) }\n    }\n\n    pub fn code(&self) -> String {\n        format!(\"HAX{:0>4}\", self.discriminant())\n    }\n}\n"
  },
  {
    "path": "hax-types/src/diagnostics/report.rs",
    "content": "use super::Diagnostics;\nuse annotate_snippets::*;\nuse miette::SourceOffset;\nuse std::collections::{HashMap, HashSet};\nuse std::path::{Path, PathBuf};\nuse std::rc::Rc;\n\n/// A context for reporting diagnostics\n#[derive(Clone, Debug, Default)]\npub struct ReportCtx {\n    files: HashMap<PathBuf, Rc<String>>,\n    seen: HashSet<Diagnostics>,\n}\n\n/// Translates a line and column position into an absolute offset\nfn compute_offset(src: &str, line: usize, col: usize) -> usize {\n    SourceOffset::from_location(src, line, col).offset() + 1\n}\n\nimpl ReportCtx {\n    /// Read the contents of a file. The result is cached.\n    fn file_contents<'a>(&'a mut self, path: PathBuf) -> Rc<String> {\n        self.files\n            .entry(path.clone())\n            .or_insert_with(|| {\n                let s =\n                    std::fs::read_to_string(&path).expect(&format!(\"Unable to read file {path:?}\"));\n                Rc::new(s)\n            })\n            .clone()\n    }\n\n    /// Check if `diagnostic` have been seen already, and mark `diagnostic` as seen.\n    pub fn seen_already(&mut self, diagnostic: Diagnostics) -> bool {\n        !self.seen.insert(diagnostic)\n    }\n}\n\nimpl Diagnostics {\n    /// Converts a `Diagnostics` to a `annotate_snippets::Message`,\n    /// which can be accessed via `then`, a callback function.\n    pub fn with_message<R, F: for<'a> FnMut(Message<'a>) -> R>(\n        &self,\n        report_ctx: &mut ReportCtx,\n        working_dir: Option<&Path>,\n        level: Level,\n        mut then: F,\n    ) -> R {\n        let mut snippets_data = vec![];\n\n        for span in &self.span {\n            if let Some(path) = span.filename.to_path() {\n                let source = {\n                    let mut path = path.to_path_buf();\n                    if let Some(working_dir) = working_dir\n                        && path.is_relative()\n                    {\n                        path = working_dir.join(&path);\n                    };\n                    report_ctx.file_contents(path)\n                };\n                let start = compute_offset(&source, span.lo.line, span.lo.col);\n                let end = compute_offset(&source, span.hi.line, span.hi.col);\n                let origin = format!(\"{}\", path.display());\n                snippets_data.push((source, origin, start..end));\n            };\n        }\n\n        let title = format!(\"[{}] {self}\", self.kind.code());\n        let message =\n            level\n                .title(&title)\n                .snippets(snippets_data.iter().map(|(source, origin, range)| {\n                    Snippet::source(source)\n                        .line_start(1)\n                        .origin(&origin)\n                        .fold(true)\n                        .annotation(level.span(range.clone()))\n                }));\n\n        then(message)\n    }\n}\n"
  },
  {
    "path": "hax-types/src/driver_api.rs",
    "content": "use crate::prelude::*;\n\npub const HAX_DRIVER_STDERR_PREFIX: &str = \"::hax-driver::\";\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone)]\npub struct EmitHaxMetaMessage {\n    pub working_dir: Option<PathBuf>,\n    pub manifest_dir: Option<PathBuf>,\n    pub path: PathBuf,\n}\n#[derive_group(Serializers)]\n#[derive(Debug, Clone)]\npub enum HaxDriverMessage {\n    EmitHaxMeta(EmitHaxMetaMessage),\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub enum Items<Body: hax_frontend_exporter::IsBody> {\n    FullDef(Vec<hax_frontend_exporter::FullDef<Body>>),\n    Legacy(Vec<hax_frontend_exporter::Item<Body>>),\n}\n\n#[derive_group(Serializers)]\n#[derive(Debug, Clone)]\npub struct HaxMeta<Body: hax_frontend_exporter::IsBody> {\n    pub crate_name: String,\n    pub cg_metadata: String,\n    pub externs: Vec<PathBuf>,\n    pub items: Items<Body>,\n    pub impl_infos: Vec<(\n        hax_frontend_exporter::DefId,\n        hax_frontend_exporter::ImplInfos,\n    )>,\n    pub def_ids: Vec<hax_frontend_exporter::DefId>,\n    pub comments: Vec<(hax_frontend_exporter::Span, String)>,\n    pub hax_version: String,\n}\n\nuse hax_frontend_exporter::id_table;\n\nimpl<Body: hax_frontend_exporter::IsBody> HaxMeta<Body>\nwhere\n    Body: serde::Serialize + for<'de> serde::Deserialize<'de>,\n{\n    #[tracing::instrument(level = \"trace\", skip(self, write, id_table))]\n    pub fn write(self, write: &mut impl std::io::Write, id_table: id_table::Table) {\n        let mut write = zstd::stream::write::Encoder::new(write, 0).unwrap();\n\n        id_table::WithTable::run(id_table, self, |with_table| {\n            serde_brief::to_writer(with_table, &mut write).unwrap();\n            write.finish().unwrap();\n        })\n    }\n    #[tracing::instrument(level = \"trace\", skip(reader))]\n    pub fn read(reader: impl std::io::Read) -> (Self, id_table::Table) {\n        let reader = zstd::stream::read::Decoder::new(reader).unwrap();\n        let reader = std::io::BufReader::new(reader);\n        let haxmeta = id_table::WithTable::<HaxMeta<Body>>::destruct(\n            serde_brief::from_reader(reader).unwrap(),\n        );\n        if haxmeta.0.hax_version != crate::HAX_VERSION {\n            let version = haxmeta.0.hax_version;\n            let expected = crate::HAX_VERSION;\n            panic!(\n                \"An invariant was broken: `*.haxmeta` was produced by hax version `{version}` while the current version of hax is `{expected}`. Please report this to https://github.com/hacspec/hax/issues.\"\n            );\n        };\n        haxmeta\n    }\n}\n\n#[macro_export]\nmacro_rules! with_kind_type {\n    ($kind:expr, <$t:ident>|| $body:expr) => {{\n        mod from {\n            pub use hax_types::cli_options::ExportBodyKind::{MirBuilt as MB, Thir as T};\n        }\n        mod to {\n            pub type T = hax_frontend_exporter::ThirBody;\n            pub type MB = hax_frontend_exporter::MirBody<hax_frontend_exporter::mir_kinds::Built>;\n        }\n        let mut kind: Vec<::hax_types::cli_options::ExportBodyKind> = $kind;\n        kind.sort();\n        kind.dedup();\n        match kind.as_slice() {\n            [from::MB] => {\n                type $t = to::MB;\n                $body\n            }\n            [from::T] => {\n                type $t = to::T;\n                $body\n            }\n            [from::T, from::MB] => {\n                type $t = (to::MB, to::T);\n                $body\n            }\n            [] => {\n                type $t = ();\n                $body\n            }\n            _ => panic!(\"Unsupported kind {:#?}\", kind),\n        }\n    }};\n}\npub use with_kind_type;\n"
  },
  {
    "path": "hax-types/src/engine_api.rs",
    "content": "use crate::cli_options::*;\nuse crate::prelude::*;\n\ntype ThirBody = hax_frontend_exporter::ThirBody;\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct EngineOptions {\n    pub hax_version: String,\n    pub backend: BackendOptions<()>,\n    pub input: crate::driver_api::Items<ThirBody>,\n    pub impl_infos: Vec<(\n        hax_frontend_exporter::DefId,\n        hax_frontend_exporter::ImplInfos,\n    )>,\n}\n\n#[derive_group(Serializers)]\n#[allow(non_snake_case)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct SourceMap {\n    pub mappings: String,\n    pub sourceRoot: String,\n    pub sources: Vec<String>,\n    pub sourcesContent: Vec<Option<String>>,\n    pub names: Vec<String>,\n    pub version: u8,\n    pub file: String,\n}\n\nimpl SourceMap {\n    pub fn inline_sources_content(&mut self) {\n        self.sourcesContent = vec![];\n        for source in &self.sources {\n            let path = if self.sourceRoot.is_empty() {\n                source.clone()\n            } else {\n                format!(\"{}/{}\", &self.sourceRoot, source)\n            };\n            let contents = Some(std::fs::read_to_string(path).unwrap());\n            self.sourcesContent.push(contents);\n        }\n    }\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct File {\n    pub path: String,\n    pub contents: String,\n    pub sourcemap: Option<SourceMap>,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct Output {\n    pub diagnostics: Vec<crate::diagnostics::Diagnostics>,\n    pub files: Vec<File>,\n    pub debug_json: Vec<String>,\n}\n\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct ProfilingData {\n    /// What context are we profiling?\n    pub context: String,\n    /// How long this took?\n    pub time_ns: u64,\n    /// How much memory this took? This is using OCaml's\n    /// `Gc.minor_words`, and is probably not very precise.\n    pub memory: u64,\n    /// How many things were processed? (often, this is the number of\n    /// items a phase processes)\n    pub quantity: u32,\n    /// Did the action errored? This is important since a failed\n    /// action might have exited very early, making the numbers\n    /// unusable.\n    pub errored: bool,\n}\n\npub mod protocol {\n    use super::*;\n\n    #[derive_group(Serializers)]\n    #[derive(JsonSchema, Debug, Clone)]\n    pub enum FromEngine {\n        Diagnostic(crate::diagnostics::Diagnostics),\n        File(File),\n        PrettyPrintDiagnostic(crate::diagnostics::Diagnostics),\n        PrettyPrintRust(String),\n        DebugString(String),\n        ProfilingData(ProfilingData),\n        /// Declares a list of items that will be processed by the engine\n        ItemProcessed(Vec<hax_frontend_exporter::DefId>),\n        Exit,\n        Ping,\n    }\n    #[derive_group(Serializers)]\n    #[derive(JsonSchema, Debug, Clone)]\n    pub enum ToEngine {\n        PrettyPrintedDiagnostic(String),\n        PrettyPrintedRust(Result<String, String>),\n        Pong,\n    }\n\n    impl FromEngine {\n        pub fn requires_response(&self) -> bool {\n            matches!(\n                self,\n                Self::PrettyPrintRust { .. }\n                    | Self::Ping { .. }\n                    | Self::PrettyPrintDiagnostic { .. }\n            )\n        }\n    }\n}\n\n// This is located here for dependency reason, but this is not related\n// to the engine (yet?).\n#[derive_group(Serializers)]\n#[derive(JsonSchema, Debug, Clone)]\npub struct WithDefIds<Body: hax_frontend_exporter::IsBody> {\n    pub def_ids: Vec<hax_frontend_exporter::DefId>,\n    pub impl_infos: Vec<(\n        hax_frontend_exporter::DefId,\n        hax_frontend_exporter::ImplInfos,\n    )>,\n    pub items: crate::driver_api::Items<Body>,\n    pub comments: Vec<(hax_frontend_exporter::Span, String)>,\n}\n"
  },
  {
    "path": "hax-types/src/lib.rs",
    "content": "#![cfg_attr(feature = \"rustc\", feature(rustc_private))]\n//! This crate contains the type definitions that are used to communicate between:\n//!  - the command line (the `cargo-hax` binary);\n//!  - the custom rustc driver;\n//!  - the hax engine (the `hax-engine` binary).\n//!  \n//! Those three component send and receive messages in JSON or CBOR on\n//! stdin and stdout.\n\npub(crate) mod prelude;\n\n/// The CLI options for `cargo-hax`. The types defines in this module\n/// are also used by the driver and the engine.\npub mod cli_options;\n\n/// Type to represent errors, mainly in `hax-engine`. The engine\n/// doesn't do any reporting itself: it only sends JSON to its stdout,\n/// and `cargo-hax` takes care of reporting everything in a rustc\n/// style.\npub mod diagnostics;\n\n/// The types used to communicate between `cargo-hax` and the custom\n/// driver.\npub mod driver_api;\n\n/// The types used to communicate between `cargo-hax` and\n/// `hax-engine`.\npub mod engine_api;\n\n/// Compile-time version of hax\npub const HAX_VERSION: &str = env!(\"HAX_VERSION\");\n"
  },
  {
    "path": "hax-types/src/prelude.rs",
    "content": "pub(crate) use hax_adt_into::derive_group;\npub use schemars::JsonSchema;\npub use std::path::{Path, PathBuf};\n"
  },
  {
    "path": "justfile",
    "content": "@_default:\n  just --list\n\n# Build Rust and OCaml parts and install binaries in PATH. To build\n# only OCaml parts or only Rust parts, set target to `rust` or\n# `ocaml`.\n@build target='rust+ocaml':\n  ./.utils/rebuild.sh {{target}}\n\nalias b := build\n\n# alias for `build rust`\n@rust:\n  just build rust\n\n# alias for `build ocaml`\n@ocaml:\n  just build ocaml\n\n# `cargo expand` a crate, but sets flags and crate attributes so that the expansion is exactly what hax receives. This is useful to debug hax macros.\n[no-cd]\nexpand *FLAGS:\n  RUSTFLAGS='-Zcrate-attr=register_tool(_hax) -Zcrate-attr=feature(register_tool) --cfg hax_compilation --cfg _hax --cfg hax --cfg hax_backend_fstar --cfg hax' \\\n    cargo \\\n    $([[ \"$(cargo --version)\" == *nightly* ]] || echo \"+nigthly\") \\\n    expand {{FLAGS}}\n\n# Show debug JSON emitted by the Rust engine\n@debug-json N: (_ensure_command_in_path \"jless\" \"jless (https://jless.io/)\") (_ensure_command_in_path \"jq\" \"jq (https://jqlang.github.io/jq/)\")\n  cat /tmp/hax-ast-debug.json | jq -s '.[{{N}}]' | jless\n\n# Show the generated module `concrete_ident_generated.ml`, that contains all the Rust names the engine knows about. Those names are declared in the `./engine/names` crate.\n@list-names:\n  hax-engine-names-extract | sed '/include .val/,$d' | just _pager\n\n# Show the Rust to OCaml generated types available to the engine.\n@list-types:\n  just _ensure_command_in_path ocamlformat ocamlformat\n  cd engine && dune describe pp lib/types.ml \\\n    | sed -e '1,/open ParseError/ d' \\\n    | sed '/let rec pp_/,$d' \\\n    | ocamlformat --impl - \\\n    | just _pager\n\n# Show the OCaml module `Generated_generic_printer_base`\n@show-generated-printer-ml:\n  just _ensure_command_in_path ocamlformat ocamlformat\n  cd engine && dune describe pp lib/generated_generic_printer_base.ml \\\n    | ocamlformat --impl - \\\n    | just _pager\n\n# Regenerate names in the Rust engine. Writes to `rust-engine/src/names/generated.rs`.\nregenerate-names:\n  #!/usr/bin/env bash\n  OUTPUT_FILE=rust-engine/src/ast/identifiers/global_id/generated.rs\n  cargo hax -C --manifest-path engine/names/Cargo.toml \\; into --output-dir $(dirname -- $OUTPUT_FILE) generate-rust-engine-names\n  rustfmt \"$OUTPUT_FILE\"\n\n# Format all the code\nfmt:\n  cargo fmt\n  cd engine && dune fmt\n\n# Run hax tests: each test crate has a snapshot, so that we track changes in extracted code. If a snapshot changed, please review them with `just test-review`.\ntest *FLAGS:\n  cargo test --test toolchain {{FLAGS}}\n\n_test *FLAGS:\n  CARGO_TESTS_ASSUME_BUILT=1 cargo test --test toolchain {{FLAGS}}\n\n# Review snapshots\ntest-review: (_ensure_command_in_path \"cargo-insta\" \"Insta (https://insta.rs)\")\n  cargo insta review\n\n# Serve documentation\ndocs: (_ensure_command_in_path \"mkdocs\" \"mkdocs (https://www.mkdocs.org/)\")\n  mkdocs serve\n\n# Check the coherency between issues labeled `marked-unimplemented` on GitHub and issues mentionned in the engine in the `Unimplemented {issue_id: ...}` errors.\n@check-issues:\n  just _ensure_command_in_path jq \"jq (https://jqlang.github.io/jq/)\"\n  just _ensure_command_in_path gh \"GitHub CLI (https://cli.github.com/)\"\n  just _ensure_command_in_path rg \"ripgrep (https://github.com/BurntSushi/ripgrep)\"\n  just _ensure_command_in_path sd \"sd (https://github.com/chmln/sd)\"\n  diff -U0 \\\n      <(gh issue -R hacspec/hax list --label 'marked-unimplemented' --json number,closed -L 200 \\\n           | jq '.[] | select(.closed | not) | .number' | sort -u) \\\n      <(rg 'issue_id:(\\d+)' -Ior '$1' | sort -u) \\\n      | rg '^[+-]\\d' \\\n      | sd '[-](\\d+)' '#$1\\t is labeled `marked-unimplemented`, but was not found in the code' \\\n      | sd '[+](\\d+)' '#$1\\t is *not* labeled `marked-unimplemented` or is closed'\n\n# Check that the licenses of every crate and every package are compliant with `deny.toml`\ncheck-licenses:\n  #!/usr/bin/env bash\n  just _ensure_command_in_path cargo-deny \"cargo-deny (https://embarkstudios.github.io/cargo-deny/)\"\n  just _ensure_command_in_path toml2json \"toml2json (https://github.com/woodruffw/toml2json)\"\n  echo \"> Check licenses for Rust\"\n  cargo deny check licenses\n  cd engine\n  echo \"> Check licenses for OCaml\"\n  # initialize opam if needed\n  opam env >& /dev/null || opam init --no\n  # pin package `hax-engine` if needed\n  opam list --required-by=hax-engine --column=name,license: -s >& /dev/null || opam pin . --yes\n  # Check that every pacakge matches licenses of `deny.toml`\n  if opam list --required-by=hax-engine --column=name,license: -s \\\n     | grep -Pvi $(toml2json ../deny.toml| jq '.licenses.allow | join(\"|\")'); then\n     echo \"Some licenses were non compliant to our policy (see `deny.toml`)\"\n  else\n    echo \"licenses ok\"\n  fi\n\n_ensure_command_in_path BINARY NAME:\n  #!/usr/bin/env bash\n  command -v {{BINARY}} &> /dev/null || {\n     >&2 echo -e \"\\033[0;31mSorry, the binary \\033[1m{{BINARY}}\\033[0m\\033[0;31m is required for this command.\\033[0m\"\n     >&2 echo -e \"  \\033[0;31m→ please install \\033[1m{{NAME}}\\033[0m\"\n     >&2 echo \"\"\n     exit 1\n  }\n\n_pager:\n  #!/usr/bin/env bash\n  if command -v bat &> /dev/null; then\n      bat -l ml\n  else\n      less\n  fi\n\n# Serve the book\n[private]\n@book:\n  echo \"We moved out from mdbook: please run 'just docs'\"\n  exit 1\n\n# Runs hax twice: once with the Rust import thir, once with the OCaml one.\n# Then it compares both.\ndiff-thir-importers DIR:\n  #!/usr/bin/env bash\n  # Ensures hax is built\n  just b\n\n  # Utils\n  function readJSON() { cat proofs/debugger/extraction/ast.json; }\n  BASE=\"$PWD\"\n  OUT=\"$BASE/diff-thir-importers\"\n  # Remove previous results (if any)\n  rm -rf \"$OUT\"\n\n  cd {{DIR}}\n  cargo hax json -o old-thir.json\n  cargo hax --experimental-full-def json -o thir.json\n  cargo hax --experimental-full-def into debugger\n  readJSON > rust-import-thir-ast.json\n  cargo hax                         into debugger\n  readJSON > ocaml-import-thir-ast.json\n\n  mkdir \"$OUT\"\n  mv thir.json old-thir.json *ast.json \"$OUT\"\n  cd \"$OUT\"\n  diff ocaml-import-thir-ast.json rust-import-thir-ast.json > diff.json\n"
  },
  {
    "path": "mkdocs.yml",
    "content": "site_name: hax\nrepo_url: https://github.com/cryspen/hax\nsite_url: https://hax.cryspen.com\nsite_author: Cryspen\nrepo_name: GitHub\ntheme:\n  name: material\n  logo: static/img/logo.png\n  favicon: static/img/favicon.png\n  custom_dir: docs/overrides\n  palette:\n    primary: white\n  features:\n    - content.code.copy\n    - content.code.select\n    - content.code.annotate\n    - content.footnote.tooltips\n    - navigation.sections\n    - navigation.path\n    - navigation.tabs\n    - header.autohide\n    - navigation.instant\n    - navigation.indexes\n  icon:\n    repo: fontawesome/brands/github\n    tag:\n      draft: material/sticker-outline\n      accepted: material/sticker-check-outline\n      proposed: material/sticker-plus-outline\n      deprecated: material/sticker-minus-outline\n      superseded: material/sticker-alert-outline\n      rejected: material/sticker-remove-outline\nextra_javascript:\n  - javascripts/hax_playground.js\n  - javascripts/ansi_up.js\n  - javascripts/fstar.js\n  - javascripts/lz-string.js\nextra_css:\n  - stylesheets/tags-colors.css\n  - stylesheets/hax_playground.css\n  - stylesheets/logo.css\nextra:\n  tags:\n    Draft: draft\n    Accepted: accepted\n    Proposed: proposed\n    Superseded: superseded\n    Deprecated: deprecated\n    Rejected: rejected\nmarkdown_extensions:\n  - attr_list\n  - tables\n  - md_in_html\n  - admonition\n  - footnotes\n  - pymdownx.blocks.html\n  - pymdownx.details\n  - pymdownx.superfences\n  - pymdownx.inlinehilite\n  - pymdownx.snippets\n  - pymdownx.keys\n  - pymdownx.caret\n  - pymdownx.mark\n  - pymdownx.tilde\n  - pymdownx.highlight:\n      anchor_linenums: true\n      line_spans: __span\n      pygments_lang_class: true\n  - pymdownx.arithmatex:\n      generic: true\n  - pymdownx.tasklist:\n      custom_checkbox: true\n  - pymdownx.emoji:\n      emoji_index: !!python/name:material.extensions.emoji.twemoji\n      emoji_generator: !!python/name:material.extensions.emoji.to_svg\n  - pymdownx.superfences:\n      custom_fences:\n        - name: mermaid\n          class: mermaid\n          format: !!python/name:pymdownx.superfences.fence_code_format\nplugins:\n  - glightbox\n  - search\n  - blog\n  - awesome-nav\n  - mkdocs-nav-weight\n  - tags:\n      tags_file: RFCs/index.md\n"
  },
  {
    "path": "rust-engine/Cargo.toml",
    "content": "[package]\nname = \"hax-rust-engine\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\ndescription = \"The engine of the hax toolchain.\"\n\n[dependencies]\nhax-frontend-exporter.workspace = true\nhax-rust-engine-macros.workspace = true\nhax-lib-macros-types = { workspace = true, features = [\"schemars\"] }\nserde_json = { workspace = true, features = [\"unbounded_depth\"] }\nserde = { workspace = true, features = [\"derive\"] }\nhax-types.workspace = true\nschemars.workspace = true\nitertools.workspace = true\nserde_stacker = \"0.1.12\"\npretty = \"0.12\"\nderive_generic_visitor = \"0.3.0\"\npastey = \"0.1.0\"\ncamino = \"1.1.11\"\naxum = { version = \"0.8.7\", features = [\"macros\"] }\ntokio = \"1.48.0\"\n\n[dev-dependencies]\nhax-lib.workspace = true\n"
  },
  {
    "path": "rust-engine/README.md",
    "content": "# Hax Rust Engine\n\nThis crate implements an alternative engine for Rust: the main one is implemented in OCaml and is located in `/engine`.\nThis Rust engine is designed so that it can re-use some bits of the OCaml engine.\n\nThe plan is to slowly deprecate the OCaml engine, rewrite most of its components and drop it.\n\n## Usage\nThe Rust engine supports only one backend for now: `Lean`.\n\nTo run it, use the following command:\n```bash\ncargo hax into lean\n```\n"
  },
  {
    "path": "rust-engine/macros/Cargo.toml",
    "content": "[package]\nname = \"hax-rust-engine-macros\"\ndescription = \"This crate provides helpers procedural macros for the `hax-rust-engine`\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\n\n[lib]\nproc-macro = true\n\n[dependencies]\nsyn = { version = \"2.0\", features = [\"full\", \"visit-mut\"] }\nproc-macro2.workspace = true\nquote.workspace = true\nproc-macro-crate = \"3.3.0\"\n"
  },
  {
    "path": "rust-engine/macros/src/lib.rs",
    "content": "//! Helper crate providing procedural macros for the Rust engine of hax.\n//!\n//! Currently it provides the following.\n//!  - Macros for deriving groups of traits.\n//!    Most of the type from the AST have the same bounds, so that helps deduplicating a lot.\n//!    Also, the fact those derive groups are named is helpful: for instance for code generation\n//!    a simple `use derive_group_for_ast_base as derive_group_for_ast` can change what is to be\n//!    derived without any attribute manipulation.\n\nuse proc_macro::TokenStream;\nuse proc_macro2::{Ident, Span};\nuse quote::quote;\nuse syn::{Token, parse_macro_input, visit_mut::VisitMut};\nuse utils::*;\n\nmod partial_application;\nmod replace;\nmod struct_fields;\n\n/// Adds a new field with a fresh name to an existing `struct` type definition.\n/// The new field contains error handling and span information to be used with a\n/// visitor. This macro will also derive implementations of\n/// `hax_rust_engine::ast::visitors::wrappers::VisitorWithErrors` and\n/// `hax_rust_engine::ast::HasSpan` for the struct.\n#[proc_macro_attribute]\npub fn setup_error_handling_struct(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    struct_fields::setup_error_handling_struct(_attr, item)\n}\n\n/// Adds a new field with a fresh name to an existing `struct` type definition.\n/// The new field contains span information to be used with a\n/// printer. This macro will also derive implementations of\n/// `hax_rust_engine::printer::pretty_ast::HasContextualSpan` for the struct.\n#[proc_macro_attribute]\npub fn setup_printer_struct(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    struct_fields::setup_printer_struct(_attr, item)\n}\n\nmod utils {\n    use super::*;\n\n    /// Get the name of this macro crate (`hax_rust_engine_macros`)\n    pub(crate) fn crate_name() -> Ident {\n        let krate = module_path!().split(\"::\").next().unwrap();\n        Ident::new(krate, Span::call_site())\n    }\n\n    /// Prepends a `proc_macro2::TokenStream` to a `TokenStream`\n    pub(crate) fn prepend(item: TokenStream, prefix: proc_macro2::TokenStream) -> TokenStream {\n        let item: proc_macro2::TokenStream = item.into();\n        quote! {\n            #prefix\n            #item\n        }\n        .into()\n    }\n\n    /// Add a derive attribute to `item`\n    pub(crate) fn add_derive(item: TokenStream, payload: proc_macro2::TokenStream) -> TokenStream {\n        prepend(item, quote! {#[derive(#payload)]})\n    }\n\n    /// Find the name of the crate `hax-rust-engine`. This can be either the\n    /// keyword `crate` or the ident `hax_rust_engine`, depending on the context\n    /// in which the macros using this function are called.\n    pub(crate) fn rust_engine_krate_name() -> proc_macro2::TokenStream {\n        use proc_macro_crate::{FoundCrate, crate_name};\n        match crate_name(\"hax-rust-engine\").unwrap() {\n            FoundCrate::Itself => quote!(crate),\n            FoundCrate::Name(name) => {\n                let ident = Ident::new(&name, Span::call_site());\n                quote!( #ident )\n            }\n        }\n    }\n}\n\n/// Derive the common derives for the hax engine AST.\n/// This is a equivalent to `derive_group_for_ast_serialization` and `derive_group_for_ast_base`.\n#[proc_macro_attribute]\npub fn derive_group_for_ast(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    let krate = crate_name();\n    prepend(\n        item,\n        quote! {\n            #[#krate::derive_group_for_ast_base]\n            #[#krate::derive_group_for_ast_serialization]\n        },\n    )\n}\n\n/// Derive the necessary (de)serialization related traits for nodes in the AST.\n#[proc_macro_attribute]\npub fn derive_group_for_ast_serialization(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    add_derive(\n        item,\n        quote! {::serde::Deserialize, ::serde::Serialize, ::schemars::JsonSchema},\n    )\n}\n\n/// Derive the basic necessary traits for nodes in the AST.\n#[proc_macro_attribute]\npub fn derive_group_for_ast_base(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    add_derive(\n        item,\n        quote! {Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord, derive_generic_visitor::Drive, derive_generic_visitor::DriveMut},\n    )\n}\n\n#[proc_macro_attribute]\n/// Replaces all occurrences of an identifier within the attached item.\n///\n/// For example, `#[replace(Name => A, B, C)]` will replace `Name` by `A, B, C`\n/// in the item the proc-macro is applied on.\n///\n/// The special case `#[replace(Name => include(VisitableAstNodes))]` will\n/// expand to a list of visitable AST nodes. This is useful in practice, as this\n/// list is often repeated.\npub fn replace(attr: TokenStream, item: TokenStream) -> TokenStream {\n    replace::replace(attr, item)\n}\n\n/// An attribute procedural macro that creates a new `macro_rules!` definition\n/// by partially applying an existing macro or function with a given token stream.\n///\n/// Usage:\n/// ```rust,ignore\n/// #[partial_apply(original_macro!, my_expression,)]\n/// macro_rules! new_proxy_macro {\n///     // This content is ignored and replaced by the proc macro.\n/// }\n/// ```\n#[proc_macro_attribute]\npub fn partial_apply(attr: TokenStream, item: TokenStream) -> TokenStream {\n    partial_application::partial_apply(attr, item)\n}\n\n/// Prepend the body any associated function with the given attribute payload.\n/// ```rust,ignore\n/// #[prepend_associated_functions_with(println!(\"self is {self}\");)]\n/// impl Foo {\n///   fn f(self) {}\n/// }\n/// ```\n///\n/// Expands to:\n/// ```rust,ignore\n/// impl Foo {\n///   fn f(self) {\n///     println!(\"self is {self}\");\n///   }\n/// }\n/// ```\n#[proc_macro_attribute]\npub fn prepend_associated_functions_with(attr: TokenStream, item: TokenStream) -> TokenStream {\n    struct Visitor {\n        prefix: syn::Expr,\n    }\n    impl VisitMut for Visitor {\n        fn visit_item_impl_mut(&mut self, impl_block: &mut syn::ItemImpl) {\n            for item in &mut impl_block.items {\n                let syn::ImplItem::Fn(impl_item_fn) = item else {\n                    continue;\n                };\n                impl_item_fn.block.stmts.insert(\n                    0,\n                    syn::Stmt::Expr(self.prefix.clone(), Some(Token![;](Span::mixed_site()))),\n                );\n            }\n        }\n    }\n    let mut item: syn::Item = parse_macro_input!(item);\n    let prefix = parse_macro_input!(attr);\n    Visitor { prefix }.visit_item_mut(&mut item);\n    quote! {#item}.into()\n}\n"
  },
  {
    "path": "rust-engine/macros/src/partial_application.rs",
    "content": "use proc_macro::TokenStream;\nuse quote::quote;\nuse syn::{ExprPath, Token, parse_macro_input};\n\nstruct PartialApplyArgs {\n    ident: ExprPath,\n    bang: Option<Token![!]>,\n    prefix: proc_macro2::TokenStream,\n}\n\nimpl syn::parse::Parse for PartialApplyArgs {\n    fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {\n        let ident = input.parse()?;\n        let bang = input.parse()?;\n        input.parse::<syn::Token![,]>()?;\n        Ok(PartialApplyArgs {\n            ident,\n            bang,\n            prefix: input.parse()?,\n        })\n    }\n}\n\n/// See [`super::partial_apply`].\npub(crate) fn partial_apply(attr: TokenStream, item: TokenStream) -> TokenStream {\n    let PartialApplyArgs {\n        ident,\n        bang,\n        prefix,\n    } = parse_macro_input!(attr as PartialApplyArgs);\n    let input_macro = parse_macro_input!(item as syn::ItemMacro);\n    let macro_name = input_macro.ident;\n    let attrs = input_macro.attrs;\n    quote! {\n        #(#attrs)*\n        macro_rules! #macro_name {\n            ($($rest:tt)*) => {\n                #ident #bang(#prefix $($rest)*)\n            };\n        }\n    }\n    .into()\n}\n"
  },
  {
    "path": "rust-engine/macros/src/replace.rs",
    "content": "extern crate proc_macro;\n\nuse proc_macro::TokenStream;\nuse proc_macro2::{Group, TokenStream as TokenStream2, TokenTree};\nuse quote::quote;\nuse syn::parse::{Parse, ParseStream, Result};\nuse syn::{Ident, Token, parse_macro_input};\n\nmod kw {\n    syn::custom_keyword!(include);\n}\n\nfn replace_in_stream(\n    stream: TokenStream2,\n    target: &Ident,\n    replacement: &TokenStream2,\n) -> TokenStream2 {\n    stream\n        .into_iter()\n        .flat_map(|tt| match tt {\n            TokenTree::Ident(ident) if ident == *target => {\n                replacement.clone().into_iter().collect()\n            }\n            TokenTree::Group(group) => {\n                let new_stream = replace_in_stream(group.stream(), target, replacement);\n                let mut new_group = Group::new(group.delimiter(), new_stream);\n                new_group.set_span(group.span());\n                vec![TokenTree::Group(new_group)]\n            }\n            other => vec![other],\n        })\n        .collect()\n}\n\n// The arguments that the `replace` proc-macro can take\nstruct AttributeArgs {\n    target: Ident,\n    replacement: TokenStream2,\n}\n\nimpl Parse for AttributeArgs {\n    fn parse(input: ParseStream) -> Result<Self> {\n        let target: Ident = input.parse()?;\n        input.parse::<Token![=>]>()?;\n        let include_clause = |input: ParseStream| -> Result<Ident> {\n            input.parse::<kw::include>()?;\n            let content;\n            syn::parenthesized!(content in input);\n            content.parse()\n        }(input)\n        .ok();\n        Ok(AttributeArgs {\n            target,\n            replacement: match include_clause {\n                Some(clause) => match clause.to_string().as_str() {\n                    \"VisitableAstNodes\" => quote! {\n                        Expr, Pat, ExprKind, PatKind, Ty, TyKind, Metadata, Literal,\n                        LocalId, Lhs, Symbol, LoopKind, SafetyKind, Quote,\n                        SpannedTy, BindingMode, PrimitiveTy, Region, ImplExpr,\n                        IntKind, FloatKind, GenericValue, Arm, LoopState, ControlFlowKind,\n                        DynTraitGoal, Attribute, QuoteContent, BorrowKind,\n                        TraitGoal, ImplExprKind, IntSize, Signedness, Guard, AttributeKind,\n                        GuardKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind,\n                        ItemQuoteOrigin, ItemQuoteOriginKind, ItemQuoteOriginPosition, GenericParamKind, ImplIdent,\n                        ProjectionPredicate, GenericParam, Generics, DocCommentKind, Param, Variant, ItemKind, Item,\n                        GenericConstraint, ErrorNode, Module,\n\n                        ResugaredExprKind, ResugaredTyKind, ResugaredPatKind,\n                        ResugaredImplItemKind, ResugaredTraitItemKind, ResugaredItemKind\n                    }.into(),\n                    _ => {\n                        return Err(syn::Error::new_spanned(\n                            clause,\n                            format!(\"This is not a recognized include pragma.\"),\n                        ));\n                    }\n                },\n                None => input.parse::<TokenStream2>()?,\n            },\n        })\n    }\n}\n\npub fn replace(attr: TokenStream, item: TokenStream) -> TokenStream {\n    let args = parse_macro_input!(attr as AttributeArgs);\n    let item_stream: TokenStream2 = item.into();\n    replace_in_stream(item_stream, &args.target, &args.replacement).into()\n}\n"
  },
  {
    "path": "rust-engine/macros/src/struct_fields.rs",
    "content": "use crate::utils::*;\nuse proc_macro::TokenStream;\nuse proc_macro2::{Group, Ident, Span};\nuse quote::{ToTokens, quote};\nuse syn::{\n    Field, FieldsUnnamed, Token, parse_macro_input, parse_quote, punctuated::Punctuated,\n    token::Paren,\n};\n\n/// Adds a new field `extra_field_name` of type `extra_field_type` to an existing `struct` type definition.\n/// `extra_field_name` is just a name hint, if a field with this name exists already, a different name will be picked.\n/// Returns the actual name or `_N` (in the case of a tuple struct).\nfn add_field_to_item_struct(\n    item: &mut syn::ItemStruct,\n    extra_field_name: &str,\n    extra_field_type: syn::Type,\n) -> proc_macro2::TokenStream {\n    // Deal with the case of unit structs.\n    if let fields @ syn::Fields::Unit = &mut item.fields {\n        let span = Group::new(proc_macro2::Delimiter::Brace, fields.to_token_stream()).delim_span();\n        *fields = syn::Fields::Unnamed(FieldsUnnamed {\n            paren_token: Paren { span },\n            unnamed: Punctuated::default(),\n        })\n    }\n    /// Computes a fresh identifier given a list of existing identifiers.\n    fn fresh_ident(base: &str, existing: &[Ident]) -> Ident {\n        let existing: std::collections::HashSet<_> =\n            existing.iter().map(|id| id.to_string()).collect();\n\n        (0..)\n            .map(|i| {\n                if i == 0 {\n                    base.to_string()\n                } else {\n                    format!(\"{}{}\", base, i)\n                }\n            })\n            .find(|name| !existing.contains(name))\n            .map(|name| Ident::new(&name, Span::call_site()))\n            .expect(\"should always find a fresh identifier\")\n    }\n    // Collect fields, disregarding their kind (are they named or not)\n    let (fields, named) = match &mut item.fields {\n        syn::Fields::Named(fields_named) => (&mut fields_named.named, true),\n        syn::Fields::Unnamed(fields_unnamed) => (&mut fields_unnamed.unnamed, false),\n        syn::Fields::Unit => unreachable!(\"Unit structs were dealt with.\"),\n    };\n\n    let existing_names = fields\n        .iter()\n        .flat_map(|f| &f.ident)\n        .cloned()\n        .collect::<Vec<_>>();\n\n    let (extra_field_ident, extra_field_ident_ts) = if named {\n        let ident = fresh_ident(extra_field_name, &existing_names);\n        (Some(ident.clone()), ident.to_token_stream())\n    } else {\n        (\n            None,\n            syn::LitInt::new(&format!(\"{}\", fields.len()), Span::call_site()).to_token_stream(),\n        )\n    };\n\n    fields.push(Field {\n        attrs: vec![],\n        vis: syn::Visibility::Inherited,\n        mutability: syn::FieldMutability::None,\n        ident: extra_field_ident,\n        colon_token: named.then_some(Token![:](Span::call_site())),\n        ty: extra_field_type,\n    });\n\n    extra_field_ident_ts\n}\n\n/// This function is documented in [`crate::setup_error_handling_struct`].\npub(crate) fn setup_error_handling_struct(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    let mut item: syn::ItemStruct = parse_macro_input!(item);\n    let krate = rust_engine_krate_name();\n    let extra_field_ident_ts = add_field_to_item_struct(\n        &mut item,\n        \"error_handling_state\",\n        parse_quote! {#krate::ast::visitors::wrappers::ErrorHandlingState},\n    );\n\n    let struct_name = &item.ident;\n    let generics = &item.generics;\n    quote! {\n        #item\n        impl #generics #krate::ast::HasSpan for #struct_name #generics {\n            fn span(&self) -> #krate::ast::span::Span {\n                self.#extra_field_ident_ts.0.clone()\n            }\n            fn span_mut(&mut self) -> &mut #krate::ast::span::Span {\n                &mut self.#extra_field_ident_ts.0\n            }\n        }\n        impl #generics #krate::ast::visitors::wrappers::VisitorWithErrors for #struct_name #generics {\n            fn error_vault(&mut self) -> &mut #krate::ast::visitors::wrappers::ErrorVault {\n                &mut self.#extra_field_ident_ts.1\n            }\n        }\n    }\n    .into()\n}\n\n/// This function is documented in [`crate::setup_printer_struct`].\npub(crate) fn setup_printer_struct(_attr: TokenStream, item: TokenStream) -> TokenStream {\n    let mut item: syn::ItemStruct = parse_macro_input!(item);\n    let krate = rust_engine_krate_name();\n    let extra_contextual_span_field_ident_ts = add_field_to_item_struct(\n        &mut item,\n        \"contextual_span\",\n        parse_quote! {Option<#krate::ast::span::Span>},\n    );\n    let extra_linked_item_graph_field_ident_ts = add_field_to_item_struct(\n        &mut item,\n        \"linked_item_graph\",\n        parse_quote! {::std::rc::Rc<#krate::attributes::LinkedItemGraph>},\n    );\n\n    let struct_name = &item.ident;\n    let generics = &item.generics;\n    quote! {\n        #item\n        impl #generics #krate::printer::pretty_ast::HasContextualSpan for #struct_name #generics {\n            fn span(&self) -> Option<#krate::ast::span::Span> {\n                self.#extra_contextual_span_field_ident_ts.clone()\n            }\n            fn with_span(&self, span: #krate::ast::span::Span) -> Self {\n                let mut printer = self.clone();\n                printer.#extra_contextual_span_field_ident_ts = Some(span);\n                printer\n            }\n        }\n        impl #generics #krate::printer::HasLinkedItemGraph for #struct_name #generics {\n            fn linked_item_graph(&self) -> &#krate::attributes::LinkedItemGraph {\n                &self.#extra_linked_item_graph_field_ident_ts\n            }\n            fn with_linked_item_graph(mut self, graph: ::std::rc::Rc<#krate::attributes::LinkedItemGraph>) -> Self {\n                self.#extra_linked_item_graph_field_ident_ts = graph;\n                self\n            }\n        }\n    }\n    .into()\n}\n"
  },
  {
    "path": "rust-engine/src/ast/diagnostics.rs",
    "content": "//! Diagnostic types used to represent and propagate errors (or warnings, notes,\n//! etc.) within the AST.\n//!\n//! This module is used to attach semantic or translation errors to AST nodes.\n\nuse crate::ast::*;\nuse hax_rust_engine_macros::*;\n\npub use hax_types::diagnostics::Kind as DiagnosticInfoKind;\n\n/// Error diagnostic\n#[derive_group_for_ast]\npub struct Diagnostic {\n    node: Box<Fragment>,\n    info: DiagnosticInfo,\n}\n\n/// Error description and location\n#[derive_group_for_ast]\n#[must_use]\npub struct DiagnosticInfo {\n    /// Diagnostic context\n    pub context: Context,\n    /// Location in the source code\n    pub span: Span,\n    /// Error type\n    pub kind: DiagnosticInfoKind,\n}\n\nimpl DiagnosticInfo {\n    /// Emits the diagnostic information.\n    pub fn emit(&self) {\n        crate::hax_io::write(&hax_types::engine_api::protocol::FromEngine::Diagnostic(\n            hax_types::diagnostics::Diagnostics {\n                kind: self.kind.clone(),\n                span: self.span.as_frontend_spans().to_vec(),\n                context: format!(\"{}\", self.context),\n                owner_id: None,\n            },\n        ))\n    }\n}\n\nimpl Diagnostic {\n    /// Get diagnostic information\n    pub fn info(&self) -> &DiagnosticInfo {\n        &self.info\n    }\n    /// Get diagnostic node of origin\n    pub fn node(&self) -> &Fragment {\n        &self.node\n    }\n    /// Report an error\n    pub fn new(node: impl Into<Fragment>, info: DiagnosticInfo) -> Self {\n        let node = node.into();\n        info.emit();\n        Self {\n            node: Box::new(node),\n            info,\n        }\n    }\n}\n\n/// Context of an error\n#[derive_group_for_ast]\npub enum Context {\n    /// Error during import from THIR\n    Import,\n    /// Error during the projection from idenitfiers to views\n    NameView,\n    /// Error in a printer\n    Printer(String),\n    /// Error in an engine phase\n    Phase(String),\n    /// Debugger\n    Debugger,\n    /// Unknown\n    Unknown,\n}\n\nimpl std::fmt::Display for Context {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        match self {\n            Context::Import => write!(f, \"Importer\"),\n            Context::NameView => write!(f, \"Name rendering\"),\n            Context::Printer(p) => write!(f, \"{p} Printer\"),\n            Context::Phase(p) => write!(f, \"Engine phase ({p})\"),\n            Context::Debugger => write!(f, \"Debugger\"),\n            Context::Unknown => write!(f, \"Unknown\"),\n        }\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/ast/fragment.rs",
    "content": "//! Enumeration types of any possible fragment of AST (`Fragment` / `FragmentRef`).\n//!\n//! Many components (diagnostics, logging, printers) want to refer to “some AST\n//! node” without knowing its concrete type. This module provides:\n//! - [`Fragment`]: an **owned** enum covering core AST node types.\n//! - [`FragmentRef`]: a **borrowed** counterpart.\n//!\n//! These are handy when implementing generic facilities such as error reporters,\n//! debugging helpers, or pretty-printers that need to branch on “what kind of\n//! node is this?” at runtime.\n//!\n//! ## Notes\n//! - Both enums are mechanically generated to stay in sync with the canonical\n//!   AST types. If you add a new core AST node, update the macro invocation at\n//!   the bottom of this file so `Fragment`/`FragmentRef` learn about it.\n//! - The [`Unknown`] variant exists as a last-resort placeholder when a value\n//!   cannot be represented by a known variant. Prefer concrete variants when\n//!   possible.\n\nuse crate::ast::*;\n\n/// The `mk!` macro takes a flat list of AST type identifiers and expands to\n/// two enums:\n/// - `Fragment` with owned variants (`Foo(Foo)`), and\n/// - `FragmentRef<'a>` with borrowed variants (`Foo(&'a Foo)`).\n///\n/// The generated enums also implement the obvious `From<T>` conversions, making\n/// it ergonomic to wrap concrete AST values as fragments.\nmacro_rules! mk {\n    (@visit_inner_call, Span, $self:ident, $x:expr) => {::std::ops::ControlFlow::Continue(())};\n    (@visit_inner_call, GloablId, $self:ident, $x:expr) => {::std::ops::ControlFlow::Continue(())};\n    (@visit_inner_call, $ty:ty, $self:ident, $x:expr) => {\n        $self.visit_inner($x)\n    };\n    ($($ty:ident),*) => {\n        #[derive_group_for_ast]\n        #[derive(Copy)]\n        /// Type identifiers for fragments\n        pub enum FragmentTypeId {\n            $(\n                #[doc = concat!(\"An identifier for the type [`\", stringify!($ty), \"`].\")]\n                $ty,\n            )*\n        }\n\n        mod private {\n            pub use super::*;\n            pub trait Sealed {}\n            $(impl Sealed for $ty {})*\n        }\n\n        /// Operations on any fragment of the AST of hax.\n        pub trait AnyFragment: private::Sealed {\n            /// Get a type identifier for this fragment.\n            fn type_id() -> FragmentTypeId;\n            /// Coerce as a fragment reference.\n            fn as_fragment<'a>(&'a self, type_id: FragmentTypeId) -> Option<FragmentRef<'a>>;\n            /// Coerce as an owned fragment.\n            fn as_owned_fragment(&self, type_id: FragmentTypeId) -> Option<Fragment>;\n        }\n\n        $(\n            impl AnyFragment for $ty {\n                fn type_id() -> FragmentTypeId {\n                    FragmentTypeId::$ty\n                }\n                fn as_fragment<'a>(&'a self, type_id: FragmentTypeId) -> Option<FragmentRef<'a>> {\n                    if type_id == Self::type_id() {\n                        Some(self.into())\n                    } else {\n                        None\n                    }\n                }\n                fn as_owned_fragment(&self, type_id: FragmentTypeId) -> Option<Fragment> {\n                    if type_id == Self::type_id() {\n                        #[allow(unreachable_code)]\n                        Some(self.clone().into())\n                    } else {\n                        None\n                    }\n                }\n            }\n        )*\n\n        /// A marker about a sub AST fragment in a bigger AST.\n        pub struct FragmentMarker {\n            addr: usize,\n            type_id: fragment::FragmentTypeId,\n        }\n\n        impl FragmentMarker {\n            /// Creates a marker out of an AST fragment.\n            pub fn new<T: AnyFragment>(value: &T) -> Self {\n                Self {\n                    addr: (value as *const T).addr(),\n                    type_id: T::type_id(),\n                }\n            }\n        }\n\n\n        impl<'a> derive_generic_visitor::Visitor for FragmentMarker {\n            type Break = Fragment;\n        }\n\n        impl visitors::AstEarlyExitVisitor for FragmentMarker {\n            $(\n                pastey::paste!{\n                    fn [<visit_ $ty:snake>](&mut self, x: &$ty) -> ::std::ops::ControlFlow<Self::Break> {\n                        if self.addr == (x as *const $ty).addr()\n                            && let Some(fragment) = x.as_owned_fragment(self.type_id)\n                        {\n                            return ::std::ops::ControlFlow::Break(fragment);\n                        }\n                        mk!(@visit_inner_call, $ty, self, x)\n                    }\n                }\n            )*\n        }\n\n        #[derive_group_for_ast]\n        #[allow(missing_docs)]\n        /// An owned fragment of AST in hax.\n        pub enum Fragment {\n            $(\n                #[doc = concat!(\"An owned [`\", stringify!($ty), \"`] node.\")]\n                $ty($ty),\n            )*\n            /// Represent an unknown node in the AST with a message.\n            Unknown(String),\n        }\n        #[derive(Copy)]\n        #[derive_group_for_ast_base]\n        #[derive(::serde::Serialize)]\n        #[allow(missing_docs)]\n        /// A borrowed fragment of AST in hax.\n        pub enum FragmentRef<'lt> {\n            $(\n                #[doc = concat!(\"A borrowed [`\", stringify!($ty), \"`] node.\")]\n                $ty(&'lt $ty),\n            )*\n        }\n\n        $(\n            impl From<$ty> for Fragment {\n                fn from(fragment: $ty) -> Self {\n                    Self::$ty(fragment)\n                }\n            }\n            impl<'lt> From<&'lt $ty> for FragmentRef<'lt> {\n                fn from(fragment: &'lt $ty) -> Self {\n                    Self::$ty(fragment)\n                }\n            }\n        )*\n    };\n}\n\n#[hax_rust_engine_macros::replace(AstNodes => include(VisitableAstNodes))]\nmk!(GlobalId, Span, AstNodes);\n"
  },
  {
    "path": "rust-engine/src/ast/identifiers/global_id/compact_serialization.rs",
    "content": "//! Helper module that provides serialization and deserialization of DefId to\n//! compact representations. This is solely for conciseness purposes of the\n//! generated code.\n//!\n//! Concretely, this module defines `Repr` a (JSON-compact) representation of `DefId`s without parents.\n//! It provides a bijection from the fields `krate`, `path`, and `kind` of `DefId` and `Repr`.\n//! The choice of `Repr` itself is irrelevant. Anything that produces compact JSON is good.\n\nuse crate::interning::Internable;\nuse hax_frontend_exporter::{DefKind, DefPathItem, DisambiguatedDefPathItem};\n\nuse super::{DefIdInner, ExplicitDefId};\n/// The compact reperesentation: a tuple (krate name, path, defkind, is_constructor)\n/// The path is a vector of tuples (DefPathItem, disambiguator).\ntype Repr = (String, Vec<(DefPathItem, u32)>, DefKind, bool);\n/// `BorrowedRepr` is the borrowed variant of `Repr`. Useful for serialization.\ntype BorrowedRepr<'a> = (\n    &'a String,\n    Vec<(&'a DefPathItem, &'a u32)>,\n    &'a DefKind,\n    bool,\n);\n\n/// Serialize an explicit def id into a compact represented string\npub fn serialize(edid: &ExplicitDefId) -> String {\n    let did = &edid.def_id;\n    let path = did\n        .path\n        .iter()\n        .map(\n            |DisambiguatedDefPathItem {\n                 data,\n                 disambiguator,\n             }| (data, disambiguator),\n        )\n        .collect::<Vec<_>>();\n    let data: BorrowedRepr<'_> = (&did.krate, path, &did.kind, edid.is_constructor);\n    serde_json::to_string(&data).unwrap()\n}\n\n/// Deserialize from a (string) compact representation and a parent\npub fn deserialize(s: &str, parent: Option<ExplicitDefId>) -> ExplicitDefId {\n    let (krate, path, kind, is_constructor): Repr = serde_json::from_str(s).unwrap();\n    ExplicitDefId {\n        def_id: DefIdInner {\n            parent: parent.map(|parent| parent.def_id),\n            krate,\n            path: path\n                .into_iter()\n                .map(|(data, disambiguator)| DisambiguatedDefPathItem {\n                    data,\n                    disambiguator,\n                })\n                .collect(),\n            kind,\n        }\n        .intern(),\n        is_constructor,\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/ast/identifiers/global_id/generated.rs",
    "content": "// This file was generated by `cargo hax into generate-rust-engine-names`.\n// To regenerate it, please use `just regenerate-names`. Under the hood, `cargo\n// hax into generate-rust-engine-names` runs the Rust engine, which in turn\n// calls `rust_engine::names::export_def_ids_to_mod`.\n\nstatic TABLE_AND_INTERNED_GLOBAL_IDS: (\n    crate::interning::LazyLockNewWithValue<crate::ast::identifiers::global_id::GlobalIdInner, 660>,\n    [crate::interning::Interned<crate::ast::identifiers::global_id::GlobalIdInner>; 660],\n) = {\n    crate::interning::InterningTable::new_with_values(|| {\n        use crate::ast::identifiers::global_id::ExplicitDefId;\n        use crate::ast::identifiers::global_id::compact_serialization::deserialize;\n        fn did_0() -> ExplicitDefId {\n            deserialize(r##\"[\"rust_primitives\",[],\"Mod\",false]\"##, None)\n        }\n        fn did_1() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_2() -> ExplicitDefId {\n            deserialize(r##\"[\"alloc\",[],\"Mod\",false]\"##, None)\n        }\n        fn did_3() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"alloc\"},0]],\"Mod\",false]\"##,\n                Some(did_2()),\n            )\n        }\n        fn did_4() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"alloc\"},0],[{\"TypeNs\":\"Global\"},0]],\"Struct\",false]\"##,\n                Some(did_3()),\n            )\n        }\n        fn did_5() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0]],\"Mod\",false]\"##,\n                Some(did_2()),\n            )\n        }\n        fn did_6() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[{\"TypeNs\":\"Vec\"},0]],\"Struct\",false]\"##,\n                Some(did_5()),\n            )\n        }\n        fn did_7() -> ExplicitDefId {\n            deserialize(r##\"[\"core\",[],\"Mod\",false]\"##, None)\n        }\n        fn did_8() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"clone\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_9() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"clone\"},0],[{\"TypeNs\":\"Clone\"},0]],\"Trait\",false]\"##,\n                Some(did_8()),\n            )\n        }\n        fn did_10() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"clone\"},0],[{\"TypeNs\":\"Clone\"},0],[{\"ValueNs\":\"clone\"},0]],\"AssocFn\",false]\"##,\n                Some(did_9()),\n            )\n        }\n        fn did_11() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"clone\"},0],[{\"TypeNs\":\"impls\"},0]],\"Mod\",false]\"##,\n                Some(did_8()),\n            )\n        }\n        fn did_12() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"clone\"},0],[{\"TypeNs\":\"impls\"},0],[\"Impl\",6]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_11()),\n            )\n        }\n        fn did_13() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"alloc\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_14() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"alloc\"},0],[{\"TypeNs\":\"Allocator\"},0]],\"Trait\",false]\"##,\n                Some(did_13()),\n            )\n        }\n        fn did_15() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"alloc\"},0],[\"Impl\",1]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_3()),\n            )\n        }\n        fn did_16() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"alloc\"},0],[\"Impl\",3]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_3()),\n            )\n        }\n        fn did_17() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",11]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_5()),\n            )\n        }\n        fn did_18() -> ExplicitDefId {\n            deserialize(r##\"[\"hax_lib_protocol\",[],\"Mod\",false]\"##, None)\n        }\n        fn did_19() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0]],\"Mod\",false]\"##,\n                Some(did_18()),\n            )\n        }\n        fn did_20() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"HashAlgorithm\"},0]],\"Enum\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_21() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"deref_op\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_22() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_23() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"deref\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_24() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"deref\"},0],[{\"TypeNs\":\"Deref\"},0]],\"Trait\",false]\"##,\n                Some(did_23()),\n            )\n        }\n        fn did_25() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"deref\"},0],[{\"TypeNs\":\"Deref\"},0],[{\"ValueNs\":\"deref\"},0]],\"AssocFn\",false]\"##,\n                Some(did_24()),\n            )\n        }\n        fn did_26() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",8]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_5()),\n            )\n        }\n        fn did_27() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"ValueNs\":\"hash\"},0]],\"Fn\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_28() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",1]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_5()),\n            )\n        }\n        fn did_29() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",1],[{\"ValueNs\":\"truncate\"},0]],\"AssocFn\",false]\"##,\n                Some(did_28()),\n            )\n        }\n        fn did_30() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",2]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_5()),\n            )\n        }\n        fn did_31() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",2],[{\"ValueNs\":\"extend_from_slice\"},0]],\"AssocFn\",false]\"##,\n                Some(did_30()),\n            )\n        }\n        fn did_32() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"box_new\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_33() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"boxed\"},0]],\"Mod\",false]\"##,\n                Some(did_2()),\n            )\n        }\n        fn did_34() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"boxed\"},0],[{\"TypeNs\":\"Box\"},0]],\"Struct\",false]\"##,\n                Some(did_33()),\n            )\n        }\n        fn did_35() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"unsize\"},0]],\"Fn\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_36() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"slice\"},0]],\"Mod\",false]\"##,\n                Some(did_2()),\n            )\n        }\n        fn did_37() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_36()),\n            )\n        }\n        fn did_38() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0],[{\"ValueNs\":\"into_vec\"},0]],\"AssocFn\",false]\"##,\n                Some(did_37()),\n            )\n        }\n        fn did_39() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"Concat\"},0]],\"Trait\",false]\"##,\n                Some(did_36()),\n            )\n        }\n        fn did_40() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"borrow\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_41() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"borrow\"},0],[{\"TypeNs\":\"Borrow\"},0]],\"Trait\",false]\"##,\n                Some(did_40()),\n            )\n        }\n        fn did_42() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"borrow\"},0],[\"Impl\",2]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_40()),\n            )\n        }\n        fn did_43() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",2]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_36()),\n            )\n        }\n        fn did_44() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0],[{\"ValueNs\":\"concat\"},0]],\"AssocFn\",false]\"##,\n                Some(did_37()),\n            )\n        }\n        fn did_45() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0],[{\"ValueNs\":\"to_vec\"},0]],\"AssocFn\",false]\"##,\n                Some(did_37()),\n            )\n        }\n        fn did_46() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_47() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_46()),\n            )\n        }\n        fn did_48() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0],[{\"ValueNs\":\"len\"},0]],\"AssocFn\",false]\"##,\n                Some(did_47()),\n            )\n        }\n        fn did_49() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_50() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"Range\"},0]],\"Struct\",false]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_51() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"Range\"},0],[{\"ValueNs\":\"start\"},0]],\"Field\",false]\"##,\n                Some(did_642()),\n            )\n        }\n        fn did_52() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"Range\"},0],[{\"ValueNs\":\"end\"},0]],\"Field\",false]\"##,\n                Some(did_642()),\n            )\n        }\n        fn did_53() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"index\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_54() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"index\"},0],[{\"TypeNs\":\"Index\"},0]],\"Trait\",false]\"##,\n                Some(did_53()),\n            )\n        }\n        fn did_55() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"index\"},0],[{\"TypeNs\":\"Index\"},0],[{\"ValueNs\":\"index\"},0]],\"AssocFn\",false]\"##,\n                Some(did_54()),\n            )\n        }\n        fn did_56() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"index\"},0]],\"Mod\",false]\"##,\n                Some(did_46()),\n            )\n        }\n        fn did_57() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"index\"},0],[{\"TypeNs\":\"SliceIndex\"},0]],\"Trait\",false]\"##,\n                Some(did_56()),\n            )\n        }\n        fn did_58() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"index\"},0],[\"Impl\",4]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_56()),\n            )\n        }\n        fn did_59() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",13]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_5()),\n            )\n        }\n        fn did_60() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"num\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_61() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"num\"},0],[\"Impl\",9]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_60()),\n            )\n        }\n        fn did_62() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"num\"},0],[\"Impl\",9],[{\"ValueNs\":\"to_le_bytes\"},0]],\"AssocFn\",false]\"##,\n                Some(did_61()),\n            )\n        }\n        fn did_63() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"HMACAlgorithm\"},0]],\"Enum\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_64() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"ValueNs\":\"hmac\"},0]],\"Fn\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_65() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"DHGroup\"},0]],\"Enum\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_66() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"DHScalar\"},0]],\"Struct\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_67() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"ValueNs\":\"dh_scalar_multiply_base\"},0]],\"Fn\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_68() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",9]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_69() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"DHElement\"},0]],\"Struct\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_70() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"ValueNs\":\"dh_scalar_multiply\"},0]],\"Fn\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_71() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_72() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",0],[{\"ValueNs\":\"from_bytes\"},0]],\"AssocFn\",false]\"##,\n                Some(did_71()),\n            )\n        }\n        fn did_73() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",1]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_74() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",1],[{\"ValueNs\":\"from_bytes\"},0]],\"AssocFn\",false]\"##,\n                Some(did_73()),\n            )\n        }\n        fn did_75() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"ProtocolError\"},0]],\"Enum\",false]\"##,\n                Some(did_18()),\n            )\n        }\n        fn did_76() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_77() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[{\"TypeNs\":\"Result\"},0]],\"Enum\",false]\"##,\n                Some(did_76()),\n            )\n        }\n        fn did_78() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"AEADKey\"},0]],\"Struct\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_79() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"AEADIV\"},0]],\"Struct\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_80() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",6]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_81() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",6],[{\"ValueNs\":\"from_bytes\"},0]],\"AssocFn\",false]\"##,\n                Some(did_80()),\n            )\n        }\n        fn did_82() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"AEADTag\"},0]],\"Struct\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_83() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"ValueNs\":\"aead_decrypt\"},0]],\"Fn\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_84() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"AEADAlgorithm\"},0]],\"Enum\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_85() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",4]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_86() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",4],[{\"ValueNs\":\"from_bytes\"},0]],\"AssocFn\",false]\"##,\n                Some(did_85()),\n            )\n        }\n        fn did_87() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",5]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_88() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[\"Impl\",5],[{\"ValueNs\":\"from_bytes\"},0]],\"AssocFn\",false]\"##,\n                Some(did_87()),\n            )\n        }\n        fn did_89() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"Tuple2\"},0]],\"Struct\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_90() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"ValueNs\":\"aead_encrypt\"},0]],\"Fn\",false]\"##,\n                Some(did_19()),\n            )\n        }\n        fn did_91() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[{\"ValueNs\":\"from_elem\"},0]],\"Fn\",false]\"##,\n                Some(did_5()),\n            )\n        }\n        fn did_92() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ptr\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_93() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ptr\"},0],[{\"TypeNs\":\"const_ptr\"},0]],\"Mod\",false]\"##,\n                Some(did_92()),\n            )\n        }\n        fn did_94() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ptr\"},0],[{\"TypeNs\":\"const_ptr\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_93()),\n            )\n        }\n        fn did_95() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ptr\"},0],[{\"TypeNs\":\"const_ptr\"},0],[\"Impl\",0],[{\"ValueNs\":\"offset\"},0]],\"AssocFn\",false]\"##,\n                Some(did_94()),\n            )\n        }\n        fn did_96() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"cast_op\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_97() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"str\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_98() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"str\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_97()),\n            )\n        }\n        fn did_99() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"str\"},0],[\"Impl\",0],[{\"ValueNs\":\"as_ptr\"},0]],\"AssocFn\",false]\"##,\n                Some(did_98()),\n            )\n        }\n        fn did_100() -> ExplicitDefId {\n            deserialize(r##\"[\"hax_lib\",[],\"Mod\",false]\"##, None)\n        }\n        fn did_101() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"ValueNs\":\"any_to_unit\"},0]],\"Fn\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_102() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"ValueNs\":\"inline_unsafe\"},0]],\"Fn\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_103() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"ValueNs\":\"inline\"},0]],\"Fn\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_104() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0]],\"Mod\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_105() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[{\"TypeNs\":\"Int\"},0]],\"Struct\",false]\"##,\n                Some(did_104()),\n            )\n        }\n        fn did_106() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"abstraction\"},0]],\"Mod\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_107() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"abstraction\"},0],[{\"TypeNs\":\"Concretization\"},0]],\"Trait\",false]\"##,\n                Some(did_106()),\n            )\n        }\n        fn did_108() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"abstraction\"},0],[{\"TypeNs\":\"Concretization\"},0],[{\"ValueNs\":\"concretize\"},0]],\"AssocFn\",false]\"##,\n                Some(did_107()),\n            )\n        }\n        fn did_109() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[\"Impl\",44]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_104()),\n            )\n        }\n        fn did_110() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[\"Impl\",7]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_104()),\n            )\n        }\n        fn did_111() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[\"Impl\",7],[{\"ValueNs\":\"_unsafe_from_str\"},0]],\"AssocFn\",false]\"##,\n                Some(did_110()),\n            )\n        }\n        fn did_112() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[\"Impl\",9]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_104()),\n            )\n        }\n        fn did_113() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[\"Impl\",7],[{\"ValueNs\":\"pow2\"},0]],\"AssocFn\",false]\"##,\n                Some(did_110()),\n            )\n        }\n        fn did_114() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[{\"TypeNs\":\"ToInt\"},0]],\"Trait\",false]\"##,\n                Some(did_104()),\n            )\n        }\n        fn did_115() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[{\"TypeNs\":\"ToInt\"},0],[{\"ValueNs\":\"to_int\"},0]],\"AssocFn\",false]\"##,\n                Some(did_114()),\n            )\n        }\n        fn did_116() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[\"Impl\",17]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_104()),\n            )\n        }\n        fn did_117() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"abstraction\"},0],[{\"TypeNs\":\"Abstraction\"},0]],\"Trait\",false]\"##,\n                Some(did_106()),\n            )\n        }\n        fn did_118() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"abstraction\"},0],[{\"TypeNs\":\"Abstraction\"},0],[{\"ValueNs\":\"lift\"},0]],\"AssocFn\",false]\"##,\n                Some(did_117()),\n            )\n        }\n        fn did_119() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"int\"},0],[\"Impl\",16]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_104()),\n            )\n        }\n        fn did_120() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"control_flow\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_121() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"control_flow\"},0],[{\"TypeNs\":\"ControlFlow\"},0]],\"Enum\",false]\"##,\n                Some(did_120()),\n            )\n        }\n        fn did_122() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"control_flow\"},0],[{\"TypeNs\":\"ControlFlow\"},0],[{\"TypeNs\":\"Break\"},0],[{\"ValueNs\":\"0\"},0]],\"Field\",false]\"##,\n                Some(did_644()),\n            )\n        }\n        fn did_123() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"control_flow\"},0],[{\"TypeNs\":\"ControlFlow\"},0],[{\"TypeNs\":\"Continue\"},0],[{\"ValueNs\":\"0\"},0]],\"Field\",false]\"##,\n                Some(did_645()),\n            )\n        }\n        fn did_124() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeTo\"},0]],\"Struct\",false]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_125() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeTo\"},0],[{\"ValueNs\":\"end\"},0]],\"Field\",false]\"##,\n                Some(did_646()),\n            )\n        }\n        fn did_126() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeFull\"},0]],\"Struct\",false]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_127() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeFrom\"},0]],\"Struct\",false]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_128() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeFrom\"},0],[{\"ValueNs\":\"start\"},0]],\"Field\",false]\"##,\n                Some(did_648()),\n            )\n        }\n        fn did_129() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_130() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"Into\"},0]],\"Trait\",false]\"##,\n                Some(did_129()),\n            )\n        }\n        fn did_131() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"Into\"},0],[{\"ValueNs\":\"into\"},0]],\"AssocFn\",false]\"##,\n                Some(did_130()),\n            )\n        }\n        fn did_132() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"From\"},0]],\"Trait\",false]\"##,\n                Some(did_129()),\n            )\n        }\n        fn did_133() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"num\"},0]],\"Mod\",false]\"##,\n                Some(did_129()),\n            )\n        }\n        fn did_134() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"num\"},0],[\"Impl\",64]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_133()),\n            )\n        }\n        fn did_135() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[\"Impl\",3]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_129()),\n            )\n        }\n        fn did_136() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"array\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_137() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"array\"},0],[{\"TypeNs\":\"iter\"},0]],\"Mod\",false]\"##,\n                Some(did_136()),\n            )\n        }\n        fn did_138() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"array\"},0],[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"IntoIter\"},0]],\"Struct\",false]\"##,\n                Some(did_137()),\n            )\n        }\n        fn did_139() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_140() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0]],\"Mod\",false]\"##,\n                Some(did_139()),\n            )\n        }\n        fn did_141() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"collect\"},0]],\"Mod\",false]\"##,\n                Some(did_140()),\n            )\n        }\n        fn did_142() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"collect\"},0],[{\"TypeNs\":\"IntoIterator\"},0]],\"Trait\",false]\"##,\n                Some(did_141()),\n            )\n        }\n        fn did_143() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"collect\"},0],[{\"TypeNs\":\"IntoIterator\"},0],[{\"ValueNs\":\"into_iter\"},0]],\"AssocFn\",false]\"##,\n                Some(did_142()),\n            )\n        }\n        fn did_144() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"array\"},0],[{\"TypeNs\":\"iter\"},0],[\"Impl\",1]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_137()),\n            )\n        }\n        fn did_145() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"ValueNs\":\"_internal_loop_decreases\"},0]],\"Fn\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_146() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"From\"},0],[{\"ValueNs\":\"from\"},0]],\"AssocFn\",false]\"##,\n                Some(did_132()),\n            )\n        }\n        fn did_147() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0]],\"Mod\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_148() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"Prop\"},0]],\"Struct\",false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_149() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",3]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_150() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"ValueNs\":\"_internal_while_loop_invariant\"},0]],\"Fn\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_151() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"function\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_152() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"function\"},0],[{\"TypeNs\":\"FnOnce\"},0]],\"Trait\",false]\"##,\n                Some(did_151()),\n            )\n        }\n        fn did_153() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"ValueNs\":\"_internal_loop_invariant\"},0]],\"Fn\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_154() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"ValueNs\":\"assert\"},0]],\"Fn\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_155() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_156() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialEq\"},0]],\"Trait\",false]\"##,\n                Some(did_155()),\n            )\n        }\n        fn did_157() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialEq\"},0],[{\"ValueNs\":\"eq\"},0]],\"AssocFn\",false]\"##,\n                Some(did_156()),\n            )\n        }\n        fn did_158() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_159() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Not\"},0]],\"Trait\",false]\"##,\n                Some(did_158()),\n            )\n        }\n        fn did_160() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Not\"},0],[{\"ValueNs\":\"not\"},0]],\"AssocFn\",false]\"##,\n                Some(did_159()),\n            )\n        }\n        fn did_161() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"panicking\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_162() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"panicking\"},0],[{\"TypeNs\":\"AssertKind\"},0]],\"Enum\",false]\"##,\n                Some(did_161()),\n            )\n        }\n        fn did_163() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"option\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_164() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"option\"},0],[{\"TypeNs\":\"Option\"},0]],\"Enum\",false]\"##,\n                Some(did_163()),\n            )\n        }\n        fn did_165() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"fmt\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_166() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"fmt\"},0],[{\"TypeNs\":\"Arguments\"},0]],\"Struct\",false]\"##,\n                Some(did_165()),\n            )\n        }\n        fn did_167() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"fmt\"},0],[{\"TypeNs\":\"Debug\"},0]],\"Trait\",false]\"##,\n                Some(did_165()),\n            )\n        }\n        fn did_168() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"fmt\"},0],[{\"TypeNs\":\"num\"},0]],\"Mod\",false]\"##,\n                Some(did_165()),\n            )\n        }\n        fn did_169() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"panicking\"},0],[{\"ValueNs\":\"assert_failed\"},0]],\"Fn\",false]\"##,\n                Some(did_161()),\n            )\n        }\n        fn did_170() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"Never\"},0]],\"Enum\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_171() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"never_to_any\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_172() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"panicking\"},0],[{\"ValueNs\":\"panic\"},0]],\"Fn\",false]\"##,\n                Some(did_161()),\n            )\n        }\n        fn did_173() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[{\"TypeNs\":\"Result\"},0],[{\"TypeNs\":\"Err\"},0],[{\"ValueNs\":\"0\"},0]],\"Field\",false]\"##,\n                Some(did_649()),\n            )\n        }\n        fn did_174() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_76()),\n            )\n        }\n        fn did_175() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[\"Impl\",0],[{\"ValueNs\":\"map_err\"},0]],\"AssocFn\",false]\"##,\n                Some(did_174()),\n            )\n        }\n        fn did_176() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"option\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_163()),\n            )\n        }\n        fn did_177() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"option\"},0],[\"Impl\",0],[{\"ValueNs\":\"is_some\"},0]],\"AssocFn\",false]\"##,\n                Some(did_176()),\n            )\n        }\n        fn did_178() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"option\"},0],[{\"TypeNs\":\"Option\"},0],[{\"TypeNs\":\"Some\"},0],[{\"ValueNs\":\"0\"},0]],\"Field\",false]\"##,\n                Some(did_650()),\n            )\n        }\n        fn did_179() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"boxed\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_33()),\n            )\n        }\n        fn did_180() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"boxed\"},0],[\"Impl\",0],[{\"ValueNs\":\"new\"},0]],\"AssocFn\",false]\"##,\n                Some(did_179()),\n            )\n        }\n        fn did_181() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"string\"},0]],\"Mod\",false]\"##,\n                Some(did_2()),\n            )\n        }\n        fn did_182() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"string\"},0],[{\"TypeNs\":\"String\"},0]],\"Struct\",false]\"##,\n                Some(did_181()),\n            )\n        }\n        fn did_183() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"Infallible\"},0]],\"Enum\",false]\"##,\n                Some(did_129()),\n            )\n        }\n        fn did_184() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_185() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0],[{\"TypeNs\":\"FromResidual\"},0]],\"Trait\",false]\"##,\n                Some(did_184()),\n            )\n        }\n        fn did_186() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0],[{\"TypeNs\":\"FromResidual\"},0],[{\"ValueNs\":\"from_residual\"},0]],\"AssocFn\",false]\"##,\n                Some(did_185()),\n            )\n        }\n        fn did_187() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[{\"TypeNs\":\"num\"},0],[\"Impl\",88]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_133()),\n            )\n        }\n        fn did_188() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[\"Impl\",28]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_76()),\n            )\n        }\n        fn did_189() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"index\"},0],[\"Impl\",2]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_56()),\n            )\n        }\n        fn did_190() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"iterator\"},0]],\"Mod\",false]\"##,\n                Some(did_140()),\n            )\n        }\n        fn did_191() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"iterator\"},0],[{\"TypeNs\":\"Iterator\"},0]],\"Trait\",false]\"##,\n                Some(did_190()),\n            )\n        }\n        fn did_192() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"iterator\"},0],[{\"TypeNs\":\"Iterator\"},0],[{\"ValueNs\":\"next\"},0]],\"AssocFn\",false]\"##,\n                Some(did_191()),\n            )\n        }\n        fn did_193() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0]],\"Mod\",false]\"##,\n                Some(did_22()),\n            )\n        }\n        fn did_194() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Add\"},0]],\"Trait\",false]\"##,\n                Some(did_193()),\n            )\n        }\n        fn did_195() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Add\"},0],[{\"ValueNs\":\"add\"},0]],\"AssocFn\",false]\"##,\n                Some(did_194()),\n            )\n        }\n        fn did_196() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"function\"},0],[{\"TypeNs\":\"FnMut\"},0]],\"Trait\",false]\"##,\n                Some(did_151()),\n            )\n        }\n        fn did_197() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"iterator\"},0],[{\"TypeNs\":\"Iterator\"},0],[{\"ValueNs\":\"fold\"},0]],\"AssocFn\",false]\"##,\n                Some(did_191()),\n            )\n        }\n        fn did_198() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[{\"TypeNs\":\"Result\"},0],[{\"TypeNs\":\"Ok\"},0],[{\"ValueNs\":\"0\"},0]],\"Field\",false]\"##,\n                Some(did_651()),\n            )\n        }\n        fn did_199() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"convert\"},0],[\"Impl\",4]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_129()),\n            )\n        }\n        fn did_200() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"ValueNs\":\"implies\"},0]],\"Fn\",false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_201() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"function\"},0],[{\"TypeNs\":\"Fn\"},0]],\"Trait\",false]\"##,\n                Some(did_151()),\n            )\n        }\n        fn did_202() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"ValueNs\":\"exists\"},0]],\"Fn\",false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_203() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"ValueNs\":\"forall\"},0]],\"Fn\",false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_204() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"ToProp\"},0]],\"Trait\",false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_205() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"ToProp\"},0],[{\"ValueNs\":\"to_prop\"},0]],\"AssocFn\",false]\"##,\n                Some(did_204()),\n            )\n        }\n        fn did_206() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",2]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_207() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":false}},false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_208() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0],[{\"ValueNs\":\"implies\"},0]],\"AssocFn\",false]\"##,\n                Some(did_207()),\n            )\n        }\n        fn did_209() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0],[{\"ValueNs\":\"ne\"},0]],\"AssocFn\",false]\"##,\n                Some(did_207()),\n            )\n        }\n        fn did_210() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0],[{\"ValueNs\":\"eq\"},0]],\"AssocFn\",false]\"##,\n                Some(did_207()),\n            )\n        }\n        fn did_211() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0],[{\"ValueNs\":\"not\"},0]],\"AssocFn\",false]\"##,\n                Some(did_207()),\n            )\n        }\n        fn did_212() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0],[{\"ValueNs\":\"or\"},0]],\"AssocFn\",false]\"##,\n                Some(did_207()),\n            )\n        }\n        fn did_213() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0],[{\"ValueNs\":\"and\"},0]],\"AssocFn\",false]\"##,\n                Some(did_207()),\n            )\n        }\n        fn did_214() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[\"Impl\",0],[{\"ValueNs\":\"from_bool\"},0]],\"AssocFn\",false]\"##,\n                Some(did_207()),\n            )\n        }\n        fn did_215() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0]],\"Mod\",false]\"##,\n                Some(did_147()),\n            )\n        }\n        fn did_216() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"exists\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_217() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"forall\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_218() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"implies\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_219() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_220() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_221() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"not\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_222() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"or\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_223() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"and\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_224() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"prop\"},0],[{\"TypeNs\":\"constructors\"},0],[{\"ValueNs\":\"from_bool\"},0]],\"Fn\",false]\"##,\n                Some(did_215()),\n            )\n        }\n        fn did_225() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"iter\"},0]],\"Mod\",false]\"##,\n                Some(did_46()),\n            )\n        }\n        fn did_226() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"Iter\"},0]],\"Struct\",false]\"##,\n                Some(did_225()),\n            )\n        }\n        fn did_227() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0],[{\"ValueNs\":\"iter\"},0]],\"AssocFn\",false]\"##,\n                Some(did_47()),\n            )\n        }\n        fn did_228() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"ChunksExact\"},0]],\"Struct\",false]\"##,\n                Some(did_225()),\n            )\n        }\n        fn did_229() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"slice\"},0],[\"Impl\",0],[{\"ValueNs\":\"chunks_exact\"},0]],\"AssocFn\",false]\"##,\n                Some(did_47()),\n            )\n        }\n        fn did_230() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"adapters\"},0]],\"Mod\",false]\"##,\n                Some(did_139()),\n            )\n        }\n        fn did_231() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"adapters\"},0],[{\"TypeNs\":\"enumerate\"},0]],\"Mod\",false]\"##,\n                Some(did_230()),\n            )\n        }\n        fn did_232() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"adapters\"},0],[{\"TypeNs\":\"enumerate\"},0],[{\"TypeNs\":\"Enumerate\"},0]],\"Struct\",false]\"##,\n                Some(did_231()),\n            )\n        }\n        fn did_233() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"iterator\"},0],[{\"TypeNs\":\"Iterator\"},0],[{\"ValueNs\":\"enumerate\"},0]],\"AssocFn\",false]\"##,\n                Some(did_191()),\n            )\n        }\n        fn did_234() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"adapters\"},0],[{\"TypeNs\":\"step_by\"},0]],\"Mod\",false]\"##,\n                Some(did_230()),\n            )\n        }\n        fn did_235() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"adapters\"},0],[{\"TypeNs\":\"step_by\"},0],[{\"TypeNs\":\"StepBy\"},0]],\"Struct\",false]\"##,\n                Some(did_234()),\n            )\n        }\n        fn did_236() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"iterator\"},0],[{\"TypeNs\":\"Iterator\"},0],[{\"ValueNs\":\"step_by\"},0]],\"AssocFn\",false]\"##,\n                Some(did_191()),\n            )\n        }\n        fn did_237() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0],[{\"TypeNs\":\"Try\"},0]],\"Trait\",false]\"##,\n                Some(did_184()),\n            )\n        }\n        fn did_238() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0],[{\"TypeNs\":\"Try\"},0],[{\"ValueNs\":\"branch\"},0]],\"AssocFn\",false]\"##,\n                Some(did_237()),\n            )\n        }\n        fn did_239() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[\"Impl\",27]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_76()),\n            )\n        }\n        fn did_240() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"RefineAs\"},0]],\"Trait\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_241() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"RefineAs\"},0],[{\"ValueNs\":\"into_checked\"},0]],\"AssocFn\",false]\"##,\n                Some(did_240()),\n            )\n        }\n        fn did_242() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"Refinement\"},0]],\"Trait\",false]\"##,\n                Some(did_100()),\n            )\n        }\n        fn did_243() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"Refinement\"},0],[{\"ValueNs\":\"get\"},0]],\"AssocFn\",false]\"##,\n                Some(did_242()),\n            )\n        }\n        fn did_244() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"Refinement\"},0],[{\"TypeNs\":\"InnerType\"},0]],\"AssocTy\",false]\"##,\n                Some(did_242()),\n            )\n        }\n        fn did_245() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"Refinement\"},0],[{\"ValueNs\":\"new\"},0]],\"AssocFn\",false]\"##,\n                Some(did_242()),\n            )\n        }\n        fn did_246() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib\",[[{\"TypeNs\":\"Refinement\"},0],[{\"ValueNs\":\"get_mut\"},0]],\"AssocFn\",false]\"##,\n                Some(did_242()),\n            )\n        }\n        fn did_247() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitAnd\"},0]],\"Trait\",false]\"##,\n                Some(did_158()),\n            )\n        }\n        fn did_248() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitAnd\"},0],[{\"ValueNs\":\"bitand\"},0]],\"AssocFn\",false]\"##,\n                Some(did_247()),\n            )\n        }\n        fn did_249() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitXor\"},0]],\"Trait\",false]\"##,\n                Some(did_158()),\n            )\n        }\n        fn did_250() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitXor\"},0],[{\"ValueNs\":\"bitxor\"},0]],\"AssocFn\",false]\"##,\n                Some(did_249()),\n            )\n        }\n        fn did_251() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Div\"},0]],\"Trait\",false]\"##,\n                Some(did_193()),\n            )\n        }\n        fn did_252() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Div\"},0],[{\"ValueNs\":\"div\"},0]],\"AssocFn\",false]\"##,\n                Some(did_251()),\n            )\n        }\n        fn did_253() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Mul\"},0]],\"Trait\",false]\"##,\n                Some(did_193()),\n            )\n        }\n        fn did_254() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Mul\"},0],[{\"ValueNs\":\"mul\"},0]],\"AssocFn\",false]\"##,\n                Some(did_253()),\n            )\n        }\n        fn did_255() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Sub\"},0]],\"Trait\",false]\"##,\n                Some(did_193()),\n            )\n        }\n        fn did_256() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Sub\"},0],[{\"ValueNs\":\"sub\"},0]],\"AssocFn\",false]\"##,\n                Some(did_255()),\n            )\n        }\n        fn did_257() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Neg\"},0]],\"Trait\",false]\"##,\n                Some(did_193()),\n            )\n        }\n        fn did_258() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Neg\"},0],[{\"ValueNs\":\"neg\"},0]],\"AssocFn\",false]\"##,\n                Some(did_257()),\n            )\n        }\n        fn did_259() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Rem\"},0]],\"Trait\",false]\"##,\n                Some(did_193()),\n            )\n        }\n        fn did_260() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Rem\"},0],[{\"ValueNs\":\"rem\"},0]],\"AssocFn\",false]\"##,\n                Some(did_259()),\n            )\n        }\n        fn did_261() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Shl\"},0]],\"Trait\",false]\"##,\n                Some(did_158()),\n            )\n        }\n        fn did_262() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Shl\"},0],[{\"ValueNs\":\"shl\"},0]],\"AssocFn\",false]\"##,\n                Some(did_261()),\n            )\n        }\n        fn did_263() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Shr\"},0]],\"Trait\",false]\"##,\n                Some(did_158()),\n            )\n        }\n        fn did_264() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Shr\"},0],[{\"ValueNs\":\"shr\"},0]],\"AssocFn\",false]\"##,\n                Some(did_263()),\n            )\n        }\n        fn did_265() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitOr\"},0]],\"Trait\",false]\"##,\n                Some(did_158()),\n            )\n        }\n        fn did_266() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitOr\"},0],[{\"ValueNs\":\"bitor\"},0]],\"AssocFn\",false]\"##,\n                Some(did_265()),\n            )\n        }\n        fn did_267() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialOrd\"},0]],\"Trait\",false]\"##,\n                Some(did_155()),\n            )\n        }\n        fn did_268() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialOrd\"},0],[{\"ValueNs\":\"lt\"},0]],\"AssocFn\",false]\"##,\n                Some(did_267()),\n            )\n        }\n        fn did_269() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialOrd\"},0],[{\"ValueNs\":\"gt\"},0]],\"AssocFn\",false]\"##,\n                Some(did_267()),\n            )\n        }\n        fn did_270() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"logical_op_and\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_271() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialOrd\"},0],[{\"ValueNs\":\"le\"},0]],\"AssocFn\",false]\"##,\n                Some(did_267()),\n            )\n        }\n        fn did_272() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialOrd\"},0],[{\"ValueNs\":\"ge\"},0]],\"AssocFn\",false]\"##,\n                Some(did_267()),\n            )\n        }\n        fn did_273() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"PartialEq\"},0],[{\"ValueNs\":\"ne\"},0]],\"AssocFn\",false]\"##,\n                Some(did_156()),\n            )\n        }\n        fn did_274() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"marker\"},0]],\"Mod\",false]\"##,\n                Some(did_7()),\n            )\n        }\n        fn did_275() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"marker\"},0],[{\"TypeNs\":\"Copy\"},0]],\"Trait\",false]\"##,\n                Some(did_274()),\n            )\n        }\n        fn did_276() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0],[{\"TypeNs\":\"Try\"},0],[{\"TypeNs\":\"Residual\"},0]],\"AssocTy\",false]\"##,\n                Some(did_237()),\n            )\n        }\n        fn did_277() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0],[{\"TypeNs\":\"Try\"},0],[{\"ValueNs\":\"from_output\"},0]],\"AssocFn\",false]\"##,\n                Some(did_237()),\n            )\n        }\n        fn did_278() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"deref\"},0],[{\"TypeNs\":\"Deref\"},0],[{\"TypeNs\":\"Target\"},0]],\"AssocTy\",false]\"##,\n                Some(did_24()),\n            )\n        }\n        fn did_279() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"deref\"},0],[{\"TypeNs\":\"DerefMut\"},0]],\"Trait\",false]\"##,\n                Some(did_23()),\n            )\n        }\n        fn did_280() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"deref\"},0],[{\"TypeNs\":\"DerefMut\"},0],[{\"ValueNs\":\"deref_mut\"},0]],\"AssocFn\",false]\"##,\n                Some(did_279()),\n            )\n        }\n        fn did_281() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_282() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_283() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_284() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_285() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_286() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_287() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_288() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_289() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0]],\"Mod\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_290() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_range_step_by\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_291() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0]],\"Fn\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_292() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},1]],\"Const\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_293() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"crypto_abstractions\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_294() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"dummy\"},0]],\"Fn\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_295() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_296() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_297() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_298() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_299() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_300() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},0]],\"Const\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_301() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_302() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0]],\"Mod\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_303() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_304() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_range_return\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_305() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"crypto_abstractions\"},0],[{\"ValueNs\":\"crypto_abstractions\"},0]],\"Fn\",false]\"##,\n                Some(did_293()),\n            )\n        }\n        fn did_306() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_307() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_308() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_309() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_310() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"monomorphized_update_at\"},0]],\"Mod\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_311() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"monomorphized_update_at\"},0],[{\"ValueNs\":\"update_at_usize\"},0]],\"Fn\",false]\"##,\n                Some(did_310()),\n            )\n        }\n        fn did_312() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},1],[{\"ValueNs\":\"f\"},0]],\"Fn\",false]\"##,\n                Some(did_292()),\n            )\n        }\n        fn did_313() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_314() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_315() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_316() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_317() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_318() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"monomorphized_update_at\"},0],[{\"ValueNs\":\"update_at_range_from\"},0]],\"Fn\",false]\"##,\n                Some(did_310()),\n            )\n        }\n        fn did_319() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_320() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0]],\"Mod\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_321() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"moption\"},0]],\"Mod\",false]\"##,\n                Some(did_320()),\n            )\n        }\n        fn did_322() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"moption\"},0],[{\"ValueNs\":\"run\"},0]],\"Fn\",false]\"##,\n                Some(did_321()),\n            )\n        }\n        fn did_323() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_324() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_325() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_326() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_327() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"not\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_328() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_329() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_330() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_331() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_332() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_333() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_334() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"MutRef\"},0]],\"Enum\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_335() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_336() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_337() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_338() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_339() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_340() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_341() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_342() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_343() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_344() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_345() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0]],\"Mod\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_346() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_347() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_348() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"monomorphized_update_at\"},0],[{\"ValueNs\":\"update_at_range\"},0]],\"Fn\",false]\"##,\n                Some(did_310()),\n            )\n        }\n        fn did_349() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_350() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_351() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_352() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_353() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_354() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_355() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_enumerated_slice_cf\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_356() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_357() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_358() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_359() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_360() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"bitxor\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_361() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_362() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_363() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_364() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_365() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_366() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"Failure\"},0]],\"Struct\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_367() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_range\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_368() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_369() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_370() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"update_at\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_371() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"array_of_list\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_372() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_373() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_374() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_375() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"monomorphized_update_at\"},0],[{\"ValueNs\":\"update_at_range_full\"},0]],\"Fn\",false]\"##,\n                Some(did_310()),\n            )\n        }\n        fn did_376() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_enumerated_chunked_slice\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_377() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_378() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_379() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_380() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_381() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_382() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_chunked_slice_cf\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_383() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_384() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_385() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_386() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_387() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_388() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_389() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"offset\"},0]],\"Fn\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_390() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"Tuple2\"},0],[{\"ValueNs\":\"0\"},0]],\"Field\",false]\"##,\n                Some(did_643()),\n            )\n        }\n        fn did_391() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_392() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_393() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_394() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_395() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_396() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_397() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_398() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"Tuple2\"},0],[{\"ValueNs\":\"1\"},0]],\"Field\",false]\"##,\n                Some(did_643()),\n            )\n        }\n        fn did_399() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_400() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_401() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_402() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_403() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_404() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_405() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_406() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_407() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_408() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_409() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_410() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_411() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_412() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_413() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_enumerated_slice\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_414() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_415() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_416() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_417() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_418() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"mresult\"},0]],\"Mod\",false]\"##,\n                Some(did_320()),\n            )\n        }\n        fn did_419() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"mresult\"},0],[{\"ValueNs\":\"run\"},0]],\"Fn\",false]\"##,\n                Some(did_418()),\n            )\n        }\n        fn did_420() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"repeat\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_421() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_422() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_423() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_424() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_425() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_426() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_427() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_428() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_429() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_430() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"refinements\"},0]],\"Fn\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_431() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_432() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"while_loop\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_433() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_434() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_435() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_436() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_437() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_438() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_439() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_range_cf\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_440() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_return\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_441() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_442() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_443() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_444() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_445() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_446() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_447() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_chunked_slice\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_448() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_449() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_450() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_451() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"into_machine\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_452() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_453() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_454() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_455() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_456() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_457() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_458() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_459() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_460() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_461() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_462() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_463() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_464() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_465() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_cf\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_466() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_467() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_468() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_469() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_470() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"ControlFlowMonad\"},0]],\"Trait\",false]\"##,\n                Some(did_320()),\n            )\n        }\n        fn did_471() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"ControlFlowMonad\"},0],[{\"ValueNs\":\"lift\"},0]],\"AssocFn\",false]\"##,\n                Some(did_470()),\n            )\n        }\n        fn did_472() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_473() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_474() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"iterator_functions\"},0]],\"Fn\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_475() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_476() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_477() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_478() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_479() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_chunked_slice_return\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_480() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_481() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_482() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_483() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_484() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_485() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_486() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_487() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_488() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_489() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_490() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_491() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_492() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_493() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_494() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_enumerated_chunked_slice_cf\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_495() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_496() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_497() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"from_machine\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_498() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_499() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_500() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_501() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_502() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_503() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_504() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_505() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_506() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u8\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_295()),\n            )\n        }\n        fn did_507() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_508() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_range_step_by_return\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_509() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_510() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_511() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_enumerated_slice_return\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_512() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"mexception\"},0]],\"Mod\",false]\"##,\n                Some(did_320()),\n            )\n        }\n        fn did_513() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_514() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_515() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_516() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_517() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_518() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_519() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},0],[{\"ValueNs\":\"arith\"},0]],\"Fn\",false]\"##,\n                Some(did_300()),\n            )\n        }\n        fn did_520() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_521() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_522() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_523() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_524() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"bitand\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_525() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_526() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_527() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_528() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_529() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_enumerated_chunked_slice_return\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_530() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_531() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_532() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_533() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_534() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_535() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_536() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_537() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_538() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_539() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"int\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_345()),\n            )\n        }\n        fn did_540() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"sub\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_541() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"control_flow_monad\"},0],[{\"TypeNs\":\"mexception\"},0],[{\"ValueNs\":\"run\"},0]],\"Fn\",false]\"##,\n                Some(did_512()),\n            )\n        }\n        fn did_542() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"logical_op_or\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_543() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"folds\"},0],[{\"ValueNs\":\"fold_range_step_by_cf\"},0]],\"Fn\",false]\"##,\n                Some(did_289()),\n            )\n        }\n        fn did_544() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"dropped_body\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_545() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_546() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u64\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_285()),\n            )\n        }\n        fn did_547() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_548() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_549() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_550() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"failure\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_551() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_552() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_553() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"div\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_554() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_555() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"while_loop_cf\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_556() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"question_mark_result\"},0]],\"Fn\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_557() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_558() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_559() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"lt\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_560() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_561() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"shr\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_562() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"shl\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_563() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"gt\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_564() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"props\"},0]],\"Fn\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_565() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_566() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"usize\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_319()),\n            )\n        }\n        fn did_567() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_568() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_569() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_570() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u128\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_281()),\n            )\n        }\n        fn did_571() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_572() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u16\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_283()),\n            )\n        }\n        fn did_573() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_574() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i16\"},0],[{\"ValueNs\":\"bit_xor\"},0]],\"Fn\",false]\"##,\n                Some(did_315()),\n            )\n        }\n        fn did_575() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"bitor\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_576() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"le\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_577() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i8\"},0],[{\"ValueNs\":\"add\"},0]],\"Fn\",false]\"##,\n                Some(did_324()),\n            )\n        }\n        fn did_578() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"ne\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_579() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"isize\"},0],[{\"ValueNs\":\"rem\"},0]],\"Fn\",false]\"##,\n                Some(did_287()),\n            )\n        }\n        fn did_580() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"eq\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_581() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"monomorphized_update_at\"},0],[{\"ValueNs\":\"update_at_range_to\"},0]],\"Fn\",false]\"##,\n                Some(did_310()),\n            )\n        }\n        fn did_582() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_583() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"bit_or\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_584() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i128\"},0],[{\"ValueNs\":\"mul\"},0]],\"Fn\",false]\"##,\n                Some(did_352()),\n            )\n        }\n        fn did_585() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"u32\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_297()),\n            )\n        }\n        fn did_586() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"ValueNs\":\"while_loop_return\"},0]],\"Fn\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_587() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i32\"},0],[{\"ValueNs\":\"ge\"},0]],\"Fn\",false]\"##,\n                Some(did_308()),\n            )\n        }\n        fn did_588() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"i64\"},0],[{\"ValueNs\":\"bit_and\"},0]],\"Fn\",false]\"##,\n                Some(did_328()),\n            )\n        }\n        fn did_589() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"iter\"},0],[{\"TypeNs\":\"traits\"},0],[{\"TypeNs\":\"iterator\"},0],[{\"TypeNs\":\"Iterator\"},0],[{\"TypeNs\":\"Item\"},0]],\"AssocTy\",false]\"##,\n                Some(did_191()),\n            )\n        }\n        fn did_590() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Add\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_194()),\n            )\n        }\n        fn did_591() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Sub\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_255()),\n            )\n        }\n        fn did_592() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Mul\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_253()),\n            )\n        }\n        fn did_593() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Div\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_251()),\n            )\n        }\n        fn did_594() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Rem\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_259()),\n            )\n        }\n        fn did_595() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitXor\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_249()),\n            )\n        }\n        fn did_596() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitAnd\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_247()),\n            )\n        }\n        fn did_597() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"BitOr\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_265()),\n            )\n        }\n        fn did_598() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Shl\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_261()),\n            )\n        }\n        fn did_599() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Shr\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_263()),\n            )\n        }\n        fn did_600() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"arith\"},0],[{\"TypeNs\":\"Neg\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_257()),\n            )\n        }\n        fn did_601() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"bit\"},0],[{\"TypeNs\":\"Not\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_159()),\n            )\n        }\n        fn did_602() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"try_trait\"},0],[{\"TypeNs\":\"Try\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_237()),\n            )\n        }\n        fn did_603() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"explicit_monadic\"},0]],\"Mod\",false]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_604() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"explicit_monadic\"},0],[{\"ValueNs\":\"pure\"},0]],\"Fn\",false]\"##,\n                Some(did_603()),\n            )\n        }\n        fn did_605() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"explicit_monadic\"},0],[{\"ValueNs\":\"lift\"},0]],\"Fn\",false]\"##,\n                Some(did_603()),\n            )\n        }\n        fn did_606() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"fmt\"},0],[{\"TypeNs\":\"num\"},0],[\"Impl\",54]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_168()),\n            )\n        }\n        fn did_607() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"marker\"},0],[{\"TypeNs\":\"Destruct\"},0]],\"Trait\",false]\"##,\n                Some(did_274()),\n            )\n        }\n        fn did_608() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"Eq\"},0]],\"Trait\",false]\"##,\n                Some(did_155()),\n            )\n        }\n        fn did_609() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"cmp\"},0],[{\"TypeNs\":\"Ord\"},0]],\"Trait\",false]\"##,\n                Some(did_155()),\n            )\n        }\n        fn did_610() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"marker\"},0],[{\"TypeNs\":\"StructuralPartialEq\"},0]],\"Trait\",false]\"##,\n                Some(did_274()),\n            )\n        }\n        fn did_611() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[\"Impl\",1]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_612() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"TypeNs\":\"Foo\"},0]],\"Struct\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_613() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[\"Impl\",0]],{\"Impl\":{\"of_trait\":true}},false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_614() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"add_with_overflow\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_615() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"arithmetic\"},0]],\"Mod\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_616() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"arithmetic\"},0],[{\"ValueNs\":\"neg\"},0]],\"Fn\",false]\"##,\n                Some(did_615()),\n            )\n        }\n        fn did_617() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"sub_with_overflow\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_618() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"mul_with_overflow\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_619() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"machine_int\"},0],[{\"ValueNs\":\"cmp\"},0]],\"Fn\",false]\"##,\n                Some(did_302()),\n            )\n        }\n        fn did_620() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"alloc\",[[{\"TypeNs\":\"vec\"},0],[\"Impl\",1],[{\"ValueNs\":\"as_slice\"},0]],\"AssocFn\",false]\"##,\n                Some(did_28()),\n            )\n        }\n        fn did_621() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"index\"},0],[{\"TypeNs\":\"Index\"},0],[{\"TypeNs\":\"Output\"},0]],\"AssocTy\",false]\"##,\n                Some(did_54()),\n            )\n        }\n        fn did_622() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"index\"},0],[{\"TypeNs\":\"IndexMut\"},0]],\"Trait\",false]\"##,\n                Some(did_53()),\n            )\n        }\n        fn did_623() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"index\"},0],[{\"TypeNs\":\"IndexMut\"},0],[{\"ValueNs\":\"index_mut\"},0]],\"AssocFn\",false]\"##,\n                Some(did_622()),\n            )\n        }\n        fn did_624() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[\"Use\",1]],\"Use\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_625() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"props\"},0],[\"Use\",0]],\"Use\",false]\"##,\n                Some(did_564()),\n            )\n        }\n        fn did_626() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},1],[\"Use\",0]],\"Use\",false]\"##,\n                Some(did_292()),\n            )\n        }\n        fn did_627() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},1],[\"Use\",1]],\"Use\",false]\"##,\n                Some(did_292()),\n            )\n        }\n        fn did_628() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[\"Use\",0]],\"Use\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_629() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},0],[\"Use\",0]],\"Use\",false]\"##,\n                Some(did_300()),\n            )\n        }\n        fn did_630() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"MacroNs\":\"impl_arith\"},0]],{\"Macro\":{\"bang\":true,\"attr\":false,\"derive\":false}},false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_631() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},1],[{\"ValueNs\":\"g\"},0]],\"Fn\",false]\"##,\n                Some(did_292()),\n            )\n        }\n        fn did_632() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[\"Use\",0]],\"Use\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_633() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"alloc\"},0]],\"ExternCrate\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_634() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[\"Use\",2]],\"Use\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_635() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"std\"},0]],\"ExternCrate\",false]\"##,\n                Some(did_0()),\n            )\n        }\n        fn did_636() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},0],[\"Use\",1]],\"Use\",false]\"##,\n                Some(did_300()),\n            )\n        }\n        fn did_637() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[\"Use\",4]],\"Use\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_638() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"crypto_abstractions\"},0],[\"Use\",0]],\"Use\",false]\"##,\n                Some(did_293()),\n            )\n        }\n        fn did_639() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"index_mut\"},0]],\"Fn\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_640() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"ValueNs\":\"_\"},0],[\"Use\",2]],\"Use\",false]\"##,\n                Some(did_300()),\n            )\n        }\n        fn did_641() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[\"Use\",3]],\"Use\",false]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_642() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"Range\"},0]],\"Struct\",true]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_643() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"Tuple2\"},0]],\"Struct\",true]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_644() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"control_flow\"},0],[{\"TypeNs\":\"ControlFlow\"},0],[{\"TypeNs\":\"Break\"},0]],\"Variant\",true]\"##,\n                Some(did_121()),\n            )\n        }\n        fn did_645() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"control_flow\"},0],[{\"TypeNs\":\"ControlFlow\"},0],[{\"TypeNs\":\"Continue\"},0]],\"Variant\",true]\"##,\n                Some(did_121()),\n            )\n        }\n        fn did_646() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeTo\"},0]],\"Struct\",true]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_647() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeFull\"},0]],\"Struct\",true]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_648() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"ops\"},0],[{\"TypeNs\":\"range\"},0],[{\"TypeNs\":\"RangeFrom\"},0]],\"Struct\",true]\"##,\n                Some(did_49()),\n            )\n        }\n        fn did_649() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[{\"TypeNs\":\"Result\"},0],[{\"TypeNs\":\"Err\"},0]],\"Variant\",true]\"##,\n                Some(did_77()),\n            )\n        }\n        fn did_650() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"option\"},0],[{\"TypeNs\":\"Option\"},0],[{\"TypeNs\":\"Some\"},0]],\"Variant\",true]\"##,\n                Some(did_164()),\n            )\n        }\n        fn did_651() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"result\"},0],[{\"TypeNs\":\"Result\"},0],[{\"TypeNs\":\"Ok\"},0]],\"Variant\",true]\"##,\n                Some(did_77()),\n            )\n        }\n        fn did_652() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"TypeNs\":\"hax\"},0],[{\"TypeNs\":\"Failure\"},0]],\"Struct\",true]\"##,\n                Some(did_1()),\n            )\n        }\n        fn did_653() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"rust_primitives\",[[{\"ValueNs\":\"dummy_hax_concrete_ident_wrapper\"},0],[{\"TypeNs\":\"Foo\"},0]],\"Struct\",true]\"##,\n                Some(did_291()),\n            )\n        }\n        fn did_654() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"HashAlgorithm\"},0],[{\"TypeNs\":\"Sha256\"},0]],\"Variant\",true]\"##,\n                Some(did_20()),\n            )\n        }\n        fn did_655() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"HMACAlgorithm\"},0],[{\"TypeNs\":\"Sha256\"},0]],\"Variant\",true]\"##,\n                Some(did_63()),\n            )\n        }\n        fn did_656() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"DHGroup\"},0],[{\"TypeNs\":\"X25519\"},0]],\"Variant\",true]\"##,\n                Some(did_65()),\n            )\n        }\n        fn did_657() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"hax_lib_protocol\",[[{\"TypeNs\":\"crypto\"},0],[{\"TypeNs\":\"AEADAlgorithm\"},0],[{\"TypeNs\":\"Chacha20Poly1305\"},0]],\"Variant\",true]\"##,\n                Some(did_84()),\n            )\n        }\n        fn did_658() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"option\"},0],[{\"TypeNs\":\"Option\"},0],[{\"TypeNs\":\"None\"},0]],\"Variant\",true]\"##,\n                Some(did_164()),\n            )\n        }\n        fn did_659() -> ExplicitDefId {\n            deserialize(\n                r##\"[\"core\",[[{\"TypeNs\":\"panicking\"},0],[{\"TypeNs\":\"AssertKind\"},0],[{\"TypeNs\":\"Eq\"},0]],\"Variant\",true]\"##,\n                Some(did_162()),\n            )\n        }\n        [\n            did_0().into_global_id_inner(),\n            did_1().into_global_id_inner(),\n            did_2().into_global_id_inner(),\n            did_3().into_global_id_inner(),\n            did_4().into_global_id_inner(),\n            did_5().into_global_id_inner(),\n            did_6().into_global_id_inner(),\n            did_7().into_global_id_inner(),\n            did_8().into_global_id_inner(),\n            did_9().into_global_id_inner(),\n            did_10().into_global_id_inner(),\n            did_11().into_global_id_inner(),\n            did_12().into_global_id_inner(),\n            did_13().into_global_id_inner(),\n            did_14().into_global_id_inner(),\n            did_15().into_global_id_inner(),\n            did_16().into_global_id_inner(),\n            did_17().into_global_id_inner(),\n            did_18().into_global_id_inner(),\n            did_19().into_global_id_inner(),\n            did_20().into_global_id_inner(),\n            did_21().into_global_id_inner(),\n            did_22().into_global_id_inner(),\n            did_23().into_global_id_inner(),\n            did_24().into_global_id_inner(),\n            did_25().into_global_id_inner(),\n            did_26().into_global_id_inner(),\n            did_27().into_global_id_inner(),\n            did_28().into_global_id_inner(),\n            did_29().into_global_id_inner(),\n            did_30().into_global_id_inner(),\n            did_31().into_global_id_inner(),\n            did_32().into_global_id_inner(),\n            did_33().into_global_id_inner(),\n            did_34().into_global_id_inner(),\n            did_35().into_global_id_inner(),\n            did_36().into_global_id_inner(),\n            did_37().into_global_id_inner(),\n            did_38().into_global_id_inner(),\n            did_39().into_global_id_inner(),\n            did_40().into_global_id_inner(),\n            did_41().into_global_id_inner(),\n            did_42().into_global_id_inner(),\n            did_43().into_global_id_inner(),\n            did_44().into_global_id_inner(),\n            did_45().into_global_id_inner(),\n            did_46().into_global_id_inner(),\n            did_47().into_global_id_inner(),\n            did_48().into_global_id_inner(),\n            did_49().into_global_id_inner(),\n            did_50().into_global_id_inner(),\n            did_51().into_global_id_inner(),\n            did_52().into_global_id_inner(),\n            did_53().into_global_id_inner(),\n            did_54().into_global_id_inner(),\n            did_55().into_global_id_inner(),\n            did_56().into_global_id_inner(),\n            did_57().into_global_id_inner(),\n            did_58().into_global_id_inner(),\n            did_59().into_global_id_inner(),\n            did_60().into_global_id_inner(),\n            did_61().into_global_id_inner(),\n            did_62().into_global_id_inner(),\n            did_63().into_global_id_inner(),\n            did_64().into_global_id_inner(),\n            did_65().into_global_id_inner(),\n            did_66().into_global_id_inner(),\n            did_67().into_global_id_inner(),\n            did_68().into_global_id_inner(),\n            did_69().into_global_id_inner(),\n            did_70().into_global_id_inner(),\n            did_71().into_global_id_inner(),\n            did_72().into_global_id_inner(),\n            did_73().into_global_id_inner(),\n            did_74().into_global_id_inner(),\n            did_75().into_global_id_inner(),\n            did_76().into_global_id_inner(),\n            did_77().into_global_id_inner(),\n            did_78().into_global_id_inner(),\n            did_79().into_global_id_inner(),\n            did_80().into_global_id_inner(),\n            did_81().into_global_id_inner(),\n            did_82().into_global_id_inner(),\n            did_83().into_global_id_inner(),\n            did_84().into_global_id_inner(),\n            did_85().into_global_id_inner(),\n            did_86().into_global_id_inner(),\n            did_87().into_global_id_inner(),\n            did_88().into_global_id_inner(),\n            did_89().into_global_id_inner(),\n            did_90().into_global_id_inner(),\n            did_91().into_global_id_inner(),\n            did_92().into_global_id_inner(),\n            did_93().into_global_id_inner(),\n            did_94().into_global_id_inner(),\n            did_95().into_global_id_inner(),\n            did_96().into_global_id_inner(),\n            did_97().into_global_id_inner(),\n            did_98().into_global_id_inner(),\n            did_99().into_global_id_inner(),\n            did_100().into_global_id_inner(),\n            did_101().into_global_id_inner(),\n            did_102().into_global_id_inner(),\n            did_103().into_global_id_inner(),\n            did_104().into_global_id_inner(),\n            did_105().into_global_id_inner(),\n            did_106().into_global_id_inner(),\n            did_107().into_global_id_inner(),\n            did_108().into_global_id_inner(),\n            did_109().into_global_id_inner(),\n            did_110().into_global_id_inner(),\n            did_111().into_global_id_inner(),\n            did_112().into_global_id_inner(),\n            did_113().into_global_id_inner(),\n            did_114().into_global_id_inner(),\n            did_115().into_global_id_inner(),\n            did_116().into_global_id_inner(),\n            did_117().into_global_id_inner(),\n            did_118().into_global_id_inner(),\n            did_119().into_global_id_inner(),\n            did_120().into_global_id_inner(),\n            did_121().into_global_id_inner(),\n            did_122().into_global_id_inner(),\n            did_123().into_global_id_inner(),\n            did_124().into_global_id_inner(),\n            did_125().into_global_id_inner(),\n            did_126().into_global_id_inner(),\n            did_127().into_global_id_inner(),\n            did_128().into_global_id_inner(),\n            did_129().into_global_id_inner(),\n            did_130().into_global_id_inner(),\n            did_131().into_global_id_inner(),\n            did_132().into_global_id_inner(),\n            did_133().into_global_id_inner(),\n            did_134().into_global_id_inner(),\n            did_135().into_global_id_inner(),\n            did_136().into_global_id_inner(),\n            did_137().into_global_id_inner(),\n            did_138().into_global_id_inner(),\n            did_139().into_global_id_inner(),\n            did_140().into_global_id_inner(),\n            did_141().into_global_id_inner(),\n            did_142().into_global_id_inner(),\n            did_143().into_global_id_inner(),\n            did_144().into_global_id_inner(),\n            did_145().into_global_id_inner(),\n            did_146().into_global_id_inner(),\n            did_147().into_global_id_inner(),\n            did_148().into_global_id_inner(),\n            did_149().into_global_id_inner(),\n            did_150().into_global_id_inner(),\n            did_151().into_global_id_inner(),\n            did_152().into_global_id_inner(),\n            did_153().into_global_id_inner(),\n            did_154().into_global_id_inner(),\n            did_155().into_global_id_inner(),\n            did_156().into_global_id_inner(),\n            did_157().into_global_id_inner(),\n            did_158().into_global_id_inner(),\n            did_159().into_global_id_inner(),\n            did_160().into_global_id_inner(),\n            did_161().into_global_id_inner(),\n            did_162().into_global_id_inner(),\n            did_163().into_global_id_inner(),\n            did_164().into_global_id_inner(),\n            did_165().into_global_id_inner(),\n            did_166().into_global_id_inner(),\n            did_167().into_global_id_inner(),\n            did_168().into_global_id_inner(),\n            did_169().into_global_id_inner(),\n            did_170().into_global_id_inner(),\n            did_171().into_global_id_inner(),\n            did_172().into_global_id_inner(),\n            did_173().into_global_id_inner(),\n            did_174().into_global_id_inner(),\n            did_175().into_global_id_inner(),\n            did_176().into_global_id_inner(),\n            did_177().into_global_id_inner(),\n            did_178().into_global_id_inner(),\n            did_179().into_global_id_inner(),\n            did_180().into_global_id_inner(),\n            did_181().into_global_id_inner(),\n            did_182().into_global_id_inner(),\n            did_183().into_global_id_inner(),\n            did_184().into_global_id_inner(),\n            did_185().into_global_id_inner(),\n            did_186().into_global_id_inner(),\n            did_187().into_global_id_inner(),\n            did_188().into_global_id_inner(),\n            did_189().into_global_id_inner(),\n            did_190().into_global_id_inner(),\n            did_191().into_global_id_inner(),\n            did_192().into_global_id_inner(),\n            did_193().into_global_id_inner(),\n            did_194().into_global_id_inner(),\n            did_195().into_global_id_inner(),\n            did_196().into_global_id_inner(),\n            did_197().into_global_id_inner(),\n            did_198().into_global_id_inner(),\n            did_199().into_global_id_inner(),\n            did_200().into_global_id_inner(),\n            did_201().into_global_id_inner(),\n            did_202().into_global_id_inner(),\n            did_203().into_global_id_inner(),\n            did_204().into_global_id_inner(),\n            did_205().into_global_id_inner(),\n            did_206().into_global_id_inner(),\n            did_207().into_global_id_inner(),\n            did_208().into_global_id_inner(),\n            did_209().into_global_id_inner(),\n            did_210().into_global_id_inner(),\n            did_211().into_global_id_inner(),\n            did_212().into_global_id_inner(),\n            did_213().into_global_id_inner(),\n            did_214().into_global_id_inner(),\n            did_215().into_global_id_inner(),\n            did_216().into_global_id_inner(),\n            did_217().into_global_id_inner(),\n            did_218().into_global_id_inner(),\n            did_219().into_global_id_inner(),\n            did_220().into_global_id_inner(),\n            did_221().into_global_id_inner(),\n            did_222().into_global_id_inner(),\n            did_223().into_global_id_inner(),\n            did_224().into_global_id_inner(),\n            did_225().into_global_id_inner(),\n            did_226().into_global_id_inner(),\n            did_227().into_global_id_inner(),\n            did_228().into_global_id_inner(),\n            did_229().into_global_id_inner(),\n            did_230().into_global_id_inner(),\n            did_231().into_global_id_inner(),\n            did_232().into_global_id_inner(),\n            did_233().into_global_id_inner(),\n            did_234().into_global_id_inner(),\n            did_235().into_global_id_inner(),\n            did_236().into_global_id_inner(),\n            did_237().into_global_id_inner(),\n            did_238().into_global_id_inner(),\n            did_239().into_global_id_inner(),\n            did_240().into_global_id_inner(),\n            did_241().into_global_id_inner(),\n            did_242().into_global_id_inner(),\n            did_243().into_global_id_inner(),\n            did_244().into_global_id_inner(),\n            did_245().into_global_id_inner(),\n            did_246().into_global_id_inner(),\n            did_247().into_global_id_inner(),\n            did_248().into_global_id_inner(),\n            did_249().into_global_id_inner(),\n            did_250().into_global_id_inner(),\n            did_251().into_global_id_inner(),\n            did_252().into_global_id_inner(),\n            did_253().into_global_id_inner(),\n            did_254().into_global_id_inner(),\n            did_255().into_global_id_inner(),\n            did_256().into_global_id_inner(),\n            did_257().into_global_id_inner(),\n            did_258().into_global_id_inner(),\n            did_259().into_global_id_inner(),\n            did_260().into_global_id_inner(),\n            did_261().into_global_id_inner(),\n            did_262().into_global_id_inner(),\n            did_263().into_global_id_inner(),\n            did_264().into_global_id_inner(),\n            did_265().into_global_id_inner(),\n            did_266().into_global_id_inner(),\n            did_267().into_global_id_inner(),\n            did_268().into_global_id_inner(),\n            did_269().into_global_id_inner(),\n            did_270().into_global_id_inner(),\n            did_271().into_global_id_inner(),\n            did_272().into_global_id_inner(),\n            did_273().into_global_id_inner(),\n            did_274().into_global_id_inner(),\n            did_275().into_global_id_inner(),\n            did_276().into_global_id_inner(),\n            did_277().into_global_id_inner(),\n            did_278().into_global_id_inner(),\n            did_279().into_global_id_inner(),\n            did_280().into_global_id_inner(),\n            did_281().into_global_id_inner(),\n            did_282().into_global_id_inner(),\n            did_283().into_global_id_inner(),\n            did_284().into_global_id_inner(),\n            did_285().into_global_id_inner(),\n            did_286().into_global_id_inner(),\n            did_287().into_global_id_inner(),\n            did_288().into_global_id_inner(),\n            did_289().into_global_id_inner(),\n            did_290().into_global_id_inner(),\n            did_291().into_global_id_inner(),\n            did_292().into_global_id_inner(),\n            did_293().into_global_id_inner(),\n            did_294().into_global_id_inner(),\n            did_295().into_global_id_inner(),\n            did_296().into_global_id_inner(),\n            did_297().into_global_id_inner(),\n            did_298().into_global_id_inner(),\n            did_299().into_global_id_inner(),\n            did_300().into_global_id_inner(),\n            did_301().into_global_id_inner(),\n            did_302().into_global_id_inner(),\n            did_303().into_global_id_inner(),\n            did_304().into_global_id_inner(),\n            did_305().into_global_id_inner(),\n            did_306().into_global_id_inner(),\n            did_307().into_global_id_inner(),\n            did_308().into_global_id_inner(),\n            did_309().into_global_id_inner(),\n            did_310().into_global_id_inner(),\n            did_311().into_global_id_inner(),\n            did_312().into_global_id_inner(),\n            did_313().into_global_id_inner(),\n            did_314().into_global_id_inner(),\n            did_315().into_global_id_inner(),\n            did_316().into_global_id_inner(),\n            did_317().into_global_id_inner(),\n            did_318().into_global_id_inner(),\n            did_319().into_global_id_inner(),\n            did_320().into_global_id_inner(),\n            did_321().into_global_id_inner(),\n            did_322().into_global_id_inner(),\n            did_323().into_global_id_inner(),\n            did_324().into_global_id_inner(),\n            did_325().into_global_id_inner(),\n            did_326().into_global_id_inner(),\n            did_327().into_global_id_inner(),\n            did_328().into_global_id_inner(),\n            did_329().into_global_id_inner(),\n            did_330().into_global_id_inner(),\n            did_331().into_global_id_inner(),\n            did_332().into_global_id_inner(),\n            did_333().into_global_id_inner(),\n            did_334().into_global_id_inner(),\n            did_335().into_global_id_inner(),\n            did_336().into_global_id_inner(),\n            did_337().into_global_id_inner(),\n            did_338().into_global_id_inner(),\n            did_339().into_global_id_inner(),\n            did_340().into_global_id_inner(),\n            did_341().into_global_id_inner(),\n            did_342().into_global_id_inner(),\n            did_343().into_global_id_inner(),\n            did_344().into_global_id_inner(),\n            did_345().into_global_id_inner(),\n            did_346().into_global_id_inner(),\n            did_347().into_global_id_inner(),\n            did_348().into_global_id_inner(),\n            did_349().into_global_id_inner(),\n            did_350().into_global_id_inner(),\n            did_351().into_global_id_inner(),\n            did_352().into_global_id_inner(),\n            did_353().into_global_id_inner(),\n            did_354().into_global_id_inner(),\n            did_355().into_global_id_inner(),\n            did_356().into_global_id_inner(),\n            did_357().into_global_id_inner(),\n            did_358().into_global_id_inner(),\n            did_359().into_global_id_inner(),\n            did_360().into_global_id_inner(),\n            did_361().into_global_id_inner(),\n            did_362().into_global_id_inner(),\n            did_363().into_global_id_inner(),\n            did_364().into_global_id_inner(),\n            did_365().into_global_id_inner(),\n            did_366().into_global_id_inner(),\n            did_367().into_global_id_inner(),\n            did_368().into_global_id_inner(),\n            did_369().into_global_id_inner(),\n            did_370().into_global_id_inner(),\n            did_371().into_global_id_inner(),\n            did_372().into_global_id_inner(),\n            did_373().into_global_id_inner(),\n            did_374().into_global_id_inner(),\n            did_375().into_global_id_inner(),\n            did_376().into_global_id_inner(),\n            did_377().into_global_id_inner(),\n            did_378().into_global_id_inner(),\n            did_379().into_global_id_inner(),\n            did_380().into_global_id_inner(),\n            did_381().into_global_id_inner(),\n            did_382().into_global_id_inner(),\n            did_383().into_global_id_inner(),\n            did_384().into_global_id_inner(),\n            did_385().into_global_id_inner(),\n            did_386().into_global_id_inner(),\n            did_387().into_global_id_inner(),\n            did_388().into_global_id_inner(),\n            did_389().into_global_id_inner(),\n            did_390().into_global_id_inner(),\n            did_391().into_global_id_inner(),\n            did_392().into_global_id_inner(),\n            did_393().into_global_id_inner(),\n            did_394().into_global_id_inner(),\n            did_395().into_global_id_inner(),\n            did_396().into_global_id_inner(),\n            did_397().into_global_id_inner(),\n            did_398().into_global_id_inner(),\n            did_399().into_global_id_inner(),\n            did_400().into_global_id_inner(),\n            did_401().into_global_id_inner(),\n            did_402().into_global_id_inner(),\n            did_403().into_global_id_inner(),\n            did_404().into_global_id_inner(),\n            did_405().into_global_id_inner(),\n            did_406().into_global_id_inner(),\n            did_407().into_global_id_inner(),\n            did_408().into_global_id_inner(),\n            did_409().into_global_id_inner(),\n            did_410().into_global_id_inner(),\n            did_411().into_global_id_inner(),\n            did_412().into_global_id_inner(),\n            did_413().into_global_id_inner(),\n            did_414().into_global_id_inner(),\n            did_415().into_global_id_inner(),\n            did_416().into_global_id_inner(),\n            did_417().into_global_id_inner(),\n            did_418().into_global_id_inner(),\n            did_419().into_global_id_inner(),\n            did_420().into_global_id_inner(),\n            did_421().into_global_id_inner(),\n            did_422().into_global_id_inner(),\n            did_423().into_global_id_inner(),\n            did_424().into_global_id_inner(),\n            did_425().into_global_id_inner(),\n            did_426().into_global_id_inner(),\n            did_427().into_global_id_inner(),\n            did_428().into_global_id_inner(),\n            did_429().into_global_id_inner(),\n            did_430().into_global_id_inner(),\n            did_431().into_global_id_inner(),\n            did_432().into_global_id_inner(),\n            did_433().into_global_id_inner(),\n            did_434().into_global_id_inner(),\n            did_435().into_global_id_inner(),\n            did_436().into_global_id_inner(),\n            did_437().into_global_id_inner(),\n            did_438().into_global_id_inner(),\n            did_439().into_global_id_inner(),\n            did_440().into_global_id_inner(),\n            did_441().into_global_id_inner(),\n            did_442().into_global_id_inner(),\n            did_443().into_global_id_inner(),\n            did_444().into_global_id_inner(),\n            did_445().into_global_id_inner(),\n            did_446().into_global_id_inner(),\n            did_447().into_global_id_inner(),\n            did_448().into_global_id_inner(),\n            did_449().into_global_id_inner(),\n            did_450().into_global_id_inner(),\n            did_451().into_global_id_inner(),\n            did_452().into_global_id_inner(),\n            did_453().into_global_id_inner(),\n            did_454().into_global_id_inner(),\n            did_455().into_global_id_inner(),\n            did_456().into_global_id_inner(),\n            did_457().into_global_id_inner(),\n            did_458().into_global_id_inner(),\n            did_459().into_global_id_inner(),\n            did_460().into_global_id_inner(),\n            did_461().into_global_id_inner(),\n            did_462().into_global_id_inner(),\n            did_463().into_global_id_inner(),\n            did_464().into_global_id_inner(),\n            did_465().into_global_id_inner(),\n            did_466().into_global_id_inner(),\n            did_467().into_global_id_inner(),\n            did_468().into_global_id_inner(),\n            did_469().into_global_id_inner(),\n            did_470().into_global_id_inner(),\n            did_471().into_global_id_inner(),\n            did_472().into_global_id_inner(),\n            did_473().into_global_id_inner(),\n            did_474().into_global_id_inner(),\n            did_475().into_global_id_inner(),\n            did_476().into_global_id_inner(),\n            did_477().into_global_id_inner(),\n            did_478().into_global_id_inner(),\n            did_479().into_global_id_inner(),\n            did_480().into_global_id_inner(),\n            did_481().into_global_id_inner(),\n            did_482().into_global_id_inner(),\n            did_483().into_global_id_inner(),\n            did_484().into_global_id_inner(),\n            did_485().into_global_id_inner(),\n            did_486().into_global_id_inner(),\n            did_487().into_global_id_inner(),\n            did_488().into_global_id_inner(),\n            did_489().into_global_id_inner(),\n            did_490().into_global_id_inner(),\n            did_491().into_global_id_inner(),\n            did_492().into_global_id_inner(),\n            did_493().into_global_id_inner(),\n            did_494().into_global_id_inner(),\n            did_495().into_global_id_inner(),\n            did_496().into_global_id_inner(),\n            did_497().into_global_id_inner(),\n            did_498().into_global_id_inner(),\n            did_499().into_global_id_inner(),\n            did_500().into_global_id_inner(),\n            did_501().into_global_id_inner(),\n            did_502().into_global_id_inner(),\n            did_503().into_global_id_inner(),\n            did_504().into_global_id_inner(),\n            did_505().into_global_id_inner(),\n            did_506().into_global_id_inner(),\n            did_507().into_global_id_inner(),\n            did_508().into_global_id_inner(),\n            did_509().into_global_id_inner(),\n            did_510().into_global_id_inner(),\n            did_511().into_global_id_inner(),\n            did_512().into_global_id_inner(),\n            did_513().into_global_id_inner(),\n            did_514().into_global_id_inner(),\n            did_515().into_global_id_inner(),\n            did_516().into_global_id_inner(),\n            did_517().into_global_id_inner(),\n            did_518().into_global_id_inner(),\n            did_519().into_global_id_inner(),\n            did_520().into_global_id_inner(),\n            did_521().into_global_id_inner(),\n            did_522().into_global_id_inner(),\n            did_523().into_global_id_inner(),\n            did_524().into_global_id_inner(),\n            did_525().into_global_id_inner(),\n            did_526().into_global_id_inner(),\n            did_527().into_global_id_inner(),\n            did_528().into_global_id_inner(),\n            did_529().into_global_id_inner(),\n            did_530().into_global_id_inner(),\n            did_531().into_global_id_inner(),\n            did_532().into_global_id_inner(),\n            did_533().into_global_id_inner(),\n            did_534().into_global_id_inner(),\n            did_535().into_global_id_inner(),\n            did_536().into_global_id_inner(),\n            did_537().into_global_id_inner(),\n            did_538().into_global_id_inner(),\n            did_539().into_global_id_inner(),\n            did_540().into_global_id_inner(),\n            did_541().into_global_id_inner(),\n            did_542().into_global_id_inner(),\n            did_543().into_global_id_inner(),\n            did_544().into_global_id_inner(),\n            did_545().into_global_id_inner(),\n            did_546().into_global_id_inner(),\n            did_547().into_global_id_inner(),\n            did_548().into_global_id_inner(),\n            did_549().into_global_id_inner(),\n            did_550().into_global_id_inner(),\n            did_551().into_global_id_inner(),\n            did_552().into_global_id_inner(),\n            did_553().into_global_id_inner(),\n            did_554().into_global_id_inner(),\n            did_555().into_global_id_inner(),\n            did_556().into_global_id_inner(),\n            did_557().into_global_id_inner(),\n            did_558().into_global_id_inner(),\n            did_559().into_global_id_inner(),\n            did_560().into_global_id_inner(),\n            did_561().into_global_id_inner(),\n            did_562().into_global_id_inner(),\n            did_563().into_global_id_inner(),\n            did_564().into_global_id_inner(),\n            did_565().into_global_id_inner(),\n            did_566().into_global_id_inner(),\n            did_567().into_global_id_inner(),\n            did_568().into_global_id_inner(),\n            did_569().into_global_id_inner(),\n            did_570().into_global_id_inner(),\n            did_571().into_global_id_inner(),\n            did_572().into_global_id_inner(),\n            did_573().into_global_id_inner(),\n            did_574().into_global_id_inner(),\n            did_575().into_global_id_inner(),\n            did_576().into_global_id_inner(),\n            did_577().into_global_id_inner(),\n            did_578().into_global_id_inner(),\n            did_579().into_global_id_inner(),\n            did_580().into_global_id_inner(),\n            did_581().into_global_id_inner(),\n            did_582().into_global_id_inner(),\n            did_583().into_global_id_inner(),\n            did_584().into_global_id_inner(),\n            did_585().into_global_id_inner(),\n            did_586().into_global_id_inner(),\n            did_587().into_global_id_inner(),\n            did_588().into_global_id_inner(),\n            did_589().into_global_id_inner(),\n            did_590().into_global_id_inner(),\n            did_591().into_global_id_inner(),\n            did_592().into_global_id_inner(),\n            did_593().into_global_id_inner(),\n            did_594().into_global_id_inner(),\n            did_595().into_global_id_inner(),\n            did_596().into_global_id_inner(),\n            did_597().into_global_id_inner(),\n            did_598().into_global_id_inner(),\n            did_599().into_global_id_inner(),\n            did_600().into_global_id_inner(),\n            did_601().into_global_id_inner(),\n            did_602().into_global_id_inner(),\n            did_603().into_global_id_inner(),\n            did_604().into_global_id_inner(),\n            did_605().into_global_id_inner(),\n            did_606().into_global_id_inner(),\n            did_607().into_global_id_inner(),\n            did_608().into_global_id_inner(),\n            did_609().into_global_id_inner(),\n            did_610().into_global_id_inner(),\n            did_611().into_global_id_inner(),\n            did_612().into_global_id_inner(),\n            did_613().into_global_id_inner(),\n            did_614().into_global_id_inner(),\n            did_615().into_global_id_inner(),\n            did_616().into_global_id_inner(),\n            did_617().into_global_id_inner(),\n            did_618().into_global_id_inner(),\n            did_619().into_global_id_inner(),\n            did_620().into_global_id_inner(),\n            did_621().into_global_id_inner(),\n            did_622().into_global_id_inner(),\n            did_623().into_global_id_inner(),\n            did_624().into_global_id_inner(),\n            did_625().into_global_id_inner(),\n            did_626().into_global_id_inner(),\n            did_627().into_global_id_inner(),\n            did_628().into_global_id_inner(),\n            did_629().into_global_id_inner(),\n            did_630().into_global_id_inner(),\n            did_631().into_global_id_inner(),\n            did_632().into_global_id_inner(),\n            did_633().into_global_id_inner(),\n            did_634().into_global_id_inner(),\n            did_635().into_global_id_inner(),\n            did_636().into_global_id_inner(),\n            did_637().into_global_id_inner(),\n            did_638().into_global_id_inner(),\n            did_639().into_global_id_inner(),\n            did_640().into_global_id_inner(),\n            did_641().into_global_id_inner(),\n            did_642().into_global_id_inner(),\n            did_643().into_global_id_inner(),\n            did_644().into_global_id_inner(),\n            did_645().into_global_id_inner(),\n            did_646().into_global_id_inner(),\n            did_647().into_global_id_inner(),\n            did_648().into_global_id_inner(),\n            did_649().into_global_id_inner(),\n            did_650().into_global_id_inner(),\n            did_651().into_global_id_inner(),\n            did_652().into_global_id_inner(),\n            did_653().into_global_id_inner(),\n            did_654().into_global_id_inner(),\n            did_655().into_global_id_inner(),\n            did_656().into_global_id_inner(),\n            did_657().into_global_id_inner(),\n            did_658().into_global_id_inner(),\n            did_659().into_global_id_inner(),\n        ]\n    })\n};\n\nstatic INTERNED_GLOBAL_IDS: [crate::interning::Interned<\n    crate::ast::identifiers::global_id::GlobalIdInner,\n>; 660] = TABLE_AND_INTERNED_GLOBAL_IDS.1;\n\nimpl crate::interning::Internable for crate::ast::identifiers::global_id::GlobalIdInner {\n    fn interning_table() -> &'static std::sync::Mutex<crate::interning::InterningTable<Self>> {\n        &TABLE_AND_INTERNED_GLOBAL_IDS.0\n    }\n}\n\nuse super::root;\npub mod alloc {\n    #![doc = r##\"This is the module [`::alloc`].\"##]\n    use super::root;\n    pub mod alloc {\n        #![doc = r##\"This is the module [`::alloc::alloc`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the struct [`::alloc::alloc::Global`].\"##]\n        pub const Global: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[4]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__1: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[15]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__3: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[16]);\n    }\n    pub mod boxed {\n        #![doc = r##\"This is the module [`::alloc::boxed`].\"##]\n        use super::root;\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::alloc::boxed::Impl::new`].\"##]\n            pub const new: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[180]);\n        }\n\n        #[doc = r##\"This is the struct [`::alloc::boxed::Box`].\"##]\n        pub const Box: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[34]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[179]);\n    }\n    pub mod slice {\n        #![doc = r##\"This is the module [`::alloc::slice`].\"##]\n        use super::root;\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::alloc::slice::Impl::concat`].\"##]\n            pub const concat: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[44]);\n\n            #[doc = r##\"This is the associated function [`::alloc::slice::Impl::into_vec`].\"##]\n            pub const into_vec: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[38]);\n\n            #[doc = r##\"This is the associated function [`::alloc::slice::Impl::to_vec`].\"##]\n            pub const to_vec: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[45]);\n        }\n\n        #[doc = r##\"This is the trait [`::alloc::slice::Concat`].\"##]\n        pub const Concat: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[39]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[37]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__2: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[43]);\n    }\n    pub mod string {\n        #![doc = r##\"This is the module [`::alloc::string`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the struct [`::alloc::string::String`].\"##]\n        pub const String: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[182]);\n    }\n    pub mod vec {\n        #![doc = r##\"This is the module [`::alloc::vec`].\"##]\n        use super::root;\n        pub mod Impl__1 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::alloc::vec::Impl__1::as_slice`].\"##]\n            pub const as_slice: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[620]);\n\n            #[doc = r##\"This is the associated function [`::alloc::vec::Impl__1::truncate`].\"##]\n            pub const truncate: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[29]);\n        }\n        pub mod Impl__2 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::alloc::vec::Impl__2::extend_from_slice`].\"##]\n            pub const extend_from_slice: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[31]);\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__1: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[28]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__11: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[17]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__13: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[59]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__2: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[30]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__8: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[26]);\n\n        #[doc = r##\"This is the struct [`::alloc::vec::Vec`].\"##]\n        pub const Vec: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[6]);\n\n        #[doc = r##\"This is the function [`::alloc::vec::from_elem`].\"##]\n        pub const from_elem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[91]);\n    }\n\n    #[doc = r##\"This is the module [`::alloc::alloc`].\"##]\n    pub const alloc: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[3]);\n\n    #[doc = r##\"This is the module [`::alloc::boxed`].\"##]\n    pub const boxed: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[33]);\n\n    #[doc = r##\"This is the module [`::alloc::slice`].\"##]\n    pub const slice: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[36]);\n\n    #[doc = r##\"This is the module [`::alloc::string`].\"##]\n    pub const string: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[181]);\n\n    #[doc = r##\"This is the module [`::alloc::vec`].\"##]\n    pub const vec: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[5]);\n}\npub mod core {\n    #![doc = r##\"This is the module [`::core`].\"##]\n    use super::root;\n    pub mod alloc {\n        #![doc = r##\"This is the module [`::core::alloc`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the trait [`::core::alloc::Allocator`].\"##]\n        pub const Allocator: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[14]);\n    }\n    pub mod array {\n        #![doc = r##\"This is the module [`::core::array`].\"##]\n        use super::root;\n        pub mod iter {\n            #![doc = r##\"This is the module [`::core::array::iter`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl__1: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[144]);\n\n            #[doc = r##\"This is the struct [`::core::array::iter::IntoIter`].\"##]\n            pub const IntoIter: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[138]);\n        }\n\n        #[doc = r##\"This is the module [`::core::array::iter`].\"##]\n        pub const iter: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[137]);\n    }\n    pub mod borrow {\n        #![doc = r##\"This is the module [`::core::borrow`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the trait [`::core::borrow::Borrow`].\"##]\n        pub const Borrow: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[41]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__2: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[42]);\n    }\n    pub mod clone {\n        #![doc = r##\"This is the module [`::core::clone`].\"##]\n        use super::root;\n        pub mod Clone {\n            #![doc = r##\"This is the trait [`::core::clone::Clone`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::clone::Clone::clone`].\"##]\n            pub const clone: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[10]);\n        }\n        pub mod impls {\n            #![doc = r##\"This is the module [`::core::clone::impls`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl__6: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[12]);\n        }\n\n        #[doc = r##\"This is the trait [`::core::clone::Clone`].\"##]\n        pub const Clone: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[9]);\n\n        #[doc = r##\"This is the module [`::core::clone::impls`].\"##]\n        pub const impls: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[11]);\n    }\n    pub mod cmp {\n        #![doc = r##\"This is the module [`::core::cmp`].\"##]\n        use super::root;\n        pub mod PartialEq {\n            #![doc = r##\"This is the trait [`::core::cmp::PartialEq`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::cmp::PartialEq::eq`].\"##]\n            pub const eq: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[157]);\n\n            #[doc = r##\"This is the associated function [`::core::cmp::PartialEq::ne`].\"##]\n            pub const ne: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[273]);\n        }\n        pub mod PartialOrd {\n            #![doc = r##\"This is the trait [`::core::cmp::PartialOrd`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::cmp::PartialOrd::ge`].\"##]\n            pub const ge: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[272]);\n\n            #[doc = r##\"This is the associated function [`::core::cmp::PartialOrd::gt`].\"##]\n            pub const gt: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[269]);\n\n            #[doc = r##\"This is the associated function [`::core::cmp::PartialOrd::le`].\"##]\n            pub const le: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[271]);\n\n            #[doc = r##\"This is the associated function [`::core::cmp::PartialOrd::lt`].\"##]\n            pub const lt: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[268]);\n        }\n\n        #[doc = r##\"This is the trait [`::core::cmp::Eq`].\"##]\n        pub const Eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[608]);\n\n        #[doc = r##\"This is the trait [`::core::cmp::Ord`].\"##]\n        pub const Ord: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[609]);\n\n        #[doc = r##\"This is the trait [`::core::cmp::PartialEq`].\"##]\n        pub const PartialEq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[156]);\n\n        #[doc = r##\"This is the trait [`::core::cmp::PartialOrd`].\"##]\n        pub const PartialOrd: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[267]);\n    }\n    pub mod convert {\n        #![doc = r##\"This is the module [`::core::convert`].\"##]\n        use super::root;\n        pub mod From {\n            #![doc = r##\"This is the trait [`::core::convert::From`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::convert::From::from`].\"##]\n            pub const from: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[146]);\n        }\n        pub mod Into {\n            #![doc = r##\"This is the trait [`::core::convert::Into`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::convert::Into::into`].\"##]\n            pub const into: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[131]);\n        }\n        pub mod num {\n            #![doc = r##\"This is the module [`::core::convert::num`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl__64: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[134]);\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl__88: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[187]);\n        }\n\n        #[doc = r##\"This is the trait [`::core::convert::From`].\"##]\n        pub const From: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[132]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__3: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[135]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__4: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[199]);\n\n        #[doc = r##\"This is the enum [`::core::convert::Infallible`].\"##]\n        pub const Infallible: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[183]);\n\n        #[doc = r##\"This is the trait [`::core::convert::Into`].\"##]\n        pub const Into: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[130]);\n\n        #[doc = r##\"This is the module [`::core::convert::num`].\"##]\n        pub const num: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[133]);\n    }\n    pub mod fmt {\n        #![doc = r##\"This is the module [`::core::fmt`].\"##]\n        use super::root;\n        pub mod num {\n            #![doc = r##\"This is the module [`::core::fmt::num`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl__54: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[606]);\n        }\n\n        #[doc = r##\"This is the struct [`::core::fmt::Arguments`].\"##]\n        pub const Arguments: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[166]);\n\n        #[doc = r##\"This is the trait [`::core::fmt::Debug`].\"##]\n        pub const Debug: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[167]);\n\n        #[doc = r##\"This is the module [`::core::fmt::num`].\"##]\n        pub const num: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[168]);\n    }\n    pub mod iter {\n        #![doc = r##\"This is the module [`::core::iter`].\"##]\n        use super::root;\n        pub mod adapters {\n            #![doc = r##\"This is the module [`::core::iter::adapters`].\"##]\n            use super::root;\n            pub mod enumerate {\n                #![doc = r##\"This is the module [`::core::iter::adapters::enumerate`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the struct [`::core::iter::adapters::enumerate::Enumerate`].\"##]\n                pub const Enumerate: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[232]);\n            }\n            pub mod step_by {\n                #![doc = r##\"This is the module [`::core::iter::adapters::step_by`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the struct [`::core::iter::adapters::step_by::StepBy`].\"##]\n                pub const StepBy: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[235]);\n            }\n\n            #[doc = r##\"This is the module [`::core::iter::adapters::enumerate`].\"##]\n            pub const enumerate: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[231]);\n\n            #[doc = r##\"This is the module [`::core::iter::adapters::step_by`].\"##]\n            pub const step_by: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[234]);\n        }\n        pub mod traits {\n            #![doc = r##\"This is the module [`::core::iter::traits`].\"##]\n            use super::root;\n            pub mod collect {\n                #![doc = r##\"This is the module [`::core::iter::traits::collect`].\"##]\n                use super::root;\n                pub mod IntoIterator {\n                    #![doc = r##\"This is the trait [`::core::iter::traits::collect::IntoIterator`].\"##]\n                    use super::root;\n\n                    #[doc = r##\"This is the associated function [`::core::iter::traits::collect::IntoIterator::into_iter`].\"##]\n                    pub const into_iter: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[143],\n                        );\n                }\n\n                #[doc = r##\"This is the trait [`::core::iter::traits::collect::IntoIterator`].\"##]\n                pub const IntoIterator: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[142]);\n            }\n            pub mod iterator {\n                #![doc = r##\"This is the module [`::core::iter::traits::iterator`].\"##]\n                use super::root;\n                pub mod Iterator {\n                    #![doc = r##\"This is the trait [`::core::iter::traits::iterator::Iterator`].\"##]\n                    use super::root;\n\n                    #[doc = r##\"This is the associated type [`::core::iter::traits::iterator::Iterator::Item`].\"##]\n                    pub const Item: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[589],\n                        );\n\n                    #[doc = r##\"This is the associated function [`::core::iter::traits::iterator::Iterator::enumerate`].\"##]\n                    pub const enumerate: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[233],\n                        );\n\n                    #[doc = r##\"This is the associated function [`::core::iter::traits::iterator::Iterator::fold`].\"##]\n                    pub const fold: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[197],\n                        );\n\n                    #[doc = r##\"This is the associated function [`::core::iter::traits::iterator::Iterator::next`].\"##]\n                    pub const next: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[192],\n                        );\n\n                    #[doc = r##\"This is the associated function [`::core::iter::traits::iterator::Iterator::step_by`].\"##]\n                    pub const step_by: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[236],\n                        );\n                }\n\n                #[doc = r##\"This is the trait [`::core::iter::traits::iterator::Iterator`].\"##]\n                pub const Iterator: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[191]);\n            }\n\n            #[doc = r##\"This is the module [`::core::iter::traits::collect`].\"##]\n            pub const collect: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[141]);\n\n            #[doc = r##\"This is the module [`::core::iter::traits::iterator`].\"##]\n            pub const iterator: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[190]);\n        }\n\n        #[doc = r##\"This is the module [`::core::iter::adapters`].\"##]\n        pub const adapters: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[230]);\n\n        #[doc = r##\"This is the module [`::core::iter::traits`].\"##]\n        pub const traits: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[140]);\n    }\n    pub mod marker {\n        #![doc = r##\"This is the module [`::core::marker`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the trait [`::core::marker::Copy`].\"##]\n        pub const Copy: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[275]);\n\n        #[doc = r##\"This is the trait [`::core::marker::Destruct`].\"##]\n        pub const Destruct: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[607]);\n\n        #[doc = r##\"This is the trait [`::core::marker::StructuralPartialEq`].\"##]\n        pub const StructuralPartialEq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[610]);\n    }\n    pub mod num {\n        #![doc = r##\"This is the module [`::core::num`].\"##]\n        use super::root;\n        pub mod Impl__9 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::num::Impl__9::to_le_bytes`].\"##]\n            pub const to_le_bytes: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[62]);\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__9: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[61]);\n    }\n    pub mod ops {\n        #![doc = r##\"This is the module [`::core::ops`].\"##]\n        use super::root;\n        pub mod arith {\n            #![doc = r##\"This is the module [`::core::ops::arith`].\"##]\n            use super::root;\n            pub mod Add {\n                #![doc = r##\"This is the trait [`::core::ops::arith::Add`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::arith::Add::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[590]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::arith::Add::add`].\"##]\n                pub const add: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[195]);\n            }\n            pub mod Div {\n                #![doc = r##\"This is the trait [`::core::ops::arith::Div`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::arith::Div::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[593]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::arith::Div::div`].\"##]\n                pub const div: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[252]);\n            }\n            pub mod Mul {\n                #![doc = r##\"This is the trait [`::core::ops::arith::Mul`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::arith::Mul::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[592]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::arith::Mul::mul`].\"##]\n                pub const mul: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[254]);\n            }\n            pub mod Neg {\n                #![doc = r##\"This is the trait [`::core::ops::arith::Neg`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::arith::Neg::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[600]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::arith::Neg::neg`].\"##]\n                pub const neg: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[258]);\n            }\n            pub mod Rem {\n                #![doc = r##\"This is the trait [`::core::ops::arith::Rem`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::arith::Rem::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[594]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::arith::Rem::rem`].\"##]\n                pub const rem: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[260]);\n            }\n            pub mod Sub {\n                #![doc = r##\"This is the trait [`::core::ops::arith::Sub`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::arith::Sub::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[591]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::arith::Sub::sub`].\"##]\n                pub const sub: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[256]);\n            }\n\n            #[doc = r##\"This is the trait [`::core::ops::arith::Add`].\"##]\n            pub const Add: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[194]);\n\n            #[doc = r##\"This is the trait [`::core::ops::arith::Div`].\"##]\n            pub const Div: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[251]);\n\n            #[doc = r##\"This is the trait [`::core::ops::arith::Mul`].\"##]\n            pub const Mul: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[253]);\n\n            #[doc = r##\"This is the trait [`::core::ops::arith::Neg`].\"##]\n            pub const Neg: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[257]);\n\n            #[doc = r##\"This is the trait [`::core::ops::arith::Rem`].\"##]\n            pub const Rem: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[259]);\n\n            #[doc = r##\"This is the trait [`::core::ops::arith::Sub`].\"##]\n            pub const Sub: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[255]);\n        }\n        pub mod bit {\n            #![doc = r##\"This is the module [`::core::ops::bit`].\"##]\n            use super::root;\n            pub mod BitAnd {\n                #![doc = r##\"This is the trait [`::core::ops::bit::BitAnd`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::bit::BitAnd::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[596]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::bit::BitAnd::bitand`].\"##]\n                pub const bitand: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[248]);\n            }\n            pub mod BitOr {\n                #![doc = r##\"This is the trait [`::core::ops::bit::BitOr`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::bit::BitOr::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[597]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::bit::BitOr::bitor`].\"##]\n                pub const bitor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[266]);\n            }\n            pub mod BitXor {\n                #![doc = r##\"This is the trait [`::core::ops::bit::BitXor`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::bit::BitXor::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[595]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::bit::BitXor::bitxor`].\"##]\n                pub const bitxor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[250]);\n            }\n            pub mod Not {\n                #![doc = r##\"This is the trait [`::core::ops::bit::Not`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::bit::Not::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[601]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::bit::Not::not`].\"##]\n                pub const not: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[160]);\n            }\n            pub mod Shl {\n                #![doc = r##\"This is the trait [`::core::ops::bit::Shl`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::bit::Shl::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[598]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::bit::Shl::shl`].\"##]\n                pub const shl: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[262]);\n            }\n            pub mod Shr {\n                #![doc = r##\"This is the trait [`::core::ops::bit::Shr`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::bit::Shr::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[599]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::bit::Shr::shr`].\"##]\n                pub const shr: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[264]);\n            }\n\n            #[doc = r##\"This is the trait [`::core::ops::bit::BitAnd`].\"##]\n            pub const BitAnd: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[247]);\n\n            #[doc = r##\"This is the trait [`::core::ops::bit::BitOr`].\"##]\n            pub const BitOr: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[265]);\n\n            #[doc = r##\"This is the trait [`::core::ops::bit::BitXor`].\"##]\n            pub const BitXor: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[249]);\n\n            #[doc = r##\"This is the trait [`::core::ops::bit::Not`].\"##]\n            pub const Not: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[159]);\n\n            #[doc = r##\"This is the trait [`::core::ops::bit::Shl`].\"##]\n            pub const Shl: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[261]);\n\n            #[doc = r##\"This is the trait [`::core::ops::bit::Shr`].\"##]\n            pub const Shr: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[263]);\n        }\n        pub mod control_flow {\n            #![doc = r##\"This is the module [`::core::ops::control_flow`].\"##]\n            use super::root;\n            pub mod ControlFlow {\n                #![doc = r##\"This is the enum [`::core::ops::control_flow::ControlFlow`].\"##]\n                use super::root;\n                pub mod Break {\n                    use super::root;\n\n                    #[doc = r##\"This is the variant [`::core::ops::control_flow::ControlFlow::Break::Constructor`].\"##]\n                    pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[644],\n                        );\n\n                    #[doc = r##\"This is the field [`_0`] from ::core::ops::control_flow::ControlFlow::Break.\"##]\n                    pub const _0: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[122],\n                        );\n                }\n                pub mod Continue {\n                    use super::root;\n\n                    #[doc = r##\"This is the variant [`::core::ops::control_flow::ControlFlow::Continue::Constructor`].\"##]\n                    pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[645],\n                        );\n\n                    #[doc = r##\"This is the field [`_0`] from ::core::ops::control_flow::ControlFlow::Continue.\"##]\n                    pub const _0: crate::ast::identifiers::global_id::GlobalId =\n                        crate::ast::identifiers::global_id::GlobalId(\n                            root::INTERNED_GLOBAL_IDS[123],\n                        );\n                }\n            }\n\n            #[doc = r##\"This is the enum [`::core::ops::control_flow::ControlFlow`].\"##]\n            pub const ControlFlow: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[121]);\n        }\n        pub mod deref {\n            #![doc = r##\"This is the module [`::core::ops::deref`].\"##]\n            use super::root;\n            pub mod Deref {\n                #![doc = r##\"This is the trait [`::core::ops::deref::Deref`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::deref::Deref::Target`].\"##]\n                pub const Target: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[278]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::deref::Deref::deref`].\"##]\n                pub const deref: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[25]);\n            }\n            pub mod DerefMut {\n                #![doc = r##\"This is the trait [`::core::ops::deref::DerefMut`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated function [`::core::ops::deref::DerefMut::deref_mut`].\"##]\n                pub const deref_mut: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[280]);\n            }\n\n            #[doc = r##\"This is the trait [`::core::ops::deref::Deref`].\"##]\n            pub const Deref: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[24]);\n\n            #[doc = r##\"This is the trait [`::core::ops::deref::DerefMut`].\"##]\n            pub const DerefMut: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[279]);\n        }\n        pub mod function {\n            #![doc = r##\"This is the module [`::core::ops::function`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the trait [`::core::ops::function::Fn`].\"##]\n            pub const Fn: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[201]);\n\n            #[doc = r##\"This is the trait [`::core::ops::function::FnMut`].\"##]\n            pub const FnMut: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[196]);\n\n            #[doc = r##\"This is the trait [`::core::ops::function::FnOnce`].\"##]\n            pub const FnOnce: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[152]);\n        }\n        pub mod index {\n            #![doc = r##\"This is the module [`::core::ops::index`].\"##]\n            use super::root;\n            pub mod Index {\n                #![doc = r##\"This is the trait [`::core::ops::index::Index`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::index::Index::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[621]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::index::Index::index`].\"##]\n                pub const index: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[55]);\n            }\n            pub mod IndexMut {\n                #![doc = r##\"This is the trait [`::core::ops::index::IndexMut`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated function [`::core::ops::index::IndexMut::index_mut`].\"##]\n                pub const index_mut: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[623]);\n            }\n\n            #[doc = r##\"This is the trait [`::core::ops::index::Index`].\"##]\n            pub const Index: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[54]);\n\n            #[doc = r##\"This is the trait [`::core::ops::index::IndexMut`].\"##]\n            pub const IndexMut: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[622]);\n        }\n        pub mod range {\n            #![doc = r##\"This is the module [`::core::ops::range`].\"##]\n            use super::root;\n            pub mod Range {\n                #![doc = r##\"This is the struct [`::core::ops::range::Range`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the struct [`::core::ops::range::Range::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[642]);\n\n                #[doc = r##\"This is the field [`end`] from ::core::ops::range::Range.\"##]\n                pub const end: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[52]);\n\n                #[doc = r##\"This is the field [`start`] from ::core::ops::range::Range.\"##]\n                pub const start: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[51]);\n            }\n            pub mod RangeFrom {\n                #![doc = r##\"This is the struct [`::core::ops::range::RangeFrom`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the struct [`::core::ops::range::RangeFrom::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[648]);\n\n                #[doc = r##\"This is the field [`start`] from ::core::ops::range::RangeFrom.\"##]\n                pub const start: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[128]);\n            }\n            pub mod RangeFull {\n                #![doc = r##\"This is the struct [`::core::ops::range::RangeFull`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the struct [`::core::ops::range::RangeFull::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[647]);\n            }\n            pub mod RangeTo {\n                #![doc = r##\"This is the struct [`::core::ops::range::RangeTo`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the struct [`::core::ops::range::RangeTo::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[646]);\n\n                #[doc = r##\"This is the field [`end`] from ::core::ops::range::RangeTo.\"##]\n                pub const end: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[125]);\n            }\n\n            #[doc = r##\"This is the struct [`::core::ops::range::Range`].\"##]\n            pub const Range: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[50]);\n\n            #[doc = r##\"This is the struct [`::core::ops::range::RangeFrom`].\"##]\n            pub const RangeFrom: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[127]);\n\n            #[doc = r##\"This is the struct [`::core::ops::range::RangeFull`].\"##]\n            pub const RangeFull: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[126]);\n\n            #[doc = r##\"This is the struct [`::core::ops::range::RangeTo`].\"##]\n            pub const RangeTo: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[124]);\n        }\n        pub mod try_trait {\n            #![doc = r##\"This is the module [`::core::ops::try_trait`].\"##]\n            use super::root;\n            pub mod FromResidual {\n                #![doc = r##\"This is the trait [`::core::ops::try_trait::FromResidual`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated function [`::core::ops::try_trait::FromResidual::from_residual`].\"##]\n                pub const from_residual: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[186]);\n            }\n            pub mod Try {\n                #![doc = r##\"This is the trait [`::core::ops::try_trait::Try`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated type [`::core::ops::try_trait::Try::Output`].\"##]\n                pub const Output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[602]);\n\n                #[doc = r##\"This is the associated type [`::core::ops::try_trait::Try::Residual`].\"##]\n                pub const Residual: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[276]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::try_trait::Try::branch`].\"##]\n                pub const branch: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[238]);\n\n                #[doc = r##\"This is the associated function [`::core::ops::try_trait::Try::from_output`].\"##]\n                pub const from_output: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[277]);\n            }\n\n            #[doc = r##\"This is the trait [`::core::ops::try_trait::FromResidual`].\"##]\n            pub const FromResidual: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[185]);\n\n            #[doc = r##\"This is the trait [`::core::ops::try_trait::Try`].\"##]\n            pub const Try: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[237]);\n        }\n\n        #[doc = r##\"This is the module [`::core::ops::arith`].\"##]\n        pub const arith: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[193]);\n\n        #[doc = r##\"This is the module [`::core::ops::bit`].\"##]\n        pub const bit: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[158]);\n\n        #[doc = r##\"This is the module [`::core::ops::control_flow`].\"##]\n        pub const control_flow: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[120]);\n\n        #[doc = r##\"This is the module [`::core::ops::deref`].\"##]\n        pub const deref: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[23]);\n\n        #[doc = r##\"This is the module [`::core::ops::function`].\"##]\n        pub const function: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[151]);\n\n        #[doc = r##\"This is the module [`::core::ops::index`].\"##]\n        pub const index: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[53]);\n\n        #[doc = r##\"This is the module [`::core::ops::range`].\"##]\n        pub const range: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[49]);\n\n        #[doc = r##\"This is the module [`::core::ops::try_trait`].\"##]\n        pub const try_trait: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[184]);\n    }\n    pub mod option {\n        #![doc = r##\"This is the module [`::core::option`].\"##]\n        use super::root;\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::option::Impl::is_some`].\"##]\n            pub const is_some: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[177]);\n        }\n        pub mod Option {\n            #![doc = r##\"This is the enum [`::core::option::Option`].\"##]\n            use super::root;\n            pub mod None {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::core::option::Option::None::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[658]);\n            }\n            pub mod Some {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::core::option::Option::Some::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[650]);\n\n                #[doc = r##\"This is the field [`_0`] from ::core::option::Option::Some.\"##]\n                pub const _0: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[178]);\n            }\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[176]);\n\n        #[doc = r##\"This is the enum [`::core::option::Option`].\"##]\n        pub const Option: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[164]);\n    }\n    pub mod panicking {\n        #![doc = r##\"This is the module [`::core::panicking`].\"##]\n        use super::root;\n        pub mod AssertKind {\n            #![doc = r##\"This is the enum [`::core::panicking::AssertKind`].\"##]\n            use super::root;\n            pub mod Eq {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::core::panicking::AssertKind::Eq::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[659]);\n            }\n        }\n\n        #[doc = r##\"This is the enum [`::core::panicking::AssertKind`].\"##]\n        pub const AssertKind: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[162]);\n\n        #[doc = r##\"This is the function [`::core::panicking::assert_failed`].\"##]\n        pub const assert_failed: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[169]);\n\n        #[doc = r##\"This is the function [`::core::panicking::panic`].\"##]\n        pub const panic: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[172]);\n    }\n    pub mod ptr {\n        #![doc = r##\"This is the module [`::core::ptr`].\"##]\n        use super::root;\n        pub mod const_ptr {\n            #![doc = r##\"This is the module [`::core::ptr::const_ptr`].\"##]\n            use super::root;\n            pub mod Impl {\n                #![doc = r##\"This is an impl block.\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated function [`::core::ptr::const_ptr::Impl::offset`].\"##]\n                pub const offset: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[95]);\n            }\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[94]);\n        }\n\n        #[doc = r##\"This is the module [`::core::ptr::const_ptr`].\"##]\n        pub const const_ptr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[93]);\n    }\n    pub mod result {\n        #![doc = r##\"This is the module [`::core::result`].\"##]\n        use super::root;\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::result::Impl::map_err`].\"##]\n            pub const map_err: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[175]);\n        }\n        pub mod Result {\n            #![doc = r##\"This is the enum [`::core::result::Result`].\"##]\n            use super::root;\n            pub mod Err {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::core::result::Result::Err::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[649]);\n\n                #[doc = r##\"This is the field [`_0`] from ::core::result::Result::Err.\"##]\n                pub const _0: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[173]);\n            }\n            pub mod Ok {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::core::result::Result::Ok::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[651]);\n\n                #[doc = r##\"This is the field [`_0`] from ::core::result::Result::Ok.\"##]\n                pub const _0: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[198]);\n            }\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[174]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__27: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[239]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__28: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[188]);\n\n        #[doc = r##\"This is the enum [`::core::result::Result`].\"##]\n        pub const Result: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[77]);\n    }\n    pub mod slice {\n        #![doc = r##\"This is the module [`::core::slice`].\"##]\n        use super::root;\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::slice::Impl::chunks_exact`].\"##]\n            pub const chunks_exact: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[229]);\n\n            #[doc = r##\"This is the associated function [`::core::slice::Impl::iter`].\"##]\n            pub const iter: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[227]);\n\n            #[doc = r##\"This is the associated function [`::core::slice::Impl::len`].\"##]\n            pub const len: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[48]);\n        }\n        pub mod index {\n            #![doc = r##\"This is the module [`::core::slice::index`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl__2: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[189]);\n\n            #[doc = r##\"This is an impl block.\"##]\n            pub const Impl__4: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[58]);\n\n            #[doc = r##\"This is the trait [`::core::slice::index::SliceIndex`].\"##]\n            pub const SliceIndex: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[57]);\n        }\n        pub mod iter {\n            #![doc = r##\"This is the module [`::core::slice::iter`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the struct [`::core::slice::iter::ChunksExact`].\"##]\n            pub const ChunksExact: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[228]);\n\n            #[doc = r##\"This is the struct [`::core::slice::iter::Iter`].\"##]\n            pub const Iter: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[226]);\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[47]);\n\n        #[doc = r##\"This is the module [`::core::slice::index`].\"##]\n        pub const index: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[56]);\n\n        #[doc = r##\"This is the module [`::core::slice::iter`].\"##]\n        pub const iter: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[225]);\n    }\n    pub mod str {\n        #![doc = r##\"This is the module [`::core::str`].\"##]\n        use super::root;\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::core::str::Impl::as_ptr`].\"##]\n            pub const as_ptr: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[99]);\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[98]);\n    }\n\n    #[doc = r##\"This is the module [`::core::alloc`].\"##]\n    pub const alloc: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[13]);\n\n    #[doc = r##\"This is the module [`::core::array`].\"##]\n    pub const array: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[136]);\n\n    #[doc = r##\"This is the module [`::core::borrow`].\"##]\n    pub const borrow: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[40]);\n\n    #[doc = r##\"This is the module [`::core::clone`].\"##]\n    pub const clone: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[8]);\n\n    #[doc = r##\"This is the module [`::core::cmp`].\"##]\n    pub const cmp: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[155]);\n\n    #[doc = r##\"This is the module [`::core::convert`].\"##]\n    pub const convert: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[129]);\n\n    #[doc = r##\"This is the module [`::core::fmt`].\"##]\n    pub const fmt: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[165]);\n\n    #[doc = r##\"This is the module [`::core::iter`].\"##]\n    pub const iter: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[139]);\n\n    #[doc = r##\"This is the module [`::core::marker`].\"##]\n    pub const marker: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[274]);\n\n    #[doc = r##\"This is the module [`::core::num`].\"##]\n    pub const num: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[60]);\n\n    #[doc = r##\"This is the module [`::core::ops`].\"##]\n    pub const ops: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[22]);\n\n    #[doc = r##\"This is the module [`::core::option`].\"##]\n    pub const option: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[163]);\n\n    #[doc = r##\"This is the module [`::core::panicking`].\"##]\n    pub const panicking: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[161]);\n\n    #[doc = r##\"This is the module [`::core::ptr`].\"##]\n    pub const ptr: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[92]);\n\n    #[doc = r##\"This is the module [`::core::result`].\"##]\n    pub const result: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[76]);\n\n    #[doc = r##\"This is the module [`::core::slice`].\"##]\n    pub const slice: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[46]);\n\n    #[doc = r##\"This is the module [`::core::str`].\"##]\n    pub const str: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[97]);\n}\npub mod hax_lib {\n    #![doc = r##\"This is the module [`::hax_lib`].\"##]\n    use super::root;\n    pub mod RefineAs {\n        #![doc = r##\"This is the trait [`::hax_lib::RefineAs`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the associated function [`::hax_lib::RefineAs::into_checked`].\"##]\n        pub const into_checked: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[241]);\n    }\n    pub mod Refinement {\n        #![doc = r##\"This is the trait [`::hax_lib::Refinement`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the associated type [`::hax_lib::Refinement::InnerType`].\"##]\n        pub const InnerType: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[244]);\n\n        #[doc = r##\"This is the associated function [`::hax_lib::Refinement::get`].\"##]\n        pub const get: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[243]);\n\n        #[doc = r##\"This is the associated function [`::hax_lib::Refinement::get_mut`].\"##]\n        pub const get_mut: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[246]);\n\n        #[doc = r##\"This is the associated function [`::hax_lib::Refinement::new`].\"##]\n        pub const new: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[245]);\n    }\n    pub mod abstraction {\n        #![doc = r##\"This is the module [`::hax_lib::abstraction`].\"##]\n        use super::root;\n        pub mod Abstraction {\n            #![doc = r##\"This is the trait [`::hax_lib::abstraction::Abstraction`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib::abstraction::Abstraction::lift`].\"##]\n            pub const lift: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[118]);\n        }\n        pub mod Concretization {\n            #![doc = r##\"This is the trait [`::hax_lib::abstraction::Concretization`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib::abstraction::Concretization::concretize`].\"##]\n            pub const concretize: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[108]);\n        }\n\n        #[doc = r##\"This is the trait [`::hax_lib::abstraction::Abstraction`].\"##]\n        pub const Abstraction: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[117]);\n\n        #[doc = r##\"This is the trait [`::hax_lib::abstraction::Concretization`].\"##]\n        pub const Concretization: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[107]);\n    }\n    pub mod int {\n        #![doc = r##\"This is the module [`::hax_lib::int`].\"##]\n        use super::root;\n        pub mod Impl__7 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib::int::Impl__7::_unsafe_from_str`].\"##]\n            pub const _unsafe_from_str: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[111]);\n\n            #[doc = r##\"This is the associated function [`::hax_lib::int::Impl__7::pow2`].\"##]\n            pub const pow2: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[113]);\n        }\n        pub mod ToInt {\n            #![doc = r##\"This is the trait [`::hax_lib::int::ToInt`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib::int::ToInt::to_int`].\"##]\n            pub const to_int: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[115]);\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__16: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[119]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__17: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[116]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__44: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[109]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__7: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[110]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__9: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[112]);\n\n        #[doc = r##\"This is the struct [`::hax_lib::int::Int`].\"##]\n        pub const Int: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[105]);\n\n        #[doc = r##\"This is the trait [`::hax_lib::int::ToInt`].\"##]\n        pub const ToInt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[114]);\n    }\n    pub mod prop {\n        #![doc = r##\"This is the module [`::hax_lib::prop`].\"##]\n        use super::root;\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::Impl::and`].\"##]\n            pub const and: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[213]);\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::Impl::eq`].\"##]\n            pub const eq: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[210]);\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::Impl::from_bool`].\"##]\n            pub const from_bool: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[214]);\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::Impl::implies`].\"##]\n            pub const implies: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[208]);\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::Impl::ne`].\"##]\n            pub const ne: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[209]);\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::Impl::not`].\"##]\n            pub const not: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[211]);\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::Impl::or`].\"##]\n            pub const or: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[212]);\n        }\n        pub mod ToProp {\n            #![doc = r##\"This is the trait [`::hax_lib::prop::ToProp`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib::prop::ToProp::to_prop`].\"##]\n            pub const to_prop: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[205]);\n        }\n        pub mod constructors {\n            #![doc = r##\"This is the module [`::hax_lib::prop::constructors`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::and`].\"##]\n            pub const and: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[223]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::eq`].\"##]\n            pub const eq: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[220]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::exists`].\"##]\n            pub const exists: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[216]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::forall`].\"##]\n            pub const forall: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[217]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::from_bool`].\"##]\n            pub const from_bool: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[224]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::implies`].\"##]\n            pub const implies: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[218]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::ne`].\"##]\n            pub const ne: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[219]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::not`].\"##]\n            pub const not: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[221]);\n\n            #[doc = r##\"This is the function [`::hax_lib::prop::constructors::or`].\"##]\n            pub const or: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[222]);\n        }\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[207]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__2: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[206]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__3: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[149]);\n\n        #[doc = r##\"This is the struct [`::hax_lib::prop::Prop`].\"##]\n        pub const Prop: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[148]);\n\n        #[doc = r##\"This is the trait [`::hax_lib::prop::ToProp`].\"##]\n        pub const ToProp: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[204]);\n\n        #[doc = r##\"This is the module [`::hax_lib::prop::constructors`].\"##]\n        pub const constructors: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[215]);\n\n        #[doc = r##\"This is the function [`::hax_lib::prop::exists`].\"##]\n        pub const exists: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[202]);\n\n        #[doc = r##\"This is the function [`::hax_lib::prop::forall`].\"##]\n        pub const forall: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[203]);\n\n        #[doc = r##\"This is the function [`::hax_lib::prop::implies`].\"##]\n        pub const implies: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[200]);\n    }\n\n    #[doc = r##\"This is the trait [`::hax_lib::RefineAs`].\"##]\n    pub const RefineAs: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[240]);\n\n    #[doc = r##\"This is the trait [`::hax_lib::Refinement`].\"##]\n    pub const Refinement: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[242]);\n\n    #[doc = r##\"This is the function [`::hax_lib::_internal_loop_decreases`].\"##]\n    pub const _internal_loop_decreases: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[145]);\n\n    #[doc = r##\"This is the function [`::hax_lib::_internal_loop_invariant`].\"##]\n    pub const _internal_loop_invariant: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[153]);\n\n    #[doc = r##\"This is the function [`::hax_lib::_internal_while_loop_invariant`].\"##]\n    pub const _internal_while_loop_invariant: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[150]);\n\n    #[doc = r##\"This is the module [`::hax_lib::abstraction`].\"##]\n    pub const abstraction: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[106]);\n\n    #[doc = r##\"This is the function [`::hax_lib::any_to_unit`].\"##]\n    pub const any_to_unit: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[101]);\n\n    #[doc = r##\"This is the function [`::hax_lib::assert`].\"##]\n    pub const assert: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[154]);\n\n    #[doc = r##\"This is the function [`::hax_lib::inline`].\"##]\n    pub const inline: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[103]);\n\n    #[doc = r##\"This is the function [`::hax_lib::inline_unsafe`].\"##]\n    pub const inline_unsafe: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[102]);\n\n    #[doc = r##\"This is the module [`::hax_lib::int`].\"##]\n    pub const int: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[104]);\n\n    #[doc = r##\"This is the module [`::hax_lib::prop`].\"##]\n    pub const prop: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[147]);\n}\npub mod hax_lib_protocol {\n    #![doc = r##\"This is the module [`::hax_lib_protocol`].\"##]\n    use super::root;\n    pub mod crypto {\n        #![doc = r##\"This is the module [`::hax_lib_protocol::crypto`].\"##]\n        use super::root;\n        pub mod AEADAlgorithm {\n            #![doc = r##\"This is the enum [`::hax_lib_protocol::crypto::AEADAlgorithm`].\"##]\n            use super::root;\n            pub mod Chacha20Poly1305 {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::hax_lib_protocol::crypto::AEADAlgorithm::Chacha20Poly1305::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[657]);\n            }\n        }\n        pub mod DHGroup {\n            #![doc = r##\"This is the enum [`::hax_lib_protocol::crypto::DHGroup`].\"##]\n            use super::root;\n            pub mod X25519 {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::hax_lib_protocol::crypto::DHGroup::X25519::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[656]);\n            }\n        }\n        pub mod HMACAlgorithm {\n            #![doc = r##\"This is the enum [`::hax_lib_protocol::crypto::HMACAlgorithm`].\"##]\n            use super::root;\n            pub mod Sha256 {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::hax_lib_protocol::crypto::HMACAlgorithm::Sha256::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[655]);\n            }\n        }\n        pub mod HashAlgorithm {\n            #![doc = r##\"This is the enum [`::hax_lib_protocol::crypto::HashAlgorithm`].\"##]\n            use super::root;\n            pub mod Sha256 {\n                use super::root;\n\n                #[doc = r##\"This is the variant [`::hax_lib_protocol::crypto::HashAlgorithm::Sha256::Constructor`].\"##]\n                pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[654]);\n            }\n        }\n        pub mod Impl {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib_protocol::crypto::Impl::from_bytes`].\"##]\n            pub const from_bytes: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[72]);\n        }\n        pub mod Impl__1 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib_protocol::crypto::Impl__1::from_bytes`].\"##]\n            pub const from_bytes: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[74]);\n        }\n        pub mod Impl__4 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib_protocol::crypto::Impl__4::from_bytes`].\"##]\n            pub const from_bytes: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[86]);\n        }\n        pub mod Impl__5 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib_protocol::crypto::Impl__5::from_bytes`].\"##]\n            pub const from_bytes: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[88]);\n        }\n        pub mod Impl__6 {\n            #![doc = r##\"This is an impl block.\"##]\n            use super::root;\n\n            #[doc = r##\"This is the associated function [`::hax_lib_protocol::crypto::Impl__6::from_bytes`].\"##]\n            pub const from_bytes: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[81]);\n        }\n\n        #[doc = r##\"This is the enum [`::hax_lib_protocol::crypto::AEADAlgorithm`].\"##]\n        pub const AEADAlgorithm: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[84]);\n\n        #[doc = r##\"This is the struct [`::hax_lib_protocol::crypto::AEADIV`].\"##]\n        pub const AEADIV: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[79]);\n\n        #[doc = r##\"This is the struct [`::hax_lib_protocol::crypto::AEADKey`].\"##]\n        pub const AEADKey: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[78]);\n\n        #[doc = r##\"This is the struct [`::hax_lib_protocol::crypto::AEADTag`].\"##]\n        pub const AEADTag: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[82]);\n\n        #[doc = r##\"This is the struct [`::hax_lib_protocol::crypto::DHElement`].\"##]\n        pub const DHElement: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[69]);\n\n        #[doc = r##\"This is the enum [`::hax_lib_protocol::crypto::DHGroup`].\"##]\n        pub const DHGroup: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[65]);\n\n        #[doc = r##\"This is the struct [`::hax_lib_protocol::crypto::DHScalar`].\"##]\n        pub const DHScalar: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[66]);\n\n        #[doc = r##\"This is the enum [`::hax_lib_protocol::crypto::HMACAlgorithm`].\"##]\n        pub const HMACAlgorithm: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[63]);\n\n        #[doc = r##\"This is the enum [`::hax_lib_protocol::crypto::HashAlgorithm`].\"##]\n        pub const HashAlgorithm: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[20]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[71]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__1: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[73]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__4: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[85]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__5: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[87]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__6: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[80]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__9: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[68]);\n\n        #[doc = r##\"This is the function [`::hax_lib_protocol::crypto::aead_decrypt`].\"##]\n        pub const aead_decrypt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[83]);\n\n        #[doc = r##\"This is the function [`::hax_lib_protocol::crypto::aead_encrypt`].\"##]\n        pub const aead_encrypt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[90]);\n\n        #[doc = r##\"This is the function [`::hax_lib_protocol::crypto::dh_scalar_multiply`].\"##]\n        pub const dh_scalar_multiply: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[70]);\n\n        #[doc = r##\"This is the function [`::hax_lib_protocol::crypto::dh_scalar_multiply_base`].\"##]\n        pub const dh_scalar_multiply_base: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[67]);\n\n        #[doc = r##\"This is the function [`::hax_lib_protocol::crypto::hash`].\"##]\n        pub const hash: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[27]);\n\n        #[doc = r##\"This is the function [`::hax_lib_protocol::crypto::hmac`].\"##]\n        pub const hmac: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[64]);\n    }\n\n    #[doc = r##\"This is the enum [`::hax_lib_protocol::ProtocolError`].\"##]\n    pub const ProtocolError: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[75]);\n\n    #[doc = r##\"This is the module [`::hax_lib_protocol::crypto`].\"##]\n    pub const crypto: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[19]);\n}\npub mod rust_primitives {\n    #![doc = r##\"This is the module [`::rust_primitives`].\"##]\n    use super::root;\n    pub mod arithmetic {\n        #![doc = r##\"This is the module [`::rust_primitives::arithmetic`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::arithmetic::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[616]);\n    }\n    pub mod crypto_abstractions {\n        #![doc = r##\"This is the module [`::rust_primitives::crypto_abstractions`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the use item [`::rust_primitives::crypto_abstractions::Use`].\"##]\n        pub const Use: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[638]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::crypto_abstractions::crypto_abstractions`].\"##]\n        pub const crypto_abstractions: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[305]);\n    }\n    pub mod dummy_hax_concrete_ident_wrapper {\n        #![doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper`].\"##]\n        use super::root;\n        pub mod Foo {\n            #![doc = r##\"This is the struct [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Foo`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the struct [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Foo::Constructor`].\"##]\n            pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[653]);\n        }\n        pub mod ___1 {\n            #![doc = r##\"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::Use`].\"##]\n            pub const Use: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[626]);\n\n            #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::Use__1`].\"##]\n            pub const Use__1: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[627]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::f`].\"##]\n            pub const f: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[312]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1::g`].\"##]\n            pub const g: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[631]);\n        }\n        pub mod _anonymous {\n            #![doc = r##\"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::Use`].\"##]\n            pub const Use: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[629]);\n\n            #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::Use__1`].\"##]\n            pub const Use__1: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[636]);\n\n            #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::Use__2`].\"##]\n            pub const Use__2: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[640]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous::arith`].\"##]\n            pub const arith: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[519]);\n        }\n        pub mod props {\n            #![doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::props`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::props::Use`].\"##]\n            pub const Use: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[625]);\n        }\n\n        #[doc = r##\"This is the struct [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Foo`].\"##]\n        pub const Foo: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[612]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[613]);\n\n        #[doc = r##\"This is an impl block.\"##]\n        pub const Impl__1: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[611]);\n\n        #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use`].\"##]\n        pub const Use: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[632]);\n\n        #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__1`].\"##]\n        pub const Use__1: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[624]);\n\n        #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__2`].\"##]\n        pub const Use__2: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[634]);\n\n        #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__3`].\"##]\n        pub const Use__3: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[641]);\n\n        #[doc = r##\"This is the use item [`::rust_primitives::dummy_hax_concrete_ident_wrapper::Use__4`].\"##]\n        pub const Use__4: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[637]);\n\n        #[doc = r##\"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::___1`].\"##]\n        pub const ___1: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[292]);\n\n        #[doc = r##\"This is the const [`::rust_primitives::dummy_hax_concrete_ident_wrapper::_anonymous`].\"##]\n        pub const _anonymous: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[300]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::dummy`].\"##]\n        pub const dummy: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[294]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::index_mut`].\"##]\n        pub const index_mut: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[639]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::iterator_functions`].\"##]\n        pub const iterator_functions: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[474]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::props`].\"##]\n        pub const props: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[564]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::question_mark_result`].\"##]\n        pub const question_mark_result: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[556]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper::refinements`].\"##]\n        pub const refinements: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[430]);\n    }\n    pub mod hax {\n        #![doc = r##\"This is the module [`::rust_primitives::hax`].\"##]\n        use super::root;\n        pub mod Failure {\n            #![doc = r##\"This is the struct [`::rust_primitives::hax::Failure`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the struct [`::rust_primitives::hax::Failure::Constructor`].\"##]\n            pub const Constructor: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[652]);\n        }\n        pub(in crate::ast::identifiers::global_id) mod Tuple2 {\n            #![doc = r##\"This is the struct [`::rust_primitives::hax::Tuple2`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the struct [`::rust_primitives::hax::Tuple2::Constructor`].\"##]\n            pub(in crate::ast::identifiers::global_id) const Constructor:\n                crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[643]);\n\n            #[doc = r##\"This is the field [`_0`] from ::rust_primitives::hax::Tuple2.\"##]\n            pub(in crate::ast::identifiers::global_id) const _0:\n                crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[390]);\n\n            #[doc = r##\"This is the field [`_1`] from ::rust_primitives::hax::Tuple2.\"##]\n            pub(in crate::ast::identifiers::global_id) const _1:\n                crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[398]);\n        }\n        pub mod control_flow_monad {\n            #![doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad`].\"##]\n            use super::root;\n            pub mod ControlFlowMonad {\n                #![doc = r##\"This is the trait [`::rust_primitives::hax::control_flow_monad::ControlFlowMonad`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the associated function [`::rust_primitives::hax::control_flow_monad::ControlFlowMonad::lift`].\"##]\n                pub const lift: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[471]);\n            }\n            pub mod mexception {\n                #![doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad::mexception`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the function [`::rust_primitives::hax::control_flow_monad::mexception::run`].\"##]\n                pub const run: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[541]);\n            }\n            pub mod moption {\n                #![doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad::moption`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the function [`::rust_primitives::hax::control_flow_monad::moption::run`].\"##]\n                pub const run: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[322]);\n            }\n            pub mod mresult {\n                #![doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad::mresult`].\"##]\n                use super::root;\n\n                #[doc = r##\"This is the function [`::rust_primitives::hax::control_flow_monad::mresult::run`].\"##]\n                pub const run: crate::ast::identifiers::global_id::GlobalId =\n                    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[419]);\n            }\n\n            #[doc = r##\"This is the trait [`::rust_primitives::hax::control_flow_monad::ControlFlowMonad`].\"##]\n            pub const ControlFlowMonad: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[470]);\n\n            #[doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad::mexception`].\"##]\n            pub const mexception: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[512]);\n\n            #[doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad::moption`].\"##]\n            pub const moption: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[321]);\n\n            #[doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad::mresult`].\"##]\n            pub const mresult: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[418]);\n        }\n        pub mod explicit_monadic {\n            #![doc = r##\"This is the module [`::rust_primitives::hax::explicit_monadic`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::explicit_monadic::lift`].\"##]\n            pub const lift: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[605]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::explicit_monadic::pure`].\"##]\n            pub const pure: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[604]);\n        }\n        pub mod folds {\n            #![doc = r##\"This is the module [`::rust_primitives::hax::folds`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_cf`].\"##]\n            pub const fold_cf: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[465]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_chunked_slice`].\"##]\n            pub const fold_chunked_slice: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[447]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_chunked_slice_cf`].\"##]\n            pub const fold_chunked_slice_cf: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[382]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_chunked_slice_return`].\"##]\n            pub const fold_chunked_slice_return: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[479]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_enumerated_chunked_slice`].\"##]\n            pub const fold_enumerated_chunked_slice: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[376]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_enumerated_chunked_slice_cf`].\"##]\n            pub const fold_enumerated_chunked_slice_cf:\n                crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[494]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_enumerated_chunked_slice_return`].\"##]\n            pub const fold_enumerated_chunked_slice_return:\n                crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[529]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_enumerated_slice`].\"##]\n            pub const fold_enumerated_slice: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[413]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_enumerated_slice_cf`].\"##]\n            pub const fold_enumerated_slice_cf: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[355]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_enumerated_slice_return`].\"##]\n            pub const fold_enumerated_slice_return: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[511]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_range`].\"##]\n            pub const fold_range: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[367]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_range_cf`].\"##]\n            pub const fold_range_cf: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[439]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_range_return`].\"##]\n            pub const fold_range_return: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[304]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_range_step_by`].\"##]\n            pub const fold_range_step_by: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[290]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_range_step_by_cf`].\"##]\n            pub const fold_range_step_by_cf: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[543]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_range_step_by_return`].\"##]\n            pub const fold_range_step_by_return: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[508]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::folds::fold_return`].\"##]\n            pub const fold_return: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[440]);\n        }\n        pub mod int {\n            #![doc = r##\"This is the module [`::rust_primitives::hax::int`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::add`].\"##]\n            pub const add: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[482]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::div`].\"##]\n            pub const div: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[381]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::eq`].\"##]\n            pub const eq: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[539]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::from_machine`].\"##]\n            pub const from_machine: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[497]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::ge`].\"##]\n            pub const ge: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[412]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::gt`].\"##]\n            pub const gt: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[466]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::into_machine`].\"##]\n            pub const into_machine: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[451]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::le`].\"##]\n            pub const le: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[363]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::lt`].\"##]\n            pub const lt: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[513]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::mul`].\"##]\n            pub const mul: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[373]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::ne`].\"##]\n            pub const ne: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[424]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::neg`].\"##]\n            pub const neg: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[490]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::rem`].\"##]\n            pub const rem: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[483]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::int::sub`].\"##]\n            pub const sub: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[346]);\n        }\n        pub mod machine_int {\n            #![doc = r##\"This is the module [`::rust_primitives::hax::machine_int`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::add`].\"##]\n            pub const add: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[463]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::add_with_overflow`].\"##]\n            pub const add_with_overflow: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[614]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::bitand`].\"##]\n            pub const bitand: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[524]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::bitor`].\"##]\n            pub const bitor: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[575]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::bitxor`].\"##]\n            pub const bitxor: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[360]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::cmp`].\"##]\n            pub const cmp: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[619]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::div`].\"##]\n            pub const div: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[399]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::eq`].\"##]\n            pub const eq: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[383]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::ge`].\"##]\n            pub const ge: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[303]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::gt`].\"##]\n            pub const gt: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[504]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::le`].\"##]\n            pub const le: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[448]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::lt`].\"##]\n            pub const lt: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[499]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::mul`].\"##]\n            pub const mul: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[582]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::mul_with_overflow`].\"##]\n            pub const mul_with_overflow: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[618]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::ne`].\"##]\n            pub const ne: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[473]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::not`].\"##]\n            pub const not: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[327]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::rem`].\"##]\n            pub const rem: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[488]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::shl`].\"##]\n            pub const shl: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[502]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::shr`].\"##]\n            pub const shr: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[484]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::sub`].\"##]\n            pub const sub: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[537]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::machine_int::sub_with_overflow`].\"##]\n            pub const sub_with_overflow: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[617]);\n        }\n        pub mod monomorphized_update_at {\n            #![doc = r##\"This is the module [`::rust_primitives::hax::monomorphized_update_at`].\"##]\n            use super::root;\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range`].\"##]\n            pub const update_at_range: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[348]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range_from`].\"##]\n            pub const update_at_range_from: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[318]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range_full`].\"##]\n            pub const update_at_range_full: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[375]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_range_to`].\"##]\n            pub const update_at_range_to: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[581]);\n\n            #[doc = r##\"This is the function [`::rust_primitives::hax::monomorphized_update_at::update_at_usize`].\"##]\n            pub const update_at_usize: crate::ast::identifiers::global_id::GlobalId =\n                crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[311]);\n        }\n\n        #[doc = r##\"This is the struct [`::rust_primitives::hax::Failure`].\"##]\n        pub const Failure: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[366]);\n\n        #[doc = r##\"This is the enum [`::rust_primitives::hax::MutRef`].\"##]\n        pub const MutRef: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[334]);\n\n        #[doc = r##\"This is the enum [`::rust_primitives::hax::Never`].\"##]\n        pub const Never: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[170]);\n\n        #[doc = r##\"This is the struct [`::rust_primitives::hax::Tuple2`].\"##]\n        pub(in crate::ast::identifiers::global_id) const Tuple2:\n            crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[89]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::array_of_list`].\"##]\n        pub const array_of_list: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[371]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::box_new`].\"##]\n        pub const box_new: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[32]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::cast_op`].\"##]\n        pub const cast_op: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[96]);\n\n        #[doc = r##\"This is the module [`::rust_primitives::hax::control_flow_monad`].\"##]\n        pub const control_flow_monad: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[320]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::deref_op`].\"##]\n        pub const deref_op: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[21]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::dropped_body`].\"##]\n        pub const dropped_body: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[544]);\n\n        #[doc = r##\"This is the module [`::rust_primitives::hax::explicit_monadic`].\"##]\n        pub const explicit_monadic: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[603]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::failure`].\"##]\n        pub const failure: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[550]);\n\n        #[doc = r##\"This is the module [`::rust_primitives::hax::folds`].\"##]\n        pub const folds: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[289]);\n\n        #[doc = r##\"This is the module [`::rust_primitives::hax::int`].\"##]\n        pub const int: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[345]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::logical_op_and`].\"##]\n        pub const logical_op_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[270]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::logical_op_or`].\"##]\n        pub const logical_op_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[542]);\n\n        #[doc = r##\"This is the module [`::rust_primitives::hax::machine_int`].\"##]\n        pub const machine_int: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[302]);\n\n        #[doc = r##\"This is the module [`::rust_primitives::hax::monomorphized_update_at`].\"##]\n        pub const monomorphized_update_at: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[310]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::never_to_any`].\"##]\n        pub const never_to_any: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[171]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::repeat`].\"##]\n        pub const repeat: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[420]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::update_at`].\"##]\n        pub const update_at: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[370]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::while_loop`].\"##]\n        pub const while_loop: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[432]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::while_loop_cf`].\"##]\n        pub const while_loop_cf: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[555]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::hax::while_loop_return`].\"##]\n        pub const while_loop_return: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[586]);\n    }\n    pub mod i128 {\n        #![doc = r##\"This is the module [`::rust_primitives::i128`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[386]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[531]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[583]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[557]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[462]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[580]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[569]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[409]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[533]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[515]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[584]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[578]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[568]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[354]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[353]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[364]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i128::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[393]);\n    }\n    pub mod i16 {\n        #![doc = r##\"This is the module [`::rust_primitives::i16`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[336]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[523]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[331]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[574]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[553]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[565]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[332]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[563]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[534]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[532]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[408]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[388]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[356]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[518]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[316]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[358]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i16::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[478]);\n    }\n    pub mod i32 {\n        #![doc = r##\"This is the module [`::rust_primitives::i32`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[361]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[464]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[489]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[501]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[423]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[510]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[587]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[514]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[333]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[323]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[457]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[476]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[309]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[342]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[562]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[368]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i32::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[350]);\n    }\n    pub mod i64 {\n        #![doc = r##\"This is the module [`::rust_primitives::i64`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[498]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[588]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[485]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[329]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[436]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[571]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[554]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[437]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[567]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[446]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[491]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[414]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[397]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[433]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[459]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[480]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i64::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[527]);\n    }\n    pub mod i8 {\n        #![doc = r##\"This is the module [`::rust_primitives::i8`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[577]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[528]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[340]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[552]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[325]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[411]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[405]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[347]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[576]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[417]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[435]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[351]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[385]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[551]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[357]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[429]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::i8::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[401]);\n    }\n    pub mod isize {\n        #![doc = r##\"This is the module [`::rust_primitives::isize`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[507]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[410]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[365]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[493]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[416]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[516]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[438]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[530]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[573]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[288]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[344]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[387]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[549]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[579]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[317]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[500]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::isize::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[472]);\n    }\n    pub mod u128 {\n        #![doc = r##\"This is the module [`::rust_primitives::u128`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[509]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[427]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[526]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[548]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[521]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[560]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[496]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[445]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[338]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[547]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[384]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[450]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[570]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[282]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[326]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[395]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u128::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[406]);\n    }\n    pub mod u16 {\n        #![doc = r##\"This is the module [`::rust_primitives::u16`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[335]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[299]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[525]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[475]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[535]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[536]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[343]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[403]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[572]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[444]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[313]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[428]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[284]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[422]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[520]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[449]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u16::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[540]);\n    }\n    pub mod u32 {\n        #![doc = r##\"This is the module [`::rust_primitives::u32`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[339]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[585]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[558]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[456]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[460]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[330]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[495]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[400]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[337]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[559]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[486]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[307]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[314]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[298]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[454]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[561]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u32::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[378]);\n    }\n    pub mod u64 {\n        #![doc = r##\"This is the module [`::rust_primitives::u64`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[487]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[452]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[374]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[362]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[359]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[545]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[391]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[301]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[467]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[306]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[286]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[492]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[481]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[546]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[425]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[505]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u64::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[431]);\n    }\n    pub mod u8 {\n        #![doc = r##\"This is the module [`::rust_primitives::u8`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[349]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[506]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[421]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[455]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[369]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[379]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[392]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[407]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[372]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[503]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[402]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[377]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[461]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[434]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[469]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[296]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::u8::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[394]);\n    }\n    pub mod usize {\n        #![doc = r##\"This is the module [`::rust_primitives::usize`].\"##]\n        use super::root;\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::add`].\"##]\n        pub const add: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[380]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::bit_and`].\"##]\n        pub const bit_and: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[477]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::bit_or`].\"##]\n        pub const bit_or: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[517]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::bit_xor`].\"##]\n        pub const bit_xor: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[404]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::div`].\"##]\n        pub const div: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[522]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::eq`].\"##]\n        pub const eq: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[566]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::ge`].\"##]\n        pub const ge: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[341]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::gt`].\"##]\n        pub const gt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[453]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::le`].\"##]\n        pub const le: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[468]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::lt`].\"##]\n        pub const lt: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[426]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::mul`].\"##]\n        pub const mul: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[458]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::ne`].\"##]\n        pub const ne: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[538]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::neg`].\"##]\n        pub const neg: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[396]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::rem`].\"##]\n        pub const rem: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[443]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::shl`].\"##]\n        pub const shl: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[442]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::shr`].\"##]\n        pub const shr: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[415]);\n\n        #[doc = r##\"This is the function [`::rust_primitives::usize::sub`].\"##]\n        pub const sub: crate::ast::identifiers::global_id::GlobalId =\n            crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[441]);\n    }\n\n    #[doc = r##\"This is the use item [`::rust_primitives::Use`].\"##]\n    pub const Use: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[628]);\n\n    #[doc = r##\"This is the extern crate [`::rust_primitives::alloc`].\"##]\n    pub const alloc: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[633]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::arithmetic`].\"##]\n    pub const arithmetic: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[615]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::crypto_abstractions`].\"##]\n    pub const crypto_abstractions: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[293]);\n\n    #[doc = r##\"This is the function [`::rust_primitives::dummy_hax_concrete_ident_wrapper`].\"##]\n    pub const dummy_hax_concrete_ident_wrapper: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[291]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::hax`].\"##]\n    pub const hax: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[1]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::i128`].\"##]\n    pub const i128: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[352]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::i16`].\"##]\n    pub const i16: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[315]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::i32`].\"##]\n    pub const i32: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[308]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::i64`].\"##]\n    pub const i64: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[328]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::i8`].\"##]\n    pub const i8: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[324]);\n\n    #[doc = r##\"This is the macro [`::rust_primitives::impl_arith`].\"##]\n    pub const impl_arith: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[630]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::isize`].\"##]\n    pub const isize: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[287]);\n\n    #[doc = r##\"This is the function [`::rust_primitives::offset`].\"##]\n    pub const offset: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[389]);\n\n    #[doc = r##\"This is the extern crate [`::rust_primitives::std`].\"##]\n    pub const std: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[635]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::u128`].\"##]\n    pub const u128: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[281]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::u16`].\"##]\n    pub const u16: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[283]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::u32`].\"##]\n    pub const u32: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[297]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::u64`].\"##]\n    pub const u64: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[285]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::u8`].\"##]\n    pub const u8: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[295]);\n\n    #[doc = r##\"This is the function [`::rust_primitives::unsize`].\"##]\n    pub const unsize: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[35]);\n\n    #[doc = r##\"This is the module [`::rust_primitives::usize`].\"##]\n    pub const usize: crate::ast::identifiers::global_id::GlobalId =\n        crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[319]);\n}\n\n#[doc = r##\"This is the module [`::alloc`].\"##]\npub const alloc: crate::ast::identifiers::global_id::GlobalId =\n    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[2]);\n\n#[doc = r##\"This is the module [`::core`].\"##]\npub const core: crate::ast::identifiers::global_id::GlobalId =\n    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[7]);\n\n#[doc = r##\"This is the module [`::hax_lib`].\"##]\npub const hax_lib: crate::ast::identifiers::global_id::GlobalId =\n    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[100]);\n\n#[doc = r##\"This is the module [`::hax_lib_protocol`].\"##]\npub const hax_lib_protocol: crate::ast::identifiers::global_id::GlobalId =\n    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[18]);\n\n#[doc = r##\"This is the module [`::rust_primitives`].\"##]\npub const rust_primitives: crate::ast::identifiers::global_id::GlobalId =\n    crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[0]);\n"
  },
  {
    "path": "rust-engine/src/ast/identifiers/global_id/generated_names.rs",
    "content": "/// We allow:\n///  - `unused`: we don't use all the names present in the `engine/names` crate.\n///    Filtering which `DefId` should be exposed would be complicated, and\n///    dependent library may use some names. (for instance, the backend for\n///    ProVerif may use names from `hax_lib_protocol` that are not needed\n///    anywhere else in the engine)\n///  - `non_snake_case`: we produce faithful names with respect to their\n///    original definitions in Rust. We generate for instance `fn Some() ->\n///    DefID {...}` that provides the `DefId` for the\n///    `std::option::Option::Some`. We want the function to be named `Some`\n///    here, not `some`.\n///  - `broken_intra_doc_links`: we produce documentation that link the function\n///    providing the `DefId` of a item to the item itself. Sometimes, we refer\n///    to private items, to re-exported items or to items that are not in the\n///    dependency closure of the engine: in such cases, `rustdoc` cannot link\n///    properly.\n#[allow(\n    unused,\n    non_snake_case,\n    rustdoc::broken_intra_doc_links,\n    missing_docs,\n    clippy::module_inception,\n    unused_qualifications,\n    non_upper_case_globals\n)]\npub mod root {\n    include!(\"generated.rs\");\n}\n\n/// Global identifiers are built around `DefId` that comes out of the hax\n/// frontend. We use the Rust engine itself to produce the names: we run hax on\n/// the `engine/names` crate, we extract identifiers from the resulting AST, and\n/// we expose them back as Rust functions here.\npub mod codegen {\n    use itertools::*;\n    use std::iter;\n\n    use crate::ast::Item;\n    use crate::ast::identifiers::{\n        GlobalId,\n        global_id::{ExplicitDefId, compact_serialization},\n    };\n    use hax_frontend_exporter::DefKind;\n\n    use std::collections::{HashMap, HashSet};\n\n    /// Replace the crate name `\"hax_engine_names\"` with `\"rust_primitives\"` in the given `DefId`.\n    fn rename_krate(def_id: &mut ExplicitDefId) {\n        if def_id.def_id.krate == \"hax_engine_names\" {\n            def_id.rename_krate(\"rust_primitives\");\n        }\n    }\n\n    /// Visit items and collect all the `DefId`s\n    fn collect_def_ids(items: Vec<Item>) -> Vec<ExplicitDefId> {\n        #[derive(Default)]\n        struct DefIdCollector(HashSet<ExplicitDefId>);\n        use crate::ast::visitors::*;\n        impl AstVisitor for DefIdCollector {\n            fn visit_global_id(&mut self, x: &GlobalId) {\n                let mut current = x.0.explicit_def_id();\n                while let Some(def_id) = current {\n                    self.0.insert(def_id.clone());\n                    current = def_id.parent();\n                }\n            }\n        }\n\n        // Collect names\n        let mut names: Vec<_> = DefIdCollector::default()\n            .visit_by_val(&items)\n            .0\n            .into_iter()\n            .collect();\n\n        // In the OCaml engine, `hax_engine_names` is renamed to `rust_primitives`.\n        names.iter_mut().for_each(rename_krate);\n\n        // We consume names after import by the OCaml engine. Thus, the OCaml\n        // engine may have introduced already some hax-specific Rust names,\n        // directly in `rust_primitives`. After renaming from `hax_engine_names`\n        // to `rust_primitives`, such names may be duplicated. For instance,\n        // that's the case of `unsize`: the crate `hax_engine_names` contains\n        // expression with implicit unsize operations, thus the OCaml engine\n        // inserts `rust_primitives::unsize`. In the same time,\n        // `hax_engine_names::unsize` exists and was renamed to\n        // `rust_primitives::unsize`. Whence the need to dedup here.\n        names.sort();\n        names.dedup();\n        names\n    }\n\n    /// Crafts a docstring for a `DefId`, hopefully (rustdoc) linking it back to\n    /// its origin.\n    fn docstring(explicit_id: &ExplicitDefId) -> String {\n        let id = &explicit_id.def_id;\n        let path = path_of_def_id(explicit_id);\n        let (parent_path, def) = match &path[..] {\n            [init @ .., last] => (init, last.clone()),\n            _ => (&[] as &[_], id.krate.to_string()),\n        };\n        let parent_path_str = format!(\"::{}\", parent_path.join(\"::\"));\n        let path_str = format!(\"::{}\", path_of_def_id(explicit_id).join(\"::\"));\n        let subject = match &id.kind {\n            DefKind::Mod => format!(\"module [`{path_str}`]\"),\n            DefKind::Struct => format!(\"struct [`{path_str}`]\"),\n            DefKind::Union => format!(\"union [`{path_str}`]\"),\n            DefKind::Enum => format!(\"enum [`{path_str}`]\"),\n            DefKind::Variant => format!(\"variant [`{path_str}`]\"),\n            DefKind::Trait => format!(\"trait [`{path_str}`]\"),\n            DefKind::TyAlias => format!(\"type alias [`{path_str}`]\"),\n            DefKind::ForeignTy => format!(\"foreign type [`{path_str}`]\"),\n            DefKind::TraitAlias => format!(\"trait alias [`{path_str}`]\"),\n            DefKind::AssocTy => format!(\"associated type [`{path_str}`]\"),\n            DefKind::TyParam => format!(\"type parameter from [`{parent_path_str}`]\"),\n            DefKind::Fn => format!(\"function [`{path_str}`]\"),\n            DefKind::Const => format!(\"const [`{path_str}`]\"),\n            DefKind::ConstParam => format!(\"const parameter from [`{parent_path_str}`]\"),\n            DefKind::Static { .. } => format!(\"static [`{path_str}`]\"),\n            DefKind::Ctor { .. } => format!(\"constructor for [`{parent_path_str}`]\"),\n            DefKind::AssocFn => format!(\"associated function [`{path_str}`]\"),\n            DefKind::AssocConst => format!(\"associated constant [`{path_str}`]\"),\n            DefKind::Macro { .. } => format!(\"macro [`{path_str}`]\"),\n            DefKind::ExternCrate => format!(\"extern crate [`{path_str}`]\"),\n            DefKind::Use => format!(\"use item [`{path_str}`]\"),\n            DefKind::ForeignMod => format!(\"foreign module [`{path_str}`]\"),\n            DefKind::AnonConst => return \"This is an anonymous constant.\".to_string(),\n            DefKind::PromotedConst | DefKind::InlineConst => {\n                format!(\"This is an inline const from [`{parent_path_str}`]\")\n            }\n            DefKind::OpaqueTy => {\n                return format!(\"This is an opaque type for [`{parent_path_str}`]\");\n            }\n            DefKind::Field => format!(\"field [`{def}`] from {parent_path_str}\"),\n            DefKind::LifetimeParam => return \"This is a lifetime parameter.\".to_string(),\n            DefKind::GlobalAsm => return \"This is a global ASM block.\".to_string(),\n            DefKind::Impl { .. } => return \"This is an impl block.\".to_string(),\n            DefKind::Closure => return \"This is a closure.\".to_string(),\n            DefKind::SyntheticCoroutineBody => return \"This is a coroutine body.\".to_string(),\n        };\n        format!(\"This is the {subject}.\")\n    }\n\n    /// Computes a string path for a `DefId`.\n    fn path_of_def_id(explicit_id: &ExplicitDefId) -> Vec<String> {\n        let id = &explicit_id.def_id;\n        fn name_to_string(mut s: String) -> String {\n            if s == \"_\" {\n                s = \"_anonymous\".into();\n            };\n            if s.parse::<i32>().is_ok() {\n                s = format!(\"_{s}\");\n            }\n            s\n        }\n        iter::once(id.krate.to_string())\n            .chain(id.path.iter().map(|item| {\n                let data = match item.data.clone() {\n                    hax_frontend_exporter::DefPathItem::CrateRoot { name } => name,\n                    hax_frontend_exporter::DefPathItem::TypeNs(s)\n                    | hax_frontend_exporter::DefPathItem::ValueNs(s)\n                    | hax_frontend_exporter::DefPathItem::MacroNs(s)\n                    | hax_frontend_exporter::DefPathItem::LifetimeNs(s) => s,\n                    data => format!(\"{data:?}\"),\n                };\n                if item.disambiguator == 0 {\n                    data\n                } else {\n                    format!(\"{data}__{}\", item.disambiguator)\n                }\n            }))\n            .chain(if explicit_id.is_constructor {\n                Some(\"Constructor\".to_string())\n            } else {\n                None\n            })\n            .chain(if matches!(id.kind, DefKind::Ctor(..)) {\n                // TODO: get rid of `ctor` #1657\n                Some(\"ctor\".to_string())\n            } else {\n                None\n            })\n            .map(name_to_string)\n            .collect()\n    }\n\n    /// Given a list of `DefId`, this will create a Rust code source that provides those names.\n    ///\n    /// For example, given `krate::module::f` and `krate::g`, this will produce something like:\n    /// ```rust,ignore\n    /// mod krate {\n    ///    mod module {\n    ///       fn f() -> DefId {...}\n    ///    }\n    ///    fn g() -> DefId {...}\n    /// }\n    /// ```\n    fn generate_names_hierachy(def_ids: Vec<ExplicitDefId>) -> String {\n        /// Helper struct: a graph of module and definitions.\n        #[derive(Debug, Default)]\n        struct Module {\n            attached_def_id: Option<ExplicitDefId>,\n            submodules: HashMap<String, Module>,\n            definitions: Vec<(String, ExplicitDefId)>,\n        }\n        impl Module {\n            fn new(def_ids: Vec<ExplicitDefId>) -> Self {\n                let mut node = Self::default();\n                for def_id in &def_ids {\n                    node.insert(def_id);\n                }\n                for def_id in def_ids {\n                    let modpath = path_of_def_id(&def_id);\n                    if let Some(module) = node.find_module(&modpath) {\n                        module.attached_def_id = Some(def_id.clone());\n                    }\n                }\n                node\n            }\n            /// Insert a `DefId` in our module tree\n            fn insert(&mut self, def_id: &ExplicitDefId) {\n                let fullpath = path_of_def_id(def_id);\n                let [modpath @ .., def] = &fullpath[..] else {\n                    return;\n                };\n\n                let mut node = self;\n                for chunk in modpath {\n                    node = node.submodules.entry(chunk.clone()).or_default();\n                }\n\n                node.definitions.push((def.clone(), def_id.clone()));\n            }\n            /// Get a mutable borrow to the submodule denoted by `modpath`, if it exists\n            fn find_module(&mut self, modpath: &Vec<String>) -> Option<&mut Self> {\n                let mut node = self;\n                for chunk in modpath {\n                    node = node.submodules.get_mut(chunk)?;\n                }\n                Some(node)\n            }\n            /// Render the module tree as a string\n            fn render(self, path: String, indexes: &HashMap<ExplicitDefId, usize>) -> String {\n                /// Computes the visibility restriction for a given path.\n                fn restriction(path: &str) -> &'static str {\n                    // Tuples are encoded directly in `GlobalIdInner::Tuple`.\n                    // The names here exist so that tuple identifiers can be handled in the exact same way as other identifiers.\n                    // But the canonical representation of tuples is not `names::rust_primitives::hax::Tuple*`.\n                    // Whence this visibility restriction.\n                    if path.starts_with(\"::rust_primitives::hax::Tuple\") {\n                        \"(in crate::ast::identifiers::global_id)\"\n                    } else {\n                        \"\"\n                    }\n                }\n                let Self {\n                    submodules,\n                    definitions,\n                    attached_def_id,\n                } = self;\n                let submodules = submodules\n                    .into_iter()\n                    .sorted_by(|(a, _), (b, _)| a.cmp(b))\n                    .map(|(name, contents)| {\n                        let path = format!(\"{path}::{name}\");\n                        let restriction = restriction(&path);\n                        format!(\n                            r###\"pub{restriction} mod {name} {{ {} }}\"###,\n                            contents.render(path, indexes)\n                        )\n                    });\n                let definitions = definitions\n                    .into_iter()\n                    .sorted_by(|(a, _), (b, _)| a.cmp(b))\n                    .map(|(name, def_id)| {\n                        let docstring = docstring(&def_id);\n                        let index = indexes.get(&def_id).unwrap();\n                        let restriction = restriction(&format!(\"{path}::{name}\"));\n                        format!(r###\"\n                            #[doc = r##\"{docstring}\"##]\n                            pub{restriction} const {name}: crate::ast::identifiers::global_id::GlobalId = crate::ast::identifiers::global_id::GlobalId(root::INTERNED_GLOBAL_IDS[{index}]);\n                        \"###)\n                    });\n                let docstring = attached_def_id\n                    .iter()\n                    .map(docstring)\n                    .map(|s| format!(r###\"#![doc=r##\"{s}\"##]\"###));\n                docstring\n                    .chain(iter::once(\"use super::root;\".to_string()))\n                    .chain(submodules)\n                    .chain(definitions)\n                    .collect::<Vec<_>>()\n                    .join(\"\\n\")\n            }\n        }\n        let enumerated_def_ids = def_ids\n            .iter()\n            .cloned()\n            .enumerate()\n            .map(|(n, def_id)| (def_id, n))\n            .collect::<Vec<_>>();\n        let indexes = HashMap::from_iter(enumerated_def_ids.iter().cloned());\n        let tree = Module::new(def_ids).render(String::new(), &indexes);\n        let functions = {\n            enumerated_def_ids.iter().map(|(did, i)| {\n                let serialized = compact_serialization::serialize(did);\n                let parent = did.parent().as_ref().map(|parent| *indexes.get(parent).unwrap()).map(|parent| format!(\"Some(did_{parent}())\")).unwrap_or(\"None\".into());\n                format!(r###\"fn did_{i}() -> ExplicitDefId {{deserialize(r##\"{serialized}\"##, {parent})}}\"###)\n            }).collect::<Vec<_>>().join(\"\\n\")\n        };\n        let array_literal = enumerated_def_ids\n            .iter()\n            .map(|(_, i)| format!(\"did_{i}().into_global_id_inner()\"))\n            .collect::<Vec<_>>()\n            .join(\",\");\n        let n = indexes.len();\n        format!(\n            r#\"// This file was generated by `cargo hax into generate-rust-engine-names`.\n// To regenerate it, please use `just regenerate-names`. Under the hood, `cargo\n// hax into generate-rust-engine-names` runs the Rust engine, which in turn\n// calls `rust_engine::names::export_def_ids_to_mod`.\n\nstatic TABLE_AND_INTERNED_GLOBAL_IDS: (crate::interning::LazyLockNewWithValue<crate::ast::identifiers::global_id::GlobalIdInner, {n}>, [crate::interning::Interned<crate::ast::identifiers::global_id::GlobalIdInner>; {n}]) = {{\n    crate::interning::InterningTable::new_with_values(|| {{\n        use crate::ast::identifiers::global_id::ExplicitDefId;\n        use crate::ast::identifiers::global_id::compact_serialization::deserialize;\n        {functions}\n        [{array_literal}]\n    }})\n}};\n\nstatic INTERNED_GLOBAL_IDS: [crate::interning::Interned<crate::ast::identifiers::global_id::GlobalIdInner>; {n}] = TABLE_AND_INTERNED_GLOBAL_IDS.1;\n\nimpl crate::interning::Internable for crate::ast::identifiers::global_id::GlobalIdInner {{\n    fn interning_table() -> &'static std::sync::Mutex<crate::interning::InterningTable<Self>> {{\n        &TABLE_AND_INTERNED_GLOBAL_IDS.0\n    }}\n}}\n\n{tree}\n\"#\n        )\n    }\n\n    /// Finds all `DefId`s in `items`, and produce a Rust module exposing them.\n    pub fn export_def_ids_to_mod(items: Vec<Item>) -> String {\n        generate_names_hierachy(collect_def_ids(items))\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/ast/identifiers/global_id/view.rs",
    "content": "//! Helpers to view and reason about **path segments** in Rust items.\n//!\n//! This module encodes a number of rustc invariants about which items are named vs.\n//! unnamed and which items have parents. Those invariants are enforced at runtime and\n//! will emit diagnostic if the invariants are broken.\n//!\n//! # What is a path segment?\n//!\n//! In Rust, every item lives inside a path. Every path begins with a crate root\n//! (the crate where the item is defined).\n//!\n//! For example, imagine a crate called `my_crate` with this code:\n//!\n//! ```ignore\n//! mod a {\n//!     mod b {\n//!         fn hello() {}\n//!     }\n//! }\n//! ```\n//!\n//! The function `hello` has the full path `my_crate::a::b::hello`.\n//! This path is made up of segments: `my_crate`, `a`, `b`, and `hello`.\n//!\n//! This module represents those segments as typed values, enriched with extra\n//! information such as:\n//! - whether the segment is named or unnamed (e.g. anonymous const or impl blocks),\n//! - what kind of item it points to (a crate, module, struct, field, associated fn, etc.),\n//! - its parent segment in the hierarchy (e.g. a field belongs to a constructor,\n//!   which belongs to a type, which belongs to a module, which belongs to a crate).\n//!\n//! # Hierarchical nature of segments\n//!\n//! Segments form a hierarchy of ownership, starting from the crate root.\n//! For example, in a crate called `my_crate`:\n//!\n//! ```ignore\n//! struct Foo {\n//!     bar: u32,\n//! }\n//! ```\n//!\n//! The field `bar` is represented as a `Field` segment. It knows its parent is\n//! the constructor of `Foo`, which knows its parent is the type definition `Foo`,\n//! which in turn belongs to the crate `my_crate`.\n//!\n//! The hierarchy looks like this:\n//!\n//! ```text\n//! my_crate (crate)\n//!  └── Foo (type)\n//!       └── Foo (constructor)\n//!            └── bar (field)\n//! ```\n//!\n//! Similarly, with associated items in a crate `my_crate`:\n//!\n//! ```ignore\n//! trait T {\n//!     fn f();\n//! }\n//! ```\n//!\n//! The function `f` is represented as an `AssocItem` segment, whose parent is the\n//! container `T` (a `Trait` segment), and ultimately the crate root:\n//!\n//! ```text\n//! my_crate (crate)\n//!  └── T (trait)\n//!       └── f (assoc fn)\n//! ```\n//!\n//! This hierarchical model makes it possible to:\n//! - reliably find the **parent** of any segment (`bar` → constructor → type → crate),\n//! - disambiguate names in backends (e.g. when two crates define constructors with the\n//!   same name, the crate root keeps them separate),\n//! - traverse full paths in a strongly-typed way (using [`View`] or [`PathSegment::parents`]).\n//!\n//! # Examples\n//!\n//! For this Rust code in crate `my_crate`:\n//!\n//! ```ignore\n//! mod a {\n//!     trait Foo {\n//!         fn f() {\n//!             enum T {\n//!                 C { field: u8 },\n//!             }\n//!         }\n//!     }\n//! }\n//! ```\n//!\n//! We can represent various identifiers as hierarchical segments:\n//!\n//! | Path                               | Segments                                     |\n//! |------------------------------------|----------------------------------------------|\n//! | `my_crate`                         | `[my_crate]`                                 |\n//! | `my_crate::a`                      | `[my_crate], [a]`                            |\n//! | `my_crate::a::b::hello`            | `[my_crate], [a], [b], [hello]`              |\n//! | `my_crate::a::Foo`                 | `[my_crate], [a], [Foo]`                     |\n//! | `my_crate::a::Foo::f`              | `[my_crate], [a], [Foo::f]`                  |\n//! | `my_crate::a::Foo::f::T`           | `[my_crate], [a], [Foo::f], [T]`             |\n//! | `my_crate::a::Foo::f::T::C`        | `[my_crate], [a], [Foo::f], [T::C]`          |\n//! | `my_crate::a::Foo::f::T::C::field` | `[my_crate], [a], [Foo::f], [T::C::field]`   |\n//!\n\nuse hax_frontend_exporter::{CtorOf, DefKind, DefPathItem, ImplInfos};\n\nuse crate::{\n    ast::identifiers::global_id::{DefId, ExplicitDefId},\n    symbol::Symbol,\n};\n\n#[derive(Debug, Clone)]\n/// The kind of a type definition: `struct`, `enum`, or `union`.\npub enum TypeDefKind {\n    /// A `struct` definition.\n    Struct,\n    /// An `enum` definition.\n    Enum,\n    /// A `union` definition.\n    Union,\n}\n\n#[derive(Debug, Clone)]\n/// The kind of a container for associated items (i.e., a `trait` or an `impl` block).\npub enum AssocItemContainerKind {\n    /// An `impl` block.\n    Impl {\n        /// `true` if this is an inherent `impl` (no trait), `false` if it implements a trait.\n        inherent: bool,\n        /// Optional extra information about the impl (if available from the frontend).\n        ///\n        /// `None` when such information is not provided/collected.\n        impl_infos: Option<ImplInfos>,\n    },\n    /// A `trait` definition.\n    Trait {\n        /// `true` if this is a trait alias (a type alias to a trait).\n        trait_alias: bool,\n    },\n}\n\n#[derive(Debug, Clone)]\n/// The kind of a constructo (tuple struct/variant/struct-ctor function).\npub enum ConstructorKind {\n    /// A constructor associated to a concrete type definition `ty`.\n    Constructor {\n        /// The type constructed\n        ty: PathSegment<TypeDefKind>,\n    },\n}\n\n#[derive(Debug, Clone)]\n/// The kind of an associated item within a trait or impl.\npub enum AssocItemKind {\n    /// An associated function.\n    Fn,\n    /// An associated constant.\n    Const,\n    /// An associated type.\n    Ty,\n}\n\n#[derive(Debug, Clone)]\n/// The kind of any item that can occur as a path segment.\n///\n/// This is a sum type that makes [`PathSegment<AnyKind>`] expressive enough to encode\n/// precise parents (e.g., a field always has a constructor parent, an associated item\n/// always has a trait/impl container, etc.).\npub enum AnyKind {\n    /// A type definition (`struct`, `enum`, or `union`).\n    TypeDef(TypeDefKind),\n    /// A container of associated items (`trait` or `impl`).\n    AssocItemContainer(AssocItemContainerKind),\n    /// A constructor (for a struct or enum variant).\n    Constructor(ConstructorKind),\n\n    /// An associated item.\n    AssocItem {\n        /// Which associated item kind this is.\n        kind: AssocItemKind,\n        /// The parent container (trait or impl) of this associated item.\n        container: PathSegment<AssocItemContainerKind>,\n    },\n\n    /// A standalone function.\n    Fn,\n    /// A standalone constant.\n    Const,\n    /// A `use` item.\n    Use,\n    /// An anonymous constant (e.g., `const _: T = ...;`).\n    AnonConst,\n    /// An inline constant (e.g., `let x = { const Y: i32 = 0; Y };`).\n    InlineConst,\n    /// A trait alias.\n    TraitAlias,\n    /// A foreign module (`extern \"C\" { ... }`).\n    Foreign,\n    /// A foreign type (`extern type T;`).\n    ForeignTy,\n    /// A type alias (`type Foo = Bar;`).\n    TyAlias,\n    /// An `extern crate` item.\n    ExternCrate,\n    /// An opaque item (e.g., `type Foo = impl Trait;`).\n    Opaque,\n    /// A `static` item.\n    Static,\n    /// A macro definition or export.\n    Macro,\n    /// A module or crate.\n    Mod,\n    /// A global assembly block.\n    GlobalAsm,\n\n    /// A field of a struct or a struct-like enum variant.\n    Field {\n        /// `true` if the field is *named* (e.g., `x` in `struct S { x: u8 }`);\n        /// `false` if it is *unnamed* (tuple field like `0` in `struct T(u8)`).\n        named: bool,\n        /// The parent constructor that owns this field.\n        ///\n        /// Example: The parent of `x` is the constructor of `Foo` in:\n        /// `struct Foo { x: u8 }`.\n        parent: PathSegment<ConstructorKind>,\n    },\n\n    /// A closure expression item.\n    Closure,\n}\n\n#[derive(Debug, Clone)]\n/// Payloads used when a path segment is **unnamed**.\n///\n/// These correspond to items that do not contribute a user-facing identifier in the path.\npub enum UnnamedPathSegmentPayload {\n    /// An `impl` block.\n    Impl,\n    /// An anonymous constant.\n    AnonConst,\n    /// An inline constant.\n    InlineConst,\n    /// A foreign module or crate.\n    Foreign,\n    /// A global assembly code block.\n    GlobalAsm,\n    /// A `use` item.\n    Use,\n    /// An opaque item (e.g., `type Foo = impl Trait;`).\n    Opaque,\n    /// A closure.\n    Closure,\n}\n\n/// Each path segment carries a payload:\n/// - [`PathSegmentPayload::Named`] with a user-decided name, or\n/// - [`PathSegmentPayload::Unnamed`] for items that are anonymous in the path.\n#[derive(Debug, Clone)]\npub enum PathSegmentPayload {\n    /// A named segment (holds the name as a [`Symbol`]).\n    Named(Symbol),\n    /// An unnamed segment with a categorized payload.\n    Unnamed(UnnamedPathSegmentPayload),\n}\n\nmod rustc_invariant_handling {\n    //! This modules provides the function `error_dummy_value`, which emits errors.\n\n    use std::any::{Any, type_name};\n    use std::fmt::Debug;\n\n    use super::*;\n    use crate::{\n        ast::{\n            diagnostics::{Context, DiagnosticInfo},\n            span::Span,\n        },\n        names,\n    };\n    use hax_types::diagnostics::Kind;\n\n    #[derive(Clone, Copy)]\n    /// Restrict [`ErrorDummyValue`] callers\n    pub struct Permit(());\n\n    pub trait ErrorDummyValue {\n        fn error_dummy_value(_: Permit) -> Self;\n    }\n\n    impl ErrorDummyValue for PathSegmentPayload {\n        fn error_dummy_value(_: Permit) -> Self {\n            Self::Named(Symbol::new(\"hax_engine_view_fatal_error\"))\n        }\n    }\n\n    impl ErrorDummyValue for TypeDefKind {\n        fn error_dummy_value(_: Permit) -> Self {\n            TypeDefKind::Enum\n        }\n    }\n    impl ErrorDummyValue for ConstructorKind {\n        fn error_dummy_value(permit: Permit) -> Self {\n            ConstructorKind::Constructor {\n                ty: PathSegment::<TypeDefKind>::error_dummy_value(permit),\n            }\n        }\n    }\n\n    impl<K: ErrorDummyValue> ErrorDummyValue for PathSegment<K> {\n        fn error_dummy_value(permit: Permit) -> Self {\n            Self {\n                identifier: DefId::error_dummy_value(permit),\n                payload: PathSegmentPayload::error_dummy_value(permit),\n                disambiguator: 0,\n                kind: K::error_dummy_value(permit),\n            }\n        }\n    }\n\n    impl ErrorDummyValue for AnyKind {\n        fn error_dummy_value(_: Permit) -> Self {\n            Self::Fn\n        }\n    }\n\n    impl ErrorDummyValue for DefId {\n        fn error_dummy_value(_: Permit) -> Self {\n            match names::rust_primitives::hax::failure.0.get() {\n                crate::ast::identifiers::global_id::GlobalIdInner::Concrete(concrete_id) => {\n                    concrete_id.def_id.def_id\n                }\n                // The error dummy value is generated by hax, with a concrete identifier\n                _ => unreachable!(\"Hax generated name for failure is concrete\"),\n            }\n        }\n    }\n\n    impl ErrorDummyValue for AssocItemContainerKind {\n        fn error_dummy_value(_: Permit) -> Self {\n            AssocItemContainerKind::Trait { trait_alias: false }\n        }\n    }\n\n    impl ErrorDummyValue for bool {\n        fn error_dummy_value(_: Permit) -> Self {\n            true\n        }\n    }\n\n    pub(super) fn error_dummy_value<T: ErrorDummyValue, V: Debug + Any>(\n        message: &str,\n        value: &V,\n    ) -> T {\n        let details = format!(\n            \"A rustc invariant about `DefId` was violated.\\nContext: {message}.\\nValue (type {}) is:\\n{value:#?}\",\n            type_name::<T>()\n        );\n        DiagnosticInfo {\n            context: Context::NameView,\n            span: Span::dummy(),\n            kind: Kind::AssertionFailure { details },\n        }\n        .emit();\n        T::error_dummy_value(Permit(()))\n    }\n}\nuse rustc_invariant_handling::error_dummy_value;\n\nimpl PathSegmentPayload {\n    /// Constructs a [`PathSegmentPayload`] from an [`ExplicitDefId`], assuming its last\n    /// path segment is named.\n    fn from_named(def_id: &ExplicitDefId) -> Self {\n        Self::Named(match def_id.def_id.path.last() {\n            Some(last) => match &last.data {\n                DefPathItem::TypeNs(s)\n                | DefPathItem::ValueNs(s)\n                | DefPathItem::MacroNs(s)\n                | DefPathItem::LifetimeNs(s) => Symbol::new(s),\n                _ => return error_dummy_value(\"PathSegmentPayload::from_named\", def_id),\n            },\n            None => Symbol::new(&def_id.def_id.krate),\n        })\n    }\n\n    /// Constructs a [`PathSegmentPayload`] from an [`ExplicitDefId`], assuming its last\n    /// path segment is unnamed.\n    fn from_unnamed(def_id: &ExplicitDefId) -> Result<Self, &'static str> {\n        match def_id.def_id.path.last() {\n            Some(last) => match &last.data {\n                DefPathItem::TypeNs(_)\n                | DefPathItem::ValueNs(_)\n                | DefPathItem::MacroNs(_)\n                | DefPathItem::LifetimeNs(_) => {\n                    return Err(\"PathSegmentPayload::from_unnamed, got name\");\n                }\n\n                _ => (),\n            },\n            None => return Err(\"PathSegmentPayload::from_unnamed, got a root crate\"),\n        };\n        Ok(Self::Unnamed(match &def_id.def_id.kind {\n            DefKind::Use => UnnamedPathSegmentPayload::Use,\n            DefKind::ForeignMod => UnnamedPathSegmentPayload::Foreign,\n            DefKind::AnonConst => UnnamedPathSegmentPayload::AnonConst,\n            DefKind::InlineConst => UnnamedPathSegmentPayload::InlineConst,\n            DefKind::OpaqueTy => UnnamedPathSegmentPayload::Opaque,\n            DefKind::GlobalAsm => UnnamedPathSegmentPayload::GlobalAsm,\n            DefKind::Impl { .. } => UnnamedPathSegmentPayload::Impl,\n            DefKind::Closure => UnnamedPathSegmentPayload::Closure,\n            _ => return Err(\"PathSegmentPayload::from_unnamed, bad kind\"),\n        }))\n    }\n\n    /// Constructs a [`PathSegmentPayload`] from an [`ExplicitDefId`], dispatching to\n    /// `from_named` or `from_unnamed` according to the item's [`DefKind`].\n    ///\n    /// This encodes rustc invariants about which kinds are name-bearing in paths.\n    fn from_def_id(def_id: &ExplicitDefId) -> Self {\n        match &def_id.def_id.kind {\n            DefKind::Mod\n            | DefKind::Struct\n            | DefKind::Union\n            | DefKind::Enum\n            | DefKind::Variant\n            | DefKind::Trait\n            | DefKind::TyAlias\n            | DefKind::ForeignTy\n            | DefKind::TraitAlias\n            | DefKind::AssocTy\n            | DefKind::Fn\n            | DefKind::Const\n            | DefKind::Static { .. }\n            | DefKind::Ctor { .. }\n            | DefKind::AssocFn\n            | DefKind::AssocConst\n            | DefKind::Macro { .. }\n            | DefKind::ExternCrate\n            | DefKind::Field => Self::from_named(def_id),\n\n            DefKind::Use\n            | DefKind::ForeignMod\n            | DefKind::AnonConst\n            | DefKind::InlineConst\n            | DefKind::OpaqueTy\n            | DefKind::GlobalAsm\n            | DefKind::Impl { .. }\n            | DefKind::Closure => Self::from_unnamed(def_id)\n                .unwrap_or_else(|message| error_dummy_value(message, def_id)),\n\n            DefKind::TyParam\n            | DefKind::ConstParam\n            | DefKind::PromotedConst\n            | DefKind::LifetimeParam\n            | DefKind::SyntheticCoroutineBody => error_dummy_value(\n                \"PathSegmentPayload::from_def_id, kinds should never appear\",\n                def_id,\n            ),\n        }\n    }\n}\n\n#[derive(Debug, Clone)]\n/// A typed path segment: one \"piece\" of a Rust path, with extra structure.\n///\n/// # What does that mean?\n///\n/// In Rust, every item (function, type, trait, field...) has a path starting at\n/// its crate root. For example, in a crate called `my_crate`:\n///\n/// ```ignore\n/// mod a {\n///     mod b {\n///         fn hello() {}\n///     }\n///     trait Foo {\n///         fn f() {\n///             enum T {\n///                 C { field: u8 },\n///             }\n///         }\n///     }\n/// }\n/// ```\n///\n/// Some paths and their **segments** are:\n///\n/// | Path                               | Segments                                     |\n/// |------------------------------------|----------------------------------------------|\n/// | `my_crate`                         | `[my_crate]`                                 |\n/// | `my_crate::a`                      | `[my_crate], [a]`                            |\n/// | `my_crate::a::b::hello`            | `[my_crate], [a], [b], [hello]`              |\n/// | `my_crate::a::Foo`                 | `[my_crate], [a], [Foo]`                     |\n/// | `my_crate::a::Foo::f`              | `[my_crate], [a], [Foo::f]`                  |\n/// | `my_crate::a::Foo::f::T`           | `[my_crate], [a], [Foo::f], [T]`             |\n/// | `my_crate::a::Foo::f::T::C`        | `[my_crate], [a], [Foo::f], [T::C]`          |\n/// | `my_crate::a::Foo::f::T::C::field` | `[my_crate], [a], [Foo::f], [T::C::field]`   |\n///\n/// Each `[X]` here is a **path segment**.\n///\n/// # Hierarchy\n///\n/// Path segments form a hierarchy: each one knows its parent. For example, the\n/// field `my_field` is inside the constructor of `MyVariant`, which is inside\n/// the enum `MyEnum`, which lives inside the function `f`, and so on -- all the\n/// way up to the crate root.\n///\n/// This parenthood is important:\n/// - a field segment always has a constructor parent\n///   (e.g. `my_field → MyVariant`).\n/// - an associated item always has a trait/impl container parent\n///   (e.g. `f → Foo`).\n/// - everything ultimately has a **crate** as its top parent.\n///\n/// # Why does this matter?\n///\n/// This strong typing of segments lets tools:\n/// - disambiguate names across contexts (e.g. two types with the same\n///   constructor name),\n/// - generate unique, human-readable names in other languages/backends,\n/// - walk up the chain of parents to reconstruct full paths.\n///\n/// For example, with the F\\* backend, constructors are not namespaced under the\n/// name of their type, but live directly at top-level. Thus, they need to be\n/// unique. Using the hierarchy, we can print them as `Foo_MyVariant` instead of\n/// `Foo.MyVariant`.\npub struct PathSegment<Kind = AnyKind> {\n    identifier: DefId,\n    payload: PathSegmentPayload,\n    disambiguator: u32,\n    kind: Kind,\n}\n\nimpl<K> PathSegment<K> {\n    /// Returns the payload of this path segment (named vs. unnamed and why).\n    pub fn payload(&self) -> PathSegmentPayload {\n        self.payload.clone()\n    }\n\n    /// Returns the rustc path disambiguator for this segment.\n    pub fn disambiguator(&self) -> u32 {\n        self.disambiguator\n    }\n\n    /// Returns the kind of this segment as an [`K`].\n    pub fn kind(&self) -> &K {\n        &self.kind\n    }\n\n    /// Maps the segment's `kind` while preserving all other fields.\n    fn map<U>(self, f: impl Fn(K, &DefId) -> U) -> PathSegment<U> {\n        let Self {\n            identifier,\n            payload,\n            disambiguator,\n            kind,\n        } = self;\n        let kind = f(kind, &identifier);\n        PathSegment {\n            identifier,\n            payload,\n            disambiguator,\n            kind,\n        }\n    }\n}\n\nimpl PathSegment<ConstructorKind> {\n    /// Lift a `PathSegment` of kind `ConstructorKind` to a `PathSegment` of kind `AnyKind`.\n    pub fn lift(&self) -> PathSegment<AnyKind> {\n        self.clone().map(|kind, _| AnyKind::Constructor(kind))\n    }\n}\nimpl PathSegment<TypeDefKind> {\n    /// Lift a `PathSegment` of kind `TypeDefKind` to a `PathSegment` of kind `AnyKind`.\n    pub fn lift(&self) -> PathSegment<AnyKind> {\n        self.clone().map(|kind, _| AnyKind::TypeDef(kind))\n    }\n}\nimpl PathSegment<AssocItemContainerKind> {\n    /// Lift a `PathSegment` of kind `AssocItemContainerKind` to a `PathSegment` of kind `AnyKind`.\n    pub fn lift(&self) -> PathSegment<AnyKind> {\n        self.clone()\n            .map(|kind, _| AnyKind::AssocItemContainer(kind))\n    }\n}\n\nimpl PartialEq<PathSegment> for PathSegment {\n    fn eq(&self, other: &PathSegment) -> bool {\n        self.identifier == other.identifier && self.disambiguator == other.disambiguator\n    }\n}\n\nimpl PathSegment {\n    /// Asserts that this segment is a [`TypeDefKind`] and narrows the type.\n    ///\n    /// Emits a diagnostic if it doesn\n    fn assert_type_def(self) -> PathSegment<TypeDefKind> {\n        self.map(|kind, did| match kind {\n            AnyKind::TypeDef(inner) => inner,\n            _ => error_dummy_value(&format!(\"expected TypeDefKind, got {kind:#?}\"), did),\n        })\n    }\n\n    /// Asserts that this segment is an [`AssocItemContainerKind`] and narrows the type.\n    fn assert_assoc_item_container(self) -> PathSegment<AssocItemContainerKind> {\n        self.map(|kind, did| match kind {\n            AnyKind::AssocItemContainer(inner) => inner,\n            _ => error_dummy_value(\n                &format!(\"expected AssocItemContainerKind, got {kind:#?}\"),\n                did,\n            ),\n        })\n    }\n\n    /// Asserts that this segment is a [`ConstructorKind`] and narrows the type.\n    fn assert_constructor(self) -> PathSegment<ConstructorKind> {\n        self.map(|kind, did| match kind {\n            AnyKind::Constructor(inner) => inner,\n            _ => error_dummy_value(&format!(\"expected ConstructorKind, got {kind:#?}\"), did),\n        })\n    }\n\n    /// Internal constructor that consumes an iterator of [`ExplicitDefId`]s (from child\n    /// to parents) and builds a single [`PathSegment`] at a time, honoring rustc\n    /// invariants and wiring proper parents for kinds that require them\n    /// (constructors, fields, associated items).\n    ///\n    /// Returns `None` when the iterator is exhausted.\n    fn from_iterator(it: &mut impl Iterator<Item = ExplicitDefId>) -> Option<Self> {\n        let def_id = it.next()?;\n        let mut from_iterator = |context: &str| match Self::from_iterator(it) {\n            Some(value) => value,\n            None => error_dummy_value(\n                &format!(\"PathSegment::from_iterator, expected parent for {context}.\"),\n                &def_id,\n            ),\n        };\n        let payload = PathSegmentPayload::from_def_id(&def_id);\n\n        let kind = match &def_id.def_id.kind {\n            // Struct constructor path segment special-casing (struct-as-ctor).\n            DefKind::Ctor(CtorOf::Struct, _) | DefKind::Struct if def_id.is_constructor => {\n                let parent_def_id = ExplicitDefId {\n                    is_constructor: false,\n                    def_id: def_id.def_id,\n                };\n                let parent = match Self::from_iterator(&mut std::iter::once(parent_def_id)) {\n                    Some(value) => value,\n                    None => error_dummy_value(\n                        \"PathSegment::from_iterator, expected parent for Struct/Ctor.\",\n                        &def_id,\n                    ),\n                };\n                AnyKind::Constructor(ConstructorKind::Constructor {\n                    ty: parent.assert_type_def(),\n                })\n            }\n            // Non-ctor struct item.\n            DefKind::Ctor(CtorOf::Struct, _) => AnyKind::TypeDef(TypeDefKind::Struct),\n            // Enum variants and non-struct ctors.\n            DefKind::Variant | DefKind::Ctor(_, _) => {\n                AnyKind::Constructor(ConstructorKind::Constructor {\n                    ty: from_iterator(\"Variant/Ctor\").assert_type_def(),\n                })\n            }\n            DefKind::Struct => AnyKind::TypeDef(TypeDefKind::Struct),\n            DefKind::Union => AnyKind::TypeDef(TypeDefKind::Union),\n            DefKind::Enum => AnyKind::TypeDef(TypeDefKind::Enum),\n            DefKind::Trait => {\n                AnyKind::AssocItemContainer(AssocItemContainerKind::Trait { trait_alias: false })\n            }\n            DefKind::Impl { of_trait } => AnyKind::AssocItemContainer(\n                AssocItemContainerKind::Impl { inherent: !of_trait, impl_infos: /* intentionally left None; fill where available */ None },\n            ),\n\n            // Simple leaf kinds.\n            DefKind::Mod => AnyKind::Mod,\n            DefKind::Fn => AnyKind::Fn,\n            DefKind::Const => AnyKind::Const,\n            DefKind::Static { .. } => AnyKind::Static,\n            DefKind::Use => AnyKind::Use,\n            DefKind::TyAlias => AnyKind::TyAlias,\n            DefKind::TraitAlias => AnyKind::TraitAlias,\n            DefKind::ForeignTy => AnyKind::ForeignTy,\n            DefKind::ForeignMod => AnyKind::Foreign,\n            DefKind::Macro { .. } => AnyKind::Macro,\n            DefKind::AnonConst => AnyKind::AnonConst,\n            DefKind::OpaqueTy => AnyKind::Opaque,\n            DefKind::GlobalAsm => AnyKind::GlobalAsm,\n            DefKind::Closure => AnyKind::Closure,\n            DefKind::ExternCrate => AnyKind::ExternCrate,\n\n            // Field: requires a constructor parent and conveys whether it's named.\n            DefKind::Field => AnyKind::Field {\n                parent: from_iterator(\"Field\").assert_constructor(),\n                named: match &payload {\n                    PathSegmentPayload::Named(symbol) => {\n                        // Tuple fields are numbered; parse success => unnamed field.\n                        str::parse::<usize>(symbol.as_ref()).is_ok()\n                    }\n                    PathSegmentPayload::Unnamed(_) => {\n                        error_dummy_value(\"Field should carry a ValueNs payload.\", &def_id)\n                    }\n                },\n            },\n\n            // Associated items: require a container parent.\n            DefKind::AssocTy => AnyKind::AssocItem {\n                container: from_iterator(\"AssocTy\").assert_assoc_item_container(),\n                kind: AssocItemKind::Ty,\n            },\n            DefKind::AssocFn => AnyKind::AssocItem {\n                container: from_iterator(\"AssocFn\").assert_assoc_item_container(),\n                kind: AssocItemKind::Fn,\n            },\n            DefKind::AssocConst => AnyKind::AssocItem {\n                container: from_iterator(\"AssocConst\").assert_assoc_item_container(),\n                kind: AssocItemKind::Const,\n            },\n\n            _ => error_dummy_value(\"PathSegment::from_iterator_opt\", &def_id),\n        };\n        let identifier = def_id.def_id;\n        let disambiguator = identifier.path.last().map(|d| d.disambiguator).unwrap_or(0);\n        Some(Self {\n            identifier,\n            payload,\n            disambiguator,\n            kind,\n        })\n    }\n}\n\nimpl PathSegment {\n    /// Returns the parent path segment, if any.\n    ///\n    /// Parents exist only for:\n    /// - [`AnyKind::Constructor`] (parent is its [`TypeDefKind`]),\n    /// - [`AnyKind::AssocItem`] (parent is its container `trait`/`impl`),\n    /// - [`AnyKind::Field`] (parent is its constructor).\n    ///\n    /// All other kinds return `None`.\n    pub fn parent(&self) -> Option<PathSegment> {\n        Some(match self.kind.clone() {\n            AnyKind::Constructor(ConstructorKind::Constructor { ty }) => {\n                ty.map(|kind, _| AnyKind::TypeDef(kind))\n            }\n            AnyKind::AssocItem { container, .. } => {\n                container.map(|kind, _| AnyKind::AssocItemContainer(kind))\n            }\n            AnyKind::Field { parent, .. } => parent.map(|kind, _| AnyKind::Constructor(kind)),\n            _ => return None,\n        })\n    }\n\n    /// Returns an iterator over `self` and all its ancestors, walking up via\n    /// [`Self::parent`] until no parent remains.\n    pub fn parents(&self) -> impl Iterator<Item = Self> {\n        std::iter::successors(Some(self.clone()), |seg| seg.parent())\n    }\n}\n\nmod view_encapsulation {\n    //! Encapsulation module to scope [`View`]'s invariants\n    use crate::ast::{\n        identifiers::global_id::{FreshModule, ReservedSuffix},\n        span::Span,\n    };\n\n    use super::*;\n    /// A view for an [`ExplicitDefId`], materialized as a list of typed\n    /// [`PathSegment`]s ordered from the crate root/module towards the item.\n    pub struct View(Vec<PathSegment>, Option<ReservedSuffix>);\n\n    impl View {\n        /// Returns the full list of segments (non-empty).\n        pub fn segments(&self) -> &[PathSegment] {\n            &self.0\n        }\n\n        /// Returns the last (most specific) segment.\n        pub fn last(&self) -> &PathSegment {\n            self.0\n                .last()\n                .expect(\"Broken invariant: a view always contains at least one path path segments.\")\n        }\n\n        /// Returns the first (outermost) segment.\n        pub fn first(&self) -> &PathSegment {\n            self.0\n                .first()\n                .expect(\"Broken invariant: a view always contains at least one path path segments.\")\n        }\n\n        /// Splits the view at the boundary between (Rust) modules and the first non-module\n        /// segment.\n        ///\n        /// Returns `(modules, rest)`, where `modules` is the (non empty) prefix of\n        /// `mod` segments (e.g., the crate/module path), and `rest` is the remaining\n        /// segments starting at the first non-`mod`.\n        pub fn split_at_module(&self) -> (&[PathSegment], &[PathSegment]) {\n            let position = self\n                .segments()\n                .iter()\n                .enumerate()\n                .find(|(_, seg)| !matches!(seg.kind(), AnyKind::Mod))\n                .map(|(i, _)| i)\n                .unwrap_or(self.segments().len());\n            self.segments().split_at(position)\n        }\n\n        /// Get the first parent which is a proper module (all its parent are modules as well).\n        pub fn module(&self) -> &PathSegment {\n            self.0\n                .iter()\n                .take_while(|seg| !matches!(seg.kind(), AnyKind::Mod))\n                .last()\n                .expect(\"Broken invariant, a name has at least a crate\")\n        }\n\n        /// Get the optional suffix of this view\n        pub fn suffix(&self) -> &Option<ReservedSuffix> {\n            &self.1\n        }\n\n        /// Add a suffix to a view\n        pub fn with_suffix(mut self, suffix: Option<ReservedSuffix>) -> Self {\n            self.1 = suffix;\n            self\n        }\n    }\n\n    impl From<ExplicitDefId> for View {\n        /// Builds a [`View`] from an [`ExplicitDefId`], reconstructing segments by walking\n        /// up the parent chain and then reversing to obtain the canonical outer→inner order.\n        fn from(value: ExplicitDefId) -> Self {\n            let mut it = value.parents();\n            let mut inner =\n                std::iter::from_fn(|| PathSegment::from_iterator(&mut it)).collect::<Vec<_>>();\n            inner.reverse();\n            debug_assert!(!inner.is_empty()); // invariant: non-empty\n            Self(inner, None)\n        }\n    }\n\n    impl From<FreshModule> for View {\n        fn from(value: FreshModule) -> Self {\n            use crate::ast::diagnostics::{Context, DiagnosticInfo};\n            (DiagnosticInfo {\n                context: Context::NameView,\n                span: Span::dummy(),\n                kind: hax_types::diagnostics::Kind::Unimplemented {\n                    issue_id: Some(1779),\n                    details: Some(\n                        \"Fresh modules are not implemented yet in the Rust engine\".into(),\n                    ),\n                },\n            })\n            .emit();\n            // dummy value\n            value\n                .hints\n                .first()\n                .expect(\"The list of hints should be non-empty\")\n                .clone()\n                .into()\n        }\n    }\n}\npub use view_encapsulation::View;\n"
  },
  {
    "path": "rust-engine/src/ast/identifiers/global_id.rs",
    "content": "//! The global identifiers of hax.\n//!\n//! ## Public API\n//! The main type provided by this module is `GlobalId`.\n//!\n//! A global identifier is either:\n//!  - a concrete identifier, something that could be represented as a Rust path\n//!  - a tuple identifier\n//!\n//! To print a global identifier, you have to use the method [`GlobalId::view`],\n//! which will output a [`view::View`].\n//!\n//! You can also try to interpret a global identifier as a tuple identifier\n//! ([`TupleId`]) via the method [`GlobalId::expect_tuple`].\n//!\n//! ## Internal representations\n//! [`GlobalId`] is a wrapper for an interned [`GlobalIdInner`].\n//!\n//! A [`GlobalIdInner`] is either a [`ConcreteId`] or a [`TupleId`]. A\n//! [`GlobalId`] can always be turned into a [`ConcreteId`].\n//!\n//! A [`ConcreteId`] is an [`ExplicitDefId`] that can be moved to fresh\n//! namespaces or suffixed with reserved suffixes.\n//!\n//! An [`ExplicitDefId`] is a [`DefId`] that adds one piece of information: is\n//! the identifier refering to a constructor or not. This information is\n//! ambiguous in Rust's `DefId`s.\n//!\n//! A [`DefId`] is an interned [`DefIdInner`], which in turn is a datatype\n//! isomorphic to the raw representation of `DefId`s in the frontend.\n//!\n//! A [`DefIdInner`] is basically a definition kind, a krate name and a path.\n\nuse hax_frontend_exporter::{DefKind, DefPathItem, DisambiguatedDefPathItem};\nuse hax_rust_engine_macros::*;\n\nuse crate::interning::{Internable, Interned, InterningTable};\n\nmod compact_serialization;\npub(crate) mod generated_names;\npub mod view;\n\n/// A Rust `DefId`: a lighter version of [`hax_frontend_exporter::DefId`].\n#[derive_group_for_ast]\nstruct DefIdInner {\n    /// The crate of the definition\n    krate: String,\n    /// The full path for this definition, under the crate `krate`\n    path: Vec<DisambiguatedDefPathItem>,\n    /// The parent `DefId`, if any.\n    /// `parent` if node if and only if `path` is empty\n    parent: Option<DefId>,\n    /// What kind is this definition? (e.g. an `enum`, a `const`, an assoc. `fn`...)\n    kind: DefKind,\n}\n\nimpl From<hax_frontend_exporter::DefId> for DefIdInner {\n    fn from(value: hax_frontend_exporter::DefId) -> Self {\n        Self {\n            krate: value.krate.clone(),\n            path: value.path.clone(),\n            parent: value\n                .parent\n                .clone()\n                .map(|def_id| DefIdInner::from(def_id).intern()),\n            kind: value.kind.clone(),\n        }\n    }\n}\n\nimpl DefIdInner {\n    /// Change the krate field of `self` and propagate the change into all parents.\n    fn rename_krate(&self, name: &str) -> Self {\n        let mut def_id = self.clone();\n        def_id.krate = name.into();\n        def_id.parent = def_id.parent.map(|parent: DefId| parent.rename_krate(name));\n        def_id\n    }\n\n    fn to_debug_string(&self) -> String {\n        fn disambiguator_suffix(disambiguator: u32) -> String {\n            if disambiguator == 0 {\n                \"\".into()\n            } else {\n                format!(\"__{disambiguator}\")\n            }\n        }\n        use itertools::Itertools;\n        std::iter::once(self.krate.clone())\n            .chain(self.path.iter().map(|item| match &item.data {\n                DefPathItem::TypeNs(s)\n                | DefPathItem::ValueNs(s)\n                | DefPathItem::MacroNs(s)\n                | DefPathItem::LifetimeNs(s) => s.clone(),\n                DefPathItem::Impl => \"impl\".into(),\n                other => format!(\"{other:?}\"),\n            } + &disambiguator_suffix(item.disambiguator)))\n            .join(\"::\")\n    }\n}\n\nuse std::{\n    cell::{LazyCell, RefCell},\n    collections::HashMap,\n    sync::{LazyLock, Mutex},\n};\nimpl Internable for DefIdInner {\n    fn interning_table() -> &'static Mutex<InterningTable<Self>> {\n        static TABLE: LazyLock<Mutex<InterningTable<DefIdInner>>> =\n            LazyLock::new(|| Mutex::new(InterningTable::default()));\n        &TABLE\n    }\n}\n\n/// An interned Rust `DefId`: a lighter version of [`hax_frontend_exporter::DefId`].\ntype DefId = Interned<DefIdInner>;\n\nimpl DefId {\n    /// Change the krate name to `name`.\n    fn rename_krate(&self, name: &str) -> Self {\n        (*self).get().rename_krate(name).intern()\n    }\n}\n\n/// An [`ExpliciDefId`] is a Rust [`DefId`] tagged withg some disambiguation metadata.\n///\n/// [`DefId`] can be ambiguous, consider the following Rust code:\n///\n/// ```rust\n/// struct S;\n/// fn f() -> S { S }\n/// ```\n///\n/// Here, the return type of `f` (that is, `S`) and the constructor `S` in the body of `f` refer to the exact same identifier `mycrate::S`.\n/// Yet, they denote two very different objects: a type versus a constructor.\n///\n/// [`ExplicitDefId`] clears up this ambiguity, making constructors and types two separate things.\n///\n/// Also, an [`ExplicitDefId`] always points to an item: an [`ExplicitDefId`] is never pointing to a crate alone.\n#[derive_group_for_ast]\nstruct ExplicitDefId {\n    /// Is this `DefId` a constructor?\n    is_constructor: bool,\n    /// The `DefId` itself\n    def_id: DefId,\n}\n\nimpl ExplicitDefId {\n    /// Get the parent of an `ExplicitDefId`.\n    fn parent(&self) -> Option<Self> {\n        let def_id = &self.def_id;\n        let is_constructor = matches!(&def_id.kind, DefKind::Field);\n        Some(Self {\n            is_constructor,\n            def_id: def_id.parent?,\n        })\n    }\n    /// Returns an iterator that yields `self`, then `self.parent()`, etc.\n    /// This iterator is non-empty.\n    fn parents(&self) -> impl Iterator<Item = Self> {\n        std::iter::successors(Some(self.clone()), |id| id.parent())\n    }\n\n    /// Change the krate name to `name`.\n    fn rename_krate(&mut self, name: &str) {\n        self.def_id = self.def_id.rename_krate(name);\n    }\n\n    /// Helper to get a `GlobalIdInner` out of an `ExplicitDefId`.\n    fn into_global_id_inner(self) -> GlobalIdInner {\n        GlobalIdInner::Concrete(ConcreteId {\n            def_id: self,\n            moved: None,\n            suffix: None,\n        })\n    }\n}\n\n/// Represents a fresh module: a module generated by hax and guaranteed to be fresh.\n#[derive_group_for_ast]\npub struct FreshModule {\n    /// Internal (unique) identifier\n    id: usize,\n    /// Non-empty list of identifiers that will be used to decide the name of the fresh module.\n    hints: Vec<ExplicitDefId>,\n    /// A decoration label that will be also used to decide the name of the fresh module.\n    label: String,\n}\n\nimpl FreshModule {\n    /// Renders a view of the fresh module identifier.\n    fn view(&self) -> view::View {\n        self.clone().into()\n    }\n\n    /// Change the krate name in all hints.\n    fn rename_krate(&self, name: &str) -> Self {\n        let hints = self\n            .hints\n            .iter()\n            .map(|hint| {\n                let mut hint = hint.clone();\n                hint.rename_krate(name);\n                hint\n            })\n            .collect();\n        Self {\n            hints,\n            id: self.id,\n            label: self.label.clone(),\n        }\n    }\n\n    fn to_debug_string(&self) -> String {\n        format!(\"fresh_module_{}_{}\", self.id, self.label)\n    }\n}\n\n/// [`ReservedSuffix`] helps at deriving fresh identifiers out of existing (Rust) ones.\n#[derive_group_for_ast]\npub enum ReservedSuffix {\n    /// Precondition of a function-like item.\n    Pre,\n    /// Postcondition of a function-like item.\n    Post,\n    /// Cast function for an `enum` discriminant.\n    Cast,\n}\n\n/// A identifier that we call concrete: it exists concretely somewhere in Rust.\n#[derive_group_for_ast]\npub struct ConcreteId {\n    /// The explicit `def_id`.\n    def_id: ExplicitDefId,\n    /// A fresh module if this definition was moved to a fresh module.\n    moved: Option<FreshModule>,\n    /// An optional suffix.\n    suffix: Option<ReservedSuffix>,\n}\n\n/// A global identifier in hax.\n#[derive_group_for_ast]\nenum GlobalIdInner {\n    /// A concrete identifier that exists in Rust.\n    Concrete(ConcreteId),\n    /// A fresh module introduced by Hax (typically, a bundle)\n    FreshModule(FreshModule),\n    /// A projector.\n    Tuple(TupleId),\n}\n\n#[derive_group_for_ast]\n#[derive(Copy)]\n/// Represents tuple-related identifier in Rust.\n///\n/// Since Rust tuples do not have user-defined names, this type is used to\n/// represent synthesized identifiers for tuple types, their constructors, and\n/// fields. This is necessary in cases where we need to refer to these\n/// components in a structured and identifiable way.\n///\n/// For ergnomic purposes, `TupleId` can be transformed into `ConcreteId`s.\n/// After such a conversion, we loose structure, but we end up with a standard\n/// concrete identifier, which can be printed in a generic way.\n/// See [`ConcreteId::from_global_id`].\npub enum TupleId {\n    /// Represents a tuple type with the given number of elements.\n    ///\n    /// For example, a tuple like `(i32, bool, String)` would have `length = 3`.\n    Type {\n        /// Number of elements in the tuple.\n        length: usize,\n    },\n\n    /// Represents the constructor function for a tuple with the given arity.\n    ///\n    /// This refers to the tuple expression itself (e.g., `(x, y, z)`), which constructs\n    /// a value of the tuple type.\n    Constructor {\n        /// Number of elements in the tuple.\n        length: usize,\n    },\n\n    /// Represents a field within a tuple, addressed by position.\n    ///\n    /// For instance, accessing `.0` or `.1` on a tuple corresponds to a specific field.\n    Field {\n        /// Number of elements in the tuple.\n        length: usize,\n        /// Index of the field (zero-based).\n        field: usize,\n    },\n}\n\nimpl From<TupleId> for GlobalId {\n    fn from(tuple_id: TupleId) -> Self {\n        Self(GlobalIdInner::Tuple(tuple_id).intern())\n    }\n}\n\nimpl TupleId {\n    /// Creates a ConcreteId from a TupleId: `Tuple(1)` returns `Tuple1`\n    fn into_owned_concrete_id(self) -> ConcreteId {\n        fn patch_def_id(template: GlobalId, length: usize, field: usize) -> ConcreteId {\n            let GlobalIdInner::Concrete(mut concrete_id) = template.0.get().clone() else {\n                // `patch_def_id` is called with constant values (`hax::Tuple2`\n                // and friends are constants) Those are of the shape\n                // `GlobalIdInner::Concrete(_)`, *not*\n                // `GlobalIdInner::Tuple(_)`. The tuple identifiers we deal with\n                // in this functions are private identifiers used only in this\n                // module, to provide normal concrete identifiers even for\n                // tuples.\n                unreachable!()\n            };\n            fn inner(did: &mut DefIdInner, length: usize, field: usize) {\n                for DisambiguatedDefPathItem { data, .. } in &mut did.path {\n                    // Patch field\n                    if let DefPathItem::ValueNs(s) = data\n                        && s == \"1\"\n                    {\n                        *s = field.to_string()\n                    }\n                    // Patch constructor / type name\n                    if let DefPathItem::TypeNs(s) = data\n                        && s.starts_with(\"Tuple\")\n                    {\n                        *s = format!(\"Tuple{length}\")\n                    }\n                }\n                if let Some(parent) = did.parent {\n                    let mut parent = parent.get().clone();\n                    inner(&mut parent, length, field);\n                    did.parent = Some(parent.intern());\n                }\n            }\n            let mut did = concrete_id.def_id.def_id.get().clone();\n            inner(&mut did, length, field);\n            concrete_id.def_id.def_id = did.intern();\n            concrete_id\n        }\n\n        use crate::names::rust_primitives::hax;\n\n        match self {\n            TupleId::Type { length } => patch_def_id(hax::Tuple2, length, 0),\n            TupleId::Constructor { length } => patch_def_id(hax::Tuple2::Constructor, length, 0),\n            TupleId::Field { length, field } => patch_def_id(hax::Tuple2::_1, length, field),\n        }\n    }\n\n    /// Creates a static [`ConcreteId`] from a [`TupleId`]: `Tuple(1)` returns `Tuple1`. The function is\n    /// memoized (as the same tuple ids may appear a lot in a program), and inserts identifiers in\n    /// the GlobalId table to return a static lifetime.\n    pub fn as_concreteid(self) -> &'static ConcreteId {\n        thread_local! {\n            static MEMO: LazyCell<RefCell<HashMap<TupleId, &'static ConcreteId>>> =\n                LazyCell::new(|| RefCell::new(HashMap::new()));\n        }\n\n        MEMO.with(|memo| {\n            let mut memo = memo.borrow_mut();\n            let reference: &'static ConcreteId = memo.entry(self).or_insert_with(|| {\n                match GlobalIdInner::Concrete(self.into_owned_concrete_id())\n                    .intern()\n                    .get()\n                {\n                    GlobalIdInner::Concrete(concrete_id) => concrete_id,\n                    GlobalIdInner::FreshModule(_) | GlobalIdInner::Tuple(_) => {\n                        // This is a match on the Id that was just inserted in the table as a\n                        // ConcreteId\n                        unreachable!()\n                    }\n                }\n            });\n            reference\n        })\n    }\n}\n\n/// A interned global identifier in hax.\n#[derive_group_for_ast]\n#[derive(Copy)]\npub struct GlobalId(Interned<GlobalIdInner>);\n\nimpl GlobalId {\n    /// Import a def_id from the frontend\n    pub fn from_frontend(id: hax_frontend_exporter::DefId, is_value: bool) -> Self {\n        let mut def_id: DefIdInner = id.into();\n        use hax_frontend_exporter::DefKind as DK;\n\n        let mut popped_ctor = false;\n        if let Some(last) = def_id.path.last()\n            && matches!(&last.data, DefPathItem::Ctor)\n        {\n            def_id.path.pop();\n            popped_ctor = true;\n            if let Some(parent) = def_id.parent.as_ref() {\n                def_id.parent = parent.parent;\n            }\n        }\n\n        let is_constructor = is_value\n            && (matches!(&def_id.kind, DK::Variant | DK::Union | DK::Struct) || popped_ctor);\n        let inner = GlobalIdInner::Concrete(ConcreteId {\n            def_id: ExplicitDefId {\n                is_constructor,\n                def_id: def_id.intern(),\n            },\n            moved: None,\n            suffix: None,\n        });\n        Self(inner.intern())\n    }\n\n    /// Extracts the Crate info\n    pub fn krate(self) -> &'static str {\n        match self.0.get() {\n            GlobalIdInner::FreshModule(fresh_module) => {\n                &fresh_module\n                    .hints\n                    .first()\n                    .expect(\"The hint list should always be non-empty\")\n                    .def_id\n                    .krate\n            }\n            GlobalIdInner::Concrete(concrete_id) => &concrete_id.def_id.def_id.krate,\n            GlobalIdInner::Tuple(tuple_id) => &tuple_id.as_concreteid().def_id.def_id.krate,\n        }\n    }\n\n    /// Debug printing of identifiers, for testing purposes only.\n    /// Prints path in a Rust-like way, as a `::` separated dismabiguated path.\n    pub fn to_debug_string(self) -> String {\n        match self.0.get() {\n            GlobalIdInner::Concrete(id) => id.to_debug_string(),\n            GlobalIdInner::FreshModule(id) => id.to_debug_string(),\n            GlobalIdInner::Tuple(id) => id.as_concreteid().to_debug_string(),\n        }\n    }\n\n    /// Returns true if the underlying identifier is a constructor\n    pub fn is_constructor(self) -> bool {\n        self.0.get().is_constructor()\n    }\n\n    /// Returns true if the underlying identifier is a projector\n    pub fn is_projector(self) -> bool {\n        self.0.get().is_projector()\n    }\n\n    /// Returns true if the underlying identifier is a precondition (trait/impl item)\n    /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed\n    pub fn is_precondition(self) -> bool {\n        self.0.get().is_precondition()\n    }\n\n    /// Returns true if the underlying identifier is a postcondition (trait/impl item)\n    /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed\n    pub fn is_postcondition(self) -> bool {\n        self.0.get().is_postcondition()\n    }\n\n    /// Renders a view of the global identifier.\n    pub fn view(self) -> view::View {\n        match self.0.get() {\n            GlobalIdInner::FreshModule(id) => id.view(),\n            GlobalIdInner::Concrete(id) => id.view(),\n            GlobalIdInner::Tuple(id) => id.as_concreteid().view(),\n        }\n    }\n\n    /// Returns a tuple identifier if `self` is indeed a tuple.\n    pub fn expect_tuple(self) -> Option<TupleId> {\n        match self.0.get() {\n            GlobalIdInner::Tuple(tuple_id) => Some(*tuple_id),\n            _ => None,\n        }\n    }\n\n    /// Gets the closest module only parent identifier, that is, the closest parent whose path\n    /// contains only path chunks of kind `DefKind::Mod`. Can be itself (for fresh modules).\n    pub fn mod_only_closest_parent(self) -> Self {\n        match self.0.get() {\n            GlobalIdInner::FreshModule(_) => self,\n            GlobalIdInner::Concrete(concrete_id) => concrete_id.mod_only_closest_parent().into(),\n            GlobalIdInner::Tuple(tuple_id) => {\n                tuple_id.as_concreteid().mod_only_closest_parent().into()\n            }\n        }\n    }\n\n    /// Change the krate name (the first element of the `GlobalId`) to `name`.\n    pub fn rename_krate(self, name: &str) -> Self {\n        match self.0.get() {\n            GlobalIdInner::FreshModule(fresh_module) => {\n                Self(GlobalIdInner::FreshModule(fresh_module.rename_krate(name)).intern())\n            }\n            GlobalIdInner::Concrete(concrete_id) => {\n                let mut concrete_id = concrete_id.clone();\n                concrete_id.rename_krate(name);\n                Self(GlobalIdInner::Concrete(concrete_id).intern())\n            }\n            GlobalIdInner::Tuple(tuple_id) => {\n                let mut concrete_id = tuple_id.as_concreteid().clone();\n                concrete_id.rename_krate(name);\n                Self(GlobalIdInner::Concrete(concrete_id).intern())\n            }\n        }\n    }\n\n    /// Add a suffix to a GlobalId\n    pub fn with_suffix(self, suffix: ReservedSuffix) -> Self {\n        match self.0.get() {\n            GlobalIdInner::Concrete(concrete_id) => Self(\n                GlobalIdInner::Concrete(ConcreteId {\n                    suffix: Some(suffix),\n                    ..concrete_id.clone()\n                })\n                .intern(),\n            ),\n            GlobalIdInner::Tuple(_) | GlobalIdInner::FreshModule(_) => self,\n        }\n    }\n}\n\nimpl GlobalIdInner {\n    /// Extract the `ExplicitDefId` from a `GlobalId`.\n    fn explicit_def_id(&self) -> Option<ExplicitDefId> {\n        match self {\n            GlobalIdInner::Concrete(concrete_id) => Some(concrete_id.def_id.clone()),\n            _ => None,\n        }\n    }\n\n    /// Returns true if the underlying identifier is a constructor\n    pub fn is_constructor(&self) -> bool {\n        match self {\n            GlobalIdInner::Concrete(concrete_id) => concrete_id.def_id.is_constructor,\n            GlobalIdInner::Tuple(TupleId::Constructor { .. }) => true,\n            _ => false,\n        }\n    }\n\n    /// Returns true if the underlying identifier is a projector\n    pub fn is_projector(&self) -> bool {\n        match self {\n            GlobalIdInner::Concrete(concrete_id) => {\n                matches!(concrete_id.def_id.def_id.get().kind, DefKind::Field)\n            }\n            GlobalIdInner::Tuple(TupleId::Field { .. }) => true,\n            _ => false,\n        }\n    }\n\n    /// Returns true if the underlying identifier has the precondition suffix\n    /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed\n    pub fn is_precondition(&self) -> bool {\n        matches!(self, GlobalIdInner::Concrete(concrete_id) if matches!(concrete_id.suffix, Some(ReservedSuffix::Pre)))\n    }\n\n    /// Returns true if the underlying identifier has the postcondition suffix\n    /// Should be removed once https://github.com/cryspen/hax/issues/1646 has been fixed\n    pub fn is_postcondition(&self) -> bool {\n        matches!(self, GlobalIdInner::Concrete(concrete_id) if matches!(concrete_id.suffix, Some(ReservedSuffix::Post)))\n    }\n}\n\nimpl From<ConcreteId> for GlobalId {\n    fn from(concrete_id: ConcreteId) -> Self {\n        Self(GlobalIdInner::Concrete(concrete_id).intern())\n    }\n}\n\nimpl ConcreteId {\n    /// Renders a view of the concrete identifier.\n    fn view(&self) -> view::View {\n        view::View::from(self.def_id.clone()).with_suffix(self.suffix.clone())\n    }\n\n    /// Gets the closest module only parent identifier, that is, the closest\n    /// parent whose path contains only path chunks of kind `DefKind::Mod`.\n    fn mod_only_closest_parent(&self) -> Self {\n        let mut parents = self.def_id.parents().collect::<Vec<_>>();\n        parents.reverse();\n        let def_id = parents\n            .into_iter()\n            .take_while(|id| matches!(id.def_id.kind, DefKind::Mod))\n            .last()\n            .expect(\"Invariant broken: a DefId must always contain at least on `mod` segment (the crate)\");\n        Self {\n            def_id,\n            moved: self.moved.clone(),\n            suffix: None,\n        }\n    }\n\n    fn rename_krate(&mut self, name: &str) {\n        self.def_id.rename_krate(name);\n    }\n\n    fn to_debug_string(&self) -> String {\n        self.def_id.def_id.get().to_debug_string()\n    }\n}\n\nimpl PartialEq<DefId> for GlobalId {\n    fn eq(&self, other: &DefId) -> bool {\n        if let GlobalIdInner::Concrete(concrete) = self.0.get() {\n            &concrete.def_id.def_id == other\n        } else {\n            false\n        }\n    }\n}\nimpl PartialEq<GlobalId> for DefId {\n    fn eq(&self, other: &GlobalId) -> bool {\n        other == self\n    }\n}\n\nimpl PartialEq<ExplicitDefId> for GlobalId {\n    fn eq(&self, other: &ExplicitDefId) -> bool {\n        self == &other.def_id\n    }\n}\n\nimpl PartialEq<GlobalId> for ExplicitDefId {\n    fn eq(&self, other: &GlobalId) -> bool {\n        other == &self.def_id\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/ast/identifiers.rs",
    "content": "//! Identifier types used throughout the AST.\n//!\n//! This module provides two kinds of identifiers:\n//! - `GlobalId`: fully-qualified paths like `std::mem::drop`\n//! - `LocalId`: local identifiers\n\nuse crate::symbol::Symbol;\nuse hax_rust_engine_macros::*;\nuse std::fmt;\n\npub mod global_id;\n/// Local identifier\n// TODO(issue #1874): local identifiers should have unique indexes\n#[derive_group_for_ast]\npub struct LocalId(pub Symbol);\n\nimpl LocalId {\n    /// Returns true if `self` is a local identifier named `self`: the Rust keyword `self`.\n    pub fn is_self(&self) -> bool {\n        self.0.as_ref() == \"self\"\n    }\n}\n\nimpl fmt::Display for LocalId {\n    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n        write!(f, \"{}\", self.0)\n    }\n}\nimpl From<&hax_frontend_exporter::LocalIdent> for LocalId {\n    fn from(value: &hax_frontend_exporter::LocalIdent) -> Self {\n        Self(Symbol::new(&value.name))\n    }\n}\nimpl From<&str> for LocalId {\n    fn from(name: &str) -> Self {\n        Self(Symbol::new(name))\n    }\n}\n\npub use global_id::GlobalId;\n"
  },
  {
    "path": "rust-engine/src/ast/literals.rs",
    "content": "//! Literal and numeric type kinds used in constant expressions.\n\nuse crate::symbol::Symbol;\nuse hax_rust_engine_macros::*;\n\n/// Size of an integer type\n#[derive_group_for_ast]\npub enum IntSize {\n    /// 8 bits integer type\n    S8,\n    /// 16 bits integer type\n    S16,\n    /// 32 bits integer type\n    S32,\n    /// 64 bits integer type\n    S64,\n    /// 128 bits integer type\n    S128,\n    /// Pointer-sized integer type\n    SSize,\n}\n\nuse hax_frontend_exporter::{FloatTy, IntTy, UintTy};\nimpl From<&IntTy> for IntSize {\n    fn from(value: &IntTy) -> Self {\n        match value {\n            IntTy::I128 => Self::S128,\n            IntTy::I64 => Self::S64,\n            IntTy::I32 => Self::S32,\n            IntTy::I16 => Self::S16,\n            IntTy::I8 => Self::S8,\n            IntTy::Isize => Self::SSize,\n        }\n    }\n}\nimpl From<&UintTy> for IntSize {\n    fn from(value: &UintTy) -> Self {\n        match value {\n            UintTy::U128 => Self::S128,\n            UintTy::U64 => Self::S64,\n            UintTy::U32 => Self::S32,\n            UintTy::U16 => Self::S16,\n            UintTy::U8 => Self::S8,\n            UintTy::Usize => Self::SSize,\n        }\n    }\n}\nimpl From<&IntTy> for IntKind {\n    fn from(value: &IntTy) -> Self {\n        IntKind {\n            size: value.into(),\n            signedness: Signedness::Signed,\n        }\n    }\n}\nimpl From<&UintTy> for IntKind {\n    fn from(value: &UintTy) -> Self {\n        IntKind {\n            size: value.into(),\n            signedness: Signedness::Unsigned,\n        }\n    }\n}\nimpl From<&FloatTy> for FloatKind {\n    fn from(value: &FloatTy) -> Self {\n        match value {\n            FloatTy::F128 => Self::F128,\n            FloatTy::F64 => Self::F64,\n            FloatTy::F32 => Self::F32,\n            FloatTy::F16 => Self::F16,\n        }\n    }\n}\n\n/// Signedness of a numeric type\n#[derive_group_for_ast]\npub enum Signedness {\n    /// Signed type (`i32`, `i64`, ...)\n    Signed,\n    /// Unsigned type (`u32`, `u64`, ...)\n    Unsigned,\n}\n\n/// Describes a Rust integer type (`u64`, `i32`, ...)\n#[derive_group_for_ast]\npub struct IntKind {\n    /// Size of this integer type\n    pub size: IntSize,\n    /// Whether this integer type is signed or unsigned\n    pub signedness: Signedness,\n}\n\n/// Float types\n#[derive_group_for_ast]\npub enum FloatKind {\n    /// 16 bits float\n    F16,\n    /// 32 bits float\n    F32,\n    /// 64 bits float\n    F64,\n    /// 128 bits float\n    F128,\n}\n\n/// Rust literal\n#[derive_group_for_ast]\npub enum Literal {\n    /// String literal\n    String(Symbol),\n    /// Character literal\n    Char(char),\n    /// Boolean literal\n    Bool(bool),\n    /// Integer literal\n    Int {\n        /// Value as u128\n        value: Symbol,\n        /// True if `-`\n        negative: bool,\n        /// Rust int type description (size + signedness)\n        kind: IntKind,\n    },\n    /// Float literal\n    Float {\n        /// Value as a string\n        value: Symbol,\n        /// True if `-`\n        negative: bool,\n        /// Size\n        kind: FloatKind,\n    },\n}\n"
  },
  {
    "path": "rust-engine/src/ast/resugared.rs",
    "content": "//! This module defines *resugared fragments* for the Hax Rust engine's AST.\n//!\n//! A resugared fragment is an additional AST node used solely for pretty-printing purposes.\n//! These nodes carry no semantic meaning in hax core logic but enable more accurate\n//! or backend-specific surface syntax reconstruction.\n//!\n//! For example, the engine represents the `unit` type as a zero-sized tuple `()`,\n//! mirroring Rust's internal representation. However, this may not suit all backends:\n//! in F*, `unit` is explicitly written as `unit`, not `()`.\n//!\n//! To accommodate such differences, we introduce resugared fragments (e.g. `UnitType`) that\n//! allow the printer to emit the expected syntax while maintaining the same internal semantics.\n\nuse hax_rust_engine_macros::*;\n\nuse super::*;\n\n/// Resugared variants for items. This represent extra printing-only items, see [`super::ItemKind::Resugared`].\n#[derive_group_for_ast]\npub enum ResugaredItemKind {\n    /// A `const` item, for example `const NAME: T = body;`.\n    /// The type of the constant is `body.ty`.\n    Constant {\n        /// The identifier of the constant, for example `krate::module::NAME`.\n        name: GlobalId,\n        /// The body of the constant, for example `body`.\n        body: Expr,\n        /// The generic arguments and constraints of the constant.\n        /// Note: constant supporting generics is a nightly feature (generic_const_items).\n        generics: Generics,\n    },\n    /// A recursive function definition. Detected by checking whether the function\n    /// body contains a reference to its own name.\n    RecursiveFn {\n        /// The identifier of the function.\n        name: GlobalId,\n        /// The generic arguments and constraints of the function.\n        generics: Generics,\n        /// The body of the function.\n        body: Expr,\n        /// The parameters of the function.\n        params: Vec<Param>,\n        /// The safety of the function.\n        safety: SafetyKind,\n    },\n}\n\n/// Resugared variants for expressions. This represent extra printing-only expressions, see [`super::ExprKind::Resugared`].\n#[derive_group_for_ast]\n// TODO: drop `clippy::large_enum_variant` when https://github.com/cryspen/hax/issues/1666 is addressed.\n#[allow(clippy::large_enum_variant)]\npub enum ResugaredExprKind {\n    /// A tuple constructor.\n    ///\n    /// # Example:\n    /// `(a, b)`\n    Tuple(Vec<Expr>),\n    /// A let-binding of a \"pure\" (non-panicking) expression\n    ///\n    /// # Example:\n    /// `let x = 9; x + 0`\n    LetPure {\n        /// The left-hand side of the `let` expression. (`x` in the example)\n        lhs: Pat,\n        /// The right-hand side of the `let` expression. (`9` in the example)\n        rhs: Expr,\n        /// The body of the `let`. (`x + 0` in the example)\n        body: Expr,\n    },\n}\n\n/// Resugared variants for patterns. This represent extra printing-only patterns, see [`super::PatKind::Resugared`].\n#[derive_group_for_ast]\npub enum ResugaredPatKind {\n    /// A record constructor pattern where wildcard fields are replaced by `..`.\n    ConstructWithEllipsis {\n        /// The identifier of the constructor we are matching.\n        constructor: GlobalId,\n        /// Is this a struct? (meaning, *not* a variant from an enum)\n        is_struct: bool,\n        /// Only the explicitly-bound (non-wildcard) fields.\n        fields: Vec<(GlobalId, Pat)>,\n    },\n}\n\n/// Resugared variants for types. This represent extra printing-only types, see [`super::TyKind::Resugared`].\n#[derive_group_for_ast]\npub enum ResugaredTyKind {\n    /// A tuple tupe.\n    ///\n    /// # Example:\n    /// `(i32, bool)`\n    Tuple(Vec<Ty>),\n}\n\n/// Resugared variants for impl. items. This represent extra printing-only impl. items, see [`super::ImplItemKind::Resugared`].\n#[derive_group_for_ast]\npub enum ResugaredImplItemKind {\n    /// An associated `const` impl item, for example `const NAME: T = body;`.\n    /// The type of the constant is `body.ty`.\n    Constant {\n        /// The body of the constant, for example `body`.\n        body: Expr,\n    },\n}\n\n/// Resugared variants for trait items. This represent extra printing-only trait items, see [`super::TraitItemKind::Resugared`].\n#[derive_group_for_ast]\npub enum ResugaredTraitItemKind {}\n\n/// Marks a type as a resugar fragment of the AST.\npub trait ResugaredFragment {\n    /// What fragment of the AST this resugar is extending?\n    type ParentFragment;\n}\n\n/// Convenience macro which implements [`ResugaredFragment`] on `$ty`, setting\n/// `$parent` as the `ParentFragment`, as well as `From<$ty>` for `$parent`, by\n/// wrapping the `$ty` in `$parent::Resugared(..)`.\nmacro_rules! derive_from {\n    ($($ty:ty => $parent:ty),*) => {\n        $(impl ResugaredFragment for $ty {\n            type ParentFragment = $parent;\n        }\n        impl From<$ty> for <$ty as ResugaredFragment>::ParentFragment {\n            fn from(value: $ty) -> Self {\n                Self::Resugared(value)\n            }\n        })*\n    };\n}\n\nderive_from!(\n    ResugaredItemKind => ItemKind,\n    ResugaredExprKind => ExprKind,\n    ResugaredPatKind => PatKind,\n    ResugaredTyKind => TyKind,\n    ResugaredImplItemKind => ImplItemKind,\n    ResugaredTraitItemKind => TraitItemKind\n);\n"
  },
  {
    "path": "rust-engine/src/ast/span.rs",
    "content": "//! Source positions.\n\nuse crate::interning::{Internable, Interned, InterningTable};\nuse hax_rust_engine_macros::*;\nuse std::sync::{LazyLock, Mutex};\n\n/// Creates a fresh identifier for a span.\nfn fresh_id() -> u32 {\n    use std::sync::atomic::{AtomicU32 as AtomicInt, Ordering};\n    static CURRENT_ID: AtomicInt = AtomicInt::new(0);\n    CURRENT_ID.fetch_add(1, Ordering::Relaxed)\n}\n\n/// Position of a Rust source\n#[derive_group_for_ast]\nstruct SpanData {\n    /// A vector of spans as defined by the frontend.\n    /// This is useful for supporting in a trivial way union of spans.\n    data: Vec<hax_frontend_exporter::Span>,\n    /// A reference to the item in which this span lives. This information is\n    /// used for debugging and profiling purposes, e.g. for `cargo hax into\n    /// --stats backend`.\n    owner_hint: Option<Interned<hax_frontend_exporter::DefId>>,\n}\n\nimpl SpanData {\n    /// Creates a dummy span.\n    fn dummy() -> Self {\n        let lo: hax_frontend_exporter::Loc = hax_frontend_exporter::Loc { line: 0, col: 0 };\n        let hi = lo.clone();\n        SpanData {\n            data: vec![hax_frontend_exporter::Span {\n                lo,\n                hi,\n                filename: hax_frontend_exporter::FileName::Custom(\"dumny\".into()),\n                rust_span_data: None,\n            }],\n            owner_hint: None,\n        }\n    }\n\n    /// Creates a [`Span`] given information from the hax exporter.\n    fn from_exporter(\n        span: hax_frontend_exporter::Span,\n        owner_hint: Option<&hax_frontend_exporter::DefId>,\n    ) -> Self {\n        Self {\n            data: vec![span],\n            owner_hint: owner_hint.map(Interned::intern),\n        }\n    }\n}\n\n/// Position of a Rust source\n#[derive_group_for_ast]\n#[derive(Copy)]\npub struct Span {\n    #[serde(flatten)]\n    data: Interned<SpanData>,\n    /// A unique identifier. Since we store spans almost for every node of the\n    /// AST, having a unique identifier for spans gives us a fine-grained way of\n    /// refering to sub-nodes in debugging context. This id is indeed mostly\n    /// used by the web debugger.\n    id: u32,\n}\n\nimpl Internable for SpanData {\n    fn interning_table() -> &'static Mutex<InterningTable<Self>> {\n        static TABLE: LazyLock<Mutex<InterningTable<SpanData>>> =\n            LazyLock::new(|| Mutex::new(InterningTable::default()));\n        &TABLE\n    }\n}\n\nimpl Span {\n    /// Creates a dummy span.\n    pub fn dummy() -> Self {\n        static DUMMY_SPAN: LazyLock<Span> = LazyLock::new(|| {\n            let data = Interned::intern(&SpanData::dummy());\n            Span {\n                data,\n                id: fresh_id(),\n            }\n        });\n        *DUMMY_SPAN\n    }\n\n    /// Creates a [`Span`] given information from the hax exporter.\n    pub fn from_exporter(\n        span: hax_frontend_exporter::Span,\n        owner_hint: Option<&hax_frontend_exporter::DefId>,\n    ) -> Self {\n        let data = Interned::intern(&SpanData::from_exporter(span, owner_hint));\n        Self {\n            data,\n            id: fresh_id(),\n        }\n    }\n\n    /// Get a vector of frontend spans given a [`Span`].\n    pub fn as_frontend_spans(self) -> &'static [hax_frontend_exporter::Span] {\n        &self.data.get().data\n    }\n}\n\nimpl Internable for hax_frontend_exporter::DefId {\n    fn interning_table() -> &'static Mutex<InterningTable<Self>> {\n        static TABLE: LazyLock<Mutex<InterningTable<hax_frontend_exporter::DefId>>> =\n            LazyLock::new(|| Mutex::new(InterningTable::default()));\n        &TABLE\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/ast/utils.rs",
    "content": "//! This module provides a collection of utilities to work on AST.\n\nuse super::visitors::*;\nuse super::*;\nuse identifiers::*;\nuse std::collections::HashMap;\n\n/// Useful visitor to map AST fragments.\npub mod mappers {\n    use super::*;\n\n    /// Visitor that substitutes local identifiers in ASTs.\n    pub struct SubstLocalIds(HashMap<LocalId, LocalId>);\n\n    impl SubstLocalIds {\n        /// Create a substituer given one replacement couple.\n        pub fn one(from: LocalId, to: LocalId) -> Self {\n            Self::many([(from, to)])\n        }\n        /// Create a substituer given a bunch of replacement couples.\n        pub fn many(replacements: impl IntoIterator<Item = (LocalId, LocalId)>) -> Self {\n            Self(replacements.into_iter().collect())\n        }\n    }\n\n    impl AstVisitorMut for SubstLocalIds {\n        fn visit_local_id(&mut self, local_id: &mut LocalId) {\n            if let Some(replacement) = self.0.get(local_id) {\n                *local_id = replacement.clone();\n            }\n        }\n    }\n}\n\nimpl Expr {\n    /// Create a tuple expression out of components.\n    pub fn tuple(components: Vec<Expr>, span: Span) -> Self {\n        let ty = TyKind::tuple(\n            components\n                .iter()\n                .map(Typed::ty)\n                .cloned()\n                .map(GenericValue::Ty)\n                .collect(),\n        )\n        .promote();\n        ExprKind::tuple(components).promote(ty, span)\n    }\n\n    /// Create a unit (tuple of size 0) expression.\n    pub fn unit(span: Span) -> Self {\n        ExprKind::GlobalId(global_id::TupleId::Constructor { length: 0 }.into())\n            .promote(TyKind::unit().promote(), span)\n    }\n\n    /// Creates a `App` node for a standalone function.\n    pub fn standalone_fn_app(\n        head: impl Into<FnAppHead>,\n        generic_args: Vec<GenericValue>,\n        args: Vec<Expr>,\n        output_type: Ty,\n        span: Span,\n    ) -> Self {\n        ExprKind::standalone_fn_app(head, generic_args, args, output_type.clone(), span)\n            .promote(output_type, span)\n    }\n\n    /// Creates a `App` node.\n    pub fn fn_app(\n        head: impl Into<FnAppHead>,\n        generic_args: Vec<GenericValue>,\n        args: Vec<Expr>,\n        output_type: Ty,\n        bounds_impls: Vec<ImplExpr>,\n        trait_: Option<(ImplExpr, Vec<GenericValue>)>,\n        span: Span,\n    ) -> Self {\n        ExprKind::fn_app(\n            head,\n            generic_args,\n            args,\n            output_type.clone(),\n            bounds_impls,\n            trait_,\n            span,\n        )\n        .promote(output_type, span)\n    }\n\n    /// Removes a box\n    pub fn unbox_once(&self) -> Option<&Expr> {\n        if let ExprKind::App { head, args, .. } = self.kind()\n            && let [arg] = &**args\n            && let ExprKind::GlobalId(head) = head.kind()\n            && let crate::names::alloc::boxed::Impl::new\n            | crate::names::rust_primitives::hax::box_new = *head\n        {\n            Some(arg)\n        } else {\n            None\n        }\n    }\n\n    /// Removes a deref\n    pub fn underef_once(&self) -> Option<&Expr> {\n        if let ExprKind::App { head, args, .. } = self.kind()\n            && let [arg] = &**args\n            && let ExprKind::GlobalId(head) = head.kind()\n            && let crate::names::rust_primitives::hax::deref_op = *head\n        {\n            Some(arg)\n        } else {\n            None\n        }\n    }\n\n    /// Removes all boxes and derefs wrapping the expression\n    pub fn unbox_underef(&self) -> &Expr {\n        let mut current = self;\n        while let Some(e) = current.unbox_once().or_else(|| current.underef_once()) {\n            current = e\n        }\n        current\n    }\n}\n\nimpl ExprKind {\n    /// Creates a `App` node for a standalone function.\n    pub fn standalone_fn_app(\n        head: impl Into<FnAppHead>,\n        generic_args: Vec<GenericValue>,\n        args: Vec<Expr>,\n        output_type: Ty,\n        span: Span,\n    ) -> Self {\n        Self::fn_app(head, generic_args, args, output_type, vec![], None, span)\n    }\n\n    /// Creates a `App` node.\n    pub fn fn_app(\n        head: impl Into<FnAppHead>,\n        generic_args: Vec<GenericValue>,\n        args: Vec<Expr>,\n        output_type: Ty,\n        bounds_impls: Vec<ImplExpr>,\n        trait_: Option<(ImplExpr, Vec<GenericValue>)>,\n        span: Span,\n    ) -> Self {\n        let head = 'head: {\n            let kind = match head.into() {\n                FnAppHead::GlobalId(global_id) => ExprKind::GlobalId(global_id),\n                FnAppHead::ExprKind(expr_kind) => expr_kind,\n                FnAppHead::Expr(expr) => break 'head expr,\n            };\n            let head_ty = TyKind::Arrow {\n                inputs: args.iter().map(Typed::ty).cloned().collect(),\n                output: output_type.clone(),\n            }\n            .promote();\n            kind.promote(head_ty, span)\n        };\n\n        Self::App {\n            head,\n            args,\n            generic_args,\n            bounds_impls,\n            trait_,\n        }\n    }\n\n    /// Creates a tuple out of a vector of components.\n    pub fn tuple(components: Vec<Expr>) -> Self {\n        let length = components.len();\n        ExprKind::Construct {\n            constructor: global_id::TupleId::Constructor { length }.into(),\n            is_record: false,\n            is_struct: true,\n            fields: components\n                .into_iter()\n                .enumerate()\n                .map(|(field, expr)| (global_id::TupleId::Field { length, field }.into(), expr))\n                .collect(),\n            base: None,\n        }\n    }\n\n    /// Promote to an `Expr`\n    pub fn promote(self, ty: Ty, span: Span) -> Expr {\n        Expr {\n            kind: Box::new(self),\n            ty,\n            meta: Metadata {\n                span,\n                attributes: Vec::new(),\n            },\n        }\n    }\n}\n\nimpl Metadata {\n    /// Get an iterator over hax attributes for this AST fragment.\n    pub fn hax_attributes(&self) -> impl Iterator<Item = &hax_lib_macros_types::AttrPayload> {\n        crate::attributes::hax_attributes(&self.attributes)\n    }\n}\n\nimpl Pat {\n    /// Expects the pattern to be a simple binding `self`.\n    pub fn expect_self(&self) -> Option<LocalId> {\n        if let PatKind::Binding { var, .. } = self.kind()\n            && var.is_self()\n        {\n            Some(var.clone())\n        } else {\n            None\n        }\n    }\n}\n\n/// Helper enum that describes what can serve as function application heads.\n/// This is an helper that is useful for [`ExprKind::fn_application`].\npub enum FnAppHead {\n    /// A global identifier\n    GlobalId(GlobalId),\n    /// An expression kind\n    ExprKind(ExprKind),\n    /// A full blown expression\n    Expr(Expr),\n}\n\nimpl From<GlobalId> for FnAppHead {\n    fn from(value: GlobalId) -> Self {\n        Self::GlobalId(value)\n    }\n}\nimpl From<ExprKind> for FnAppHead {\n    fn from(value: ExprKind) -> Self {\n        Self::ExprKind(value)\n    }\n}\nimpl From<Expr> for FnAppHead {\n    fn from(value: Expr) -> Self {\n        Self::Expr(value)\n    }\n}\n\nimpl Generics {\n    /// Concatenate two generics\n    pub fn concat(mut self, other: Self) -> Self {\n        self.constraints.extend(other.constraints);\n        self.params.extend(other.params);\n        use std::cmp::Ordering;\n        self.params.sort_by(|a, b| match (a.kind(), b.kind()) {\n            (GenericParamKind::Lifetime, GenericParamKind::Lifetime) => Ordering::Equal,\n            (GenericParamKind::Lifetime, _) => Ordering::Less,\n            (_, GenericParamKind::Lifetime) => Ordering::Greater,\n            _ => Ordering::Equal,\n        });\n        self\n    }\n    /// Empty generics\n    pub fn empty() -> Self {\n        Self {\n            params: Vec::new(),\n            constraints: Vec::new(),\n        }\n    }\n}\n\nimpl Item {\n    /// Returns a `LocalId` named `self` if the item is a standalone function\n    /// whose first argument is the keyword `self`. In other words, this\n    /// function returns a local identifier only for associated methods from\n    /// inherent `impl` blocks.\n    pub fn self_id(&self) -> Option<LocalId> {\n        if let ItemKind::Fn { params, .. } = self.kind()\n            && let [first, ..] = &params[..]\n            && let Some(self_id) = first.pat.expect_self()\n        {\n            Some(self_id.clone())\n        } else {\n            None\n        }\n    }\n}\n\nimpl ItemKind {\n    /// Promote to an item\n    pub fn promote(self, ident: GlobalId, span: Span) -> Item {\n        Item {\n            ident,\n            kind: self,\n            meta: Metadata {\n                span,\n                attributes: Vec::new(),\n            },\n        }\n    }\n}\n\nimpl GenericValue {\n    /// Tries to extract a [`Ty`] out of a [`GenericValue`].\n    pub fn expect_ty(&self) -> Option<&Ty> {\n        let Self::Ty(ty) = self else { return None };\n        Some(ty)\n    }\n}\n\nimpl TyKind {\n    /// Tuple type\n    pub fn tuple(args: Vec<GenericValue>) -> Self {\n        let head = global_id::TupleId::Type { length: args.len() }.into();\n        Self::App { head, args }\n    }\n    /// Unit type\n    pub fn unit() -> Self {\n        Self::tuple(Vec::new())\n    }\n    /// Promote to a Ty\n    pub fn promote(self) -> Ty {\n        Ty(Box::new(self))\n    }\n}\n\nimpl Arm {\n    /// Create a non-guarded arm\n    pub fn non_guarded(pat: Pat, body: Expr, span: Span) -> Self {\n        Self {\n            pat,\n            body,\n            guard: None,\n            meta: Metadata {\n                span,\n                attributes: Vec::new(),\n            },\n        }\n    }\n}\n\nimpl PatKind {\n    /// Pattern for binding to a single variable\n    pub fn var_pat(var: LocalId) -> Self {\n        Self::Binding {\n            mutable: false,\n            var,\n            mode: BindingMode::ByValue,\n            sub_pat: None,\n        }\n    }\n    /// Promote to a `Pat`\n    pub fn promote(self, ty: Ty, span: Span) -> Pat {\n        Pat {\n            kind: Box::new(self),\n            ty,\n            meta: Metadata {\n                span,\n                attributes: Vec::new(),\n            },\n        }\n    }\n}\n\nimpl Variant {\n    /// Whether a variant has fields or not.\n    /// See https://doc.rust-lang.org/reference/items/enumerations.html#field-less-enum.\n    pub fn is_fieldless(&self) -> bool {\n        self.arguments.is_empty()\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/ast/visitors.rs",
    "content": "//! Syntax tree traversals to walk a shared or mutable borrow of the syntax tree\n//! of Hax. The visitors are generated using the [`derive_generic_visitor`]\n//! library.\n//!\n//! This module provides visitors of different flavors of visitors, and visitor\n//! wrappers that can enhance the default behavior of a visitor.\n//!\n//! We provide four main visitors.\n//!  - [`AstVisitor`] and [`AstVisitorMut`]: visitor that never early exit.\n//!  - [`AstEarlyExitVisitor`] and [`AstEarlyExitVisitorMut`]: visitor that can early exit.\n//!\n//! Each trait provides methods `visit_expr`, `visit_ty`, etc. enabling easy AST\n//! traversal.\n//!\n//! Importantly, we also provide visitor wrappers that enhance visitors with\n//! common useful behavior. See the module [`wrappers`] for more information.\n\nuse super::*;\nuse derive_generic_visitor::*;\nuse hax_lib_macros_types::AttrPayload;\n\npub mod wrappers {\n    //! This module provides a visitor wrappers, or transformer of visitors.\n    //! Such wrappers transform the behavior of a visitor.\n    //!\n    //! For example, [`SpanWrapper`] takes care of keeping track of [`Span`]s\n    //! while travesing an AST.\n\n    use std::ops::Deref;\n\n    use super::{infallible::AstVisitable as AstVisitableInfallible, *};\n    use diagnostics::*;\n\n    /// A visitor wrapper that tracks span while visiting the AST. Whenever an\n    /// AST node that carries a span is visited, using this wrapper, the ambient\n    /// span is mutated and accessible via the `HasSpan` trait.\n    pub struct SpanWrapper<'a, V>(pub &'a mut V);\n\n    impl<'a, V: HasSpan> SpanWrapper<'a, V> {\n        /// Performs a spanned action: calls the function `action` on\n        /// `ast_fragment`, with the contextual span information in `self` being\n        /// the span found in `ast_fragment`.\n        fn spanned_action<T: Deref, U>(\n            &mut self,\n            ast_fragment: T,\n            action: impl Fn(&mut Self, T) -> U,\n        ) -> U\n        where\n            T::Target: HasSpan,\n        {\n            let span_before = self.0.span();\n            *self.0.span_mut() = ast_fragment.span();\n            // Perform the provided action on `ast_fragment` with `ast_fragment`'s span as contextual span.\n            let result = action(self, ast_fragment);\n            *self.0.span_mut() = span_before;\n            result\n        }\n    }\n\n    impl<'a, V: AstVisitorMut + HasSpan> AstVisitorMut for SpanWrapper<'a, V> {\n        fn visit_inner<T>(&mut self, x: &mut T)\n        where\n            T: AstVisitableInfallible,\n            T: for<'s> DriveMut<'s, AstVisitableInfallibleWrapper<Self>>,\n        {\n            x.drive_map(self.0)\n        }\n        fn visit_item(&mut self, x: &mut Item) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_expr(&mut self, x: &mut Expr) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_pat(&mut self, x: &mut Pat) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_guard(&mut self, x: &mut Guard) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_arm(&mut self, x: &mut Arm) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_impl_item(&mut self, x: &mut ImplItem) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_trait_item(&mut self, x: &mut TraitItem) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_generic_param(&mut self, x: &mut GenericParam) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_attribute(&mut self, x: &mut Attribute) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n        fn visit_spanned_ty(&mut self, x: &mut SpannedTy) {\n            self.spanned_action(x, Self::visit_inner)\n        }\n    }\n\n    /// A visitor wrapper that automatically collects errors in `ErrorNode`s.\n    /// Coupled with the trait `VisitorWithErrors`, this provides an `error`\n    /// method on a visitor that can be used to throw errors, which will be\n    /// automatically inlined in the AST on the closest error-capable node.\n    pub struct ErrorWrapper<'a, V>(pub &'a mut V);\n\n    /// An opaque error vault. This is the state manipulated by the visitor wrapper [`ErrorWrapper`].\n    /// It is purposefully not-inspectable.\n    #[derive(Default)]\n    pub struct ErrorVault(Vec<Diagnostic>);\n    impl ErrorVault {\n        fn add(&mut self, diagnostic: Diagnostic) {\n            self.0.push(diagnostic);\n        }\n    }\n\n    /// Helper struct that contains error-handling related state information.\n    /// This is used internally by [`setup_error_handling_struct`].\n    pub struct ErrorHandlingState(pub Span, pub ErrorVault);\n    impl Default for ErrorHandlingState {\n        fn default() -> Self {\n            Self(Span::dummy(), Default::default())\n        }\n    }\n\n    #[macro_export]\n    /// Use this macro in an implementation of `AstVisitorMut` to get automatic spans and error handling.\n    macro_rules! setup_error_handling_impl {\n        () => {\n            fn visit<T: $crate::ast::visitors::AstVisitableInfallible>(&mut self, x: &mut T) {\n                $crate::ast::visitors::wrappers::SpanWrapper(\n                    &mut $crate::ast::visitors::wrappers::ErrorWrapper(self),\n                )\n                .visit(x)\n            }\n        };\n    }\n    pub use setup_error_handling_impl;\n\n    /// Mark a visitor with a specific diagnostic context.\n    pub trait VisitorWithContext {\n        /// Returns the diagnostic context for this visitor.\n        fn context(&self) -> Context;\n    }\n\n    impl<T: HasSpan> HasSpan for ErrorWrapper<'_, T> {\n        fn span(&self) -> Span {\n            self.0.span()\n        }\n\n        fn span_mut(&mut self) -> &mut Span {\n            self.0.span_mut()\n        }\n    }\n\n    /// A visitor that can throw errors. It should be used in combination with\n    /// `ErrorWrapper`, which will take care of bubbling error up to the nearest\n    /// parent capable of representing errors. For instance, if you error out in\n    /// a literal, the error will be represented in the parent expression or the\n    /// parent type, as nodes [`ExprKind::Error`] or [`TyKind ::Error`].\n    pub trait VisitorWithErrors: HasSpan + VisitorWithContext {\n        /// Projects the error vault.\n        fn error_vault(&mut self) -> &mut ErrorVault;\n        /// Send an error.\n        fn error(&mut self, node: impl Into<Fragment>, kind: DiagnosticInfoKind) {\n            let context = self.context();\n            let span = self.span();\n            self.error_vault().add(Diagnostic::new(\n                node,\n                DiagnosticInfo {\n                    context,\n                    span,\n                    kind,\n                },\n            ));\n        }\n    }\n\n    impl<'a, V: VisitorWithErrors> ErrorWrapper<'a, V> {\n        fn error_handled_action<\n            T: FallibleAstNode + Clone + std::fmt::Debug + Into<Fragment>,\n            U,\n        >(\n            &mut self,\n            x: &mut T,\n            action: impl Fn(&mut Self, &mut T) -> U,\n        ) -> U {\n            let diagnostics_snapshot = self.0.error_vault().0.clone();\n            self.0.error_vault().0.clear();\n            let result = action(self, x);\n            let diagnostics: Vec<_> = self.0.error_vault().0.drain(..).collect();\n            if !diagnostics.is_empty() {\n                x.set_error(ErrorNode {\n                    fragment: Box::new(x.clone().into()),\n                    diagnostics,\n                });\n            }\n            self.0.error_vault().0 = diagnostics_snapshot;\n            result\n        }\n    }\n\n    impl<'a, V: AstVisitorMut + VisitorWithErrors> AstVisitorMut for ErrorWrapper<'a, V> {\n        fn visit_inner<T>(&mut self, x: &mut T)\n        where\n            T: AstVisitableInfallible,\n            T: for<'s> DriveMut<'s, AstVisitableInfallibleWrapper<Self>>,\n        {\n            x.drive_map(self.0)\n        }\n        fn visit_item(&mut self, x: &mut Item) {\n            self.error_handled_action(x, Self::visit_inner)\n        }\n        fn visit_pat(&mut self, x: &mut Pat) {\n            self.error_handled_action(x, Self::visit_inner)\n        }\n        fn visit_expr(&mut self, x: &mut Expr) {\n            self.error_handled_action(x, Self::visit_inner)\n        }\n        fn visit_ty(&mut self, x: &mut Ty) {\n            self.error_handled_action(x, Self::visit_inner)\n        }\n    }\n}\n\n#[hax_rust_engine_macros::replace(AstNodes => include(VisitableAstNodes))]\nmod replaced {\n    use super::*;\n    pub mod infallible {\n        use super::*;\n\n        #[visitable_group(\n            visitor(drive_map(\n                /// An mutable visitor that visits the AST for hax.\n                ///\n                /// ```rust,ignore\n                /// use crate::ast::{diagnostics::*, visitors::*};\n                /// #[setup_error_handling_struct]\n                /// #[derive(Default)]\n                /// struct MyVisitor;\n                ///\n                /// impl VisitorWithContext for MyVisitor {\n                ///     fn context(&self) -> Context {\n                ///         Context::Import\n                ///     }\n                /// }\n                ///\n                /// impl AstVisitorMut for MyVisitor {\n                ///     setup_error_handling_impl!();\n                /// }\n                ///\n                /// // MyVisitor::visit(my_ast_node)\n                /// ```\n                &mut AstVisitorMut\n            ), infallible),\n            visitor(drive(\n                /// An immutable visitor that visits the AST for hax.\n                &AstVisitor\n            ), infallible),\n            skip(\n                String, bool, char, hax_frontend_exporter::Span,\n                for<T> crate::interning::Interned<T>,\n            ),\n            drive(\n                for<T: AstVisitable> Box<T>, for<T: AstVisitable> Option<T>, for<T: AstVisitable> Vec<T>,\n                for<A: AstVisitable, B: AstVisitable> (A, B),\n                for<A: AstVisitable, B: AstVisitable, C: AstVisitable> (A, B, C),\n                usize\n            ),\n            override(AstNodes),\n            override_skip(\n                Span, Fragment, GlobalId, Diagnostic, AttrPayload,\n            ),\n        )]\n        /// Helper trait to drive visitor.\n        pub trait AstVisitable {}\n    }\n\n    #[allow(missing_docs)]\n    pub mod fallible {\n        use super::*;\n\n        #[visitable_group(\n            visitor(drive(\n                /// An immutable visitor that can exit early.\n                &AstEarlyExitVisitor\n            )),\n            visitor(drive_mut(\n                /// An immutable visitor that can exit early and mutate the AST fragments.\n                &mut AstEarlyExitVisitorMut\n            )),\n            skip(\n                String, bool, char, hax_frontend_exporter::Span,\n                for<T> crate::interning::Interned<T>,\n            ),\n            drive(\n                for<T: AstVisitable> Box<T>, for<T: AstVisitable> Option<T>, for<T: AstVisitable> Vec<T>,\n                for<A: AstVisitable, B: AstVisitable> (A, B),\n                for<A: AstVisitable, B: AstVisitable, C: AstVisitable> (A, B, C),\n                usize\n            ),\n            override(AstNodes),\n            override_skip(\n                Span, Fragment, GlobalId, Diagnostic, AttrPayload,\n            ),\n        )]\n        /// Helper trait to drive visitor.\n        pub trait AstVisitable {}\n    }\n\n    /// This modules provides `dyn` compatible trait for visitors.\n    pub mod dyn_compatible {\n        use super::*;\n\n        macro_rules! derive_erased_ast_visitors {\n            ({$($attrs:tt)*}, $name: ident, $helper: ident, $($ty:ty),*) => {\n                $($attrs)*\n                pub trait $name<'a>: $($helper<'a, $ty> + )* {}\n            };\n        }\n\n        macro_rules! render_path {\n            ($head:ident) => {stringify!($head)};\n            ($head:ident $(::$tail:ident)*) => {\n                concat!(stringify!($head), \"::\", render_path!($($tail)::*))\n            };\n        }\n\n        macro_rules! make_dyn_compatible {\n            ($($visitable_trait:ident)::*, $($visitor_trait:ident)::*, $helper_name: ident, $name: ident, mut:{$($mut:tt)?}, super:{$($super:ident)::*}, $ret:ty) => {\n                #[doc = concat!(\"A [dyn-compatible](https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility) trait similar to [`\", render_path!($($visitor_trait)::*),\"`].\")]\n                #[doc = concat!(\"This trait provides one `visit` method to visit a given type `T` with a given visitor.\")]\n                pub trait $helper_name<'a, T: ?Sized>: $($super)::* {\n                    /// Visit a value with the visitor.\n                    fn visit(&mut self, _: &'a $($mut)? T) -> $ret;\n                }\n\n                impl<'a, T: $($visitable_trait)::*, V: $($visitor_trait)::*> $helper_name<'a, T> for V {\n                    fn visit(&mut self, e: &'a $($mut)? T) -> $ret {\n                        <Self as $($visitor_trait)::*>::visit(self, e)\n                    }\n                }\n                derive_erased_ast_visitors!({\n                    #[doc = concat!(\"A [dyn-compatible](https://doc.rust-lang.org/reference/items/traits.html#dyn-compatibility) trait similar to [`\", render_path!($($visitor_trait)::*),\"`].\")]\n                    #[doc = concat!(\"This trait is empty, but it implies a super bound for every type in the AST, so that you can use [`\", stringify!($helper_name), \"::visit\", \"`] with the entire AST.\")]\n                }, $name, $helper_name, AstNodes);\n\n                impl<'a, V: $($visitor_trait)::*> $name<'a> for V {}\n            };\n        }\n\n        make_dyn_compatible!(\n            infallible::AstVisitable,\n            infallible::AstVisitorMut,\n            AstVisitableMut,\n            AstVisitorMut,\n            mut:{mut},\n            super:{},\n            ()\n        );\n        make_dyn_compatible!(\n            infallible::AstVisitable,\n            infallible::AstVisitor,\n            AstVisitable,\n            AstVisitor,\n            mut:{},\n            super:{},\n            ()\n        );\n\n        make_dyn_compatible!(\n            fallible::AstVisitable,\n            fallible::AstEarlyExitVisitorMut,\n            AstEarlyExitVisitableMut,\n            AstEarlyExitVisitorMut,\n            mut:{mut},\n            super:{Visitor},\n            ControlFlow<<Self as Visitor>::Break>\n        );\n        make_dyn_compatible!(\n            fallible::AstVisitable,\n            fallible::AstEarlyExitVisitor,\n            AstEarlyExitVisitable,\n            AstEarlyExitVisitor,\n            mut:{},\n            super:{Visitor},\n            ControlFlow<<Self as Visitor>::Break>\n        );\n    }\n}\n\npub use replaced::dyn_compatible;\nuse replaced::{fallible, infallible};\n\npub use fallible::{\n    AstEarlyExitVisitor, AstEarlyExitVisitorMut, AstVisitable as AstVisitableFallible,\n    AstVisitableWrapper,\n};\npub use hax_rust_engine_macros::setup_error_handling_struct;\npub use infallible::{\n    AstVisitable as AstVisitableInfallible, AstVisitableInfallibleWrapper, AstVisitor,\n    AstVisitorMut,\n};\npub use wrappers::{VisitorWithContext, VisitorWithErrors, setup_error_handling_impl};\n\n#[test]\nfn double_literals_in_ast() {\n    use crate::ast::diagnostics::*;\n\n    #[setup_error_handling_struct]\n    #[derive(Default)]\n    struct DoubleU8Literals;\n\n    impl VisitorWithContext for DoubleU8Literals {\n        fn context(&self) -> Context {\n            Context::Import\n        }\n    }\n\n    impl AstVisitorMut for DoubleU8Literals {\n        setup_error_handling_impl!();\n\n        fn visit_literal(&mut self, x: &mut Literal) {\n            let Literal::Int { value, .. } = x else {\n                return;\n            };\n            let Ok(n): Result<u8, _> = str::parse(value) else {\n                return self.error(\n                    x.clone(),\n                    DiagnosticInfoKind::AssertionFailure {\n                        details: \"Bad literal\".into(),\n                    },\n                );\n            };\n            let n = (n as u16) * 2;\n            if n >= u8::MAX as u16 {\n                return self.error(\n                    x.clone(),\n                    DiagnosticInfoKind::AssertionFailure {\n                        details: \"Literal too big\".into(),\n                    },\n                );\n            }\n            *value = Symbol::new(&format!(\"{}\", n));\n        }\n    }\n\n    // Syntax helpers\n    let int_kind = IntKind {\n        size: IntSize::S8,\n        signedness: Signedness::Signed,\n    };\n    let mk_lit = |n: isize| Literal::Int {\n        value: Symbol::new(&format!(\"{}\", n)),\n        negative: false,\n        kind: int_kind.clone(),\n    };\n    let meta = Metadata {\n        span: Span::dummy(),\n        attributes: vec![],\n    };\n    let mk_lit_expr = |n| Expr {\n        kind: Box::new(ExprKind::Literal(mk_lit(n))),\n        ty: Ty(Box::new(TyKind::Primitive(PrimitiveTy::Int(\n            int_kind.clone(),\n        )))),\n        meta: meta.clone(),\n    };\n    let mk_array = |exprs| Expr {\n        kind: Box::new(ExprKind::Array(exprs)),\n        ty: Ty(Box::new(TyKind::RawPointer)), // wrong type, but this is not important for this test.\n        meta: meta.clone(),\n    };\n    let mut lit_expr_200 = mk_lit_expr(200);\n\n    // Creates the expression `[50u8, 100u8, 200u8]`: the last one cannot be doubled, and will cause an error.\n    let mut e = mk_array(vec![\n        mk_lit_expr(50),\n        mk_lit_expr(100),\n        lit_expr_200.clone(),\n    ]);\n\n    // Visit the expression.\n    DoubleU8Literals::default().visit(&mut e);\n\n    // Transform `lit_expr_200` into the error `DoubleU8Literal` should produce\n    lit_expr_200.set_error(ErrorNode {\n        fragment: Box::new(lit_expr_200.clone().into()),\n        diagnostics: vec![Diagnostic::new(\n            mk_lit(200),\n            DiagnosticInfo {\n                span: lit_expr_200.span(),\n                context: Context::Import,\n                kind: DiagnosticInfoKind::AssertionFailure {\n                    details: \"Literal too big\".into(),\n                },\n            },\n        )],\n    });\n\n    // Check that the visitor works as expected\n    assert_eq!(\n        e,\n        mk_array(vec![mk_lit_expr(100), mk_lit_expr(200), lit_expr_200])\n    );\n}\n"
  },
  {
    "path": "rust-engine/src/ast.rs",
    "content": "//! The core abstract syntax tree (AST) representation for hax.\n//!\n//! This module defines the primary data structures used to represent\n//! typed syntax.\n//!\n//! The design of this AST is designed under the following constraints:\n//!  1. Valid (cargo check) pretty-printed Rust can be produced out of it.\n//!  2. The Rust THIR AST from the frontend can be imported into this AST.\n//!  3. The AST defined in the OCaml engine can be imported into this AST.\n//!  4. This AST can be exported to the OCaml engine.\n//!  5. This AST should be suitable for AST transformations.\n\npub mod diagnostics;\npub mod fragment;\npub mod identifiers;\npub mod literals;\npub mod resugared;\npub mod span;\npub mod utils;\npub mod visitors;\n\nuse crate::{ast::diagnostics::Context, symbol::Symbol};\nuse diagnostics::Diagnostic;\nuse fragment::Fragment;\nuse hax_rust_engine_macros::*;\npub use identifiers::*;\nuse literals::*;\nuse resugared::*;\nuse span::Span;\n\n/// Represents a generic value used in type applications (e.g., `T` in `Vec<T>`).\n#[derive_group_for_ast]\npub enum GenericValue {\n    /// A type-level generic value.\n    ///\n    /// # Example:\n    /// `i32` in `Vec<i32>`\n    Ty(Ty),\n    /// A const-level generic value.\n    ///\n    /// # Example:\n    /// `12` in `Foo<12>`\n    Expr(Expr),\n    /// A lifetime.\n    ///\n    /// # Example:\n    /// `'a` in `foo<'a>`\n    Lifetime,\n}\n\n/// Built-in primitive types.\n#[derive_group_for_ast]\npub enum PrimitiveTy {\n    /// The `bool` type.\n    Bool,\n    /// An integer type (e.g., `i32`, `u8`).\n    Int(IntKind),\n    /// A float type (e.g. `f32`)\n    Float(FloatKind),\n    /// The `char` type\n    Char,\n    /// The `str` type\n    Str,\n}\n\n/// Represent a Rust lifetime region.\n#[derive_group_for_ast]\npub struct Region;\n\n/// A indirection for the representation of types.\n#[derive_group_for_ast]\npub struct Ty(pub(crate) Box<TyKind>);\n\nimpl Ty {\n    /// The type `bool`\n    pub fn bool() -> Self {\n        Self(Box::new(TyKind::Primitive(PrimitiveTy::Bool)))\n    }\n    /// The type `int`\n    pub fn int(size: IntSize, signedness: Signedness) -> Self {\n        Self(Box::new(TyKind::Primitive(PrimitiveTy::Int(IntKind {\n            size,\n            signedness,\n        }))))\n    }\n    /// The `int` check\n    pub fn is_int(&self) -> bool {\n        let Self(b) = self;\n        matches!(\n            &**b,\n            TyKind::Primitive(PrimitiveTy::Int(IntKind {\n                size: _,\n                signedness: _,\n            }))\n        )\n    }\n    /// The (hax) type `Prop`\n    pub fn prop() -> Self {\n        Self(Box::new(TyKind::App {\n            head: crate::names::hax_lib::prop::Prop,\n            args: vec![],\n        }))\n    }\n}\n\n/// Describes any Rust type (e.g., `i32`, `Vec<T>`, `fn(i32) -> bool`).\n#[derive_group_for_ast]\npub enum TyKind {\n    /// A primitive type.\n    ///\n    /// # Example:\n    /// `i32`, `bool`\n    Primitive(PrimitiveTy),\n\n    /// A type application (generic type).\n    ///\n    /// # Example:\n    /// `Vec<i32>`\n    App {\n        /// The type being applied (`Vec` in the example).\n        head: GlobalId,\n        /// The arguments (`[i32]` in the example).\n        args: Vec<GenericValue>,\n    },\n\n    /// A function or closure type.\n    ///\n    /// # Example:\n    /// `fn(i32) -> bool` or `Fn(i32) -> bool`\n    Arrow {\n        /// `i32` in the example\n        inputs: Vec<Ty>,\n        /// `bool` in the example\n        output: Ty,\n    },\n\n    // TODO: Should we keep this type?\n    /// A reference type.\n    ///\n    /// # Example:\n    /// `&i32`, `&mut i32`\n    Ref {\n        /// The type inside the reference\n        inner: Ty,\n        /// Is the reference mutable?\n        mutable: bool,\n        /// The region of this reference\n        region: Region,\n    },\n\n    /// A parameter type\n    Param(LocalId),\n\n    // TODO: Should we keep this type?\n    /// A slice type.\n    ///\n    /// # Example:\n    /// `&[i32]`\n    Slice(Ty),\n\n    /// An array type.\n    ///\n    /// # Example:\n    /// `&[i32; 10]`\n    Array {\n        /// The type of the items of the array\n        ty: Ty,\n        /// The length of the array\n        length: Box<Expr>,\n    },\n\n    /// A raw pointer type\n    RawPointer,\n\n    /// An associated type\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    ///     fn f<T: Tr>() -> T::A {...}\n    /// ```\n    AssociatedType {\n        /// Impl expr for `Tr<T>` in the example\n        impl_: ImplExpr,\n        /// `Tr::A` in the example\n        item: GlobalId,\n    },\n\n    /// An opaque type\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// type Foo = impl Bar;\n    /// ```\n    Opaque(GlobalId),\n\n    /// A `dyn` type\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// dyn Tr\n    /// ```\n    Dyn(Vec<DynTraitGoal>),\n\n    /// A resugared type.\n    /// This variant is introduced before printing only.\n    /// Phases must not produce this variant.\n    Resugared(ResugaredTyKind),\n\n    /// Fallback constructor to carry errors.\n    Error(ErrorNode),\n}\n\n#[derive_group_for_ast]\n/// Represent a node of the AST where an error occurred.\npub struct ErrorNode {\n    /// The node from the AST at the time something failed\n    pub fragment: Box<Fragment>,\n    /// The error(s) encountered.\n    pub diagnostics: Vec<Diagnostic>,\n}\n\nimpl ErrorNode {\n    /// Creates an assertion failure out of an AST fragment and a message.\n    pub fn assertion_failure(\n        fragment: impl Into<Fragment> + HasMetadata,\n        context: Context,\n        message: impl Into<String>,\n    ) -> Self {\n        let span = fragment.span();\n        let fragment = fragment.into();\n        ErrorNode {\n            diagnostics: vec![Diagnostic::new(\n                fragment.clone(),\n                diagnostics::DiagnosticInfo {\n                    context,\n                    span,\n                    kind: hax_types::diagnostics::Kind::AssertionFailure {\n                        details: message.into(),\n                    },\n                },\n            )],\n            fragment: Box::new(fragment),\n        }\n    }\n}\n\n/// A `dyn` trait. The generic arguments are known but the actual type\n/// implementing the trait is known dynamically.\n///\n/// # Example:\n/// ```rust,ignore\n/// dyn Tr<A, B>\n/// ```\n#[derive_group_for_ast]\npub struct DynTraitGoal {\n    /// `Tr` in the example above\n    pub trait_: GlobalId,\n    /// `A, B` in the example above\n    pub non_self_args: Vec<GenericValue>,\n}\n\n/// Extra information attached to syntax nodes.\n#[derive_group_for_ast]\npub struct Metadata {\n    /// The location in the source code.\n    pub span: Span,\n    /// Rust attributes.\n    pub attributes: Attributes,\n    // TODO: add phase/desugar informations\n}\n\n/// A typed expression with metadata.\n#[derive_group_for_ast]\npub struct Expr {\n    /// The kind of expression.\n    pub kind: Box<ExprKind>,\n    /// The type of this expression.\n    pub ty: Ty,\n    /// Source span and attributes.\n    pub meta: Metadata,\n}\n\n/// A typed pattern with metadata.\n#[derive_group_for_ast]\npub struct Pat {\n    /// The kind of pattern.\n    pub kind: Box<PatKind>,\n    /// The type of this pattern.\n    pub ty: Ty,\n    /// Source span and attributes.\n    pub meta: Metadata,\n}\n\n/// A pattern matching arm with metadata.\n#[derive_group_for_ast]\npub struct Arm {\n    /// The pattern of the arm.\n    pub pat: Pat,\n    /// The body of the arm.\n    pub body: Expr,\n    /// The optional guard of the arm.\n    pub guard: Option<Guard>,\n    /// Source span and attributes.\n    pub meta: Metadata,\n}\n\n/// A pattern matching arm guard with metadata.\n#[derive_group_for_ast]\npub struct Guard {\n    /// The kind of guard.\n    pub kind: GuardKind,\n    /// Source span and attributes.\n    pub meta: Metadata,\n}\n\n/// Represents different levels of borrowing.\n#[derive_group_for_ast]\npub enum BorrowKind {\n    /// Shared reference\n    ///\n    /// # Example:\n    /// `&x`\n    Shared,\n    /// Unique reference: this is internal to rustc\n    Unique,\n    /// Mutable reference\n    ///\n    /// # Example:\n    /// `&mut x`\n    Mut,\n}\n\n/// Binding modes used in patterns.\n#[derive_group_for_ast]\npub enum BindingMode {\n    /// Binding by value\n    ///\n    /// # Example:\n    /// `x`\n    ByValue,\n    /// Binding by reference\n    ///\n    /// # Example:\n    /// `ref x`, `ref mut x`\n    ByRef(BorrowKind),\n}\n\n/// Represents the various kinds of patterns.\n#[derive_group_for_ast]\npub enum PatKind {\n    /// Wildcard pattern\n    ///\n    /// # Example:\n    /// `_`\n    Wild,\n\n    /// An ascription pattern\n    ///\n    /// # Example:\n    /// `p : ty`\n    Ascription {\n        /// The inner pattern (`p` in the example)\n        pat: Pat,\n        /// The (spanned) type ascription (`ty` in the example)\n        ty: SpannedTy,\n    },\n\n    /// An or pattern\n    ///\n    /// # Example:\n    /// `p | q`\n    /// Always contains at least 2 sub-patterns\n    Or {\n        /// A vector of sub-patterns\n        sub_pats: Vec<Pat>,\n    },\n\n    /// An array pattern\n    ///\n    /// # Example:\n    /// `[p, q]`\n    Array {\n        /// A vector of patterns\n        args: Vec<Pat>,\n    },\n\n    /// A dereference pattern\n    ///\n    /// # Example:\n    /// `&p`\n    Deref {\n        /// The inner pattern\n        sub_pat: Pat,\n    },\n\n    /// A constant pattern\n    ///\n    /// # Example:\n    /// `1`\n    Constant {\n        /// The literal\n        lit: Literal,\n    },\n\n    /// A variable binding.\n    ///\n    /// # Examples:\n    /// - `x` → `mutable: false`\n    /// - `mut x` → `mutable: true`\n    /// - `ref x` → `mode: ByRef(Shared)`\n    Binding {\n        /// Is the binding mutable? E.g. `x` is not mutable, `mut x` is.\n        mutable: bool,\n        /// The variable introduced by the binding pattern.\n        var: LocalId,\n        /// The binding mode, e.g. [`BindingMode::Shared`] for `ref x`.\n        mode: BindingMode,\n        /// The sub-pattern, if any.\n        /// For example, this is `Some(inner_pat)` for the pattern `variable @ inner_pat`.\n        sub_pat: Option<Pat>,\n    },\n\n    /// A constructor pattern\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// Foo(x)\n    /// ```\n    Construct {\n        /// The identifier of the constructor we are matching\n        constructor: GlobalId,\n        /// Are we constructing a record? E.g. a struct or a variant with named fields.\n        is_record: bool,\n        /// Is this a struct? (meaning, *not* a variant from an enum)\n        is_struct: bool,\n        /// A list of fields.\n        fields: Vec<(GlobalId, Pat)>,\n    },\n\n    /// A resugared pattern.\n    /// This variant is introduced before printing only.\n    /// Phases must not produce this variant.\n    Resugared(ResugaredPatKind),\n\n    /// Fallback constructor to carry errors.\n    Error(ErrorNode),\n}\n\n/// Represents the various kinds of pattern guards.\n#[derive_group_for_ast]\npub enum GuardKind {\n    /// An `if let` guard.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// match x {\n    ///   Some(value) if let Some(x) = f(value) => x,\n    ///   _ => ...,\n    /// }\n    /// ```\n    IfLet {\n        /// The left-hand side of the guard. `Some(x)` in the example.\n        lhs: Pat,\n        /// The right-hand side of the guard. `f(value)` in the example.\n        rhs: Expr,\n    },\n}\n\n// TODO: Replace by places, or just expressions\n/// The left-hand side of an assignment.\n#[derive_group_for_ast]\n#[allow(missing_docs)]\npub enum Lhs {\n    LocalVar {\n        var: LocalId,\n        ty: Ty,\n    },\n    VecRef {\n        e: Box<Lhs>,\n        ty: Ty,\n    },\n    ArbitraryExpr(Box<Expr>),\n    FieldAccessor {\n        e: Box<Lhs>,\n        ty: Ty,\n        field: GlobalId,\n    },\n    ArrayAccessor {\n        e: Box<Lhs>,\n        ty: Ty,\n        index: Expr,\n    },\n}\n\n/// An `ImplExpr` describes the full data of a trait implementation. Because of\n/// generics, this may need to combine several concrete trait implementation\n/// items. For example, `((1u8, 2u8), \"hello\").clone()` combines the generic\n/// implementation of `Clone` for `(A, B)` with the concrete implementations for\n/// `u8` and `&str`, represented as a tree.\n#[derive_group_for_ast]\npub struct ImplExpr {\n    /// The impl. expression itself.\n    pub kind: Box<ImplExprKind>,\n    /// The trait being implemented.\n    pub goal: TraitGoal,\n}\n\n/// Represents all the kinds of impl expr.\n///\n/// # Example:\n/// In the snippet below, the `clone` method on `x` corresponds to the implementation\n/// of `Clone` derived for `Vec<T>` (`ImplApp`) given the `LocalBound` on `T`.\n/// ```rust,ignore\n/// fn f<T: Clone>(x: Vec<T>) -> Vec<T> {\n///   x.clone()\n/// }\n/// ```\n#[derive_group_for_ast]\npub enum ImplExprKind {\n    /// The trait implementation being defined.\n    ///\n    /// # Example:\n    /// The impl expr for `Type: Trait` used in `self.f()` is `Self_`.\n    /// ```rust,ignore\n    /// impl Trait for Type {\n    ///     fn f(&self) {...}\n    ///     fn g(&self) {self.f()}\n    /// }\n    /// ```\n    Self_,\n    /// A concrete `impl` block.\n    ///\n    /// # Example\n    /// ```rust,ignore\n    /// impl Clone for Type { // Consider this `impl` is called `impl0`\n    ///     ...\n    /// }\n    /// fn f(x: Type) {\n    ///     x.clone() // Here `clone` comes from `Concrete(impl0)`\n    /// }\n    /// ```\n    Concrete(TraitGoal),\n    /// A bound introduced by a generic clause.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// fn f<T: Clone>(x: T) -> T {\n    ///   x.clone() // Here the method comes from the bound `T: Clone`\n    /// }\n    /// ```\n    LocalBound {\n        /// Local identifier to a bound.\n        id: Symbol,\n    },\n    /// A parent implementation.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// trait SubTrait: Clone {}\n    /// fn f<T: SubTrait>(x: T) -> T {\n    ///   x.clone() // Here the method comes from the parent of the bound `T: SubTrait`\n    /// }\n    /// ```\n    Parent {\n        /// Parent implementation\n        impl_: ImplExpr,\n        /// Which implementation to pick in the parent\n        ident: ImplIdent,\n    },\n    /// A projected associated implementation.\n    ///\n    /// # Example:\n    /// In this snippet, `T::Item` is an `AssociatedType` where the subsequent `ImplExpr`\n    /// is a type projection of `ITerator`.\n    /// ```rust,ignore\n    /// fn f<T: Iterator>(x: T) -> Option<T::Item> {\n    ///     x.next()\n    /// }\n    /// ```\n    Projection {\n        /// The base implementation from which we project\n        impl_: ImplExpr,\n        /// The item in the trait implemented by `impl_`\n        item: GlobalId,\n        /// Which implementation to pick on the item\n        ident: ImplIdent,\n    },\n    /// An instantiation of a generic implementation.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// fn f<T: Clone>(x: Vec<T>) -> Vec<T> {\n    ///   x.clone() // The `Clone` implementation for `Vec` is instantiated with the local bound `T: Clone`\n    /// }\n    /// ```\n    ImplApp {\n        /// The head of the application\n        impl_: ImplExpr,\n        /// The arguments of the application\n        args: Vec<ImplExpr>,\n    },\n    /// The implementation provided by a dyn.\n    Dyn,\n    /// A trait implemented natively by rust.\n    Builtin(TraitGoal),\n    /// Fallback constructor to carry errors.\n    Error(ErrorNode),\n}\n\n/// Represents an impl item (associated type or function)\n///\n/// # Example:\n/// ```rust,ignore\n/// impl ... {\n///   fn assoc_fn<T>(...) {...}\n/// }\n/// ```\n#[derive_group_for_ast]\npub struct ImplItem {\n    /// Metadata (span and attributes) for the impl item.\n    pub meta: Metadata,\n    /// Generics for this associated item. `T` in the example.\n    pub generics: Generics,\n    /// The associated item itself.\n    pub kind: ImplItemKind,\n    /// The unique identifier for this associated item.\n    pub ident: GlobalId,\n}\n\n/// Represents the kinds of impl items\n#[derive_group_for_ast]\npub enum ImplItemKind {\n    /// An instantiation of associated type\n    ///\n    /// # Example:\n    /// The associated type `Error` in the following example.\n    /// ```rust,ignore\n    /// impl TryInto for ... {\n    ///   type Error = u8;\n    /// }\n    /// ```\n    Type {\n        /// The type expression, `u8` in the example.\n        ty: Ty,\n        /// The parent bounds. In the example, there are none (in the definition\n        /// of `TryInto`, there is no `Error: Something` in the associated type\n        /// definition).\n        parent_bounds: Vec<(ImplExpr, ImplIdent)>,\n    },\n    /// A definition for a trait function\n    ///\n    /// # Example:\n    /// The associated function `into` in the following example.\n    /// ```rust,ignore\n    /// impl Into for T {\n    ///   fn into(&self) -> T {...}\n    /// }\n    /// ```\n    Fn {\n        /// The body of the associated function (`...` in the example)\n        body: Expr,\n        /// The list of the argument for the associated function (`&self` in the example).\n        params: Vec<Param>,\n    },\n\n    /// A resugared impl item.\n    /// This variant is introduced before printing only.\n    /// Phases must not produce this variant.\n    Resugared(ResugaredImplItemKind),\n\n    /// Fallback constructor to carry errors.\n    Error(ErrorNode),\n}\n\n/// Represents a trait item (associated type, fn, or default)\n#[derive_group_for_ast]\npub struct TraitItem {\n    /// Source span and attributes.\n    pub meta: Metadata,\n    /// The kind of trait item we are dealing with (an associated type or function).\n    pub kind: TraitItemKind,\n    /// The generics this associated item carries.\n    ///\n    /// # Example:\n    /// The generics `<B>` on `f`, **not** `<A>`.\n    /// ```rust,ignore\n    /// trait<A> ... {\n    ///    fn f<B>(){}\n    /// }\n    /// ```\n    pub generics: Generics,\n    /// The identifier of the associateed item.\n    pub ident: GlobalId,\n}\n\n/// Represents the kinds of trait items\n#[derive_group_for_ast]\npub enum TraitItemKind {\n    /// An associated type\n    Type(Vec<ImplIdent>),\n    /// An associated function\n    Fn(Ty),\n    /// An associated function with a default body.\n    /// A arrow type (like what is given in `TraitItemKind::Ty`) can be\n    /// reconstructed using the types of the parameters and of the body.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// impl ... {\n    ///   fn f(x: u8) -> u8 { x + 2 }\n    /// }\n    /// ```\n    Default {\n        /// The parameters of the associated function (`[x: u8]` in the example).\n        params: Vec<Param>,\n        /// The default body of the associated function (`x + 2` in the example).\n        body: Expr,\n    },\n\n    /// A resugared trait item.\n    /// This variant is introduced before printing only.\n    /// Phases must not produce this variant.\n    Resugared(ResugaredTraitItemKind),\n\n    /// Fallback constructor to carry errors.\n    Error(ErrorNode),\n}\n\n/// A QuoteContent is a component of a quote: it can be a verbatim string, a Rust expression to embed in the quote, a pattern etc.\n///\n/// # Example:\n/// ```rust,ignore\n/// fstar!(\"f ${x + 3} + 10\")\n/// ```\n/// results in `[Verbatim(\"f\"), Expr([[x + 3]]), Verbatim(\" + 10\")]`\n#[derive_group_for_ast]\npub enum QuoteContent {\n    /// A verbatim chunk of backend code.\n    Verbatim(String),\n    /// A Rust expression to inject in the quote.\n    Expr(Expr),\n    /// A Rust pattern to inject in the quote.\n    Pattern(Pat),\n    /// A Rust type to inject in the quote.\n    Ty(Ty),\n}\n\n/// Represents an inlined piece of backend code\n#[derive_group_for_ast]\npub struct Quote(pub Vec<QuoteContent>);\n\n/// The origin of a quote item.\n#[derive_group_for_ast]\npub struct ItemQuoteOrigin {\n    /// From which kind of item this quote was placed on?\n    pub item_kind: ItemQuoteOriginKind,\n    /// From what item this quote was placed on?\n    pub item_ident: GlobalId,\n    /// What was the position of the quote?\n    pub position: ItemQuoteOriginPosition,\n}\n\n/// The kind of a quote item's origin\n#[derive_group_for_ast]\npub enum ItemQuoteOriginKind {\n    /// A function\n    Fn,\n    /// A type alias\n    TyAlias,\n    /// A type definition (`enum`, `union`, `struct`)\n    Type,\n    /// A macro invocation\n    /// TODO: drop\n    MacroInvocation,\n    /// A trait definition\n    Trait,\n    /// An `impl` block\n    Impl,\n    /// An alias\n    Alias,\n    /// A `use`\n    Use,\n    /// A quote\n    Quote,\n    /// An error\n    HaxError,\n    /// Something unknown\n    NotImplementedYet,\n}\n\n/// The position of a quote item relative to its origin\n#[derive_group_for_ast]\npub enum ItemQuoteOriginPosition {\n    /// The quote was placed before an item\n    Before,\n    /// The quote was placed after an item\n    After,\n    /// The quote replaces an item\n    Replace,\n}\n\n/// The kind of a loop (resugared by respective `Reconstruct...Loops` phases).\n/// Useful for `FunctionalizeLoops`.\n#[derive_group_for_ast]\npub enum LoopKind {\n    /// An unconditional loop.\n    ///\n    /// # Example:\n    /// `loop { ... }`\n    UnconditionalLoop,\n    /// A while loop.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// while(condition) { ... }\n    /// ```\n    WhileLoop {\n        /// The boolean condition\n        condition: Expr,\n    },\n    /// A for loop.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// for i in iterator { ... }\n    /// ```\n    ForLoop {\n        /// The pattern of the for loop (`i` in the example).\n        pat: Pat,\n        /// The iterator we're looping on (`iterator` in the example).\n        iterator: Expr,\n    },\n    /// A specialized for loop on a range.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// for i in start..end {\n    ///   ...\n    /// }\n    /// ```\n    ForIndexLoop {\n        /// Where the range begins (`start` in the example).\n        start: Expr,\n        /// Where the range ends (`end` in the example).\n        end: Expr,\n        /// The binding used for the iteration.\n        var: LocalId,\n        /// The type of the binding `var`.\n        var_ty: Ty,\n    },\n}\n\n/// This is a marker to describe what control flow is present in a loop.\n/// It is added by phase `DropReturnBreakContinue` and the information is used in\n/// `FunctionalizeLoops`. We need it to replace the control flow nodes of the AST\n/// by an encoding in the `ControlFlow` enum.\n#[derive_group_for_ast]\npub enum ControlFlowKind {\n    /// Contains no `return`, maybe some `break`s\n    BreakOnly,\n    /// Contains both at least one `return` and maybe some `break`s\n    BreakOrReturn,\n}\n\n/// Represent explicit mutation context for a loop.\n/// This is useful to make loops pure.\n#[derive_group_for_ast]\npub struct LoopState {\n    /// The initial state of the loop.\n    pub init: Expr,\n    /// The pattern that destructures the state of the loop.\n    pub body_pat: Pat,\n}\n\n// TODO: Kill some nodes (e.g. `Array`)?\n/// Describes the shape of an expression.\n#[derive_group_for_ast]\npub enum ExprKind {\n    /// If expression.\n    ///\n    /// # Example:\n    /// `if x > 0 { 1 } else { 2 }`\n    If {\n        /// The boolean condition (`x > 0` in the example).\n        condition: Expr,\n        /// The then branch (`1` in the example).\n        then: Expr,\n        /// An optional else branch (`Some(2)`in the example).\n        else_: Option<Expr>,\n    },\n\n    /// Function application.\n    ///\n    /// # Example:\n    /// `f(x, y)`\n    App {\n        /// The head of the function application (or, which function do we apply?).\n        head: Expr,\n        /// The arguments applied to the function.\n        args: Vec<Expr>,\n        /// The generic arguments applied to the function.\n        generic_args: Vec<GenericValue>,\n        /// If the function requires generic bounds to be called, `bounds_impls`\n        /// is a vector of impl. expressions for those bounds.\n        bounds_impls: Vec<ImplExpr>,\n        /// If we apply an associated function, contains the impl. expr used.\n        trait_: Option<(ImplExpr, Vec<GenericValue>)>,\n    },\n\n    /// A literal value.\n    ///\n    /// # Example:\n    /// `42`, `\"hello\"`\n    Literal(Literal),\n\n    /// An array literal.\n    ///\n    /// # Example:\n    /// `[1, 2, 3]`\n    Array(Vec<Expr>),\n\n    /// A constructor application\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// MyEnum::MyVariant { x : 1, ...base }\n    /// ``````\n    Construct {\n        /// The identifier of the constructor we are building (`MyEnum::MyVariant` in the example).\n        constructor: GlobalId,\n        /// Are we constructing a record? E.g. a struct or a variant with named fields. (`true` in the example)\n        is_record: bool,\n        /// Is this a struct? Neaning, *not* a variant from an enum. (`false` in the example)\n        is_struct: bool,\n        /// A list of fields (`[(x, 1)]` in the example).\n        fields: Vec<(GlobalId, Expr)>,\n        /// The base expression, if any. (`Some(base)` in the example)\n        base: Option<Expr>,\n    },\n\n    /// A `match`` expression.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// match x {\n    ///     pat1 => expr1,\n    ///     pat2 => expr2,\n    /// }\n    /// ```\n    Match {\n        /// The expression on which we are matching. (`x` in the example)\n        scrutinee: Expr,\n        /// The arms of the match. (`pat1 => expr1` and `pat2 => expr2` in the example)\n        arms: Vec<Arm>,\n    },\n\n    /// A reference expression.\n    ///\n    /// # Examples:\n    /// - `&x` → `mutable: false`\n    /// - `&mut x` → `mutable: true`\n    Borrow {\n        /// Is the borrow mutable?\n        mutable: bool,\n        /// The expression we are borrowing\n        inner: Expr,\n    },\n\n    /// Raw borrow\n    ///\n    /// # Example:\n    /// `*const u8`\n    AddressOf {\n        /// Is the raw pointer mutable?\n        mutable: bool,\n        /// The expression on which we take a pointer\n        inner: Expr,\n    },\n\n    /// A `let` expression used in expressions.\n    ///\n    /// # Example:\n    /// `let x = 1; x + 1`\n    Let {\n        /// The left-hand side of the `let` expression. (`x` in the example)\n        lhs: Pat,\n        /// The right-hand side of the `let` expression. (`1` in the example)\n        rhs: Expr,\n        /// The body of the `let`. (`x + 1` in the example)\n        body: Expr,\n    },\n\n    /// A global identifier.\n    ///\n    /// # Example:\n    /// `std::mem::drop`\n    GlobalId(GlobalId),\n\n    /// A local variable.\n    ///\n    /// # Example:\n    /// `x`\n    LocalId(LocalId),\n\n    /// Type ascription\n    Ascription {\n        /// The expression being ascribed.\n        e: Expr,\n        /// The type\n        ty: Ty,\n    },\n\n    /// Variable mutation\n    ///\n    /// # Example:\n    /// `x = 1`\n    Assign {\n        /// the left-hand side (place) of the assign\n        lhs: Lhs,\n        /// The value we are assigning\n        value: Expr,\n    },\n\n    /// Loop\n    ///\n    /// # Example:\n    /// `'label: loop { body }`\n    Loop {\n        /// The body of the loop.\n        body: Expr,\n        /// The kind of loop (e.g. `while`, `loop`, `for`...).\n        kind: Box<LoopKind>,\n        /// An optional loop state, that makes explicit the state mutated by the\n        /// loop.\n        state: Option<LoopState>,\n        /// What kind of control flow is performed by this loop?\n        control_flow: Option<ControlFlowKind>,\n        /// Optional loop label.\n        label: Option<Symbol>,\n    },\n\n    /// The `break` exppression, that breaks out of a loop.\n    ///\n    /// # Example:\n    /// `break 'label 3`\n    Break {\n        /// The value we break with. By default, this is `()`.\n        ///\n        /// # Example:\n        /// ```rust,ignore\n        /// loop { break 3; } + 3\n        /// ```\n        value: Expr,\n        /// What loop shall we break? By default, the parent enclosing loop.\n        label: Option<Symbol>,\n        /// When a loop has a state (see [`ExprKind::Loop::state`]), this field\n        /// `state` is `Some(_)`. This carries the updated state for the loop.\n        state: Option<Expr>,\n    },\n\n    /// Return from a function.\n    ///\n    /// # Example:\n    /// `return 1`\n    Return {\n        /// The expression we return (`1` in the example).\n        value: Expr,\n    },\n\n    /// Continue (go to next loop iteration)\n    ///\n    /// # Example:\n    /// `continue 'label`\n    Continue {\n        /// The loop we continue.\n        label: Option<Symbol>,\n        /// When a loop has a state (see [`ExprKind::Loop::state`]), this field\n        /// `state` is `Some(_)`. This carries the updated state for the loop.\n        state: Option<Expr>,\n    },\n\n    /// Closure (anonymous function)\n    ///\n    /// # Example:\n    /// `|x| x`\n    Closure {\n        /// The parameters of the closure\n        params: Vec<Pat>,\n        /// The body of the closure\n        body: Expr,\n        /// The captured expressions\n        captures: Vec<Expr>,\n    },\n\n    /// Block of safe or unsafe expression\n    ///\n    /// # Example:\n    /// `unsafe { ... }`\n    Block {\n        /// The body of the block.\n        body: Expr,\n        /// The safety of the block.\n        safety_mode: SafetyKind,\n    },\n\n    /// A quote is an inlined piece of backend code.\n    Quote {\n        /// The contents of the quote.\n        contents: Quote,\n    },\n\n    /// A resugared expression.\n    /// This variant is introduced before printing only.\n    /// Phases must not produce this variant.\n    Resugared(ResugaredExprKind),\n\n    /// Fallback constructor to carry errors.\n    Error(ErrorNode),\n}\n\n/// Represents the kinds of generic parameters\n#[derive_group_for_ast]\npub enum GenericParamKind {\n    /// A generic lifetime\n    Lifetime,\n    /// A generic type\n    Type,\n    /// A generic constant\n    Const {\n        /// The type of the generic constant\n        ty: Ty,\n    },\n}\n\n/// Represents an instantiated trait that needs to be implemented.\n///\n/// # Example:\n/// A bound `_: std::ops::Add<u8>`\n#[derive_group_for_ast]\npub struct TraitGoal {\n    /// `std::ops::Add` in the example.\n    pub trait_: GlobalId,\n    /// `[u8]` in the example.\n    pub args: Vec<GenericValue>,\n}\n\n/// Represents a trait bound in a generic constraint\n#[derive_group_for_ast]\npub struct ImplIdent {\n    /// The trait goal of this impl identifier\n    pub goal: TraitGoal,\n    /// The name itself\n    pub name: Symbol,\n}\n\n/// A projection predicate expresses a constraint over an associated type:\n/// ```rust,ignore\n/// fn f<T: Foo<S = String>>(...)\n/// ```\n/// In this example `Foo` has an associated type `S`.\n#[derive_group_for_ast]\npub struct ProjectionPredicate {\n    /// The impl expression we project from\n    pub impl_: ImplExpr,\n    /// The associated type being projected\n    pub assoc_item: GlobalId,\n    /// The equality constraint on the associated type\n    pub ty: Ty,\n}\n\n/// A generic constraint (lifetime, type-class or equality)\n#[derive_group_for_ast]\npub enum GenericConstraint {\n    /// A lifetime\n    Lifetime(String), // TODO: Remove `String`\n    /// A type-class constraint (e.g. `T: Foo`)\n    TypeClass(ImplIdent),\n    /// An equality constraint on an associated type (e.g. `T::Assoc = u8`)\n    Equality(ProjectionPredicate),\n}\n\n/// A generic parameter (lifetime, type parameter or const parameter)\n#[derive_group_for_ast]\npub struct GenericParam {\n    /// The local identifier for the generic parameter\n    pub ident: LocalId,\n    /// Metadata (span and attributes) for the generic parameter.\n    pub meta: Metadata,\n    /// The kind of generic parameter.\n    pub kind: GenericParamKind,\n}\n\n/// Generic parameters and constraints (contained between `<>` in function declarations)\n#[derive_group_for_ast]\npub struct Generics {\n    /// A vector of generic parameters.\n    pub params: Vec<GenericParam>,\n    /// A vector of generic constraints.\n    pub constraints: Vec<GenericConstraint>,\n}\n\n/// Safety level of a function.\n#[derive_group_for_ast]\npub enum SafetyKind {\n    /// Safe function (default).\n    Safe,\n    /// Unsafe function.\n    Unsafe,\n}\n\n/// Represents a single attribute.\n#[derive_group_for_ast]\npub struct Attribute {\n    /// The kind of attribute (a comment, a tool attribute?).\n    pub kind: AttributeKind,\n    /// The span of the attribute.\n    pub span: Span,\n}\n\n/// Represents the kind of an attribute.\n#[derive_group_for_ast]\npub enum AttributeKind {\n    /// A tool attribute `#[path(tokens)]`\n    Tool {\n        /// The path to the tool\n        path: String,\n        /// The payload\n        tokens: String,\n    },\n    /// A doc comment\n    DocComment {\n        /// What kind of comment? (single lines, block)\n        kind: DocCommentKind,\n        /// The contents of the comment\n        body: String,\n    },\n    /// Hax attribute\n    Hax(hax_lib_macros_types::AttrPayload),\n}\n\n/// Represents the kind of a doc comment.\n#[derive_group_for_ast]\npub enum DocCommentKind {\n    /// Single line comment (`//...`)\n    Line,\n    /// Block comment (`/*...*/`)\n    Block,\n}\n\n/// A list of attributes.\npub type Attributes = Vec<Attribute>;\n\n/// A type with its associated span.\n#[derive_group_for_ast]\npub struct SpannedTy {\n    /// The span of the type\n    pub span: Span,\n    /// The type itself\n    pub ty: Ty,\n}\n\n/// A function or closure parameter.\n///\n/// # Example:\n/// ```rust,ignore\n/// (mut x, y): (T, u8)\n/// ```\n#[derive_group_for_ast]\npub struct Param {\n    /// The pattern part (left-hand side) of a parameter (`(mut x, y)` in the example).\n    pub pat: Pat,\n    /// The type part (right-rand side) of a parameter (`(T, u8)` in the example).\n    pub ty: Ty,\n    /// The span of the type part (if available).\n    pub ty_span: Option<Span>,\n    /// Optionally, some attributes present on the parameter.\n    pub attributes: Attributes,\n}\n\n/// A variant of an enum or struct.\n/// In our representation structs always have one variant with an argument for each field.\n#[derive_group_for_ast]\npub struct Variant {\n    /// Name of the variant\n    pub name: GlobalId,\n    /// Fields of this variant (named or anonymous)\n    pub arguments: Vec<(GlobalId, Ty, Attributes)>,\n    /// True if fields are named\n    pub is_record: bool,\n    // TODO Missing span\n    /// Attributes of the variant\n    pub attributes: Attributes,\n}\n\n/// A top-level item in the module.\n#[derive_group_for_ast]\npub enum ItemKind {\n    /// A function or constant item.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// fn add<T: Clone>(x: i32, y: i32) -> i32 {\n    ///     x + y\n    /// }\n    /// ```\n    /// Constants are represented as functions of arity zero, while functions always have a non-zero arity.\n    Fn {\n        /// The identifier of the function.\n        ///\n        /// # Example:\n        /// `add`\n        name: GlobalId,\n\n        /// The generic arguments and constraints of the function.\n        ///\n        /// # Example:\n        /// the generic type `T` and the constraint `T: Clone`\n        generics: Generics,\n\n        /// The body of the function\n        ///\n        /// # Example:\n        /// `x + y`\n        body: Expr,\n\n        /// The parameters of the function.\n        ///\n        /// # Example:\n        /// `x: i32, y: i32`\n        params: Vec<Param>,\n\n        /// The safety of the function.\n        safety: SafetyKind,\n    },\n\n    /// A type alias.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// type A = u8;\n    /// ```\n    TyAlias {\n        /// Name of the alias\n        ///\n        /// # Example:\n        /// `A`\n        name: GlobalId,\n\n        /// Generic arguments and constraints\n        generics: Generics,\n\n        /// Original type\n        ///\n        /// # Example:\n        /// `u8`\n        ty: Ty,\n    },\n\n    /// A type definition (struct or enum)\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// enum A {B, C}\n    /// struct S {f: u8}\n    /// ```\n    Type {\n        /// Name of this type\n        ///\n        /// # Example:\n        /// `A`, `S`\n        name: GlobalId,\n\n        /// Generic parameters and constraints\n        generics: Generics,\n\n        /// Variants\n        ///\n        /// # Example:\n        /// `{B, C}`\n        variants: Vec<Variant>,\n\n        /// Is this a struct (or an enum)\n        is_struct: bool,\n    },\n\n    /// A trait definition.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// trait T<A> {\n    ///     type Assoc;\n    ///     fn m(x: Self::Assoc, y: Self) -> A;\n    /// }\n    /// ```\n    Trait {\n        /// Name of this trait\n        ///\n        /// # Example:\n        /// `T`\n        name: GlobalId,\n\n        /// Generic parameters and constraints\n        ///\n        /// # Example:\n        /// `<A>`\n        generics: Generics,\n\n        /// Items required to implement the trait\n        ///\n        /// # Example:\n        /// `type Assoc;`, `fn m ...;`\n        items: Vec<TraitItem>,\n\n        /// Safe or unsafe\n        safety: SafetyKind,\n    },\n\n    /// A trait implementation.\n    ///\n    /// # Example:\n    /// ```rust,ignore\n    /// impl T<u8> for u16 {\n    ///     type Assoc = u32;\n    ///     fn m(x: u32, y: u16) -> u8 {\n    ///         (x as u8) + (y as u8)\n    ///     }\n    /// }\n    /// ```\n    Impl {\n        /// Generic arguments and constraints\n        generics: Generics,\n\n        /// The type we implement the trait for\n        ///\n        /// # Example:\n        /// `u16`\n        self_ty: Ty,\n\n        /// Instantiated trait that is being implemented\n        ///\n        /// # Example:\n        /// `T<u8>`\n        of_trait: (GlobalId, Vec<GenericValue>),\n\n        /// Items in this impl\n        ///\n        /// # Example:\n        /// `fn m ...`, `type Assoc ...`\n        items: Vec<ImplItem>,\n\n        /// Implementations of traits required for this impl\n        parent_bounds: Vec<(ImplExpr, ImplIdent)>,\n    },\n\n    /// Internal node introduced by phases, corresponds to an alias to any item.\n    Alias {\n        /// New name\n        name: GlobalId,\n        /// Original name\n        item: GlobalId,\n    },\n\n    // TODO: Should we keep `Use`?\n    /// A `use` statement\n    Use {\n        /// Path to used item(s)\n        path: Vec<String>,\n\n        /// Comes from external crate\n        is_external: bool,\n\n        /// Optional `as`\n        rename: Option<String>,\n    },\n\n    /// A `Quote` node is inserted by phase TransformHaxLibInline to deal with some `hax_lib` features.\n    /// For example insertion of verbatim backend code.\n    Quote {\n        /// Content of the quote\n        quote: Quote,\n\n        /// Description of the quote target position\n        origin: ItemQuoteOrigin,\n    },\n\n    /// A Rust module (`mod`, inline or not).\n    /// This exists solely because modules can have attributes relevant to the hax engine.\n    RustModule,\n\n    /// Fallback constructor to carry errors.\n    Error(ErrorNode),\n\n    /// A resugared item.\n    /// This variant is introduced before printing only.\n    /// Phases must not produce this variant.\n    Resugared(ResugaredItemKind),\n\n    /// Item that is not implemented yet\n    NotImplementedYet,\n}\n\n/// A top-level item with metadata.\n#[derive_group_for_ast]\npub struct Item {\n    /// The global identifier of the item.\n    pub ident: GlobalId,\n    /// The kind of the item.\n    pub kind: ItemKind,\n    /// Source span and attributes.\n    pub meta: Metadata,\n}\n\nimpl Item {\n    /// Checks whether the item was marked opaque using `hax_lib::opaque`\n    pub fn is_opaque(&self) -> bool {\n        self.meta.attributes.iter().any(|a| {\n            matches!(\n                a.kind,\n                AttributeKind::Hax(hax_lib_macros_types::AttrPayload::Erased)\n            )\n        })\n    }\n}\n\n/// A \"flat\" module: this contains only non-module items.\n#[derive_group_for_ast]\npub struct Module {\n    /// The global identifier of the module.\n    pub ident: GlobalId,\n    /// The list of items that belongs to this module.\n    pub items: Vec<Item>,\n    /// Source span and attributes.\n    pub meta: Metadata,\n}\n\nimpl Generics {\n    /// Returns Iterator over all type-class constraints (`GenericConstraint::TypeClass`)\n    pub fn type_class_constraints(&self) -> impl Iterator<Item = &ImplIdent> {\n        self.constraints.iter().filter_map(|c| match c {\n            GenericConstraint::TypeClass(impl_id) => Some(impl_id),\n            _ => None,\n        })\n    }\n    /// Returns Iterator over all equality constraints (`GenericConstraint::Equality`)\n    pub fn equality_constraints(&self) -> impl Iterator<Item = &ProjectionPredicate> {\n        self.constraints.iter().filter_map(|c| match c {\n            GenericConstraint::Equality(pp) => Some(pp),\n            _ => None,\n        })\n    }\n}\n\n/// Traits for utilities on AST data types\npub mod traits {\n    use super::*;\n    /// Marks AST data types that carry metadata (span + attributes)\n    pub trait HasMetadata {\n        /// Get metadata\n        fn metadata(&self) -> &Metadata;\n        /// Get mutable borrow on metadata\n        fn metadata_mut(&mut self) -> &mut Metadata;\n    }\n    /// Marks AST data types that carry a span\n    pub trait HasSpan {\n        /// Get span\n        fn span(&self) -> Span;\n        /// Mutable borrow on the span\n        fn span_mut(&mut self) -> &mut Span;\n    }\n    /// Marks AST data types that carry a Type\n    pub trait Typed {\n        /// Get type\n        fn ty(&self) -> &Ty;\n    }\n    impl<T: HasMetadata> HasSpan for T {\n        fn span(&self) -> Span {\n            self.metadata().span\n        }\n        fn span_mut(&mut self) -> &mut Span {\n            &mut self.metadata_mut().span\n        }\n    }\n\n    /// Marks types of the AST that carry a kind (an enum for the actual content)\n    pub trait HasKind {\n        /// Type carrying the kind, should be named `<Self>Kind`\n        type Kind;\n        /// Get kind\n        fn kind(&self) -> &Self::Kind;\n        /// Get mutable borrow on kind\n        fn kind_mut(&mut self) -> &mut Self::Kind;\n    }\n\n    macro_rules! derive_has_metadata {\n        ($($ty:ty),*) => {\n            $(impl HasMetadata for $ty {\n                fn metadata(&self) -> &Metadata {\n                    &self.meta\n                }\n                fn metadata_mut(&mut self) -> &mut Metadata {\n                    &mut self.meta\n                }\n            })*\n        };\n    }\n    macro_rules! derive_has_kind {\n        ($($ty:ty => $kind:ty),*) => {\n            $(impl HasKind for $ty {\n                type Kind = $kind;\n                fn kind(&self) -> &Self::Kind {\n                    &self.kind\n                }\n                fn kind_mut(&mut self) -> &mut Self::Kind {\n                    &mut self.kind\n                }\n            })*\n        };\n    }\n\n    derive_has_metadata!(\n        Item,\n        Expr,\n        Pat,\n        Guard,\n        Arm,\n        ImplItem,\n        TraitItem,\n        GenericParam\n    );\n    derive_has_kind!(\n        Item => ItemKind, Expr => ExprKind, Pat => PatKind, Guard => GuardKind,\n        GenericParam => GenericParamKind, ImplItem => ImplItemKind, TraitItem => TraitItemKind, ImplExpr => ImplExprKind\n    );\n\n    impl HasSpan for Attribute {\n        fn span(&self) -> Span {\n            self.span\n        }\n        fn span_mut(&mut self) -> &mut Span {\n            &mut self.span\n        }\n    }\n\n    impl Typed for Expr {\n        fn ty(&self) -> &Ty {\n            &self.ty\n        }\n    }\n    impl Typed for Pat {\n        fn ty(&self) -> &Ty {\n            &self.ty\n        }\n    }\n    impl Typed for SpannedTy {\n        fn ty(&self) -> &Ty {\n            &self.ty\n        }\n    }\n\n    impl HasSpan for SpannedTy {\n        fn span(&self) -> Span {\n            self.span\n        }\n        fn span_mut(&mut self) -> &mut Span {\n            &mut self.span\n        }\n    }\n\n    impl ExprKind {\n        /// Convert to full `Expr` with type, span and attributes\n        pub fn into_expr(self, span: Span, ty: Ty, attributes: Vec<Attribute>) -> Expr {\n            Expr {\n                kind: Box::new(self),\n                ty,\n                meta: Metadata { span, attributes },\n            }\n        }\n    }\n\n    /// Manual implementation of HasKind as the Ty struct contains a Box<TyKind>\n    /// instead of a TyKind directly.\n    impl HasKind for Ty {\n        type Kind = TyKind;\n\n        fn kind(&self) -> &Self::Kind {\n            &self.0\n        }\n        fn kind_mut(&mut self) -> &mut Self::Kind {\n            &mut self.0\n        }\n    }\n\n    /// Fragments of the AST on which we can store an `ErrorNode`.\n    pub trait FallibleAstNode {\n        /// Replace the current node with an error.\n        fn set_error(&mut self, error_node: ErrorNode);\n        /// Extract an error if any.\n        fn get_error(&self) -> Option<&ErrorNode>;\n    }\n    macro_rules! derive_error_node {\n        ($($ty:ident => $kind:ident),*) => {$(\n            impl FallibleAstNode for $ty {\n                fn set_error(&mut self, mut error_node: ErrorNode) {\n                    if let Some(base) = self.get_error().cloned() {\n                        error_node.diagnostics.extend_from_slice(&base.diagnostics);\n                    }\n                    *self.kind_mut() = $kind::Error(error_node)\n                }\n                fn get_error(&self) -> Option<&ErrorNode> {\n                    match &self.kind() {\n                        $kind::Error(error_node) => Some(error_node),\n                        _ => None,\n                    }\n                }\n            }\n        )*};\n    }\n\n    derive_error_node!(Item => ItemKind, Pat => PatKind, Expr => ExprKind, Ty => TyKind);\n}\npub use traits::*;\n"
  },
  {
    "path": "rust-engine/src/attributes.rs",
    "content": "//! Work with hax attributes.\n\nuse std::collections::HashMap;\n\nuse hax_lib_macros_types::{AssociationRole, AttrPayload, ItemUid, ProofMethod};\n\nuse crate::ast::diagnostics::{Context, DiagnosticInfo, DiagnosticInfoKind};\n\nuse super::ast::*;\nuse visitors::AstVisitorMut;\n\n/// A graph of items connected via the hax attribute [`AttrPayload::AssociatedItem`] and UUIDs.\n#[derive(Clone)]\npub struct LinkedItemGraph {\n    items: HashMap<ItemUid, Item>,\n    context: Context,\n}\n\nimpl Default for LinkedItemGraph {\n    fn default() -> Self {\n        Self {\n            items: Default::default(),\n            context: Context::Unknown,\n        }\n    }\n}\n\n/// Get an iterator over hax attributes contained in the given attributes.\npub fn hax_attributes(attrs: &Attributes) -> impl Iterator<Item = &AttrPayload> {\n    attrs.iter().flat_map(|attr| match &attr.kind {\n        AttributeKind::Hax(attr_payload) => Some(attr_payload),\n        _ => None,\n    })\n}\n\n/// Get proof attributes attached to the item\npub fn hax_proof_attributes(item: &Item) -> Result<ProofAttributes, String> {\n    let mut proofs = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr {\n        AttrPayload::Proof(proof) => Some(proof.clone()),\n        _ => None,\n    });\n    let proof = proofs.next();\n    if proofs.next().is_some() {\n        return Err(\"At most one `proof` attribute per item is allowed.\".into());\n    }\n    let mut pure_requires_proofs =\n        hax_attributes(&item.meta.attributes).flat_map(|attr| match attr {\n            AttrPayload::PureRequiresProof(proof) => Some(proof.clone()),\n            _ => None,\n        });\n    let pure_requires_proof = pure_requires_proofs.next();\n    if pure_requires_proofs.next().is_some() {\n        return Err(\"At most one `pure_requires_proof` attribute per item is allowed.\".into());\n    }\n    let mut pure_ensures_proofs =\n        hax_attributes(&item.meta.attributes).flat_map(|attr| match attr {\n            AttrPayload::PureEnsuresProof(proof) => Some(proof.clone()),\n            _ => None,\n        });\n    let pure_ensures_proof = pure_ensures_proofs.next();\n    if pure_ensures_proofs.next().is_some() {\n        return Err(\"At most one `pure_ensures_proof` attribute per item is allowed.\".into());\n    }\n    let mut proof_methods = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr {\n        AttrPayload::ProofMethod(method) => Some(*method),\n        _ => None,\n    });\n    let proof_method = proof_methods.next();\n    if proof_methods.next().is_some() {\n        return Err(\"At most one `proof_method` attribute per item is allowed.\".into());\n    }\n    Ok(ProofAttributes {\n        proof,\n        pure_requires_proof,\n        pure_ensures_proof,\n        proof_method,\n    })\n}\n\nfn uuid(context: Context, item: &Item) -> Option<ItemUid> {\n    let mut uuids = hax_attributes(&item.meta.attributes).flat_map(|attr| match attr {\n        AttrPayload::Uid(item_uid) => Some(item_uid),\n        _ => None,\n    });\n    let uuid = uuids.next()?;\n    if let Some(other) = uuids.next() {\n        emit_assertion_failure(\n            context,\n            item.span(),\n            format!(\n                \"Found more than one UUID hax attribute on this item. The two first UUIDs are {uuid} and {other}.\"\n            ),\n        );\n        None\n    } else {\n        Some(uuid.clone())\n    }\n}\n\nfn emit_assertion_failure(context: Context, span: span::Span, message: impl Into<String>) {\n    DiagnosticInfo {\n        context,\n        span,\n        kind: DiagnosticInfoKind::AssertionFailure {\n            details: message.into(),\n        },\n    }\n    .emit();\n}\n\nimpl std::fmt::Debug for LinkedItemGraph {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        f.debug_struct(\"LinkedItemGraph\")\n            .field(\n                \"items\",\n                &self\n                    .items\n                    .iter()\n                    .map(|(id, item)| (id.to_string(), item.ident.to_debug_string()))\n                    .collect::<Vec<_>>(),\n            )\n            .field(\"context\", &self.context)\n            .finish()\n    }\n}\n\nimpl LinkedItemGraph {\n    /// Clone items marked with UUIDs attributes to build a graph of linked items.\n    /// This graph clones the items that represent linked items: e.g. pre and post conditions.\n    pub fn new(items: &[Item], context: Context) -> Self {\n        Self {\n            items: HashMap::from_iter(\n                items\n                    .iter()\n                    .filter_map(|item| Some((uuid(context.clone(), item)?, item.clone()))),\n            ),\n            context,\n        }\n    }\n\n    fn emit_assertion_failure(&self, span: span::Span, message: impl Into<String>) {\n        emit_assertion_failure(self.context.clone(), span, message)\n    }\n\n    fn emit_unimplemented(&self, span: span::Span, issue_id: u32, message: impl Into<String>) {\n        DiagnosticInfo {\n            context: self.context.clone(),\n            span,\n            kind: DiagnosticInfoKind::Unimplemented {\n                issue_id: Some(issue_id),\n                details: Some(message.into()),\n            },\n        }\n        .emit();\n    }\n\n    /// Given a graph and an item `item`, returns an iterator of the various items that are linked with `item`.\n    pub fn linked_items_iter(\n        &self,\n        item: &impl HasMetadata,\n    ) -> impl Iterator<Item = (AssociationRole, Result<&Item, DiagnosticInfo>)> {\n        let item_attributes = &item.metadata().attributes;\n        hax_attributes(item_attributes).flat_map(move |attr| match attr {\n            AttrPayload::AssociatedItem { role, item: target } => {\n                let target = self.items.get(target).map(Ok).unwrap_or_else(|| {\n                    Err(DiagnosticInfo {\n                        context: self.context.clone(),\n                        span: item.span(),\n                        kind: DiagnosticInfoKind::AssertionFailure {\n                            details: format!(\"An item linked via hax attributes could not be found. The UUID is {target:?}. The graph is {:#?}.\", self),\n                        },\n                    })\n                });\n                Some((*role, target))\n            }\n            _ => None,\n        })\n    }\n\n    /// Returns the items linked to a given item.\n    pub fn linked_items(\n        &self,\n        item: &impl HasMetadata,\n    ) -> HashMap<AssociationRole, Vec<Result<&Item, DiagnosticInfo>>> {\n        let mut map: HashMap<AssociationRole, Vec<_>> = HashMap::new();\n        for (role, item) in self.linked_items_iter(item) {\n            map.entry(role).or_default().push(item);\n        }\n        map\n    }\n\n    /// Returns the precondition, postcondition and decreases clause, if any, for a given item.\n    /// When operating on a linked function, `self_id` is the local identifier of `self`.\n    pub fn fn_like_linked_expressions(\n        &self,\n        item: &impl HasMetadata,\n        self_id: Option<LocalId>,\n    ) -> FnLikeAssocatedExpressions {\n        let assoc_items = self.linked_items(item);\n        let get = |role| {\n            assoc_items\n                .get(&role)\n                .iter()\n                .flat_map(|vec| vec.iter())\n                .flat_map(|item| match item {\n                    Ok(item) => Some(item),\n                    Err(err) => {\n                        err.emit();\n                        None\n                    }\n                })\n                .map(|item| extract_expr(&self.context, item, self_id.clone()))\n                .collect::<Vec<_>>()\n        };\n        let precondition = {\n            let mut preconditions = get(AssociationRole::Requires).into_iter();\n            preconditions.next().map(|(e, _)| {\n                for extra in preconditions {\n                    self.emit_unimplemented(extra.0.span(), 1270, \"multiple pre-conditions\");\n                }\n                e\n            })\n        };\n        let decreases = {\n            let mut decreases = get(AssociationRole::Decreases).into_iter();\n            decreases.next().map(|(e, _)| {\n                for extra in decreases {\n                    self.emit_unimplemented(extra.0.span(), 1270, \"multiple decreases\");\n                }\n                e\n            })\n        };\n        let postcondition = {\n            let mut postconditions = get(AssociationRole::Ensures).into_iter();\n            postconditions.next().and_then(|(e, params)| {\n                for extra in postconditions {\n                    self.emit_unimplemented(extra.0.span(), 1270, \"multiple post-conditions\");\n                }\n                if let Some(last_param) = params.last() {\n                    Some(Postcondition {\n                        result_binder: last_param.pat.clone(),\n                        body: e.clone(),\n                    })\n                } else {\n                    self.emit_assertion_failure(\n                        e.span(),\n                        \"hax ensures attribute: could not find output binder\",\n                    );\n                    None\n                }\n            })\n        };\n        FnLikeAssocatedExpressions {\n            decreases,\n            precondition,\n            postcondition,\n        }\n    }\n\n    /// Is there a specification that we should prove for this item?\n    pub fn has_spec(&self, item: &Item) -> bool {\n        let spec = self.fn_like_linked_expressions(item, item.self_id());\n        spec.precondition.is_some() || spec.postcondition.is_some()\n    }\n}\n\nfn extract_expr<'a>(\n    context: &Context,\n    item: &'a Item,\n    self_id: Option<LocalId>,\n) -> (Expr, Vec<&'a Param>) {\n    let ItemKind::Fn { body, params, .. } = item.kind() else {\n        return (\n            ExprKind::Error(ErrorNode::assertion_failure(\n                item.clone(),\n                context.clone(),\n                \"Expected an function\",\n            ))\n            .into_expr(item.span(), Ty::prop(), vec![]),\n            vec![],\n        );\n    };\n    let mut body = body.clone();\n    if let Some(self_id) = self_id\n        && let [maybe_self, ..] = params.as_slice()\n        && let PatKind::Binding {\n            var, sub_pat: None, ..\n        } = &*maybe_self.pat.kind\n    {\n        // Here, we expect `self_id` is `self`, thus we cannot have any shadowing.\n        utils::mappers::SubstLocalIds::one(var.clone(), self_id.clone()).visit(&mut body)\n    }\n    (body, params.iter().collect())\n}\n\n/// A postcondition.\n///\n/// ## Example\n/// The expression `result != x` in the following is a postcondition.\n/// Note that `result` is an extra binder that represent the result of `f`, whose type is `u8` in this case: the return type of `f`.\n///\n/// ```rust\n/// #[hax_lib::ensures(|result| result != x)]\n/// fn f(x: u8) -> u8 { x.wrapping_add(1) }\n/// ```\npub struct Postcondition {\n    /// In the example, this is `|result|`.\n    pub result_binder: Pat,\n    /// The formula of the postcondition, `result != x` in the example.\n    pub body: Expr,\n}\n\n/// The various linked expressions one can usually find on a (linked or not) function.\npub struct FnLikeAssocatedExpressions {\n    /// A decreases clause, see [`hax_lib::decreases`]\n    pub decreases: Option<Expr>,\n    /// A precondition, see [`hax_lib::requires`]\n    pub precondition: Option<Expr>,\n    /// A postcondition, see [`hax_lib::ensures`]\n    pub postcondition: Option<Postcondition>,\n}\n\n/// The various linked expressions one can usually find on a (linked or not) function.\npub struct ProofAttributes {\n    /// A custom proof, see [`hax_lib::lean::proof`]\n    pub proof: Option<String>,\n    /// A proof that the precondition is pure, see [`hax_lib::lean::pure_requires_proof`]\n    pub pure_requires_proof: Option<String>,\n    /// A proof that the postcondition is pure, see [`hax_lib::lean::pure_ensures_proof`]\n    pub pure_ensures_proof: Option<String>,\n    /// A proof method, see [`hax_lib::lean::proof_method`]\n    pub proof_method: Option<ProofMethod>,\n}\n"
  },
  {
    "path": "rust-engine/src/backends/fstar.rs",
    "content": "//! The F* backend. The F* printer is still implemented in Ocaml but the phase driver uses this infrastructure\n\n/// The F* backend\npub struct FStarBackend;\n\nimpl super::Backend for FStarBackend {\n    // TODO Replace by an empty printer\n    // This is a dummy value. The fstar backend's printer is implemented in OCaml\n    type Printer = super::lean::LeanPrinter;\n\n    fn module_path(&self, _module: &super::Module) -> camino::Utf8PathBuf {\n        todo!(\"The fstar backend's printer is implemented in OCaml\")\n    }\n\n    fn phases(&self) -> Vec<crate::phase::PhaseKind> {\n        use crate::phase::legacy::LegacyOCamlPhase::*;\n        vec![\n            RejectRawOrMutPointer.into(),\n            RewriteLocalSelf.into(),\n            TransformHaxLibInline.into(),\n            Specialize.into(),\n            DropSizedTrait.into(),\n            SimplifyQuestionMarks.into(),\n            AndMutDefsite.into(),\n            ReconstructAsserts.into(),\n            ReconstructForLoops.into(),\n            ReconstructWhileLoops.into(),\n            DirectAndMut.into(),\n            RejectArbitraryLhs.into(),\n            DropBlocks.into(),\n            DropMatchGuards.into(),\n            DropReferences.into(),\n            ExplicitConversions.into(),\n            TrivializeAssignLhs.into(),\n            HoistSideEffects.into(),\n            HoistDisjunctivePatterns.into(),\n            SimplifyMatchReturn.into(),\n            LocalMutation.into(),\n            RewriteControlFlow.into(),\n            DropReturnBreakContinue.into(),\n            FunctionalizeLoops.into(),\n            RejectQuestionMark.into(),\n            RejectAsPattern.into(),\n            TraitsSpecs.into(),\n            SimplifyHoisting.into(),\n            NewtypeAsRefinement.into(),\n            RejectTraitItemDefault.into(),\n            BundleCycles.into(),\n            ReorderFields.into(),\n            SortItems.into(),\n        ]\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/backends/lean.rs",
    "content": "//! The Lean backend\n//!\n//! This module defines the trait implementations to export the rust ast to\n//! Pretty::Doc type, which can in turn be exported to string (or, eventually,\n//! source maps).\n\nuse std::collections::HashSet;\nuse std::sync::OnceLock;\n\nuse super::prelude::*;\nuse crate::{\n    ast::{\n        identifiers::global_id::view::{ConstructorKind, PathSegment, TypeDefKind},\n        span::Span,\n    },\n    attributes::hax_proof_attributes,\n    names::rust_primitives::hax::{\n        cast_op,\n        explicit_monadic::{lift, pure},\n    },\n    phase::*,\n};\nuse camino::Utf8PathBuf;\nuse hax_lib_macros_types::ProofMethod;\nuse hax_types::engine_api::File;\n\nmod binops {\n    pub use crate::names::core::cmp::PartialEq;\n    pub use crate::names::core::ops::bit::*;\n    pub use crate::names::core::ops::index::*;\n    pub use crate::names::rust_primitives::arithmetic::neg;\n    pub use crate::names::rust_primitives::hax::machine_int::*;\n    pub use crate::names::rust_primitives::hax::{logical_op_and, logical_op_or};\n}\n\nconst LIFT: GlobalId = lift;\nconst PURE: GlobalId = pure;\nconst CAST_OP: GlobalId = cast_op;\n\n/// The Lean printer\n#[setup_printer_struct]\n#[derive(Default, Clone)]\npub struct LeanPrinter {\n    current_namespace: Option<GlobalId>,\n}\n\nconst INDENT: isize = 2;\n\nconst HEADER: &str = \"\n-- Experimental lean backend for Hax\n-- The Hax prelude library can be found in hax/proof-libs/lean\nimport Hax\nimport Std.Tactic.Do\nimport Std.Do.Triple\nimport Std.Tactic.Do.Syntax\nopen Std.Do\nopen Std.Tactic\n\nset_option mvcgen.warning false\nset_option linter.unusedVariables false\n\n\n\";\n\nimpl RenderView for LeanPrinter {\n    fn reserved_keywords() -> &'static HashSet<String> {\n        static SET: OnceLock<HashSet<String>> = OnceLock::new();\n        SET.get_or_init(|| {\n            [\n                // reserved for Lean:\n                \"end\",\n                \"def\",\n                \"abbrev\",\n                \"theorem\",\n                \"example\",\n                \"inductive\",\n                \"structure\",\n                \"from\",\n                // reserved for hax encoding:\n                \"associatedTypes\",\n                \"AssociatedTypes\",\n            ]\n            .into_iter()\n            .map(|s| s.to_string())\n            .collect()\n        })\n    }\n\n    fn should_escape(id: &str) -> bool {\n        Self::is_reserved_keyword(id)\n            || id.starts_with(|c: char| c.is_ascii_digit())\n            || id.starts_with(\"trait_constr_\")\n    }\n\n    fn separator(&self) -> &str {\n        \".\"\n    }\n\n    fn relativize_module_path<'a>(&self, module_path: &'a [PathSegment]) -> &'a [PathSegment] {\n        if let Some(namespace) = self.current_namespace\n            && namespace.view().segments() == module_path\n        {\n            &[]\n        } else {\n            module_path\n        }\n    }\n\n    fn render_path_segment(&self, chunk: &PathSegment) -> Vec<String> {\n        // Returning None indicates that the default rendering should be used\n        (match chunk.kind() {\n            AnyKind::Constructor(ConstructorKind::Constructor { ty })\n                if matches!(ty.kind(), TypeDefKind::Struct) =>\n            {\n                Some(vec![\n                    Self::escape(&self.render_path_segment_payload(chunk.payload())),\n                    \"mk\".to_string(),\n                ])\n            }\n            AnyKind::Field { named: _, parent } => match parent.kind() {\n                ConstructorKind::Constructor { ty }\n                    if matches!(&ty.kind(), TypeDefKind::Struct) =>\n                {\n                    chunk.parent().map(|parent| {\n                        vec![\n                            Self::escape(&self.render_path_segment_payload(parent.payload())),\n                            Self::escape(&self.render_path_segment_payload(chunk.payload())),\n                        ]\n                    })\n                }\n                _ => None,\n            },\n            _ => None,\n        })\n        .unwrap_or(default::render_path_segment(self, chunk))\n    }\n}\n\nimpl Printer for LeanPrinter {}\n\n/// The Lean backend\npub struct LeanBackend;\n\nimpl Backend for LeanBackend {\n    type Printer = LeanPrinter;\n\n    fn module_path(&self, module: &Module) -> Utf8PathBuf {\n        let krate = module.ident.krate();\n        Utf8PathBuf::from(krate).with_extension(\"lean\")\n    }\n\n    fn phases(&self) -> Vec<PhaseKind> {\n        use crate::phase::{PhaseKind::*, legacy::LegacyOCamlPhase::*};\n        vec![\n            RejectRawOrMutPointer.into(),\n            RejectImplTypeMethod.into(),\n            RewriteLocalSelf.into(),\n            TransformHaxLibInline.into(),\n            Specialize.into(),\n            DropSizedTrait.into(),\n            SimplifyQuestionMarks.into(),\n            AndMutDefsite.into(),\n            ReconstructAsserts.into(),\n            ReconstructForLoops.into(),\n            ReconstructWhileLoops.into(),\n            DirectAndMut.into(),\n            RejectArbitraryLhs.into(),\n            DropBlocks.into(),\n            DropMatchGuards.into(),\n            DropReferences.into(),\n            TrivializeAssignLhs.into(),\n            HoistSideEffects.into(),\n            HoistDisjunctivePatterns.into(),\n            SimplifyMatchReturn.into(),\n            LocalMutation.into(),\n            RewriteControlFlow.into(),\n            DropReturnBreakContinue.into(),\n            FunctionalizeLoops.into(),\n            RejectQuestionMark.into(),\n            TraitsSpecs.into(),\n            SimplifyHoisting.into(),\n            NewtypeAsRefinement.into(),\n            ReorderFields.into(),\n            SortItems.into(),\n            FilterUnprintableItems,\n            ExplicitMonadic,\n        ]\n    }\n\n    fn resugaring_phases() -> Vec<Box<dyn Resugaring>> {\n        vec![\n            Box::new(RecursiveFunctions),\n            Box::new(FunctionsToConstants),\n            Box::new(LetPure),\n            Box::new(RecordEllipsis),\n        ]\n    }\n\n    fn items_to_module(&self, items: Vec<Item>) -> Vec<Module> {\n        let mut modules: Vec<Module> = Vec::new();\n\n        for item in items {\n            let module_ident = item.ident.mod_only_closest_parent();\n\n            if let Some(last_module) = modules.last_mut()\n                && last_module.ident == module_ident\n            {\n                last_module.items.push(item);\n            } else {\n                modules.push(Module {\n                    ident: module_ident,\n                    items: vec![item],\n                    meta: Metadata {\n                        span: Span::dummy(),\n                        attributes: vec![],\n                    },\n                });\n            }\n        }\n        modules\n    }\n\n    fn modules_to_files(&self, modules: Vec<Module>, mut printer: Self::Printer) -> Vec<File> {\n        if modules.is_empty() {\n            return vec![];\n        }\n        let path = self.module_path(modules.first().unwrap()).to_string();\n        let contents = modules\n            .into_iter()\n            .map(|module: Module| {\n                let (c, _) = printer.print(module);\n                c\n            })\n            .collect::<Vec<String>>()\n            .join(\"\\n\");\n        vec![File {\n            path,\n            contents: format!(\"{}{}\", HEADER, contents),\n            sourcemap: None,\n        }]\n    }\n}\n\nimpl LeanPrinter {\n    /// Checks if we are extracting core models to be able to use different namespeacing when\n    /// referring to core.\n    pub fn is_hax_core_models_extraction_mode(&self) -> bool {\n        std::env::var(\"HAX_CORE_MODELS_EXTRACTION_MODE\")\n            .map(|v| v == \"on\")\n            .unwrap_or(false)\n    }\n\n    /// Render a global id using the Rendering strategy of the Lean printer. Works for both concrete\n    /// and projector ids. TODO: https://github.com/cryspen/hax/issues/1660\n    pub fn render_id(&self, id: &GlobalId) -> String {\n        let id = if !self.is_hax_core_models_extraction_mode() && id.krate() == \"core\" {\n            id.rename_krate(\"core_models\")\n        } else {\n            *id\n        };\n        self.render_string(&id.view())\n    }\n\n    /// Renders the last, most local part of an id. Used for named arguments of constructors.\n    pub fn render_last(&self, id: &GlobalId) -> String {\n        self.render(&id.view())\n            .path\n            .last()\n            // TODO: Should be ensured by the rendering engine; see\n            // https://github.com/cryspen/hax/issues/1660\n            .expect(\"Segments should always be non-empty\")\n            .clone()\n    }\n\n    /// Inject an identifier in before-last position while rendering\n    /// TODO: use `DefIdInner::kind` for this instead (https://github.com/cryspen/hax/issues/1877)\n    pub fn render_with_injection(&self, id: &GlobalId, injection: &String) -> String {\n        let rendered = self.render(&id.view());\n        let (last, butlast) = rendered\n            .path\n            .split_last()\n            // TODO: Should be ensured by the rendering engine; see\n            // https://github.com/cryspen/hax/issues/1660\n            .expect(\"Segments should always be non-empty\");\n        let path: Vec<String> = butlast\n            .iter()\n            .chain(std::iter::once(injection))\n            .chain(std::iter::once(last))\n            .map(String::clone)\n            .collect();\n        self.rendered_to_string(Rendered {\n            module: rendered.module,\n            path,\n        })\n    }\n\n    /// Escape a string for use in Lean string literals.\n    /// Handles newlines, quotes, backslashes, and other special characters.\n    fn escape_string(&self, s: &str) -> String {\n        let mut result = String::with_capacity(s.len());\n        for c in s.chars() {\n            match c {\n                '\"' => result.push_str(\"\\\\\\\"\"),\n                '\\'' => result.push_str(\"\\\\'\"),\n                '\\\\' => result.push_str(\"\\\\\\\\\"),\n                '\\n' => result.push_str(\"\\\\n\"),\n                '\\r' => result.push_str(\"\\\\r\"),\n                '\\t' => result.push_str(\"\\\\t\"),\n                c if c.is_ascii_control() => {\n                    result.push_str(&format!(\"\\\\x{:02x}\", c as u8));\n                }\n                c => result.push(c),\n            }\n        }\n        result\n    }\n}\n\n/// Render parameters, adding a line after each parameter\nimpl<A: 'static + Clone> ToDocument<LeanPrinter, A> for Vec<Param> {\n    fn to_document(&self, printer: &LeanPrinter) -> DocBuilder<A> {\n        printer.params(self)\n    }\n}\n\n#[prepend_associated_functions_with(install_pretty_helpers!(self: Self))]\nconst _: () = {\n    // Emits a CLI error with a github issue number, and prints \"sorry\" in the lean output\n    macro_rules! emit_error {($($tt:tt)*) => {disambiguated_todo!($($tt)*)};}\n\n    // Insert a new line in a doc (pretty)\n    macro_rules! line {($($tt:tt)*) => {disambiguated_line!($($tt)*)};}\n\n    // Concatenate docs (pretty )\n    macro_rules! concat {($($tt:tt)*) => {disambiguated_concat!($($tt)*)};}\n\n    // Given an iterable `[A,B, ... , C]` and a separator `S`, create the doc `ASBS...CS`\n    macro_rules! zip_right {\n        ($a:expr, $sep:expr) => {\n            docs![concat!($a.into_iter().map(|a| docs![a, $sep]))]\n        };\n    }\n\n    // Given an iterable `[A,B, ... , C]` and a separator `S`, create the doc `SASB...SC`\n    macro_rules! zip_left {\n        ($sep:expr, $a:expr) => {\n            docs![concat!($a.into_iter().map(|a| docs![$sep, a]))]\n        };\n    }\n\n    // Prints a one-line comment\n    macro_rules! comment {\n        ($e:expr) => {\n            docs![\"-- \", $e]\n        };\n    }\n\n    // Extra methods, specific to the LeanPrinter\n    impl LeanPrinter {\n        /// Prints arguments a variant or constructor of struct, using named or unamed arguments based\n        /// on the `is_record` flag. Used for both expressions and patterns\n        pub fn arguments<A: 'static + Clone, D>(\n            &self,\n            fields: &[(GlobalId, D)],\n            is_record: &bool,\n        ) -> DocBuilder<A>\n        where\n            D: ToDocument<Self, A>,\n        {\n            if *is_record {\n                self.named_arguments(fields)\n            } else {\n                self.positional_arguments(fields)\n            }\n        }\n\n        /// Prints fields of structures (when in braced notation)\n        fn struct_fields<A: 'static + Clone, D>(&self, fields: &[(GlobalId, D)]) -> DocBuilder<A>\n        where\n            D: ToDocument<Self, A>,\n        {\n            docs![intersperse!(\n                fields\n                    .iter()\n                    .map(|(id, e)| { docs![self.render_last(id), reflow!(\" := \"), e].group() }),\n                docs![\",\", line!()]\n            )]\n            .group()\n        }\n        /// Prints named arguments (record) of a variant or constructor of struct\n        fn named_arguments<A: 'static + Clone, D>(&self, fields: &[(GlobalId, D)]) -> DocBuilder<A>\n        where\n            D: ToDocument<Self, A>,\n        {\n            docs![zip_left!(\n                line!(),\n                fields.iter().map(|(id, e)| {\n                    docs![self.render_last(id), reflow!(\" := \"), e]\n                        .parens()\n                        .group()\n                })\n            )]\n            .group()\n        }\n\n        /// Prints positional arguments (tuple) of a variant or constructor of struct\n        fn positional_arguments<A: 'static + Clone, D>(\n            &self,\n            fields: &[(GlobalId, D)],\n        ) -> DocBuilder<A>\n        where\n            D: ToDocument<Self, A>,\n        {\n            docs![zip_left!(line!(), fields.iter().map(|(_, e)| e))].group()\n        }\n\n        /// Prints parameters of functions (items, trait items, impl items)\n        fn params<A: 'static + Clone>(&self, params: &Vec<Param>) -> DocBuilder<A> {\n            zip_left!(line!(), params)\n        }\n\n        /// Print parameters as function arguments\n        fn params_as_args<A: 'static + Clone>(&self, params: &[Param]) -> DocBuilder<A> {\n            zip_left!(\n                line!(),\n                params.iter().map(|param| {\n                    let Ty(ty_kind) = &param.ty;\n                    // We need to print arguments of type `Tuple0` as `⟨⟩` instead of `_`\n                    // https://github.com/cryspen/hax/issues/1856\n                    if let TyKind::App { head, .. } = **ty_kind\n                        && let Some(global_id::TupleId::Type { length: 0 }) = head.expect_tuple()\n                    {\n                        docs![\"⟨⟩\"]\n                    } else {\n                        docs![param]\n                    }\n                })\n            )\n        }\n\n        /// Renders expressions with an explicit ascription `(e : RustM ty)`. Used for the body of closure, for\n        /// numeric literals, etc.\n        fn expr_typed_result<A: 'static + Clone>(&self, expr: &Expr) -> DocBuilder<A> {\n            docs![\n                expr,\n                softline!(),\n                \":\",\n                line!(),\n                docs![\"RustM\", line!(), &expr.ty].group()\n            ]\n            .group()\n        }\n\n        fn pat_typed<A: 'static + Clone>(&self, pat: &Pat) -> DocBuilder<A> {\n            docs![pat, reflow!(\" :\"), line!(), &pat.ty].parens().group()\n        }\n\n        fn do_block<A: 'static + Clone, D: ToDocument<Self, A>>(&self, body: D) -> DocBuilder<A> {\n            docs![\"do\", line!(), body].group()\n        }\n\n        /// Produces a name for a constraint on an trait-level constraint, or an associated\n        /// type. The name is obtained by combining the type it applies to and the name of the\n        /// constraint (and should be unique)\n        fn constraint_name(&self, type_name: &String, constraint: &ImplIdent) -> String {\n            format!(\"trait_constr_{}_{}\", type_name, constraint.name)\n        }\n\n        /// Renders a named argument for associated types with equality constraints\n        /// (aka projections). If there are no equality constraints, returns None.\n        fn associated_type_projections<A: 'static + Clone>(\n            &self,\n            impl_ident: &ImplIdent,\n            projections: Vec<DocBuilder<A>>,\n        ) -> Option<DocBuilder<A>> {\n            (!projections.is_empty()).then_some(\n                docs![\n                    \"(associatedTypes := {\",\n                    line!(),\n                    docs![\n                        \"show\",\n                        line!(),\n                        impl_ident.goal.trait_,\n                        \".AssociatedTypes\",\n                        zip_left!(line!(), impl_ident.goal.args.iter()),\n                    ]\n                    .group()\n                    .nest(INDENT),\n                    line!(),\n                    reflow!(\"by infer_instance\"),\n                    line!(),\n                    docs![\n                        \"with\",\n                        line!(),\n                        intersperse!(projections, docs![\",\", line!()]),\n                    ]\n                    .group()\n                    .nest(INDENT),\n                    \"})\"\n                ]\n                .group()\n                .nest(INDENT),\n            )\n        }\n\n        /// Turns an expression of type `RustM T` into one of type `T` (out of the monad), providing\n        /// reflexivity as a proof witness.\n        fn monad_extract<A: 'static + Clone>(&self, expr: &Expr) -> DocBuilder<A> {\n            if let ExprKind::App { head, args, .. } = expr.kind()\n                && let ExprKind::GlobalId(PURE) = head.kind()\n                && let [pure_expr] = &args[..]\n                && let ExprKind::Literal(_) | ExprKind::GlobalId(_) | ExprKind::LocalId(_) =\n                    pure_expr.kind()\n            {\n                // Pure values are displayed directly. Note that constructors, while pure, may\n                // contain sub-expressions that are not, so they must be wrapped in a do-block\n                docs![pure_expr]\n            } else {\n                // All other expressions are wrapped in a do-block, and extracted out of the monad\n                docs![\n                    \"RustM.of_isOk\",\n                    line!(),\n                    self.do_block(expr).parens(),\n                    line!(),\n                    \"(by rfl)\"\n                ]\n                .group()\n                .nest(INDENT)\n            }\n        }\n\n        /// Print trait items, adding trait-level params as extra arguments\n        fn trait_item_with_trait_params<A: 'static + Clone>(\n            &self,\n            trait_generics: &[GenericParam],\n            TraitItem {\n                meta: _,\n                kind,\n                generics: item_generics,\n                ident,\n            }: &TraitItem,\n        ) -> DocBuilder<A> {\n            {\n                let name = self.render_last(ident);\n                let trait_generics = zip_left!(\n                    softline!(),\n                    trait_generics\n                        .iter()\n                        .map(|GenericParam { ident, .. }| docs![ident].parens())\n                );\n                docs![match kind {\n                    TraitItemKind::Fn(ty) => {\n                        docs![\n                            name,\n                            trait_generics,\n                            self.generics(item_generics, &self.render_last(ident)),\n                            softline!(),\n                            \":\",\n                            line!(),\n                            ty\n                        ]\n                        .group()\n                        .nest(INDENT)\n                    }\n                    TraitItemKind::Type(_) => {\n                        docs![name, softline!(), \":\", line!(), \"Type\"]\n                            .group()\n                            .nest(INDENT)\n                    }\n                    TraitItemKind::Default { params, body } => docs![\n                        docs![\n                            name,\n                            trait_generics,\n                            self.generics(item_generics, &self.render_last(ident)),\n                            zip_left!(line!(), params).group(),\n                            softline!(),\n                            \":\",\n                            if params.is_empty() {\n                                docs![body.ty, softline!(), reflow!(\":=\")]\n                            } else {\n                                docs![\"RustM\", softline!(), body.ty, softline!(), reflow!(\":= do\")]\n                                    .group()\n                            }\n                        ]\n                        .group(),\n                        line!(),\n                        if params.is_empty() {\n                            self.monad_extract(body)\n                        } else {\n                            docs![body]\n                        },\n                    ]\n                    .group()\n                    .nest(INDENT),\n                    TraitItemKind::Resugared(_) => {\n                        unreachable!(\"This backend has no resugaring for trait items\")\n                    }\n                    TraitItemKind::Error(e) => docs![e],\n                }]\n            }\n        }\n\n        // Print generics, using `name` as a prefix for constraint names\n        fn generics<A: 'static + Clone>(\n            &self,\n            generics: &Generics,\n            name: &String,\n        ) -> DocBuilder<A> {\n            docs![\n                zip_left!(line!(), &generics.params),\n                zip_left!(\n                    line!(),\n                    generics.type_class_constraints().map(|impl_ident| {\n                        let projections = generics\n                            .equality_constraints()\n                            .filter(|p| !matches!(&*p.impl_.kind, ImplExprKind::LocalBound { id } if *id != impl_ident.name ))\n                            .map(|p| {\n                                if let ImplExprKind::LocalBound { .. } = &*p.impl_.kind {\n                                    docs![p]\n                                } else if let ImplExprKind::Parent { .. } = &*p.impl_.kind {\n                                    emit_error!(issue 1923, \"Unsupported equality constraints on associated types of parent trait\")\n                                } else {\n                                    emit_error!(issue 1924, \"Unsupported variant of associated type projection\")\n                                }\n                            })\n                            .collect::<Vec<_>>();\n                        docs![\n                            docs![\n                                self.constraint_name(&format!(\"{}_associated_type\", name), impl_ident),\n                                reflow!(\" : \"),\n                                impl_ident.goal.trait_,\n                                \".AssociatedTypes\",\n                                concat!(\n                                    impl_ident.goal.args.iter().map(|arg| docs![line!(), arg])\n                                )\n                            ]\n                            .brackets()\n                            .group()\n                            .nest(INDENT),\n                            line!(),\n                            docs![\n                                self.constraint_name(name, impl_ident),\n                                reflow!(\" : \"),\n                                impl_ident.goal.trait_,\n                                concat!(\n                                    impl_ident.goal.args.iter().map(|arg| docs![line!(), arg])\n                                ),\n                                line!(),\n                                self.associated_type_projections(impl_ident, projections)\n                            ]\n                            .brackets()\n                            .nest(INDENT)\n                            .group()\n                        ]\n                        .group()\n                    })\n                ),\n            ]\n            .group()\n        }\n\n        /// Print spec of an item\n        fn spec<A: 'static + Clone>(\n            &self,\n            item: &Item,\n            name: &GlobalId,\n            generics: &Generics,\n            params: &Vec<Param>,\n        ) -> DocBuilder<A> {\n            let linked_items = HasLinkedItemGraph::linked_item_graph(self);\n            let spec = linked_items.fn_like_linked_expressions(item, item.self_id());\n            if !linked_items.has_spec(item) {\n                nil!()\n            } else {\n                match hax_proof_attributes(item) {\n                    Err(message) => emit_error!(\"{message}\"),\n                    Ok(proof_attributes) => {\n                        let (tactic, specset) = match proof_attributes.proof_method {\n                            Some(ProofMethod::Grind) => (\"grind\", \"int\"),\n                            Some(ProofMethod::BvDecide) | None => (\"bv_decide\", \"bv\"),\n                        };\n                        let pure_requires_proof = proof_attributes\n                            .pure_requires_proof\n                            .unwrap_or(format!(\"by hax_construct_pure <;> {tactic}\"));\n                        let pure_ensures_proof = proof_attributes\n                            .pure_ensures_proof\n                            .unwrap_or(format!(\"by hax_construct_pure <;> {tactic}\"));\n                        let proof = proof_attributes.proof.map(|s| docs![s]).unwrap_or(docs![\n                            \"by hax_mvcgen [\",\n                            name,\n                            \"] <;> \",\n                            tactic\n                        ]);\n                        {\n                            docs![\n                                hardline!(),\n                                hardline!(),\n                                docs![\"set_option hax_mvcgen.specset \\\"\", specset, \"\\\" in\"],\n                                hardline!(),\n                                \"@[hax_spec]\",\n                                hardline!(),\n                                docs![\n                                    docs![\n                                        \"def\",\n                                        line!(),\n                                        name,\n                                        \".spec\",\n                                        self.generics(generics, &self.render_last(name)),\n                                        params,\n                                        softline!(),\n                                        \":\"\n                                    ]\n                                    .group()\n                                    .nest(INDENT),\n                                    line!(),\n                                    docs![\n                                        \"Spec\",\n                                        line!(),\n                                        docs![\n                                            \"requires\",\n                                            softline!(),\n                                            \":= do\",\n                                            line!(),\n                                            spec.precondition\n                                                .map_or(reflow!(\"pure True\"), |p| docs![p])\n                                        ]\n                                        .parens()\n                                        .group()\n                                        .nest(INDENT),\n                                        line!(),\n                                        docs![\n                                            \"ensures := \",\n                                            spec.postcondition.map_or(\n                                                reflow!(\"fun _ => pure True\"),\n                                                |p| docs![\n                                                    \"fun\",\n                                                    line!(),\n                                                    p.result_binder,\n                                                    softline!(),\n                                                    \"=> do\",\n                                                    line!(),\n                                                    p.body,\n                                                ]\n                                                .group()\n                                                .nest(INDENT)\n                                            ),\n                                        ]\n                                        .parens()\n                                        .group()\n                                        .nest(INDENT),\n                                        line!(),\n                                        docs![\n                                            name,\n                                            zip_left!(line!(), &generics.params),\n                                            self.params_as_args(params)\n                                        ]\n                                        .parens()\n                                        .group()\n                                        .nest(INDENT)\n                                    ]\n                                    .group()\n                                    .nest(INDENT),\n                                    softline!(),\n                                    \":=\",\n                                ]\n                                .group()\n                                .nest(2 * INDENT),\n                                softline!(),\n                                docs![\n                                    hardline!(),\n                                    docs![\"pureRequires :=\", softline!(), pure_requires_proof],\n                                    hardline!(),\n                                    docs![\"pureEnsures :=\", softline!(), pure_ensures_proof],\n                                    hardline!(),\n                                    docs![\"contract :=\", softline!(), proof]\n                                        .group()\n                                        .nest(INDENT),\n                                    hardline!(),\n                                ]\n                                .nest(INDENT)\n                                .braces(),\n                            ]\n                        }\n                    }\n                }\n            }\n        }\n    }\n\n    impl<A: 'static + Clone> ToDocument<LeanPrinter, A> for (Vec<GenericParam>, &TraitItem) {\n        fn to_document(&self, printer: &LeanPrinter) -> DocBuilder<A> {\n            printer.trait_item_with_trait_params(&self.0, self.1)\n        }\n    }\n\n    impl<A: 'static + Clone> PrettyAst<A> for LeanPrinter {\n        const NAME: &'static str = \"Lean\";\n\n        /// Produce a non-panicking placeholder document. In general, prefer the use of the helper macro [`todo_document!`].\n        fn todo_document(&self, message: &str, issue_id: Option<u32>) -> DocBuilder<A> {\n            <Self as PrettyAst<A>>::emit_diagnostic(\n                self,\n                hax_types::diagnostics::Kind::Unimplemented {\n                    issue_id,\n                    details: Some(message.into()),\n                },\n            );\n            text!(\"sorry\")\n        }\n\n        fn module(&self, module: &Module) -> DocBuilder<A> {\n            let current_namespace = module.ident;\n            let new_printer = LeanPrinter {\n                current_namespace: Some(current_namespace),\n                ..self.clone()\n            };\n            let items = &module.items;\n            docs![\n                \"namespace \",\n                current_namespace,\n                hardline!(),\n                hardline!(),\n                intersperse!(\n                    items.iter().map(|item| { item.to_document(&new_printer) }),\n                    docs![hardline!(), hardline!()]\n                ),\n                hardline!(),\n                hardline!(),\n                \"end \",\n                current_namespace,\n                hardline!(),\n                hardline!(),\n            ]\n        }\n\n        fn global_id(&self, global_id: &GlobalId) -> DocBuilder<A> {\n            docs![self.render_id(global_id)]\n        }\n\n        fn generics(&self, generics: &Generics) -> DocBuilder<A> {\n            self.generics(generics, &String::new())\n        }\n\n        fn generic_constraint(&self, _: &GenericConstraint) -> DocBuilder<A> {\n            unreachable!(\n                \"Generic constraints are rendered inline because they must contain associated type projections.\"\n            )\n        }\n\n        fn generic_param(&self, generic_param: &GenericParam) -> DocBuilder<A> {\n            match generic_param.kind() {\n                GenericParamKind::Type => docs![&generic_param.ident, reflow!(\" : Type\")]\n                    .parens()\n                    .group(),\n                GenericParamKind::Lifetime => unreachable_by_invariant!(Drop_references),\n                GenericParamKind::Const { ty } => docs![&generic_param.ident, reflow!(\" : \"), ty]\n                    .parens()\n                    .group(),\n            }\n        }\n\n        fn generic_value(&self, generic_value: &GenericValue) -> DocBuilder<A> {\n            match generic_value {\n                GenericValue::Ty(ty) => docs![ty],\n                GenericValue::Expr(expr) => docs![expr].parens(),\n                GenericValue::Lifetime => unreachable_by_invariant!(Drop_references),\n            }\n        }\n\n        fn expr(&self, Expr { kind, ty, meta: _ }: &Expr) -> DocBuilder<A> {\n            match &**kind {\n                ExprKind::If {\n                    condition,\n                    then,\n                    else_,\n                } => {\n                    if let Some(else_branch) = else_ {\n                        docs![\n                            docs![\"if\", line!(), condition, reflow!(\" then do\")].group(),\n                            docs![line!(), then].nest(INDENT),\n                            line!(),\n                            reflow!(\"else do\"),\n                            docs![line!(), else_branch].nest(INDENT)\n                        ]\n                        .group()\n                    } else {\n                        unreachable_by_invariant!(Local_mutation)\n                    }\n                }\n                ExprKind::App {\n                    head,\n                    args,\n                    generic_args,\n                    bounds_impls: _,\n                    trait_,\n                } => {\n                    match (&args[..], &generic_args[..], head.kind()) {\n                        ([arg], [], ExprKind::GlobalId(LIFT)) => docs![reflow!(\"← \"), arg].parens(),\n                        ([arg], [], ExprKind::GlobalId(PURE)) => {\n                            docs![reflow!(\"pure \"), arg].parens()\n                        }\n                        ([arg], [], ExprKind::GlobalId(CAST_OP)) => docs![\n                            // Add type annotation for `cast_op`:\n                            docs![head, line!(), arg],\n                            softline!(),\n                            \":\",\n                            line!(),\n                            \"RustM\",\n                            softline!(),\n                            ty\n                        ]\n                        .parens()\n                        .group()\n                        .nest(INDENT),\n                        // TODO: Replace this match pattern with an `if let` guard when the feature stabilizes\n                        // Tracking PR: https://github.com/rust-lang/rust/pull/141295\n                        (\n                            [arg],\n                            [],\n                            ExprKind::GlobalId(op @ (binops::neg | binops::not | binops::Not::not)),\n                        ) if arg.ty == Ty::bool() || arg.ty.is_int() => {\n                            let symbol = match *op {\n                                binops::neg => \"-?\",\n                                binops::not => \"~?\",\n                                binops::Not::not => \"!?\",\n                                _ => unreachable!(),\n                            };\n                            docs![symbol, softline!(), arg].parens()\n                        }\n                        ([lhs, rhs], [], ExprKind::GlobalId(binops::Index::index)) => {\n                            docs![lhs, \"[\", line_!(), rhs, line_!(), \"]_?\"]\n                                .nest(INDENT)\n                                .group()\n                        }\n                        // TODO: Replace this match pattern with an `if let` guard when the feature stabilizes\n                        // Tracking PR: https://github.com/rust-lang/rust/pull/141295\n                        (\n                            [lhs, rhs],\n                            [],\n                            ExprKind::GlobalId(\n                                op @ (binops::add\n                                | binops::sub\n                                | binops::mul\n                                | binops::div\n                                | binops::rem\n                                | binops::shr\n                                | binops::shl\n                                | binops::bitand\n                                | binops::BitAnd::bitand\n                                | binops::bitor\n                                | binops::BitOr::bitor\n                                | binops::bitxor\n                                | binops::BitXor::bitxor\n                                | binops::logical_op_and\n                                | binops::logical_op_or\n                                | binops::eq\n                                | binops::PartialEq::eq\n                                | binops::lt\n                                | binops::le\n                                | binops::gt\n                                | binops::ge\n                                | binops::ne\n                                | binops::PartialEq::ne),\n                            ),\n                        ) if (lhs.ty == Ty::bool() && rhs.ty == Ty::bool())\n                            || (rhs.ty.is_int() && lhs.ty.is_int()) =>\n                        {\n                            let symbol = match *op {\n                                binops::add => \"+?\",\n                                binops::sub => \"-?\",\n                                binops::mul => \"*?\",\n                                binops::div => \"/?\",\n                                binops::rem => \"%?\",\n                                binops::shr => \">>>?\",\n                                binops::shl => \"<<<?\",\n                                binops::bitand => \"&&&?\",\n                                binops::BitAnd::bitand => \"&&?\",\n                                binops::bitor => \"|||?\",\n                                binops::BitOr::bitor => \"||?\",\n                                binops::bitxor => \"^^^?\",\n                                binops::BitXor::bitxor => \"^^?\",\n                                binops::logical_op_and => \"&&?\",\n                                binops::logical_op_or => \"||?\",\n                                binops::eq => \"==?\",\n                                binops::PartialEq::eq => \"==?\",\n                                binops::lt => \"<?\",\n                                binops::le => \"<=?\",\n                                binops::gt => \">?\",\n                                binops::ge => \">=?\",\n                                binops::ne => \"!=?\",\n                                binops::PartialEq::ne => \"!=?\",\n                                _ => unreachable!(),\n                            };\n                            docs![lhs, line!(), docs![symbol, softline!(), rhs].group()]\n                                .group()\n                                .nest(INDENT)\n                                .parens()\n                        }\n                        _ => {\n                            // Fallback for any application\n                            docs![\n                                head,\n                                trait_\n                                    .as_ref()\n                                    .map(|(impl_expr, _)| zip_left!(line!(), &impl_expr.goal.args)),\n                                zip_left!(line!(), generic_args).group(),\n                                zip_left!(line!(), args).group(),\n                            ]\n                            .parens()\n                            .nest(INDENT)\n                            .group()\n                        }\n                    }\n                }\n                ExprKind::Literal(numeric_lit @ (Literal::Float { .. } | Literal::Int { .. })) => {\n                    docs![numeric_lit, reflow!(\" : \"), ty].parens().group()\n                }\n                ExprKind::Literal(literal) => docs![literal],\n                ExprKind::Array(exprs) => docs![\n                    \"RustArray.ofVec #v[\",\n                    intersperse!(exprs, docs![\",\", line!()])\n                        .nest(INDENT)\n                        .group()\n                        .align(),\n                    \"]\"\n                ]\n                .parens()\n                .group(),\n                ExprKind::Construct {\n                    constructor,\n                    is_record,\n                    is_struct,\n                    fields,\n                    base,\n                } => {\n                    if fields.is_empty() && base.is_none() {\n                        docs![constructor]\n                    } else if let Some(base) = base {\n                        if !(*is_record && *is_struct) {\n                            unreachable!(\n                                \"Constructors with base expressions are necessarily structs with record-like arguments\"\n                            )\n                        }\n                        docs![base, line!(), reflow!(\"with \"), self.struct_fields(fields)]\n                            .braces()\n                            .group()\n                    } else {\n                        docs![constructor, self.arguments(fields, is_record)]\n                            .nest(INDENT)\n                            .parens()\n                            .group()\n                    }\n                }\n                ExprKind::Let { lhs, rhs, body }\n                | ExprKind::Resugared(ResugaredExprKind::LetPure { lhs, rhs, body }) => {\n                    let binder = if matches!(**kind, ExprKind::Let { .. }) {\n                        \" ←\"\n                    } else {\n                        \" :=\"\n                    };\n                    docs![\n                        docs![\n                            docs![\n                                \"let\",\n                                line!(),\n                                // TODO: Improve treatment of patterns in general. see\n                                // https://github.com/cryspen/hax/issues/1712\n                                match *lhs.kind.clone() {\n                                    PatKind::Ascription { .. } =>\n                                        docs![lhs, reflow!(\" : \"), &lhs.ty],\n                                    PatKind::Binding {\n                                        mutable: false,\n                                        var,\n                                        mode: BindingMode::ByValue,\n                                        sub_pat: None,\n                                    } => docs![&var, reflow!(\" : \"), &lhs.ty],\n                                    _ => docs![lhs],\n                                },\n                            ]\n                            .group(),\n                            binder,\n                            line!(),\n                            rhs,\n                            \";\"\n                        ]\n                        .nest(INDENT)\n                        .group(),\n                        line!(),\n                        body,\n                    ]\n                }\n                ExprKind::GlobalId(global_id) => docs![global_id],\n                ExprKind::LocalId(local_id) => docs![local_id],\n                ExprKind::Ascription { e, ty } => docs![e, reflow!(\" : \"), ty].parens().group(),\n                ExprKind::Closure {\n                    params,\n                    body,\n                    captures: _,\n                } => docs![\n                    docs![\n                        reflow!(\"fun\"),\n                        zip_left!(line!(), params),\n                        softline!(),\n                        \"=>\"\n                    ]\n                    .group(),\n                    line!(),\n                    self.do_block(self.expr_typed_result(body)).parens()\n                ]\n                .parens()\n                .group()\n                .nest(INDENT),\n\n                ExprKind::Resugared(ResugaredExprKind::Tuple { .. }) => {\n                    unreachable!(\"This printer doesn't use the tuple resugaring\")\n                }\n                ExprKind::Match { scrutinee, arms } => docs![\n                    docs![\n                        \"match\",\n                        docs![line!(), scrutinee].nest(INDENT),\n                        line!(),\n                        \"with\"\n                    ]\n                    .group(),\n                    docs![line!(), intersperse!(arms, line!())]\n                        .group()\n                        .nest(INDENT),\n                ]\n                .group(),\n\n                ExprKind::Borrow { .. } => {\n                    unreachable_by_invariant!(Drop_references)\n                }\n                ExprKind::AddressOf { .. } => unreachable_by_invariant!(Reject_raw_or_mut_pointer),\n                ExprKind::Assign { .. } => unreachable_by_invariant!(Local_mutation),\n                ExprKind::Loop { .. } => unreachable_by_invariant!(Functionalize_loops),\n                ExprKind::Break { .. } | ExprKind::Return { .. } | ExprKind::Continue { .. } => {\n                    unreachable_by_invariant!(Drop_break_continue_return)\n                }\n                ExprKind::Block { .. } => unreachable_by_invariant!(Drop_blocks),\n                ExprKind::Quote { contents } => docs![contents],\n                ExprKind::Error(error_node) => docs![error_node],\n            }\n        }\n\n        fn arm(&self, arm: &Arm) -> DocBuilder<A> {\n            if let Some(_guard) = &arm.guard {\n                unreachable_by_invariant!(Drop_match_guards)\n            } else {\n                docs![\n                    reflow!(\"| \"),\n                    &arm.pat,\n                    softline!(),\n                    \"=>\",\n                    softline!(),\n                    \"do\",\n                    line!(),\n                    &arm.body\n                ]\n                .nest(INDENT)\n                .group()\n            }\n        }\n\n        fn pat(&self, pat: &Pat) -> DocBuilder<A> {\n            match &*pat.kind {\n                PatKind::Wild => docs![\"_\"],\n                PatKind::Ascription { pat, ty: _ } => docs![pat],\n                PatKind::Binding {\n                    mutable,\n                    var,\n                    mode,\n                    sub_pat,\n                } => match (mutable, mode, sub_pat) {\n                    (true, _, _) => unreachable_by_invariant!(Local_mutation),\n                    (false, BindingMode::ByRef(_), _) => unreachable_by_invariant!(Drop_references),\n                    (false, BindingMode::ByValue, None) => docs![var],\n                    (false, BindingMode::ByValue, Some(pat)) => {\n                        docs![var, \"@\", softline_!(), pat].group()\n                    }\n                },\n                PatKind::Or { sub_pats } => docs![intersperse!(sub_pats, reflow!(\" | \"))].group(),\n                PatKind::Array { .. } => {\n                    emit_error!(issue 1712, \"Unsupported pattern-matching on arrays\")\n                }\n                PatKind::Deref { .. } => unreachable_by_invariant!(Drop_references),\n                PatKind::Constant {\n                    lit: Literal::Float { .. },\n                } => {\n                    emit_error!(issue 1788, \"Unsupported pattern-matching on floats\")\n                }\n                PatKind::Constant { lit } => docs![lit],\n                PatKind::Construct {\n                    constructor,\n                    is_record,\n                    is_struct,\n                    fields,\n                } => {\n                    if *is_struct {\n                        if !*is_record {\n                            // Tuple-like structure, using positional arguments\n                            docs![\n                                \"⟨\",\n                                intersperse!(\n                                    fields.iter().map(|field| { docs![&field.1] }),\n                                    docs![\",\", line!()]\n                                )\n                                .align()\n                                .group(),\n                                \"⟩\"\n                            ]\n                            .align()\n                            .group()\n                        } else {\n                            // Record-like structure, using named arguments\n                            docs![intersperse!(\n                                fields.iter().map(|(id, pat)| {\n                                    docs![self.render_last(id), reflow!(\" :=\"), line!(), pat]\n                                        .group()\n                                }),\n                                docs![\",\", line!()]\n                            )]\n                            .align()\n                            .braces()\n                            .group()\n                        }\n                    } else {\n                        // Variant\n                        docs![\n                            constructor,\n                            line!(),\n                            self.arguments(fields, is_record).align()\n                        ]\n                        .parens()\n                        .group()\n                        .nest(INDENT)\n                    }\n                }\n                PatKind::Resugared(ResugaredPatKind::ConstructWithEllipsis {\n                    constructor,\n                    is_struct,\n                    fields,\n                }) => {\n                    if *is_struct {\n                        // Struct: render as `{f1 := pat, f2 := pat, ..}` or `_`\n                        if fields.is_empty() {\n                            docs![\"_\"]\n                        } else {\n                            docs![intersperse!(\n                                fields\n                                    .iter()\n                                    .map(|(id, pat)| {\n                                        docs![self.render_last(id), reflow!(\" :=\"), line!(), pat]\n                                            .group()\n                                    })\n                                    .chain(std::iter::once(docs![\"..\"])),\n                                docs![\",\", line!()]\n                            )]\n                            .align()\n                            .braces()\n                            .group()\n                        }\n                    } else {\n                        // Enum variant with named fields: (f1 := pat) (f2 := pat) ..\n                        let record_part = if fields.is_empty() {\n                            docs![\"_\"]\n                        } else {\n                            docs![intersperse!(\n                                fields.iter().map(|(id, pat)| {\n                                    docs![self.render_last(id), reflow!(\" :=\"), line!(), pat]\n                                        .group()\n                                        .parens()\n                                }),\n                                line!()\n                            )]\n                            .align()\n                            .group()\n                        };\n                        docs![constructor, line!(), record_part, \" ..\"]\n                            .parens()\n                            .group()\n                            .nest(INDENT)\n                    }\n                }\n                PatKind::Error(_) => {\n                    // TODO : Should be made unreachable by https://github.com/cryspen/hax/pull/1672\n                    text!(\"sorry\")\n                }\n            }\n        }\n\n        fn ty(&self, ty: &Ty) -> DocBuilder<A> {\n            match ty.kind() {\n                TyKind::Primitive(primitive_ty) => docs![primitive_ty],\n                TyKind::App { head, args } => {\n                    if args.is_empty() {\n                        docs![head]\n                    } else {\n                        docs![head, zip_left!(line!(), args)]\n                            .parens()\n                            .group()\n                            .nest(INDENT)\n                    }\n                }\n                TyKind::Arrow { inputs, output } => docs![\n                    zip_right!(inputs, docs![softline!(), \"->\", line!()]),\n                    \"RustM\",\n                    softline!(),\n                    output\n                ]\n                .parens()\n                .group(),\n                TyKind::Param(local_id) => docs![local_id],\n                TyKind::Slice(ty) => docs![\"RustSlice\", line!(), ty].parens().group(),\n                TyKind::Array { ty, length } => docs![\"RustArray\", line!(), ty, line!(), {\n                    if let ExprKind::Literal(int_lit @ Literal::Int { .. }) = length.kind() {\n                        docs![int_lit]\n                    } else if let ExprKind::LocalId(local_id) = length.kind() {\n                        docs![local_id]\n                    } else {\n                        unreachable!(\n                            \"Only arrays with integer literal or const param size are supported\"\n                        )\n                    }\n                }]\n                .parens()\n                .group(),\n                TyKind::AssociatedType { impl_, item } => {\n                    let kind = impl_.kind();\n                    match &kind {\n                        ImplExprKind::Self_ => docs![\"associatedTypes.\", self.render_last(item)],\n                        ImplExprKind::Parent { ident, .. }\n                        | ImplExprKind::Projection { ident, .. } => {\n                            docs![item, zip_left!(line!(), ident.goal.args.iter())]\n                                .parens()\n                                .group()\n                                .nest(INDENT)\n                        }\n                        ImplExprKind::LocalBound { .. } => {\n                            docs![item, zip_left!(line!(), impl_.goal.args.iter())]\n                                .parens()\n                                .group()\n                                .nest(INDENT)\n                        }\n                        _ => {\n                            emit_error!(issue 1922, \"Unsupported variant of associated type\")\n                        }\n                    }\n                }\n                TyKind::Ref { .. } => unreachable_by_invariant!(Drop_references),\n                TyKind::RawPointer => unreachable_by_invariant!(Reject_raw_or_mut_pointer),\n                TyKind::Opaque(_) => emit_error!(issue 1714, \"Unsupported opaque type definitions\"),\n                TyKind::Dyn(_) => emit_error!(issue 1708, \"Unsupported `dyn` traits\"),\n                TyKind::Resugared(resugared_ty_kind) => match resugared_ty_kind {\n                    ResugaredTyKind::Tuple(_) => {\n                        unreachable!(\"This backend does not use tuple resugaring (yet)\")\n                    }\n                },\n                TyKind::Error(e) => docs![e],\n            }\n        }\n\n        fn literal(&self, literal: &Literal) -> DocBuilder<A> {\n            docs![match literal {\n                Literal::String(symbol) => format!(\"\\\"{}\\\"\", self.escape_string(symbol)),\n                Literal::Char(c) => format!(\"'{c}'\"),\n                Literal::Bool(b) => format!(\"{b}\"),\n                Literal::Int {\n                    value,\n                    negative,\n                    kind: _,\n                } => format!(\"{}{value}\", if *negative { \"-\" } else { \"\" }),\n                Literal::Float {\n                    value,\n                    negative,\n                    kind: _,\n                } => format!(\"{}{value}\", if *negative { \"-\" } else { \"\" }),\n            }]\n        }\n\n        fn local_id(&self, local_id: &LocalId) -> DocBuilder<A> {\n            // TODO: should be done by name rendering, see https://github.com/cryspen/hax/issues/1630\n            docs![Self::escape(&local_id.0)]\n        }\n\n        fn spanned_ty(&self, spanned_ty: &SpannedTy) -> DocBuilder<A> {\n            docs![&spanned_ty.ty]\n        }\n\n        fn primitive_ty(&self, primitive_ty: &PrimitiveTy) -> DocBuilder<A> {\n            match primitive_ty {\n                PrimitiveTy::Bool => docs![\"Bool\"],\n                PrimitiveTy::Int(int_kind) => docs![int_kind],\n                PrimitiveTy::Float(float_kind) => docs![float_kind],\n                PrimitiveTy::Char => docs![\"Char\"],\n                PrimitiveTy::Str => docs![\"String\"],\n            }\n        }\n\n        fn int_kind(&self, int_kind: &IntKind) -> DocBuilder<A> {\n            docs![match (&int_kind.signedness, &int_kind.size) {\n                (Signedness::Signed, IntSize::S8) => \"i8\",\n                (Signedness::Signed, IntSize::S16) => \"i16\",\n                (Signedness::Signed, IntSize::S32) => \"i32\",\n                (Signedness::Signed, IntSize::S64) => \"i64\",\n                (Signedness::Signed, IntSize::S128) => \"i128\",\n                (Signedness::Signed, IntSize::SSize) => \"isize\",\n                (Signedness::Unsigned, IntSize::S8) => \"u8\",\n                (Signedness::Unsigned, IntSize::S16) => \"u16\",\n                (Signedness::Unsigned, IntSize::S32) => \"u32\",\n                (Signedness::Unsigned, IntSize::S64) => \"u64\",\n                (Signedness::Unsigned, IntSize::S128) => \"u128\",\n                (Signedness::Unsigned, IntSize::SSize) => \"usize\",\n            }]\n        }\n\n        fn float_kind(&self, float_kind: &FloatKind) -> DocBuilder<A> {\n            docs![match float_kind {\n                FloatKind::F32 => \"f32\",\n                FloatKind::F64 => \"f64\",\n                _ => emit_error!(issue 1787, \"The only supported float types are `f32` and `f64`.\"),\n            }]\n        }\n\n        fn quote_content(&self, quote_content: &QuoteContent) -> DocBuilder<A> {\n            match quote_content {\n                QuoteContent::Verbatim(s) => {\n                    intersperse!(s.lines().map(|x| x.to_string()), hardline!())\n                }\n                QuoteContent::Expr(expr) => docs![expr],\n                QuoteContent::Pattern(pat) => docs![pat],\n                QuoteContent::Ty(ty) => docs![ty],\n            }\n        }\n\n        fn quote(&self, quote: &Quote) -> DocBuilder<A> {\n            concat![&quote.0]\n        }\n\n        fn param(&self, param: &Param) -> DocBuilder<A> {\n            if matches!(\n                *param.pat.kind,\n                PatKind::Wild | PatKind::Ascription { .. } | PatKind::Binding { sub_pat: None, .. }\n            ) {\n                self.pat_typed(&param.pat)\n            } else {\n                emit_error!(issue 1791, \"Function parameters must not contain patterns\")\n            }\n        }\n\n        fn item(&self, item @ Item { ident, kind, meta }: &Item) -> DocBuilder<A> {\n            let body = match kind {\n                ItemKind::Fn {\n                    name,\n                    generics,\n                    body,\n                    params,\n                    safety: _,\n                } => {\n                    let opaque = item.is_opaque();\n                    let linked_items = HasLinkedItemGraph::linked_item_graph(self);\n                    docs![\n                        if opaque || linked_items.has_spec(item) {\n                            nil!()\n                        } else {\n                            // Function should be unfolded by `mvcgen`\n                            docs![\"@[spec]\", hardline!()]\n                        },\n                        docs![\n                            docs![\n                                docs![\n                                    docs![if opaque { \"opaque\" } else { \"def\" }, line!(), name]\n                                        .group(),\n                                    self.generics(generics, &self.render_last(name)),\n                                    params,\n                                    softline!(),\n                                    \":\"\n                                ]\n                                .group(),\n                                line!(),\n                                docs![\n                                    \"RustM\",\n                                    line!(),\n                                    &body.ty,\n                                    if opaque {\n                                        nil!()\n                                    } else {\n                                        docs![line!(), \":= do\"]\n                                    }\n                                ]\n                                .group(),\n                            ]\n                            .group()\n                            .nest(INDENT),\n                            if opaque { nil!() } else { docs![line!(), body] }\n                        ]\n                        .group()\n                        .nest(INDENT),\n                        if opaque {\n                            nil!()\n                        } else {\n                            docs![&self.spec(item, name, generics, params)]\n                        }\n                    ]\n                }\n                ItemKind::TyAlias { name, generics, ty } => docs![\n                    \"abbrev \",\n                    name,\n                    self.generics(generics, &self.render_last(name)),\n                    softline!(),\n                    \":\",\n                    line!(),\n                    \"Type\",\n                    softline!(),\n                    \":=\",\n                    line!(),\n                    ty\n                ]\n                .nest(INDENT)\n                .group(),\n                ItemKind::RustModule | ItemKind::Use { .. } => nil!(),\n                ItemKind::Quote { quote, origin: _ } => docs![quote],\n                ItemKind::NotImplementedYet => {\n                    emit_error!(issue 1706, \"Item unsupported by the Hax engine (unimplemented yet)\")\n                }\n                ItemKind::Type {\n                    name,\n                    generics,\n                    variants,\n                    is_struct,\n                } => {\n                    if item.is_opaque() {\n                        docs![\n                            reflow!(\"opaque \"),\n                            name,\n                            self.generics(generics, &self.render_last(name)),\n                            softline!(),\n                            \":\",\n                            line!(),\n                            \"Type\"\n                        ]\n                        .group()\n                        .nest(INDENT)\n                    }\n                    // TODO: use a resugaring, see https://github.com/cryspen/hax/issues/1668\n                    else if *is_struct {\n                        // Structures\n                        let Some(variant) = variants.first() else {\n                            unreachable!(\n                                \"Structures should always have a constructor (even empty ones)\"\n                            )\n                        };\n                        let args = if variant.arguments.is_empty() {\n                            comment![\"no fields\"]\n                        } else if !variant.is_record {\n                            // Tuple-like structure, using positional arguments\n                            intersperse!(\n                                variant.arguments.iter().enumerate().map(|(i, (_, ty, _))| {\n                                    docs![format!(\"_{i} :\"), line!(), ty].group().nest(INDENT)\n                                }),\n                                hardline!()\n                            )\n                        } else {\n                            // Structure-like structure, using named arguments\n                            intersperse!(\n                                variant.arguments.iter().map(|(id, ty, _)| {\n                                    docs![self.render_last(id), reflow!(\" : \"), ty]\n                                        .group()\n                                        .nest(INDENT)\n                                }),\n                                hardline!()\n                            )\n                        };\n                        docs![\n                            docs![\n                                reflow!(\"structure \"),\n                                name,\n                                self.generics(generics, &self.render_last(name)),\n                                line!(),\n                                \"where\"\n                            ]\n                            .group(),\n                            docs![hardline!(), args],\n                        ]\n                        .nest(INDENT)\n                        .group()\n                    } else {\n                        // Enums\n                        let applied_name: DocBuilder<A> = if generics.params.is_empty()\n                            && generics.constraints.is_empty()\n                        {\n                            docs![name]\n                        } else {\n                            docs![name, self.generics(generics, &self.render_last(name))].group()\n                        };\n                        docs![\n                            docs![\n                                \"inductive \",\n                                name,\n                                self.generics(generics, &self.render_last(name)),\n                                softline!(),\n                                \":\",\n                                line!(),\n                                \"Type\"\n                            ]\n                            .group(),\n                            hardline!(),\n                            intersperse!(\n                                variants.iter().map(|variant| docs![\n                                    \"| \",\n                                    variant,\n                                    applied_name.clone()\n                                ]\n                                .group()\n                                .nest(INDENT)),\n                                hardline!()\n                            ),\n                        ]\n                    }\n                }\n                ItemKind::Trait {\n                    name,\n                    generics,\n                    items,\n                    safety: _,\n                } => {\n                    let generic_types = generics.type_class_constraints().collect::<Vec<_>>();\n                    if generic_types.len() < generics.constraints.len() {\n                        emit_error!(issue 1921, \"Unsupported equality constraints on associated types\")\n                    }\n                    docs![\n                        // A trait is encoded as two Lean type classes: one holding the associated types,\n                        // and one holding all other fields.\n                        // This is the type class holding the associated types:\n                        docs![\n                            docs![\n                                docs![reflow!(\"class \"), name, \".AssociatedTypes\"],\n                                zip_left!(line!(), &generics.params).group(),\n                                line!(),\n                                \"where\"\n                            ]\n                            .group(),\n                            zip_left!(\n                                hardline!(),\n                                generic_types.iter().map(|impl_ident| docs![\n                                    self.constraint_name(&self.render_last(name), impl_ident),\n                                    \" :\",\n                                    line!(),\n                                    &impl_ident.goal.trait_,\n                                    \".AssociatedTypes\",\n                                    line!(),\n                                    intersperse!(&impl_ident.goal.args, line!())\n                                ]\n                                .group()\n                                .brackets())\n                            ),\n                            zip_left!(\n                                hardline!(),\n                                items\n                                    .iter()\n                                    .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) })\n                                    .map(|item| docs![(generics.params.clone(), item)])\n                            ),\n                        ]\n                        .nest(INDENT),\n                        // We add the `[instance]` attribute to the contained constraints to make\n                        // them available for type inference:\n                        zip_left!(\n                            docs![hardline!(), hardline!()],\n                            generic_types.iter().map(|impl_ident| docs![\n                                \"attribute [instance_reducible, instance]\",\n                                line!(),\n                                name,\n                                \".AssociatedTypes.\",\n                                self.constraint_name(&self.render_last(name), impl_ident),\n                            ]\n                            .group()\n                            .nest(INDENT))\n                        ),\n                        zip_left!(\n                            docs![hardline!(), hardline!()],\n                            items\n                                .iter()\n                                .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) })\n                                .map(|item| docs![\n                                    \"attribute [reducible]\",\n                                    line!(),\n                                    self.render_with_injection(\n                                        &item.ident,\n                                        &\"AssociatedTypes\".to_string()\n                                    )\n                                ]\n                                .group()\n                                .nest(INDENT))\n                        ),\n                        // When referencing associated types, we would like to refer to them as\n                        // `TraitName.TypeName` instead of `TraitName.AssociatedTypes.TypeName`:\n                        zip_left!(\n                            docs![hardline!(), hardline!()],\n                            items\n                                .iter()\n                                .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) })\n                                .map(|item| {\n                                    docs![\n                                        \"abbrev \",\n                                        name,\n                                        \".\",\n                                        self.render_last(&item.ident),\n                                        \" :=\",\n                                        line!(),\n                                        name,\n                                        \".AssociatedTypes\",\n                                        \".\",\n                                        self.render_last(&item.ident),\n                                    ]\n                                    .nest(INDENT)\n                                })\n                        ),\n                        hardline!(),\n                        hardline!(),\n                        // This is the type class holding all other fields:\n                        docs![\n                            docs![\n                                docs![reflow!(\"class \"), name],\n                                docs![\n                                    // Type parameters are also parameters of the class, but constraints are fields of the class\n                                    docs![zip_left!(line!(), &generics.params)].group(),\n                                    line!(),\n                                    // The collection of associated types is an extra parameter so that we can encode\n                                    // equality constraints on associated types.\n                                    docs![\n                                        reflow!(\"associatedTypes :\"),\n                                        softline!(),\n                                        \"outParam\",\n                                        softline!(),\n                                        docs![\n                                            name,\n                                            \".AssociatedTypes\",\n                                            softline!(),\n                                            intersperse!(&generics.params, softline!()),\n                                        ]\n                                        .parens()\n                                        .nest(INDENT)\n                                    ]\n                                    .brackets()\n                                    .nest(INDENT)\n                                ]\n                                .group(),\n                                line!(),\n                                \"where\"\n                            ]\n                            .group(),\n                            // Lean's `extends` does not work for us because one cannot implement\n                            // different functions of the same name on the super- and on the\n                            // subclass. So we treat supertraits like any other constraint:\n                            zip_left!(\n                                hardline!(),\n                                generic_types.iter().map(|impl_ident| docs![\n                                    self.constraint_name(&self.render_last(name), impl_ident),\n                                    softline!(),\n                                    \":\",\n                                    line!(),\n                                    impl_ident.goal.trait_,\n                                    zip_left!(line!(), impl_ident.goal.args.iter())\n                                ]\n                                .group()\n                                .brackets())\n                            ),\n                            // We also add constraints on associated types here:\n                            concat!(\n                                items\n                                    .iter()\n                                    .filter(|item| { matches!(item.kind, TraitItemKind::Type(_)) })\n                                    .map(|item| docs![\n                                        self.generics(\n                                            &item.generics,\n                                            &self.render_last(&item.ident)\n                                        )\n                                    ])\n                            ),\n                            // Finally the regular trait items:\n                            zip_left!(\n                                hardline!(),\n                                items.iter().filter(|item| {!(\n                                    // TODO: should be treated directly by name rendering, see :\n                                    // https://github.com/cryspen/hax/issues/1646\n                                    item.ident.is_precondition() || item.ident.is_postcondition() ||\n                                    // Associated types are encoded in a separate type class.\n                                    matches!(item.kind, TraitItemKind::Type(_))\n                                )}).map(|item| docs![(generics.params.clone(), item)] )\n                            ),\n                        ]\n                        .nest(INDENT),\n                        // We add the `[instance]` attribute to the contained constraints to make\n                        // them available for type inference:\n                        zip_left!(\n                            docs![hardline!(), hardline!()],\n                            generic_types.iter().map(|impl_ident| docs![\n                                \"attribute [instance_reducible, instance]\",\n                                line!(),\n                                name,\n                                \".\",\n                                self.constraint_name(&self.render_last(name), impl_ident),\n                            ]\n                            .group()\n                            .nest(INDENT))\n                        ),\n                    ]\n                }\n                ItemKind::Impl {\n                    generics,\n                    self_ty: _,\n                    of_trait: (trait_, args),\n                    items,\n                    parent_bounds: _,\n                } => {\n                    let opaque = item.is_opaque();\n                    docs![\n                        // An impl is encoded as two Lean instances:\n                        // One for the associated types...\n                        docs![\n                            docs![\n                                if opaque {\n                                    reflow!(\"@[instance] opaque \")\n                                } else {\n                                    reflow!(\"@[reducible] instance \")\n                                },\n                                ident,\n                                \".AssociatedTypes\",\n                                self.generics(generics, &self.render_last(ident)),\n                                softline!(),\n                                \":\"\n                            ]\n                            .group(),\n                            line!(),\n                            docs![trait_, \".AssociatedTypes\", zip_left!(line!(), args)].group(),\n                            if opaque {\n                                docs![\n                                    softline!(),\n                                    \":=\",\n                                    line!(),\n                                    reflow!(\"by constructor <;> exact Inhabited.default\")\n                                ]\n                            } else {\n                                docs![line!(), \"where\"]\n                            },\n                        ]\n                        .group()\n                        .nest(INDENT),\n                        if opaque {\n                            nil!()\n                        } else {\n                            docs![zip_left!(\n                                hardline!(),\n                                items.iter().filter(|item| {\n                                    matches!(item.kind, ImplItemKind::Type { .. })\n                                })\n                            )]\n                            .nest(INDENT)\n                        },\n                        hardline!(),\n                        hardline!(),\n                        // ...and one for all other fields:\n                        docs![\n                            docs![\n                                if opaque {\n                                    reflow!(\"@[instance] opaque \")\n                                } else {\n                                    reflow!(\"instance \")\n                                },\n                                ident,\n                                self.generics(generics, &self.render_last(ident)),\n                                softline!(),\n                                \":\"\n                            ]\n                            .group(),\n                            line!(),\n                            docs![trait_, zip_left!(line!(), args.iter())].group(),\n                            if opaque {\n                                docs![\n                                    softline!(),\n                                    \":=\",\n                                    line!(),\n                                    reflow!(\"by constructor <;> exact Inhabited.default\")\n                                ]\n                            } else {\n                                docs![line!(), \"where\"]\n                            },\n                        ]\n                        .group()\n                        .nest(INDENT),\n                        if opaque {\n                            nil!()\n                        } else {\n                            docs![zip_left!(\n                                hardline!(),\n                                items.iter().filter(|item| {\n                                    !(\n                                        // TODO: should be treated directly by name rendering, see :\n                                        // https://github.com/cryspen/hax/issues/1646\n                                        item.ident.is_precondition() || item.ident.is_postcondition() ||\n                                        // Associated types are encoded into a separate type class\n                                        matches!(item.kind, ImplItemKind::Type { .. })\n                                    )\n                                })\n                            )]\n                            .nest(INDENT)\n                        },\n                    ]\n                }\n                ItemKind::Resugared(resugared_item_kind) => match resugared_item_kind {\n                    ResugaredItemKind::Constant {\n                        name,\n                        body,\n                        generics,\n                    } => docs![\n                        docs![\n                            docs![\n                                docs![\"def\", line!(), name].group(),\n                                self.generics(generics, &self.render_last(ident)),\n                                softline!(),\n                                \":\",\n                            ]\n                            .group(),\n                            line!(),\n                            &body.ty,\n                            line!(),\n                            \":=\"\n                        ]\n                        .group(),\n                        line!(),\n                        self.monad_extract(body),\n                    ]\n                    .group()\n                    .nest(INDENT),\n                    ResugaredItemKind::RecursiveFn {\n                        name,\n                        generics,\n                        body,\n                        params,\n                        safety,\n                    } => {\n                        // Render the item with an appended `partial_fixpoint`:\n                        let item = Item {\n                            ident: item.ident,\n                            kind: ItemKind::Fn {\n                                name: *name,\n                                generics: generics.clone(),\n                                body: body.clone(),\n                                params: params.clone(),\n                                safety: safety.clone(),\n                            },\n                            meta: item.meta.clone(),\n                        };\n                        return docs![item, hardline!(), \"partial_fixpoint\"];\n                    }\n                },\n                ItemKind::Alias { .. } => {\n                    // aliases are introduced when creating bundles. Those should not appear in\n                    // Lean, as items can be named correctly in any file.\n                    emit_error!(issue 1658, \"Unsupported alias item\")\n                }\n                ItemKind::Error(e) => docs![e],\n            };\n            docs![meta, body]\n        }\n\n        fn impl_item(\n            &self,\n            ImplItem {\n                meta: _,\n                generics,\n                kind,\n                ident,\n            }: &ImplItem,\n        ) -> DocBuilder<A> {\n            let name = self.render_last(ident);\n            match kind {\n                ImplItemKind::Type {\n                    ty,\n                    parent_bounds: _,\n                } => docs![name, reflow!(\" := \"), ty],\n                ImplItemKind::Fn { body, params } => docs![\n                    docs![\n                        name,\n                        softline!(),\n                        \":=\",\n                        line!(),\n                        docs![\n                            \"fun\",\n                            self.generics(generics, &self.render_last(ident)),\n                            zip_left!(line!(), params).group(),\n                            softline!(),\n                            \"=>\",\n                            softline!(),\n                            \"do\"\n                        ]\n                        .group()\n                        .nest(INDENT)\n                    ]\n                    .group(),\n                    line!(),\n                    body\n                ]\n                .group()\n                .nest(INDENT),\n                ImplItemKind::Resugared(ResugaredImplItemKind::Constant { body }) => {\n                    docs![\n                        name,\n                        softline!(),\n                        \":=\",\n                        softline!(),\n                        self.monad_extract(body)\n                    ]\n                }\n                ImplItemKind::Error(err) => docs!(err),\n            }\n        }\n\n        fn impl_ident(&self, ImplIdent { .. }: &ImplIdent) -> DocBuilder<A> {\n            unreachable!(\n                \"`ImplIdent`s are rendered inline because we have multiple variants of how they must be rendered.\"\n            )\n        }\n\n        fn trait_goal(&self, TraitGoal { .. }: &TraitGoal) -> DocBuilder<A> {\n            unreachable!(\n                \"`TraitGoal`s are rendered inline because we have multiple variants of how they must be rendered.\"\n            )\n        }\n\n        fn variant(\n            &self,\n            Variant {\n                name,\n                arguments,\n                is_record,\n                attributes,\n            }: &Variant,\n        ) -> DocBuilder<A> {\n            docs![\n                concat!(attributes),\n                self.render_last(name),\n                softline!(),\n                // args\n                if *is_record {\n                    // Use named the arguments, keeping only the head of the identifier\n                    docs![\n                        intersperse!(\n                            arguments.iter().map(|(id, ty, _)| {\n                                docs![self.render_last(id), reflow!(\" : \"), ty]\n                                    .parens()\n                                    .group()\n                            }),\n                            line!()\n                        )\n                        .align()\n                        .nest(INDENT),\n                        line!(),\n                        reflow!(\": \"),\n                    ]\n                    .group()\n                } else {\n                    // Use anonymous arguments\n                    docs![\n                        reflow!(\": \"),\n                        concat!(\n                            arguments\n                                .iter()\n                                .map(|(_, ty, _)| { docs![ty, reflow!(\" -> \")] })\n                        )\n                    ]\n                }\n            ]\n            .group()\n            .nest(INDENT)\n        }\n\n        fn symbol(&self, symbol: &Symbol) -> DocBuilder<A> {\n            docs![Self::escape(symbol)]\n        }\n\n        fn metadata(\n            &self,\n            Metadata {\n                span: _,\n                attributes,\n            }: &Metadata,\n        ) -> DocBuilder<A> {\n            concat!(attributes)\n        }\n\n        fn lhs(&self, _lhs: &Lhs) -> DocBuilder<A> {\n            unreachable_by_invariant!(Local_mutation)\n        }\n\n        fn safety_kind(&self, _safety_kind: &SafetyKind) -> DocBuilder<A> {\n            nil!()\n        }\n\n        fn binding_mode(&self, _binding_mode: &BindingMode) -> DocBuilder<A> {\n            unreachable!(\"This backend handle binding modes directly inside patterns\")\n        }\n\n        fn region(&self, _region: &Region) -> DocBuilder<A> {\n            unreachable_by_invariant!(Drop_references)\n        }\n\n        fn dyn_trait_goal(&self, _dyn_trait_goal: &DynTraitGoal) -> DocBuilder<A> {\n            emit_error!(issue 1708, \"`dyn` traits are unsupported\")\n        }\n\n        fn attribute(&self, Attribute { kind, span: _ }: &Attribute) -> DocBuilder<A> {\n            match kind {\n                AttributeKind::Tool { .. } | AttributeKind::Hax { .. } => {\n                    nil!()\n                }\n                AttributeKind::DocComment {\n                    kind: DocCommentKind::Line,\n                    body,\n                } => comment!(body.clone()).append(hardline!()),\n                AttributeKind::DocComment {\n                    kind: DocCommentKind::Block,\n                    body,\n                } => docs![\n                    \"/--\",\n                    line!(),\n                    intersperse!(body.lines().map(|line| line.to_string()), line!()),\n                    line!(),\n                    \"-/\"\n                ]\n                .nest(INDENT)\n                .group()\n                .append(hardline!()),\n            }\n        }\n\n        fn borrow_kind(&self, _borrow_kind: &BorrowKind) -> DocBuilder<A> {\n            unreachable_by_invariant!(Drop_references)\n        }\n\n        fn guard(&self, _guard: &Guard) -> DocBuilder<A> {\n            unreachable_by_invariant!(Drop_match_guards)\n        }\n\n        fn projection_predicate(\n            &self,\n            projection_predicate: &ProjectionPredicate,\n        ) -> DocBuilder<A> {\n            docs![\n                self.render_last(&projection_predicate.assoc_item),\n                softline!(),\n                \":=\",\n                line!(),\n                projection_predicate.ty,\n            ]\n            .group()\n            .nest(INDENT)\n        }\n\n        fn error_node(&self, _error_node: &ErrorNode) -> DocBuilder<A> {\n            // TODO : Should be made unreachable by https://github.com/cryspen/hax/pull/1672\n            text!(\"sorry\")\n        }\n\n        // Impl expressions\n\n        fn impl_expr(&self, _impl_expr: &ImplExpr) -> DocBuilder<A> {\n            emit_error!(issue 1716, \"Explicit impl expressions are unsupported\")\n        }\n    }\n};\n"
  },
  {
    "path": "rust-engine/src/backends/rust/renamings",
    "content": "core:alloc:global:GlobalAlloc core:alloc:GlobalAlloc\ncore:marker:Sync core:prelude:v1:Sync\ncore:iter:adapters:SourceIter core:iter:SourceIter\ncore:ops:arith:Sub core:ops:Sub\ncore:slice:index:range core:slice:range\ncore:sync:exclusive:Exclusive core:sync:Exclusive\ncore:ops:bit:Shr core:ops:Shr\ncore:ffi:primitives:c_uchar core:ffi:c_uchar\ncore:range:iter:IterRangeFrom core:range:IterRangeFrom\ncore:fmt:builders:DebugStruct core:fmt:DebugStruct\ncore:macros:builtin:trace_macros core:prelude:v1:trace_macros\ncore:fmt:builders:from_fn core:fmt:from_fn\ncore:macros:builtin:assert core:prelude:v1:assert\ncore:macros:builtin:bench core:prelude:v1:bench\ncore:iter:adapters:chain:Chain core:iter:Chain\ncore:iter:traits:marker:TrustedStep core:iter:TrustedStep\ncore:slice:iter:RSplitMut core:slice:RSplitMut\ncore:iter:traits:collect:Extend core:iter:Extend\ncore:str:error:Utf8Error core:str:Utf8Error\ncore:ptr:metadata:Thin core:ptr:Thin\ncore:ops:arith:AddAssign core:ops:AddAssign\ncore:mem:drop core:prelude:v1:drop\ncore:async_iter:async_iter:IntoAsyncIterator core:async_iter:IntoAsyncIterator\ncore:iter:adapters:flatten:Flatten core:iter:Flatten\ncore:ops:unsize:DispatchFromDyn core:ops:DispatchFromDyn\ncore:marker:Send core:prelude:v1:Send\ncore:iter:traits:marker:InPlaceIterable core:iter:InPlaceIterable\ncore:panic:unwind_safe:UnwindSafe core:panic:UnwindSafe\ncore:hash:sip:SipHasher core:hash:SipHasher\ncore:slice:iter:ArrayChunks core:slice:ArrayChunks\ncore:slice:iter:SplitMut core:slice:SplitMut\ncore:num:nonzero:NonZeroUsize core:num:NonZeroUsize\ncore:slice:index:try_range core:slice:try_range\ncore:task:wake:RawWaker core:task:RawWaker\ncore:iter:adapters:rev:Rev core:iter:Rev\ncore:ffi:primitives:c_float core:ffi:c_float\ncore:convert:num:FloatToInt core:convert:FloatToInt\ncore:async_iter:from_iter:from_iter core:async_iter:from_iter\ncore:pin:unsafe_pinned:UnsafePinned core:pin:UnsafePinned\ncore:slice:iter:ChunksExact core:slice:ChunksExact\ncore:ffi:primitives:c_short core:ffi:c_short\ncore:iter:adapters:skip_while:SkipWhile core:iter:SkipWhile\ncore:macros:builtin:alloc_error_handler core:prelude:v1:alloc_error_handler\ncore:ops:arith:Rem core:ops:Rem\ncore:ops:range:RangeToInclusive core:range:RangeToInclusive\ncore:slice:iter:SplitN core:slice:SplitN\ncore:num:nonzero:NonZeroU32 core:num:NonZeroU32\ncore:convert:From core:prelude:v1:From\ncore:ops:async_function:AsyncFnOnce core:ops:AsyncFnOnce\ncore:marker:variance:PhantomInvariant core:marker:PhantomInvariant\ncore:io:borrowed_buf:BorrowedBuf core:io:BorrowedBuf\ncore:iter:sources:repeat_n:RepeatN core:iter:RepeatN\ncore:marker:Copy core:prelude:v1:Copy\ncore:ops:function:FnOnce core:ops:FnOnce\ncore:ffi:primitives:c_ulong core:ffi:c_ulong\ncore:iter:sources:successors:Successors core:iter:Successors\ncore:ptr:non_null:NonNull core:ptr:NonNull\ncore:iter:adapters:fuse:Fuse core:iter:Fuse\ncore:macros:builtin:line core:prelude:v1:line\ncore:panic:panic_info:PanicMessage core:panic:PanicMessage\ncore:slice:iter:Chunks core:slice:Chunks\ncore:slice:iter:ChunksMut core:slice:ChunksMut\ncore:ffi:primitives:c_size_t core:ffi:c_size_t\ncore:slice:iter:RChunksExactMut core:slice:RChunksExactMut\ncore:slice:iter:RChunksMut core:slice:RChunksMut\ncore:future:join:join core:future:join\ncore:iter:adapters:chain:chain core:iter:chain\ncore:ub_checks:assert_unsafe_precondition core:ub_checks:assert_unsafe_precondition\ncore:ops:bit:BitOr core:ops:BitOr\ncore:convert:AsMut core:prelude:v1:AsMut\ncore:mem:transmutability:TransmuteFrom core:mem:TransmuteFrom\ncore:cell:lazy:LazyCell core:cell:LazyCell\ncore:macros:builtin:stringify core:prelude:v1:stringify\ncore:iter:adapters:map_windows:MapWindows core:iter:MapWindows\ncore:net:ip_addr:Ipv4Addr core:net:Ipv4Addr\ncore:ffi:va_list:VaListImpl core:ffi:VaListImpl\ncore:iter:adapters:take_while:TakeWhile core:iter:TakeWhile\ncore:slice:iter:RChunks core:slice:RChunks\ncore:slice:raw:from_raw_parts_mut core:slice:from_raw_parts_mut\ncore:str:converts:from_raw_parts core:str:from_raw_parts\ncore:ops:try_trait:Residual core:ops:Residual\ncore:iter:adapters:cycle:Cycle core:iter:Cycle\ncore:fmt:builders:DebugSet core:fmt:DebugSet\ncore:ops:range:RangeTo core:range:RangeTo\ncore:ops:bit:ShlAssign core:ops:ShlAssign\ncore:ops:function:FnMut core:ops:FnMut\ncore:str:iter:EscapeDebug core:str:EscapeDebug\ncore:ffi:c_str:CStr core:ffi:CStr\ncore:ops:deref:Receiver core:ops:Receiver\ncore:ffi:primitives:c_int core:ffi:c_int\ncore:iter:traits:iterator:Iterator core:iter:Iterator\ncore:ops:coroutine:CoroutineState core:ops:CoroutineState\ncore:macros:builtin:concat_bytes core:prelude:v1:concat_bytes\ncore:mem:transmutability:Assume core:mem:Assume\ncore:option:Option core:prelude:v1:Option\ncore:ops:range:Bound core:range:Bound\ncore:ffi:primitives:c_double core:ffi:c_double\ncore:macros:builtin:include_str core:prelude:v1:include_str\ncore:bstr:traits:impl_partial_eq core:bstr:impl_partial_eq\ncore:ops:range:RangeBounds core:range:RangeBounds\ncore:future:poll_fn:PollFn core:future:PollFn\ncore:slice:iter:SplitInclusiveMut core:slice:SplitInclusiveMut\ncore:hash:sip:SipHasher13 core:hash:SipHasher13\ncore:macros:builtin:autodiff_forward core:autodiff:autodiff_forward\ncore:convert:TryFrom core:prelude:rust_future:TryFrom\ncore:slice:iter:RSplit core:slice:RSplit\ncore:iter:sources:repeat:repeat core:iter:repeat\ncore:future:pending:pending core:future:pending\ncore:slice:index:SliceIndex core:slice:SliceIndex\ncore:macros:cfg_select core:cfg_select\ncore:ptr:metadata:from_raw_parts_mut core:ptr:from_raw_parts_mut\ncore:char:decode:DecodeUtf16Error core:char:DecodeUtf16Error\ncore:ops:arith:MulAssign core:ops:MulAssign\ncore:future:async_drop:async_drop_in_place core:future:async_drop_in_place\ncore:num:nonzero:NonZeroI8 core:num:NonZeroI8\ncore:ops:deref:DerefMut core:ops:DerefMut\ncore:iter:traits:marker:TrustedLen core:iter:TrustedLen\ncore:num:nonzero:NonZeroU128 core:num:NonZeroU128\ncore:ptr:unique:Unique core:ptr:Unique\ncore:marker:variance:PhantomCovariant core:marker:PhantomCovariant\ncore:ops:arith:Div core:ops:Div\ncore:iter:adapters:map_while:MapWhile core:iter:MapWhile\ncore:net:ip_addr:Ipv6Addr core:net:Ipv6Addr\ncore:slice:iter:ArrayWindows core:slice:ArrayWindows\ncore:iter:adapters:zip:TrustedRandomAccess core:iter:TrustedRandomAccess\ncore:ptr:metadata:Pointee core:ptr:Pointee\ncore:ops:range:IntoBounds core:range:IntoBounds\ncore:ops:control_flow:ControlFlow core:ops:ControlFlow\ncore:ops:coroutine:Coroutine core:ops:Coroutine\ncore:macros:builtin:global_allocator core:prelude:v1:global_allocator\ncore:macros:builtin:define_opaque core:prelude:v1:define_opaque\ncore:macros:builtin:option_env core:prelude:v1:option_env\ncore:ptr:alignment:Alignment core:ptr:Alignment\ncore:ops:bit:BitAndAssign core:ops:BitAndAssign\ncore:io:borrowed_buf:BorrowedCursor core:io:BorrowedCursor\ncore:iter:adapters:intersperse:IntersperseWith core:iter:IntersperseWith\ncore:iter:sources:from_fn:FromFn core:iter:FromFn\ncore:intrinsics:transmute core:mem:transmute\ncore:ffi:primitives:c_long core:ffi:c_long\ncore:iter:sources:repeat_n:repeat_n core:iter:repeat_n\ncore:num:flt2dec:decoder:DecodableFloat core:num:flt2dec:DecodableFloat\ncore:iter:adapters:copied:Copied core:iter:Copied\ncore:ops:range:RangeFrom core:range:legacy:RangeFrom\ncore:num:flt2dec:decoder:FullDecoded core:num:flt2dec:FullDecoded\ncore:bstr:traits:impl_partial_eq_n core:bstr:impl_partial_eq_n\ncore:iter:adapters:step_by:StepBy core:iter:StepBy\ncore:slice:iter:Iter core:slice:Iter\ncore:slice:raw:from_mut core:slice:from_mut\ncore:unicode:unicode_data:conversions core:unicode:conversions\ncore:future:poll_fn:poll_fn core:future:poll_fn\ncore:ascii:ascii_char:AsciiChar core:ascii:AsciiChar\ncore:panic:location:Location core:panic:Location\ncore:macros:builtin:compile_error core:prelude:v1:compile_error\ncore:future:async_drop:AsyncDrop core:future:AsyncDrop\ncore:default:Default core:prelude:v1:Default\ncore:ops:arith:Mul core:ops:Mul\ncore:ffi:primitives:c_ptrdiff_t core:ffi:c_ptrdiff_t\ncore:iter:sources:successors:successors core:iter:successors\ncore:net:socket_addr:SocketAddrV4 core:net:SocketAddrV4\ncore:alloc:layout:Layout core:alloc:Layout\ncore:iter:adapters:skip:Skip core:iter:Skip\ncore:future:ready:ready core:future:ready\ncore:str:converts:from_utf8 core:str:from_utf8\ncore:num:nonzero:NonZeroI128 core:num:NonZeroI128\ncore:iter:sources:from_fn:from_fn core:iter:from_fn\ncore:ops:try_trait:FromResidual core:ops:FromResidual\ncore:iter:range:Step core:range:Step\ncore:macros:builtin:env core:prelude:v1:env\ncore:str:iter:SplitAsciiWhitespace core:str:SplitAsciiWhitespace\ncore:core_simd:simd core:simd:simd\ncore:marker:variance:PhantomInvariantLifetime core:marker:PhantomInvariantLifetime\ncore:ops:arith:Add core:ops:Add\ncore:marker:variance:PhantomContravariantLifetime core:marker:PhantomContravariantLifetime\ncore:num:flt2dec:decoder:Decoded core:num:flt2dec:Decoded\ncore:ops:bit:BitOrAssign core:ops:BitOrAssign\ncore:async_iter:async_iter:AsyncIterator core:async_iter:AsyncIterator\ncore:slice:iter:RSplitNMut core:slice:RSplitNMut\ncore:mem:manually_drop:ManuallyDrop core:mem:ManuallyDrop\ncore:ops:arith:Neg core:ops:Neg\ncore:ops:deref:LegacyReceiver core:ops:LegacyReceiver\ncore:str:iter:EncodeUtf16 core:str:EncodeUtf16\ncore:num:error:ParseIntError core:num:ParseIntError\ncore:ops:async_function:AsyncFnMut core:ops:AsyncFnMut\ncore:macros:builtin:cfg_accessible core:prelude:v1:cfg_accessible\ncore:ops:arith:RemAssign core:ops:RemAssign\ncore:iter:sources:from_coroutine:from_coroutine core:iter:from_coroutine\ncore:slice:ascii:is_ascii_simple core:slice:is_ascii_simple\ncore:ops:arith:SubAssign core:ops:SubAssign\ncore:mem:size_of core:prelude:v1:size_of\ncore:task:poll:Poll core:task:Poll\ncore:ops:unsize:CoerceUnsized core:ops:CoerceUnsized\ncore:char:methods:encode_utf8_raw core:char:encode_utf8_raw\ncore:fmt:macros:Debug core:fmt:Debug\ncore:future:into_future:IntoFuture core:future:IntoFuture\ncore:ffi:primitives:c_uint core:ffi:c_uint\ncore:iter:sources:generator:iter core:iter:iter\ncore:net:ip_addr:Ipv6MulticastScope core:net:Ipv6MulticastScope\ncore:panic:unwind_safe:RefUnwindSafe core:panic:RefUnwindSafe\ncore:fmt:num_buffer:NumBuffer core:fmt:NumBuffer\ncore:slice:iter:ChunksExactMut core:slice:ChunksExactMut\ncore:marker:Unpin core:prelude:v1:Unpin\ncore:ops:deref:Deref core:ops:Deref\ncore:num:nonzero:NonZeroU64 core:num:NonZeroU64\ncore:iter:traits:double_ended:DoubleEndedIterator core:iter:DoubleEndedIterator\ncore:ops:index:Index core:ops:Index\ncore:ops:range:Range core:range:legacy:Range\ncore:str:validations:utf8_char_width core:str:utf8_char_width\ncore:convert:TryInto core:prelude:rust_future:TryInto\ncore:fmt:builders:DebugList core:fmt:DebugList\ncore:ffi:c_str:FromBytesUntilNulError core:ffi:FromBytesUntilNulError\ncore:slice:iter:SplitNMut core:slice:SplitNMut\ncore:slice:ascii:EscapeAscii core:slice:EscapeAscii\ncore:iter:sources:once_with:once_with core:iter:once_with\ncore:str:iter:SplitWhitespace core:str:SplitWhitespace\ncore:ops:range:OneSidedRangeBound core:ops:OneSidedRangeBound\ncore:iter:traits:accum:Product core:iter:Product\ncore:async_iter:from_iter:FromIter core:async_iter:FromIter\ncore:future:ready:Ready core:future:Ready\ncore:mem:align_of_val core:prelude:v1:align_of_val\ncore:macros:builtin:deref core:prelude:v1:deref\ncore:ops:bit:BitXor core:ops:BitXor\ncore:clone:Clone core:prelude:v1:Clone\ncore:ops:bit:Not core:ops:Not\ncore:marker:Sized core:prelude:v1:Sized\ncore:ops:index:IndexMut core:ops:IndexMut\ncore:macros:builtin:format_args_nl core:prelude:v1:format_args_nl\ncore:ffi:primitives:c_ushort core:ffi:c_ushort\ncore:iter:adapters:scan:Scan core:iter:Scan\ncore:fmt:builders:DebugTuple core:fmt:DebugTuple\ncore:iter:sources:once:once core:iter:once\ncore:ptr:metadata:DynMetadata core:ptr:DynMetadata\ncore:slice:iter:Split core:slice:Split\ncore:slice:iter:ChunkBy core:slice:ChunkBy\ncore:ffi:primitives:c_char core:ffi:c_char\ncore:iter:sources:once_with:OnceWith core:iter:OnceWith\ncore:iter:sources:repeat_with:repeat_with core:iter:repeat_with\ncore:str:converts:from_utf8_unchecked_mut core:str:from_utf8_unchecked_mut\ncore:task:wake:LocalWaker core:task:LocalWaker\ncore:panic:panic_info:PanicInfo core:panic:PanicInfo\ncore:marker:variance:PhantomContravariant core:marker:PhantomContravariant\ncore:iter:adapters:cloned:Cloned core:iter:Cloned\ncore:task:wake:Waker core:task:Waker\ncore:iter:traits:collect:FromIterator core:iter:FromIterator\ncore:num:nonzero:NonZeroU16 core:num:NonZeroU16\ncore:future:pending:Pending core:future:Pending\ncore:ops:function:Fn core:ops:Fn\ncore:macros:builtin:file core:prelude:v1:file\ncore:intrinsics:ub_checks core:ub_checks:ub_checks\ncore:iter:adapters:filter:Filter core:iter:Filter\ncore:iter:traits:exact_size:ExactSizeIterator core:iter:ExactSizeIterator\ncore:marker:variance:variance core:marker:variance\ncore:iter:sources:repeat_with:RepeatWith core:iter:RepeatWith\ncore:char:methods:encode_utf16_raw core:char:encode_utf16_raw\ncore:iter:adapters:intersperse:Intersperse core:iter:Intersperse\ncore:iter:traits:accum:Sum core:iter:Sum\ncore:str:iter:CharIndices core:str:CharIndices\ncore:task:ready:ready core:task:ready\ncore:cmp:Eq core:prelude:v1:Eq\ncore:iter:sources:repeat:Repeat core:iter:Repeat\ncore:macros:builtin:format_args core:prelude:v1:format_args\ncore:ops:try_trait:Try core:ops:Try\ncore:ops:arith:DivAssign core:ops:DivAssign\ncore:result:Result core:prelude:v1:Result\ncore:iter:adapters:enumerate:Enumerate core:iter:Enumerate\ncore:net:ip_addr:IpAddr core:net:IpAddr\ncore:iter:traits:marker:FusedIterator core:iter:FusedIterator\ncore:convert:AsRef core:prelude:v1:AsRef\ncore:macros:builtin:test core:prelude:v1:test\ncore:iter:sources:empty:Empty core:iter:Empty\ncore:future:future:Future core:future:Future\ncore:fmt:builders:DebugMap core:fmt:DebugMap\ncore:str:lossy:Utf8Chunk core:str:Utf8Chunk\ncore:task:wake:RawWakerVTable core:task:RawWakerVTable\ncore:iter:adapters:zip:zip core:iter:zip\ncore:ptr:metadata:from_raw_parts core:ptr:from_raw_parts\ncore:ptr:metadata:metadata core:ptr:metadata\ncore:net:socket_addr:SocketAddr core:net:SocketAddr\ncore:slice:iter:SplitInclusive core:slice:SplitInclusive\ncore:ops:bit:BitAnd core:ops:BitAnd\ncore:iter:adapters:zip:TrustedRandomAccessNoCoerce core:iter:TrustedRandomAccessNoCoerce\ncore:iter:adapters:filter_map:FilterMap core:iter:FilterMap\ncore:slice:raw:from_ref core:slice:from_ref\ncore:core_arch:arch core:arch:arch\ncore:iter:adapters:peekable:Peekable core:iter:Peekable\ncore:cmp:Ord core:prelude:v1:Ord\ncore:ffi:primitives:c_longlong core:ffi:c_longlong\ncore:num:nonzero:ZeroablePrimitive core:num:ZeroablePrimitive\ncore:slice:iter:IterMut core:slice:IterMut\ncore:str:iter:Chars core:str:Chars\ncore:macros:builtin:cfg_eval core:prelude:v1:cfg_eval\ncore:range:iter:IterRangeInclusive core:range:IterRangeInclusive\ncore:macros:builtin:include core:prelude:v1:include\ncore:cell:once:OnceCell core:cell:OnceCell\ncore:macros:builtin:autodiff_reverse core:autodiff:autodiff_reverse\ncore:mem:size_of_val core:prelude:v1:size_of_val\ncore:marker:variance:PhantomCovariantLifetime core:marker:PhantomCovariantLifetime\ncore:macros:assert_matches core:assert_matches:assert_matches\ncore:ops:bit:ShrAssign core:ops:ShrAssign\ncore:macros:debug_assert_matches core:assert_matches:debug_assert_matches\ncore:ffi:va_list:VaList core:ffi:VaList\ncore:iter:sources:once:Once core:iter:Once\ncore:char:methods:encode_utf8_raw_unchecked core:char:encode_utf8_raw_unchecked\ncore:iter:sources:empty:empty core:iter:empty\ncore:iter:adapters:array_chunks:ArrayChunks core:iter:ArrayChunks\ncore:iter:traits:marker:TrustedFused core:iter:TrustedFused\ncore:array:iter:IntoIter core:array:IntoIter\ncore:ops:range:RangeInclusive core:range:legacy:RangeInclusive\ncore:slice:iter:RChunksExact core:slice:RChunksExact\ncore:macros:builtin:test_case core:prelude:v1:test_case\ncore:slice:raw:from_ptr_range core:slice:from_ptr_range\ncore:char:decode:DecodeUtf16 core:char:DecodeUtf16\ncore:marker:variance:Variance core:marker:Variance\ncore:range:iter:IterRange core:range:IterRange\ncore:macros:builtin:type_ascribe core:prelude:v1:type_ascribe\ncore:net:parser:AddrParseError core:net:AddrParseError\ncore:str:converts:from_raw_parts_mut core:str:from_raw_parts_mut\ncore:str:converts:from_utf8_mut core:str:from_utf8_mut\ncore:fmt:num_buffer:NumBufferTrait core:fmt:NumBufferTrait\ncore:ffi:primitives:c_ulonglong core:ffi:c_ulonglong\ncore:prelude:v1 core:prelude:rust_future:v1\ncore:mem:maybe_uninit:MaybeUninit core:mem:MaybeUninit\ncore:ops:bit:Shl core:ops:Shl\ncore:ops:range:OneSidedRange core:range:OneSidedRange\ncore:iter:traits:collect:IntoIterator core:iter:IntoIterator\ncore:slice:raw:from_raw_parts core:slice:from_raw_parts\ncore:str:iter:EscapeDefault core:str:EscapeDefault\ncore:macros:builtin:contracts_ensures core:contracts:contracts_ensures\ncore:slice:iter:ChunkByMut core:slice:ChunkByMut\ncore:str:validations:next_code_point core:str:next_code_point\ncore:num:error:IntErrorKind core:num:IntErrorKind\ncore:num:nonzero:NonZeroI16 core:num:NonZeroI16\ncore:num:nonzero:NonZeroI64 core:num:NonZeroI64\ncore:ops:async_function:AsyncFn core:ops:AsyncFn\ncore:char:convert:ParseCharError core:char:ParseCharError\ncore:num:nonzero:NonZeroIsize core:num:NonZeroIsize\ncore:ops:drop:Drop core:ops:Drop\ncore:char:convert:CharTryFromError core:char:CharTryFromError\ncore:ffi:va_list:VaArgSafe core:ffi:VaArgSafe\ncore:iter:sources:from_coroutine:FromCoroutine core:iter:FromCoroutine\ncore:slice:iter:ArrayChunksMut core:slice:ArrayChunksMut\ncore:bstr:traits:impl_partial_eq_ord core:bstr:impl_partial_eq_ord\ncore:str:converts:from_utf8_unchecked core:str:from_utf8_unchecked\ncore:str:iter:EscapeUnicode core:str:EscapeUnicode\ncore:str:traits:FromStr core:str:FromStr\ncore:task:wake:Context core:task:Context\ncore:ffi:primitives:c_ssize_t core:ffi:c_ssize_t\ncore:macros:builtin:derive_const core:prelude:v1:derive_const\ncore:alloc:layout:LayoutErr core:alloc:LayoutErr\ncore:fmt:builders:FromFn core:fmt:FromFn\ncore:slice:iter:Windows core:slice:Windows\ncore:ffi:primitives:c_schar core:ffi:c_schar\ncore:num:nonzero:NonZeroU8 core:num:NonZeroU8\ncore:num:dec2flt:ParseFloatError core:num:ParseFloatError\ncore:unicode:unicode_data:case_ignorable:lookup core:unicode:lookup\ncore:cmp:PartialEq core:prelude:v1:PartialEq\ncore:iter:adapters:flatten:FlatMap core:iter:FlatMap\ncore:ops:bit:BitXorAssign core:ops:BitXorAssign\ncore:macros:builtin:cfg core:prelude:v1:cfg\ncore:iter:adapters:zip:Zip core:iter:Zip\ncore:num:nonzero:NonZeroI32 core:num:NonZeroI32\ncore:convert:Into core:prelude:v1:Into\ncore:macros:builtin:concat core:prelude:v1:concat\ncore:mem:align_of core:prelude:v1:align_of\ncore:str:lossy:Utf8Chunks core:str:Utf8Chunks\ncore:macros:builtin:column core:prelude:v1:column\ncore:hash:macros:Hash core:hash:Hash\ncore:net:socket_addr:SocketAddrV6 core:net:SocketAddrV6\ncore:macros:builtin:derive core:prelude:v1:derive\ncore:num:flt2dec:decoder:decode core:num:flt2dec:decode\ncore:macros:builtin:contracts_requires core:contracts:contracts_requires\ncore:slice:raw:from_mut_ptr_range core:slice:from_mut_ptr_range\ncore:task:wake:ContextBuilder core:task:ContextBuilder\ncore:num:nonzero:NonZero core:num:NonZero\ncore:ffi:c_str:FromBytesWithNulError core:ffi:FromBytesWithNulError\ncore:macros:builtin:log_syntax core:prelude:v1:log_syntax\ncore:ops:deref:DerefPure core:ops:DerefPure\ncore:cmp:PartialOrd core:prelude:v1:PartialOrd\ncore:iter:adapters:inspect:Inspect core:iter:Inspect\ncore:macros:builtin:module_path core:prelude:v1:module_path\ncore:iter:adapters:map:Map core:iter:Map\ncore:slice:iter:RSplitN core:slice:RSplitN\ncore:iter:adapters:take:Take core:iter:Take\ncore:macros:builtin:include_bytes core:prelude:v1:include_bytes\ncore:unicode:unicode_data:cased:lookup core:unicode:lookup\n"
  },
  {
    "path": "rust-engine/src/backends/rust/renamings.rs",
    "content": "use std::{collections::HashMap, fmt::Debug, hash::Hash, sync::LazyLock};\n\nuse crate::backends::prelude::Rendered;\n\n#[derive(Debug)]\nstruct Graph<K, T> {\n    node: Option<T>,\n    subtree: HashMap<K, Box<Graph<K, T>>>,\n}\n\nimpl<K, T> Default for Graph<K, T> {\n    fn default() -> Self {\n        Self {\n            node: Default::default(),\n            subtree: Default::default(),\n        }\n    }\n}\n\nimpl<K: 'static + Eq + Hash + Clone + Debug, T: Debug> Graph<K, T> {\n    fn create_path(&mut self, path: &[K]) -> &mut Graph<K, T> {\n        let mut current = self;\n        for chunk in path {\n            current = current.subtree.entry(chunk.clone()).or_default();\n        }\n        current\n    }\n    fn get_longest(&self, path: impl Iterator<Item = K>) -> Option<(Vec<K>, &T)> {\n        let mut current = self;\n        let mut subpath = vec![];\n        let mut results = vec![];\n\n        for chunk in path {\n            if let Some(sub) = current.subtree.get(&chunk) {\n                current = sub;\n                subpath.push(chunk.clone());\n                if let Some(node) = &current.node {\n                    results.push((subpath.clone(), node));\n                }\n            } else {\n                break;\n            }\n        }\n\n        results.pop()\n    }\n    fn from_iter(it: impl Iterator<Item = (Vec<K>, T)>) -> Self {\n        let mut root = Self::default();\n        for (path, value) in it {\n            root.create_path(&path).node = Some(value);\n        }\n        root\n    }\n}\n\nstatic RENAMINGS: LazyLock<Graph<String, Vec<&'static str>>> = LazyLock::new(|| {\n    let str = include_str!(\"renamings\");\n\n    Graph::from_iter(str.lines().map(|line| {\n        let (l, r) = line.split_once(\" \").unwrap();\n        (\n            l.split(\":\").map(|s| s.to_string()).collect(),\n            r.split(\":\").collect(),\n        )\n    }))\n});\n\n/// Rename a `Rendered` name according, so that we refer to public names of core, not private names.\npub(super) fn rename_rendered(rendered: &mut Rendered) {\n    let chunks = rendered\n        .module\n        .clone()\n        .into_iter()\n        .chain(rendered.path.clone());\n    if let Some((chunks_slice, rename)) = RENAMINGS.get_longest(chunks) {\n        let rename: Vec<String> = rename.iter().map(|s| s.to_string()).collect();\n        if chunks_slice.len() >= rendered.module.len() {\n            let remainings = chunks_slice.len() - rendered.module.len();\n            let (mod_part, path_part) = rename.split_at((rename.len() - remainings).max(1));\n            rendered.module = mod_part.to_vec();\n            rendered.path.splice(0..remainings, path_part.to_vec());\n        } else {\n            rendered.module.splice(0..chunks_slice.len(), rename);\n        }\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/backends/rust.rs",
    "content": "//! A Rust backend (and printer) for hax.\n\nuse super::prelude::*;\nuse crate::ast::identifiers::global_id::view::{PathSegment, View};\nuse std::cell::RefCell;\n\nmod renamings;\n\n/// The Rust printer.\n#[setup_printer_struct]\n#[derive(Default, Clone)]\npub struct RustPrinter {\n    current_namespace: RefCell<Option<Vec<String>>>,\n}\n\nimpl Printer for RustPrinter {\n    const NAME: &str = \"Rust\";\n}\n\nimpl RenderView for RustPrinter {\n    fn render_path_segment(&self, seg: &PathSegment) -> Vec<String> {\n        if let AnyKind::Constructor(constructor_kind) = seg.kind() {\n            match constructor_kind {\n                global_id::view::ConstructorKind::Constructor { ty } => {\n                    if let global_id::view::TypeDefKind::Struct = ty.kind() {\n                        return vec![\n                            self.render_path_segment_payload(ty.lift().payload())\n                                .to_string(),\n                        ];\n                    }\n                }\n            }\n        };\n        default::render_path_segment(self, seg)\n    }\n    fn render(&self, view: &View) -> Rendered {\n        let (module_path, relative_path) = view.split_at_module();\n        let path_segment = |seg| self.render_path_segment(seg);\n        let mut rendered = Rendered {\n            module: module_path.iter().flat_map(path_segment).collect(),\n            path: relative_path.iter().flat_map(path_segment).collect(),\n        };\n        renamings::rename_rendered(&mut rendered);\n        rendered\n    }\n}\n\n/// The Rust backend.\npub struct RustBackend;\n\nimpl Backend for RustBackend {\n    type Printer = RustPrinter;\n\n    fn resugaring_phases() -> Vec<Box<dyn Resugaring>> {\n        vec![Box::new(FunctionsToConstants), Box::new(Tuples)]\n    }\n\n    fn module_path(&self, module: &Module) -> camino::Utf8PathBuf {\n        let printer = RustPrinter::default();\n        let path = <RustPrinter as RenderView>::module(&printer, &module.ident.view());\n        camino::Utf8PathBuf::from_iter(path).with_extension(\"rs\")\n    }\n}\n\nconst INDENT: isize = 4;\n\n#[prepend_associated_functions_with(install_pretty_helpers!(self: Self))]\n// Note: the `const` wrapping makes my IDE and LSP happy. Otherwise, I don't get\n// autocompletion of methods in the impl block below.\nconst _: () = {\n    macro_rules! todo {\n        ($($tt:tt)*) => {\n            disambiguated_todo!($($tt)*)\n        };\n    }\n    macro_rules! line {\n        ($($tt:tt)*) => {\n            disambiguated_line!($($tt)*)\n        };\n    }\n    macro_rules! concat {\n        ($($tt:tt)*) => {\n            disambiguated_concat!($($tt)*)\n        };\n    }\n\n    macro_rules! sep {\n        ($l:expr, $it:expr, $r:expr, $sep:expr$(,)?) => {\n            docs![\n                intersperse!($it, docs![$sep, line!()]),\n                docs![\",\"].flat_alt(nil!())\n            ]\n            .enclose(line_!(), line_!())\n            .nest(INDENT)\n            .enclose($l, $r)\n            .group()\n        };\n        ($l:expr, $it:expr, $r:expr$(,)?) => {\n            sep!($l, $it, $r, \",\")\n        };\n    }\n\n    macro_rules! print_tuple {\n        ($into_docs:ident) => {{\n            let mut docs: Vec<_> = $into_docs.iter().map(|typ| docs![typ]).collect();\n            if docs.len() == 1 {\n                docs.push(nil![])\n            }\n            sep!(\"(\", docs, \")\")\n        }};\n    }\n\n    macro_rules! sep_opt {\n        (@$l:expr, $it:expr, $($rest:tt)*) => {\n            {\n                let mut it = $it.into_iter().peekable();\n                if it.peek().is_some() {\n                    sep!($l, it, $($rest)*)\n                } else {\n                    nil!()\n                }\n            }\n        };\n        ($l:expr, $it:expr, $($rest:tt)*) => {\n            sep_opt!(@$l, $it, $($rest)*)\n        };\n    }\n\n    macro_rules! block {\n        ($body:expr) => {\n            docs![line!(), $body, line!()].group().nest(INDENT).braces()\n        };\n    }\n\n    impl<'a, 'b> RustPrinter {\n        fn generic_params<A: Clone>(&'a self, generic_params: &'b [GenericParam]) -> DocBuilder<A> {\n            let generic_params = generic_params\n                .iter()\n                .filter(|p| !matches!(&p.kind, GenericParamKind::Lifetime if p.ident.0.to_string() == \"_\"))\n                .collect::<Vec<_>>();\n            sep_opt!(\"<\", generic_params, \">\")\n        }\n        fn where_clause<A: Clone>(&'a self, constraints: &'b [GenericConstraint]) -> DocBuilder<A> {\n            if constraints.is_empty() {\n                return nil!();\n            }\n            docs![\n                line!(),\n                \"where\",\n                line!(),\n                intersperse!(constraints, docs![\",\", line!()])\n                    .nest(INDENT)\n                    .group(),\n                line!(),\n            ]\n            .nest(INDENT)\n            .group()\n        }\n        fn attributes<A: Clone>(&'a self, attrs: &'b [Attribute]) -> DocBuilder<A> {\n            concat!(\n                attrs\n                    .iter()\n                    .filter(|attr| match &attr.kind {\n                        AttributeKind::Tool { .. } | AttributeKind::Hax(_) => false,\n                        AttributeKind::DocComment { .. } => true,\n                    })\n                    .map(|attr| docs![attr, hardline!()])\n            )\n        }\n\n        fn id_name<A: Clone>(&'a self, id: GlobalId) -> DocBuilder<A> {\n            let view = id.view();\n            let path = <RustPrinter as RenderView>::render_strings(self, &view);\n            let name = path.last().unwrap().clone();\n            docs![if name == \"_\" {\n                \"___empty_name\".into()\n            } else {\n                name\n            }]\n        }\n    }\n\n    impl<A: Clone + 'static> PrettyAst<A> for RustPrinter {\n        const NAME: &'static str = \"Rust\";\n\n        fn module(&self, module: &Module) -> DocBuilder<A> {\n            let previous = self.current_namespace.borrow().clone();\n            let view = module.ident.view();\n            let module_path = <Self as RenderView>::module(self, &view);\n            *self.current_namespace.borrow_mut() = Some(module_path);\n            let doc = intersperse!(&module.items, docs![hardline!(), hardline!()]);\n            *self.current_namespace.borrow_mut() = previous;\n            doc\n        }\n\n        fn safety_kind(&self, safety_kind: &SafetyKind) -> DocBuilder<A> {\n            match safety_kind {\n                SafetyKind::Safe => nil!(),\n                SafetyKind::Unsafe => docs![text!(\"unsafe\"), space!()],\n            }\n        }\n        fn param(&self, param: &Param) -> DocBuilder<A> {\n            docs![&param.pat, \":\", space!(), &param.ty]\n        }\n        fn binding_mode(&self, binding_mode: &BindingMode) -> DocBuilder<A> {\n            match binding_mode {\n                BindingMode::ByRef(BorrowKind::Mut) => docs![\"ref mut\", space!()],\n                BindingMode::ByRef(_) => docs![\"ref\", space!()],\n                _ => nil!(),\n            }\n        }\n        fn pat(&self, pat: &Pat) -> DocBuilder<A> {\n            match &*pat.kind {\n                PatKind::Wild => docs![\"_\"],\n                PatKind::Ascription { pat, ty } => docs![pat, \":\", space!(), ty],\n                PatKind::Or { sub_pats } => {\n                    intersperse!(sub_pats, docs![line!(), \"|\", line!()])\n                }\n                PatKind::Array { args } => sep!(\"[\", args, \"]\", \"|\"),\n                PatKind::Deref { sub_pat } => docs![\"&\", sub_pat],\n                PatKind::Constant { lit } => docs![lit],\n                PatKind::Binding {\n                    mutable,\n                    var,\n                    mode,\n                    sub_pat,\n                } => {\n                    docs![\n                        if *mutable {\n                            docs![\"mut\", space!()]\n                        } else {\n                            nil!()\n                        },\n                        mode,\n                        var,\n                        sub_pat.as_ref().map(|pat| docs![\"@\", docs![pat]]),\n                    ]\n                }\n                PatKind::Construct { .. } => todo!(\"resugaring\"),\n                PatKind::Resugared(resugared_pat_kind) => docs![resugared_pat_kind],\n                PatKind::Error(_) => todo!(\"resugaring\"),\n            }\n        }\n        fn primitive_ty(&self, primitive_ty: &PrimitiveTy) -> DocBuilder<A> {\n            match primitive_ty {\n                PrimitiveTy::Bool => docs![\"bool\"],\n                PrimitiveTy::Int(int_kind) => docs![int_kind],\n                PrimitiveTy::Float(float_kind) => docs![float_kind],\n                PrimitiveTy::Char => docs![\"char\"],\n                PrimitiveTy::Str => docs![\"str\"],\n            }\n        }\n        fn int_kind(&self, int_kind: &IntKind) -> DocBuilder<A> {\n            docs![match (&int_kind.signedness, &int_kind.size) {\n                (Signedness::Signed, IntSize::S8) => \"i8\",\n                (Signedness::Signed, IntSize::S16) => \"i16\",\n                (Signedness::Signed, IntSize::S32) => \"i32\",\n                (Signedness::Signed, IntSize::S64) => \"i64\",\n                (Signedness::Signed, IntSize::S128) => \"i128\",\n                (Signedness::Signed, IntSize::SSize) => \"isize\",\n                (Signedness::Unsigned, IntSize::S8) => \"u8\",\n                (Signedness::Unsigned, IntSize::S16) => \"u16\",\n                (Signedness::Unsigned, IntSize::S32) => \"u32\",\n                (Signedness::Unsigned, IntSize::S64) => \"u64\",\n                (Signedness::Unsigned, IntSize::S128) => \"u128\",\n                (Signedness::Unsigned, IntSize::SSize) => \"usize\",\n            }]\n        }\n        fn generic_param(&self, generic_param: &GenericParam) -> DocBuilder<A> {\n            docs![\n                match &generic_param.kind {\n                    GenericParamKind::Const { .. } => docs![\"const\", space!()],\n                    _ => nil!(),\n                },\n                &generic_param.ident,\n                match &generic_param.kind {\n                    GenericParamKind::Const { ty } => docs![\":\", space!(), ty],\n                    _ => nil!(),\n                }\n            ]\n        }\n        fn generic_constraint(&self, generic_constraint: &GenericConstraint) -> DocBuilder<A> {\n            match generic_constraint {\n                GenericConstraint::Lifetime(s) => docs![s.clone()],\n                GenericConstraint::TypeClass(impl_ident) => docs![impl_ident],\n                GenericConstraint::Equality(projection_predicate) => docs![projection_predicate],\n            }\n        }\n        fn impl_ident(&self, impl_ident: &ImplIdent) -> DocBuilder<A> {\n            let trait_goal = &impl_ident.goal;\n            let [self_ty, args @ ..] = &trait_goal.args[..] else {\n                panic!()\n            };\n            docs![\n                self_ty,\n                space!(),\n                \":\",\n                space!(),\n                &trait_goal.trait_,\n                sep_opt!(\"<\", args, \">\"),\n            ]\n        }\n\n        fn ty(&self, ty: &Ty) -> DocBuilder<A> {\n            match ty.kind() {\n                TyKind::Primitive(primitive_ty) => docs![primitive_ty],\n                // TyKind::Tuple(items) => intersperse!(items, docs![\",\", line!()])\n                //     .nest(INDENT)\n                //     .group(),\n                TyKind::App { head, args } => docs![head, sep_opt!(\"<\", args, \">\")],\n                TyKind::Arrow { inputs, output } => {\n                    docs![\"fn\", sep!(\"(\", inputs, \")\"), reflow!(\" -> \"), output]\n                }\n                TyKind::Ref {\n                    inner,\n                    mutable,\n                    region: _,\n                } => docs![\n                    \"&\",\n                    if *mutable {\n                        docs![\"mut\", space!()]\n                    } else {\n                        nil!()\n                    },\n                    inner\n                ],\n                TyKind::Param(local_id) => docs![local_id],\n                TyKind::Slice(ty) => docs![ty].brackets(),\n                TyKind::Array { ty, length } => {\n                    docs![ty, \";\", space!(), length.as_ref()].brackets()\n                }\n                TyKind::RawPointer => todo!(),\n                TyKind::AssociatedType { impl_, item } => docs![impl_, \"::\", item],\n                TyKind::Opaque(global_id) => docs![global_id],\n                TyKind::Dyn(dyn_trait_goals) => docs![\n                    \"dyn\",\n                    docs![\n                        line!(),\n                        intersperse!(dyn_trait_goals, docs![line!(), \"+\", space!()])\n                    ]\n                    .group()\n                    .hang(0)\n                ],\n                TyKind::Resugared(resugared_ty_kind) => docs![resugared_ty_kind],\n                TyKind::Error(_) => todo!(\"resugaring\"),\n            }\n        }\n        fn resugared_ty_kind(&self, resugared_ty_kind: &ResugaredTyKind) -> DocBuilder<A> {\n            match resugared_ty_kind {\n                ResugaredTyKind::Tuple(types) => print_tuple!(types),\n            }\n        }\n        fn literal(&self, literal: &Literal) -> DocBuilder<A> {\n            match literal {\n                Literal::String(symbol) => docs![symbol],\n                Literal::Char(ch) => text!(format!(\"{}\", ch)),\n                Literal::Bool(b) => text!(format!(\"{}\", b)),\n                Literal::Int {\n                    value,\n                    negative,\n                    kind,\n                } => docs![if *negative { docs![\"-\"] } else { nil!() }, value, kind],\n                Literal::Float {\n                    value,\n                    negative,\n                    kind,\n                } => docs![if *negative { docs![\"-\"] } else { nil!() }, value, kind],\n            }\n        }\n        fn trait_goal(&self, trait_goal: &TraitGoal) -> DocBuilder<A> {\n            let [self_ty, args @ ..] = &trait_goal.args[..] else {\n                panic!()\n            };\n            docs![\n                self_ty,\n                space!(),\n                \"as\",\n                space!(),\n                &trait_goal.trait_,\n                sep_opt!(\"<\", args, \">\"),\n            ]\n            .enclose(\"<\", \">\")\n        }\n        fn generic_value(&self, generic_value: &GenericValue) -> DocBuilder<A> {\n            match generic_value {\n                GenericValue::Ty(ty) => docs![ty],\n                GenericValue::Expr(expr) => docs![expr],\n                GenericValue::Lifetime => docs![\"'_\"],\n            }\n        }\n        fn arm(&self, arm: &Arm) -> DocBuilder<A> {\n            docs![\n                &arm.pat,\n                arm.guard.as_ref().map(|guard| docs![\"if\", space!(), guard]),\n                reflow!(\" => \"),\n                block![&arm.body],\n            ]\n        }\n        fn expr(&self, expr: &Expr) -> DocBuilder<A> {\n            match &*expr.kind {\n                ExprKind::If {\n                    condition,\n                    then,\n                    else_,\n                } => docs![\n                    \"if\",\n                    space!(),\n                    docs![condition].parens(),\n                    space!(),\n                    block![then],\n                    else_\n                        .as_ref()\n                        .map(|doc| docs![reflow!(\" else \"), block![doc]])\n                        .unwrap_or(nil!())\n                ],\n                ExprKind::App {\n                    head,\n                    args,\n                    generic_args,\n                    bounds_impls: _, // this is implicit in Rust\n                    trait_,\n                } => {\n                    mod names {\n                        pub use crate::names::rust_primitives::hax::{\n                            cast_op, deref_op, logical_op_and, logical_op_or,\n                        };\n                    }\n                    use ExprKind::GlobalId;\n                    match (&*head.kind, &args[..]) {\n                        (GlobalId(names::deref_op), [reference]) => {\n                            Some(docs![\"*\", docs![reference].parens()])\n                        }\n                        (GlobalId(names::cast_op), [value]) => {\n                            Some(docs![docs![value].parens(), reflow!(\" as \"), &expr.ty])\n                        }\n                        (GlobalId(names::logical_op_and), [lhs, rhs]) => Some(docs![\n                            docs![lhs].parens(),\n                            reflow!(\" && \"),\n                            docs![rhs].parens()\n                        ]),\n                        (GlobalId(names::logical_op_or), [lhs, rhs]) => Some(docs![\n                            docs![lhs].parens(),\n                            reflow!(\" || \"),\n                            docs![rhs].parens()\n                        ]),\n                        _ => None,\n                    }\n                    .unwrap_or_else(|| match (trait_, &*head.kind) {\n                        (Some((trait_impl_expr, _trait_args)), GlobalId(head)) => {\n                            docs![\n                                &trait_impl_expr.goal,\n                                \"::\",\n                                self.id_name(*head),\n                                sep_opt!(\"::<\", generic_args, \">\"),\n                                sep!(\"(\", args, \")\")\n                            ]\n                        }\n                        _ => docs![\n                            head,\n                            sep_opt!(\"::<\", generic_args, \">\"),\n                            sep!(\"(\", args, \")\")\n                        ],\n                    })\n                }\n                ExprKind::Literal(literal) => docs![literal],\n                ExprKind::Array(exprs) => sep!(\"[\", exprs, \"]\"),\n                ExprKind::Construct {\n                    constructor,\n                    is_record,\n                    fields,\n                    // TODO: complete constructors with base\n                    ..\n                } => {\n                    let payload = fields.iter().map(|(id, value)| {\n                        docs![\n                            if *is_record {\n                                docs![id, \":\", space!()]\n                            } else {\n                                nil!()\n                            },\n                            value\n                        ]\n                    });\n                    docs![\n                        constructor,\n                        if *is_record {\n                            sep!(\"{\", payload, \"}\")\n                        } else {\n                            sep!(\"(\", payload, \")\")\n                        }\n                    ]\n                }\n                ExprKind::Match { scrutinee, arms } => {\n                    docs![\n                        \"match\",\n                        space!(),\n                        scrutinee,\n                        space!(),\n                        block!(intersperse!(arms, hardline!())),\n                    ]\n                }\n                ExprKind::Borrow { mutable, inner } => {\n                    docs![\"&\", if *mutable { reflow![\"mut \"] } else { nil!() }, inner]\n                }\n                ExprKind::AddressOf { mutable, inner } => docs![\n                    inner,\n                    reflow!(\" as *\"),\n                    if *mutable { reflow![\"mut \"] } else { nil!() },\n                    docs![&expr.ty]\n                ]\n                .parens(),\n                ExprKind::Let { lhs, rhs, body } => docs![\n                    \"let\",\n                    space!(),\n                    lhs,\n                    space!(),\n                    \"=\",\n                    docs![line!(), rhs].group().nest(INDENT),\n                    \";\",\n                    hardline!(),\n                    body\n                ],\n                ExprKind::GlobalId(global_id) => docs![global_id],\n                ExprKind::LocalId(local_id) => docs![local_id],\n                ExprKind::Ascription { e, ty } => docs![e, \":\", space!(), ty].parens(),\n                ExprKind::Assign { lhs, value } => docs![lhs, space!(), \"=\", space!(), value],\n                ExprKind::Loop {\n                    body,\n                    kind,\n                    state: None,\n                    control_flow: None,\n                    label: None,\n                } => match &**kind {\n                    LoopKind::UnconditionalLoop => docs![\"loop\", space!(), block![body]],\n                    LoopKind::WhileLoop { condition } => {\n                        docs![\"while\", space!(), condition, space!(), block![body]]\n                    }\n                    LoopKind::ForLoop { pat, iterator } => {\n                        docs![\n                            \"for\",\n                            space!(),\n                            pat,\n                            reflow!(\" in \"),\n                            iterator,\n                            space!(),\n                            block![body]\n                        ]\n                    }\n                    LoopKind::ForIndexLoop {\n                        start,\n                        end,\n                        var,\n                        var_ty: _,\n                    } => docs![\n                        \"for\",\n                        space!(),\n                        var,\n                        reflow!(\" in \"),\n                        start,\n                        \"..\",\n                        end,\n                        space!(),\n                        block![body]\n                    ],\n                },\n                ExprKind::Loop { .. } => {\n                    todo!(\"loop with explicit state or with a label\")\n                }\n                ExprKind::Break {\n                    value, label: None, ..\n                } => docs![\"break\", space!(), value],\n                ExprKind::Break { .. } => todo!(\"break with a label\"),\n                ExprKind::Return { value } => docs![\"return\", space!(), value],\n                ExprKind::Continue { label: None, .. } => docs![\"continue\"],\n                ExprKind::Continue { .. } => todo!(\"continue with a label\"),\n                ExprKind::Closure {\n                    params,\n                    body,\n                    captures: _,\n                } => docs![\n                    intersperse!(params, docs![\",\", space!()]).enclose(\"|\", \"|\"),\n                    body\n                ],\n                ExprKind::Block { body, safety_mode } => {\n                    docs![safety_mode, block![body]]\n                }\n                ExprKind::Quote { contents } => docs![contents],\n                ExprKind::Resugared(resugared_expr_kind) => docs![resugared_expr_kind],\n                ExprKind::Error { .. } => todo!(\"resugaring\"),\n            }\n        }\n        fn resugared_expr_kind(&self, resugared_expr_kind: &ResugaredExprKind) -> DocBuilder<A> {\n            match resugared_expr_kind {\n                ResugaredExprKind::Tuple(values) => print_tuple!(values),\n                ResugaredExprKind::LetPure { .. } => unreachable!(\"LetPure resugaring not active\"),\n            }\n        }\n\n        fn lhs(&self, lhs: &Lhs) -> DocBuilder<A> {\n            match lhs {\n                Lhs::LocalVar { var, ty: _ } => docs![var],\n                Lhs::VecRef { e, .. } => docs![e],\n                Lhs::ArbitraryExpr(expr) => docs![std::ops::Deref::deref(expr)],\n                Lhs::FieldAccessor { e, ty: _, field } => {\n                    docs![std::ops::Deref::deref(e), \".\", field]\n                }\n                Lhs::ArrayAccessor { e, ty: _, index } => {\n                    docs![std::ops::Deref::deref(e), docs!(index).brackets()]\n                }\n            }\n        }\n        fn global_id(&self, global_id: &GlobalId) -> DocBuilder<A> {\n            let view = global_id.view();\n            let module = <Self as RenderView>::module(self, &view);\n            if Some(module) == *self.current_namespace.borrow() {\n                let rendered = self.render(&view);\n                docs![rendered.path.join(\"::\")]\n            } else {\n                docs![self.render_string(&view)]\n            }\n        }\n        fn variant(&self, variant: &Variant) -> DocBuilder<A> {\n            let payload = variant.arguments.iter().map(|(id, ty, attrs)| {\n                docs![\n                    self.attributes(attrs),\n                    if variant.is_record {\n                        docs![id, \":\", space!()]\n                    } else {\n                        nil!()\n                    },\n                    ty\n                ]\n            });\n\n            if variant.is_record {\n                sep!(\"{\", payload, \"}\")\n            } else {\n                sep!(\"(\", payload, \")\")\n            }\n        }\n        fn item(&self, item: &Item) -> DocBuilder<A> {\n            docs![&item.meta, item.kind()]\n        }\n        fn resugared_item_kind(&self, resugared_item_kind: &ResugaredItemKind) -> DocBuilder<A> {\n            match resugared_item_kind {\n                ResugaredItemKind::Constant { name, body, .. } => {\n                    docs![\n                        \"const\",\n                        space!(),\n                        self.id_name(*name),\n                        \":\",\n                        space!(),\n                        &body.ty,\n                        reflow!(\" = \"),\n                        docs![body].braces(),\n                        \";\"\n                    ]\n                }\n                ResugaredItemKind::RecursiveFn { .. } => {\n                    unreachable!(\"The Rust backend does not use the RecursiveFn resugaring\")\n                }\n            }\n        }\n        fn item_kind(&self, item_kind: &ItemKind) -> DocBuilder<A> {\n            match item_kind {\n                ItemKind::Fn {\n                    name,\n                    generics,\n                    body,\n                    params,\n                    safety,\n                } => {\n                    docs![\n                        safety,\n                        text!(\"fn\"),\n                        space!(),\n                        self.id_name(*name),\n                        self.generic_params(&generics.params),\n                        sep!(\"(\", params, \")\"),\n                        reflow!(\" -> \"),\n                        &body.ty,\n                        space!(),\n                        self.where_clause(&generics.constraints),\n                        block![body]\n                    ]\n                }\n                ItemKind::TyAlias {\n                    name,\n                    generics: _,\n                    ty,\n                } => docs![\"type\", space!(), name, space!(), \"=\", space!(), ty, \";\"],\n                ItemKind::Type {\n                    name,\n                    generics,\n                    variants,\n                    is_struct,\n                } => match &variants[..] {\n                    [variant] if *is_struct => {\n                        docs![\n                            \"struct\",\n                            space!(),\n                            self.id_name(*name),\n                            self.generic_params(&generics.params),\n                            variant,\n                            if variant.is_record {\n                                nil!()\n                            } else {\n                                docs![\";\"]\n                            }\n                        ]\n                    }\n                    _ => {\n                        docs![\n                            \"enum\",\n                            space!(),\n                            self.id_name(*name),\n                            self.generic_params(&generics.params),\n                            sep!(\n                                \"{\",\n                                variants.iter().map(|variant| docs![\n                                    &variant.name,\n                                    space!(),\n                                    variant\n                                ]),\n                                \"}\",\n                            ),\n                            self.where_clause(&generics.constraints),\n                        ]\n                    }\n                },\n                ItemKind::Trait {\n                    name,\n                    generics,\n                    items,\n                    safety: _,\n                } => docs![\n                    \"trait\",\n                    space!(),\n                    self.id_name(*name),\n                    self.generic_params(&generics.params),\n                    self.where_clause(&generics.constraints),\n                    sep!(\"{\", items, \"}\", nil!()),\n                ],\n                ItemKind::Impl {\n                    generics,\n                    self_ty,\n                    of_trait: (trait_, trait_args),\n                    items,\n                    parent_bounds: _,\n                } => docs![\n                    \"impl\",\n                    self.generic_params(&generics.params),\n                    space!(),\n                    trait_,\n                    sep_opt!(\"<\", trait_args[1..], \">\"),\n                    space!(),\n                    \"for\",\n                    space!(),\n                    self_ty,\n                    self.where_clause(&generics.constraints),\n                    sep!(\"{\", items, \"}\", nil!()),\n                ],\n                ItemKind::Alias { name, item } => {\n                    docs![\"type\", self.id_name(*name), reflow!(\" = \"), item, \";\"]\n                }\n                ItemKind::RustModule | ItemKind::Use { .. } => nil!(),\n                ItemKind::Quote { quote, .. } => docs![quote],\n                ItemKind::Error { .. } => todo!(\"resugaring\"),\n                ItemKind::Resugared(resugared_item_kind) => docs![resugared_item_kind],\n                ItemKind::NotImplementedYet => docs![\"/* `NotImplementedYet` item */\"],\n            }\n        }\n        fn impl_item(&self, impl_item: &ImplItem) -> DocBuilder<A> {\n            match &impl_item.kind {\n                ImplItemKind::Type {\n                    ty,\n                    parent_bounds: _,\n                } => docs![\n                    &impl_item.meta,\n                    reflow!(\"type \"),\n                    self.id_name(impl_item.ident),\n                    reflow!(\" = \"),\n                    ty,\n                    \";\"\n                ],\n                ImplItemKind::Fn { body, params } => docs![\n                    &impl_item.meta,\n                    text!(\"fn\"),\n                    space!(),\n                    self.id_name(impl_item.ident),\n                    self.generic_params(&impl_item.generics.params),\n                    sep!(\"(\", params, \")\"),\n                    reflow!(\" -> \"),\n                    &body.ty,\n                    space!(),\n                    self.where_clause(&impl_item.generics.constraints),\n                    docs![line_!(), body, line_!(),].nest(INDENT).braces()\n                ],\n                ImplItemKind::Resugared(_resugared_impl_item_kind) => todo!(),\n                ImplItemKind::Error(_) => todo!(),\n            }\n        }\n        fn metadata(&self, metadata: &Metadata) -> DocBuilder<A> {\n            self.attributes(&metadata.attributes)\n        }\n        fn attribute(&self, attribute: &Attribute) -> DocBuilder<A> {\n            match &attribute.kind {\n                AttributeKind::Tool { .. } | AttributeKind::Hax(_) => nil!(),\n                AttributeKind::DocComment { kind, body } => match kind {\n                    DocCommentKind::Line => {\n                        intersperse!(\n                            body.lines().map(|line| docs![format!(\"/// {line}\")]),\n                            hardline!()\n                        )\n                    }\n                    DocCommentKind::Block => {\n                        docs![\n                            \"/**\",\n                            intersperse!(body.lines().map(|line| line.to_string()), hardline!()),\n                            \"*/\"\n                        ]\n                    }\n                },\n            }\n        }\n    }\n};\n"
  },
  {
    "path": "rust-engine/src/backends.rs",
    "content": "//! Code generation backends.\n//!\n//! A backend is consititued of:\n//!  - a list of AST transformations to apply, those are called phases.\n//!  - and a printer.\n//!\n//! This top-level module is mostly an index of available backends and a\n//! small prelude to make backend modules concise.\n//!\n//! # Adding a new backend\n//! 1. Create a submodule under `src/backends/`, e.g. `foo.rs`.\n//! 2. Put your printer and backend there.\n//! 3. Re-export it here with `pub mod foo;`.\n//!\n//! See [`rust`] for an example implementation.\n\npub mod fstar;\npub mod lean;\npub mod rust;\n\nuse std::{collections::HashMap, rc::Rc};\n\nuse crate::{\n    ast::{Item, Metadata, Module, span::Span},\n    attributes::LinkedItemGraph,\n    phase::legacy::group_consecutive_ocaml_phases,\n    printer::{HasLinkedItemGraph, Print, Printer},\n};\nuse camino::Utf8PathBuf;\nuse hax_types::engine_api::File;\n\n/// A hax backend.\n///\n/// A backend is responsible for turning the hax AST into sources of a target language.\n/// It combines:\n/// - a sequence of AST transformation phases, and\n/// - a printer that generates textual output.\n///\n/// For example, we have F\\*, Coq, and Lean backends.\n/// Some are still in the old OCaml engine.\npub trait Backend {\n    /// The printer type used by this backend.\n    type Printer: Printer;\n\n    /// Construct a new printer instance.\n    ///\n    /// By default this calls `Default::default` on the printer type.\n    fn printer(&self, linked_item_graph: Rc<LinkedItemGraph>) -> Self::Printer {\n        Self::Printer::default().with_linked_item_graph(linked_item_graph)\n    }\n\n    /// A short name identifying the backend.\n    ///\n    /// By default, this is delegated to the associated printer's [`Printer::NAME`].\n    const NAME: &'static str = Self::Printer::NAME;\n\n    /// The AST phases to apply before printing.\n    ///\n    /// Backends can override this to add transformations.\n    /// The default is an empty list (no transformations).\n    fn phases(&self) -> Vec<crate::phase::PhaseKind> {\n        vec![]\n    }\n\n    /// A list of resugaring phases.\n    fn resugaring_phases() -> Vec<Box<dyn prelude::Resugaring>> {\n        vec![]\n    }\n\n    /// Group a flat list of items into modules.\n    fn items_to_module(&self, items: Vec<Item>) -> Vec<Module> {\n        let mut modules: HashMap<_, Vec<_>> = HashMap::new();\n        for item in items {\n            let module_ident = item.ident.mod_only_closest_parent();\n            modules.entry(module_ident).or_default().push(item);\n        }\n        modules\n            .into_iter()\n            .map(|(ident, items)| Module {\n                ident,\n                items,\n                meta: Metadata {\n                    span: Span::dummy(),\n                    attributes: vec![],\n                },\n            })\n            .collect()\n    }\n\n    /// Print a list of modules into files\n    fn modules_to_files(&self, modules: Vec<Module>, mut printer: Self::Printer) -> Vec<File> {\n        modules\n            .into_iter()\n            .map(|module: Module| {\n                let path = self.module_path(&module).into_string();\n                let (contents, _) = printer.print(module);\n                File {\n                    path,\n                    contents,\n                    sourcemap: None,\n                }\n            })\n            .collect()\n    }\n\n    /// Compute the relative filesystem path where a given module should be written.\n    fn module_path(&self, module: &Module) -> Utf8PathBuf;\n}\n\n/// A backend can be interpreted as a phase\nimpl<B: Backend> crate::phase::Phase for B {\n    fn apply(&self, items: &mut Vec<Item>) {\n        for phase in group_consecutive_ocaml_phases(self.phases()) {\n            phase.apply(items);\n        }\n    }\n}\n\n/// Apply a backend to a collection of AST items, producing output files.\n///\n/// This runs all of the backend's [`Backend::phases`], groups the items into\n/// modules via [`Backend::items_to_module`], and then uses the backend's printer\n/// to generate source files with paths determined by [`Backend::module_path`].\npub fn apply_backend<B: Backend + 'static>(backend: B, mut items: Vec<Item>) -> Vec<File> {\n    crate::phase::Phase::apply(&backend, &mut items);\n\n    for mut resugaring_phase in B::resugaring_phases() {\n        for item in &mut items {\n            resugaring_phase.visit(item)\n        }\n    }\n\n    let linked_items_graph = Rc::new(LinkedItemGraph::new(\n        &items,\n        prelude::diagnostics::Context::Printer(B::NAME.into()),\n    ));\n\n    /// Drop any item marked with a hax attribute whose payload deserializes to\n    /// `AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true })`.\n    ///\n    /// Items with such a \"late-skip\" attribute are typically generated by hax\n    /// attributes.\n    fn drop_skip_late_items(items: &mut Vec<Item>) {\n        items.retain_mut(|item| {\n            use hax_lib_macros_types::{AttrPayload, ItemStatus};\n            !item.meta.hax_attributes().any(|attr| {\n                matches!(\n                    attr,\n                    AttrPayload::ItemStatus(ItemStatus::Included { late_skip: true })\n                )\n            })\n        });\n    }\n\n    drop_skip_late_items(&mut items);\n\n    let modules = backend.items_to_module(items);\n    let printer = backend.printer(linked_items_graph.clone());\n    backend.modules_to_files(modules, printer)\n}\n\nmod prelude {\n    //! Small \"bring-into-scope\" set used by backend modules.\n    //!\n    //! Importing this prelude saves repetitive `use` lists in per-backend\n    //! modules without forcing these names on downstream users.\n    pub use super::Backend;\n    pub use crate::ast::{identifiers::global_id::view::AnyKind, literals::*, resugared::*, *};\n    pub use crate::printer::{\n        pretty_ast::{DocBuilder, PrettyAst, ToDocument, install_pretty_helpers},\n        render_view::*,\n        *,\n    };\n    pub use crate::resugarings::*;\n    pub use crate::symbol::Symbol;\n    pub use hax_rust_engine_macros::{prepend_associated_functions_with, setup_printer_struct};\n}\n"
  },
  {
    "path": "rust-engine/src/debugger.rs",
    "content": "//! An interactive debugger server for the rust engine.\n\nuse crate::ast::span::Span;\nuse crate::ast::*;\nuse crate::phase::Phase as _;\nuse crate::phase::PhaseKind;\nuse crate::printer::SourceMap;\n\nmacro_rules! declare_printers {\n    {$($name:ident = $printer:expr),*$(,)?} => {\n        /// Enumeration of all declared printers.\n        #[derive(Clone, Debug, Copy, serde::Serialize, serde::Deserialize)]\n        pub enum Printer {\n            $($name,)*\n            /// The printer of a backend\n            Backend(Backend),\n        }\n\n        impl Printer {\n            fn print_items(self, items: Vec<Item>) -> (String, SourceMap) {\n                let module = Module {\n                    ident: crate::names::rust_primitives::hax,\n                    items,\n                    meta: Metadata {\n                        span: Span::dummy(),\n                        attributes: vec![],\n                    },\n                };\n                match self {\n                    $(Self::$name => {\n                        $printer.print(module)\n                    }),*\n                    Self::Backend(backend) => backend.print_module(module),\n                }\n            }\n        }\n    };\n}\nmacro_rules! declare_backends {\n    {$($name:ident = $backend:expr),*$(,)?} => {\n        /// Enumeration of all declared backends.\n        #[derive(Clone, Debug, Copy, serde::Serialize, serde::Deserialize)]\n        pub enum Backend {\n            $(\n                #[doc = concat!(\"The \", stringify!($name), \" backend.\")]\n                $name,\n            )*\n        }\n\n        impl Backend {\n            fn phases(self) -> Vec<PhaseKind> {\n                use crate::backends::Backend;\n                match self {\n                    $(\n                        Self::$name => $backend.phases(),\n                    )*\n                }\n            }\n            fn print_module(self, module: Module) -> (String, SourceMap) {\n                use crate::backends::Backend;\n                use crate::printer::Print;\n                let item_graph = crate::attributes::LinkedItemGraph::new(&module.items, crate::ast::diagnostics::Context::Debugger) ;\n                match self {\n                    $(\n                        Self::$name => $backend.printer(std::rc::Rc::new(item_graph)).print(module),\n                    )*\n                }\n            }\n        }\n    };\n}\n\ndeclare_backends! {\n    Lean = crate::backends::lean::LeanBackend,\n}\n\ndeclare_printers! {}\n\n/// A request to send to the debugger.\n#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]\npub enum Request {\n    /// Apply a given phase to the current items.\n    ApplyPhase(PhaseKind),\n    /// List the phases applied by a backend.\n    ListPhases(Backend),\n    /// Print the items with a given printer.\n    Print(Printer),\n    /// Dump the AST of the current items.\n    DumpAst(DumpAstOptions),\n}\n\n/// Options one can set when dumping ASTs.\n#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]\npub struct DumpAstOptions {\n    /// Sort the items via their global id. The order is not alphabetical, it is just deterministic.\n    pub sort_items_by_global_id: bool,\n    /// Drop `Use` items.\n    pub drop_use_items: bool,\n    /// Drop `RustModule` items.\n    pub drop_rust_modules_items: bool,\n    /// Drop `NotImplementedYet` items.\n    pub drop_not_implemented_yet_items: bool,\n    /// Drop every attributes in the AST.\n    pub drop_attributes: bool,\n    /// Erases spans, replacing them by `\"erased\"`.\n    /// Setting this to true will return untyped JSON.\n    pub erase_spans: bool,\n    /// Erases indices (e.g. local variable indices).\n    /// Setting this to true will return untyped JSON.\n    pub erase_indices: bool,\n}\n\nimpl Default for DumpAstOptions {\n    fn default() -> Self {\n        Self {\n            sort_items_by_global_id: true,\n            drop_use_items: true,\n            drop_rust_modules_items: true,\n            drop_not_implemented_yet_items: true,\n            drop_attributes: true,\n            erase_spans: true,\n            erase_indices: true,\n        }\n    }\n}\n\n/// Response given by the debugger.\n#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]\npub enum Response {\n    /// Response for `Request::ApplyPhase`: a phase have been applied.\n    PhaseApplied(PhaseKind),\n    /// Response for `Request::ListPhase`: list the phases for a backend.\n    ListedPhases(Vec<PhaseKind>),\n    /// Response for `Request::Print`: items have been printed.\n    Printed {\n        /// The rendered printed items.\n        rendered: String,\n        /// The sourcemap.\n        source_map: SourceMap,\n    },\n    /// One of the possible response for `Request::DumpAst`. A AST was dumped as a typed JSON.\n    TypedDumpedAst(Vec<Item>),\n    /// One of the possible response for `Request::DumpAst`. A AST was dumped as an untyped JSON.\n    DumpedAst(serde_json::Value),\n    /// An error occured.\n    Error(String),\n}\n\n/// The state against which the debugger is working.\npub struct State {\n    /// An immutable vector of items.\n    pub initial_items: Vec<Item>,\n    /// A sequence of requests.\n    pub requests: Vec<Request>,\n}\n\nimpl State {\n    /// Compute the items at the current state\n    fn items(&self) -> Vec<Item> {\n        let mut items = self.initial_items.clone();\n        let phases = self.requests.iter().flat_map(|msg| match msg {\n            Request::ApplyPhase(phase) => Some(*phase),\n            _ => None,\n        });\n        for phase in phases {\n            phase.apply(&mut items);\n        }\n        items\n    }\n\n    /// Apply the request on a state.\n    pub fn apply(&mut self, req: Request) -> Response {\n        let mut items = self.items();\n        match req {\n            Request::ApplyPhase(phase) => {\n                phase.apply(&mut items);\n                Response::PhaseApplied(phase)\n            }\n            Request::Print(printer) => {\n                let (rendered, source_map) = printer.print_items(items);\n                Response::Printed {\n                    rendered,\n                    source_map,\n                }\n            }\n            Request::DumpAst(options) => {\n                let mut items: Vec<_> = items\n                    .into_iter()\n                    .filter(|it| {\n                        let drop = match &it.kind {\n                            ItemKind::Use { .. } => options.drop_use_items,\n                            ItemKind::RustModule => options.drop_rust_modules_items,\n                            ItemKind::NotImplementedYet => options.drop_not_implemented_yet_items,\n                            _ => false,\n                        };\n                        !drop\n                    })\n                    .collect();\n                if options.sort_items_by_global_id {\n                    items.sort_by_key(|item| serde_json::to_string_pretty(&item.ident).ok());\n                }\n                if options.drop_attributes {\n                    struct DropAttributes;\n                    use crate::ast::visitors::AstVisitorMut;\n                    impl AstVisitorMut for DropAttributes {\n                        fn visit_metadata(&mut self, x: &mut Metadata) {\n                            x.attributes = vec![];\n                        }\n                        fn visit_param(&mut self, x: &mut Param) {\n                            x.attributes = vec![];\n                        }\n                        fn visit_variant(&mut self, x: &mut Variant) {\n                            x.attributes = vec![];\n                        }\n                    }\n                    DropAttributes.visit(&mut items);\n                }\n                if options.erase_indices || options.erase_spans {\n                    let mut items = match serde_json::to_value(items) {\n                        Ok(value) => value,\n                        Err(err) => return Response::Error(err.to_string()),\n                    };\n                    use serde_json::Value;\n\n                    fn visit_json<F>(value: &mut Value, f: &F)\n                    where\n                        F: Fn(&mut Value),\n                    {\n                        f(value);\n\n                        match value {\n                            Value::Array(arr) => {\n                                for v in arr {\n                                    visit_json(v, f);\n                                }\n                            }\n                            Value::Object(map) => {\n                                for (_k, v) in map {\n                                    visit_json(v, f);\n                                }\n                            }\n                            Value::Null | Value::Bool(_) | Value::Number(_) | Value::String(_) => {}\n                        }\n                    }\n\n                    let erased = || Value::String(\"<erased>\".to_string());\n                    visit_json(&mut items, &|value| {\n                        let Value::Object(map) = value else { return };\n                        if options.erase_indices {\n                            map.iter_mut()\n                                .filter(|(k, _)| matches!(k.as_str(), \"id\" | \"index\"))\n                                .for_each(|(_, value)| {\n                                    *value = erased();\n                                });\n                        }\n                        if options.erase_spans\n                            && map.contains_key(\"data\")\n                            && map.contains_key(\"owner_hint\")\n                            && map.contains_key(\"id\")\n                        {\n                            *value = erased();\n                        }\n                    });\n\n                    Response::DumpedAst(items)\n                } else {\n                    Response::TypedDumpedAst(items)\n                }\n            }\n            Request::ListPhases(backend) => Response::ListedPhases(backend.phases()),\n        }\n    }\n}\n\n/// Entrypoint for the interactive HTTP debugger.\npub fn http_interactive_debugger(items: Vec<Item>) {\n    use axum::{Json, Router, extract, routing::post};\n    use std::sync::Arc;\n\n    async fn process(\n        extract::State(items): extract::State<Arc<Vec<Item>>>,\n        Json((messages, message)): Json<(Vec<Request>, Request)>,\n    ) -> Json<Response> {\n        let mut state = State {\n            initial_items: items.to_vec(),\n            requests: messages,\n        };\n\n        Json(state.apply(message))\n    }\n\n    async fn serve(items: Vec<Item>) {\n        let state = Arc::new(items);\n\n        let app = Router::new()\n            .route(\"/process\", post(process))\n            .with_state(state);\n        let listener = tokio::net::TcpListener::bind(\"127.0.0.1:0\").await.unwrap();\n        let addr: std::net::SocketAddr = listener.local_addr().unwrap();\n        eprintln!(\"Listening on http://{addr}\");\n        axum::serve(listener, app).await.unwrap();\n    }\n\n    let rt = tokio::runtime::Builder::new_current_thread()\n        .enable_all()\n        .build()\n        .unwrap();\n\n    rt.block_on(serve(items));\n}\n"
  },
  {
    "path": "rust-engine/src/hax_io.rs",
    "content": "//! This module helps communicating with `cargo-hax`.\n\nuse hax_types::engine_api::protocol::FromEngine;\nuse serde::Deserialize;\nuse serde::de::DeserializeOwned;\nuse std::io::{BufRead, BufReader, Stdin, stdin, stdout};\nuse std::sync::{LazyLock, Mutex};\n\nuse hax_frontend_exporter::id_table::WithTable;\nuse hax_types::engine_api::{EngineOptions, protocol::ToEngine};\n\nstatic STDIN: LazyLock<Mutex<BufReader<Stdin>>> =\n    LazyLock::new(|| Mutex::new(BufReader::new(stdin())));\n\n/// Reads a message of any type from stdin\nfn read<T: DeserializeOwned>() -> T {\n    let mut stdin = STDIN.lock().unwrap();\n    let mut slice = Vec::new();\n    stdin\n        .read_until(b'\\n', &mut slice)\n        .expect(\"No message left! Did the engine crash?\");\n    let mut de = serde_json::Deserializer::from_slice(&slice);\n    de.disable_recursion_limit();\n    T::deserialize(serde_stacker::Deserializer::new(&mut de)).unwrap_or_else(|err| {\n        panic!(\n            \"Could not parse as a `{}` message! Error: {err}\",\n            std::any::type_name::<T>()\n        )\n    })\n}\n\n/// Reads a `ToEngine` message from the engine\npub fn read_to_engine_message() -> ToEngine {\n    read()\n}\n\n/// Reads the engine input JSON payload.\npub fn read_engine_input_message() -> WithTable<EngineOptions> {\n    read()\n}\n\n/// Reads a table of `EngineOptions`\npub fn read_query() -> WithTable<EngineOptions> {\n    let mut stdin = STDIN.lock().unwrap();\n    let mut slice = Vec::new();\n    stdin\n        .read_until(b'\\n', &mut slice)\n        .expect(\"No message left! Did the engine crash?\");\n    let mut de = serde_json::Deserializer::from_slice(&slice);\n    de.disable_recursion_limit();\n    WithTable::deserialize(serde_stacker::Deserializer::new(&mut de))\n        .expect(\"Could not parse as a table of EngineOptions!\")\n}\n\n/// Writes a `ExtendedFromEngine` message\npub fn write(message: &FromEngine) {\n    use std::io::Write;\n\n    let mut stdout = stdout();\n    serde_json::to_writer(&mut stdout, message).unwrap();\n    stdout.write_all(b\"\\n\").unwrap();\n    stdout.flush().unwrap();\n}\n"
  },
  {
    "path": "rust-engine/src/import_thir.rs",
    "content": "//! This modules allows to import the THIR AST produced by the frontend, and convert it to the engine's internal AST\n\nuse crate::ast;\nuse crate::ast::HasKind as _;\nuse crate::ast::identifiers::global_id::ReservedSuffix;\nuse crate::ast::identifiers::global_id::TupleId;\nuse crate::symbol::Symbol;\nuse hax_frontend_exporter as frontend;\n\nfn unsupported(msg: &str, issue_id: u32, span: ast::span::Span) -> ast::ErrorNode {\n    let fragment = ast::fragment::Fragment::Unknown(msg.to_owned());\n    let diagnostic = ast::diagnostics::Diagnostic::new(\n        fragment.clone(),\n        ast::diagnostics::DiagnosticInfo {\n            context: ast::diagnostics::Context::Import,\n            span,\n            kind: hax_types::diagnostics::Kind::Unimplemented {\n                issue_id: Some(issue_id),\n                details: Some(msg.to_owned()),\n            },\n        },\n    );\n    ast::ErrorNode {\n        fragment: Box::new(fragment),\n        diagnostics: vec![diagnostic],\n    }\n}\nfn assertion_failure(msg: &str, span: ast::span::Span) -> ast::ErrorNode {\n    let fragment = ast::fragment::Fragment::Unknown(msg.to_owned());\n    let diagnostic = ast::diagnostics::Diagnostic::new(\n        fragment.clone(),\n        ast::diagnostics::DiagnosticInfo {\n            context: ast::diagnostics::Context::Import,\n            span,\n            kind: hax_types::diagnostics::Kind::AssertionFailure {\n                details: msg.to_owned(),\n            },\n        },\n    );\n    ast::ErrorNode {\n        fragment: Box::new(fragment),\n        diagnostics: vec![diagnostic],\n    }\n}\n\nstruct Context {\n    owner_hint: Option<hax_frontend_exporter::DefId>,\n}\n\nfn is_self_type_constraint(gc: &ast::GenericConstraint) -> bool {\n    match gc {\n        ast::GenericConstraint::TypeClass(ast::ImplIdent { goal, .. }) => goal\n            .args\n            .first()\n            .and_then(ast::GenericValue::expect_ty)\n            .map(|ty| matches!(ty.0.as_ref(), ast::TyKind::Param(local) if local.0 == Symbol::new(\"Self\")))\n            .unwrap_or(false),\n        _ => false,\n    }\n}\n\nfn is_constraint_on_ty(gc: &ast::GenericConstraint, ty: &ast::Ty) -> bool {\n    match gc {\n        ast::GenericConstraint::TypeClass(ast::ImplIdent { goal, .. }) => goal\n            .args\n            .first()\n            .and_then(ast::GenericValue::expect_ty)\n            .map(|arg| arg == ty)\n            .unwrap_or(false),\n        _ => false,\n    }\n}\n\nfn resugar_index_mut(expr: &ast::Expr) -> Option<(&ast::Expr, &ast::Expr)> {\n    if let ast::ExprKind::App { head, args, .. } = expr.kind()\n        && let ast::ExprKind::GlobalId(method) = head.kind()\n        && let [lhs, index] = args.as_slice()\n    {\n        use crate::names::core::ops::index::*;\n        match (*method, lhs.kind()) {\n            (IndexMut::index_mut, ast::ExprKind::Borrow { inner: lhs, .. }) => Some((lhs, index)),\n            (Index::index, _) => Some((lhs, index)),\n            _ => None,\n        }\n    } else {\n        None\n    }\n}\n\nfn lhs_from_expr(expr: &ast::Expr) -> ast::Lhs {\n    if let ast::ExprKind::LocalId(var) = expr.kind() {\n        return ast::Lhs::LocalVar {\n            var: var.clone(),\n            ty: expr.ty.clone(),\n        };\n    }\n    let expr = expr.unbox_underef();\n    if let Some((e, index)) = resugar_index_mut(expr) {\n        ast::Lhs::ArrayAccessor {\n            e: Box::new(lhs_from_expr(e)),\n            ty: expr.ty.clone(),\n            index: index.clone(),\n        }\n    } else if let ast::ExprKind::App { head, args, .. } = expr.kind()\n        && let [arg] = args.as_slice()\n        && let ast::ExprKind::GlobalId(field) = head.kind()\n        && field.is_projector()\n    {\n        ast::Lhs::FieldAccessor {\n            e: Box::new(lhs_from_expr(arg)),\n            ty: expr.ty.clone(),\n            field: *field,\n        }\n    } else {\n        ast::Lhs::ArbitraryExpr(Box::new(expr.clone()))\n    }\n}\n\ntrait SpannedImport<Out> {\n    fn spanned_import(&self, context: &Context, span: ast::span::Span) -> Out;\n}\n\ntrait Import<Out> {\n    fn import(&self, context: &Context) -> Out;\n}\n\nimpl<T: Import<Out>, Out> Import<Vec<Out>> for Vec<T> {\n    fn import(&self, context: &Context) -> Vec<Out> {\n        self.iter()\n            .map(|value| Import::import(value, context))\n            .collect()\n    }\n}\n\nimpl<T: SpannedImport<Out>, Out> SpannedImport<Vec<Out>> for Vec<T> {\n    fn spanned_import(&self, context: &Context, span: ast::span::Span) -> Vec<Out> {\n        self.iter()\n            .map(|value| value.spanned_import(context, span))\n            .collect()\n    }\n}\n\ntrait DefIdImportHelpers {\n    fn import_as_value(&self) -> ast::GlobalId;\n    fn import_as_nonvalue(&self) -> ast::GlobalId;\n}\n\nimpl DefIdImportHelpers for frontend::DefId {\n    fn import_as_value(&self) -> ast::GlobalId {\n        ast::GlobalId::from_frontend(self.clone(), true)\n    }\n\n    fn import_as_nonvalue(&self) -> ast::GlobalId {\n        ast::GlobalId::from_frontend(self.clone(), false)\n    }\n}\n\nimpl Import<ast::span::Span> for frontend::Span {\n    fn import(&self, context: &Context) -> ast::span::Span {\n        ast::span::Span::from_exporter(self.clone(), context.owner_hint.as_ref())\n    }\n}\n\nfn import_attributes(context: &Context, attrs: &[frontend::Attribute]) -> ast::Attributes {\n    attrs.iter().flat_map(|attr| attr.import(context)).collect()\n}\n\nfn has_automatically_derived(attrs: &ast::Attributes) -> bool {\n    attrs.iter().any(|attr| {\n        matches!(\n            attr.kind,\n            ast::AttributeKind::Tool { ref path, .. } if path == \"automatically_derived\"\n        )\n    })\n}\n\nimpl Import<Option<ast::Attribute>> for frontend::Attribute {\n    fn import(&self, context: &Context) -> Option<ast::Attribute> {\n        match self {\n            frontend::Attribute::Parsed(frontend::AttributeKind::DocComment {\n                kind,\n                span,\n                comment,\n                ..\n            }) => {\n                let kind = match kind {\n                    frontend::CommentKind::Block => ast::DocCommentKind::Block,\n                    frontend::CommentKind::Line => ast::DocCommentKind::Line,\n                };\n                Some(ast::Attribute {\n                    kind: ast::AttributeKind::DocComment {\n                        kind,\n                        body: comment.clone(),\n                    },\n                    span: span.import(context),\n                })\n            }\n            frontend::Attribute::Parsed(frontend::AttributeKind::AutomaticallyDerived(span)) => {\n                let kind = ast::AttributeKind::Tool {\n                    path: \"automatically_derived\".to_owned(),\n                    tokens: String::new(),\n                };\n                Some(ast::Attribute {\n                    kind,\n                    span: span.import(context),\n                })\n            }\n            frontend::Attribute::Unparsed(frontend::AttrItem {\n                path,\n                args:\n                    frontend::AttrArgs::Eq {\n                        expr: frontend::MetaItemLit { symbol, .. },\n                        ..\n                    },\n                span,\n            }) if path == \"doc\" => {\n                let kind = ast::AttributeKind::DocComment {\n                    kind: ast::DocCommentKind::Line,\n                    body: symbol.clone(),\n                };\n                Some(ast::Attribute {\n                    kind,\n                    span: span.import(context),\n                })\n            }\n            frontend::Attribute::Unparsed(frontend::AttrItem { path, args, span }) => {\n                let tokens =\n                    if let frontend::AttrArgs::Delimited(frontend::DelimArgs { tokens, .. }) = args\n                    {\n                        tokens.clone()\n                    } else {\n                        String::new()\n                    };\n                Some(ast::Attribute {\n                    kind: ast::AttributeKind::Tool {\n                        path: path.clone(),\n                        tokens,\n                    },\n                    span: span.import(context),\n                })\n            }\n            _ => None,\n        }\n    }\n}\n\nimpl Import<ast::Attributes> for Vec<frontend::Attribute> {\n    fn import(&self, context: &Context) -> ast::Attributes {\n        self.iter()\n            .filter_map(|value| value.import(context))\n            .collect()\n    }\n}\n\nimpl Import<ast::GenericParam> for frontend::GenericParamDef {\n    fn import(&self, context: &Context) -> ast::GenericParam {\n        let span = self.span.import(context);\n        let frontend::GenericParamDef { name, kind, .. } = self;\n        let kind = match kind {\n            frontend::GenericParamDefKind::Lifetime => ast::GenericParamKind::Lifetime,\n            frontend::GenericParamDefKind::Type { .. } => ast::GenericParamKind::Type,\n            frontend::GenericParamDefKind::Const { ty, .. } => ast::GenericParamKind::Const {\n                ty: ty.spanned_import(context, span),\n            },\n        };\n        ast::GenericParam {\n            ident: ast::LocalId(Symbol::new(name.clone())),\n            meta: ast::Metadata {\n                span,\n                attributes: self.attributes.import(context),\n            },\n            kind,\n        }\n    }\n}\n\nimpl SpannedImport<ast::GenericValue> for frontend::GenericArg {\n    fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::GenericValue {\n        match self {\n            frontend::GenericArg::Lifetime(_) => ast::GenericValue::Lifetime,\n            frontend::GenericArg::Type(ty) => {\n                ast::GenericValue::Ty(ty.spanned_import(context, span))\n            }\n            frontend::GenericArg::Const(decorated) => {\n                ast::GenericValue::Expr(frontend::Expr::from(decorated.clone()).import(context))\n            }\n        }\n    }\n}\n\nimpl Import<Vec<ast::GenericParam>> for frontend::TyGenerics {\n    fn import(&self, context: &Context) -> Vec<ast::GenericParam> {\n        self.params\n            .iter()\n            .map(|value| value.import(context))\n            .collect()\n    }\n}\n\nimpl SpannedImport<Vec<(ast::ImplExpr, ast::ImplIdent)>> for Vec<frontend::ImplExpr> {\n    fn spanned_import(\n        &self,\n        context: &Context,\n        span: ast::span::Span,\n    ) -> Vec<(ast::ImplExpr, ast::ImplIdent)> {\n        let impl_exprs: Vec<ast::ImplExpr> = self.spanned_import(context, span);\n        impl_exprs\n            .into_iter()\n            .enumerate()\n            .map(|(i, ie)| {\n                let impl_ident = ast::ImplIdent {\n                    goal: ie.goal.clone(),\n                    name: impl_expr_name(i as u64),\n                };\n                (ie, impl_ident)\n            })\n            .collect()\n    }\n}\n\nimpl SpannedImport<Option<ast::GenericConstraint>> for frontend::Clause {\n    fn spanned_import(\n        &self,\n        context: &Context,\n        span: ast::span::Span,\n    ) -> Option<ast::GenericConstraint> {\n        match &self.kind.value {\n            frontend::ClauseKind::Trait(trait_predicate) => {\n                let args = trait_predicate\n                    .trait_ref\n                    .generic_args\n                    .spanned_import(context, span);\n                let trait_ = trait_predicate.trait_ref.def_id.import_as_nonvalue();\n                let goal = ast::TraitGoal { trait_, args };\n\n                Some(ast::GenericConstraint::TypeClass(ast::ImplIdent {\n                    goal,\n                    name: impl_expr_name(self.id.0),\n                }))\n            }\n            frontend::ClauseKind::Projection(frontend::ProjectionPredicate {\n                impl_expr,\n                assoc_item,\n                ty,\n            }) => {\n                let impl_ = impl_expr.spanned_import(context, span);\n                let assoc_item = assoc_item.def_id.import_as_nonvalue();\n                let ty = ty.spanned_import(context, span);\n                Some(ast::GenericConstraint::Equality(ast::ProjectionPredicate {\n                    impl_,\n                    assoc_item,\n                    ty,\n                }))\n            }\n            _ => None,\n        }\n    }\n}\n\nimpl Import<Vec<ast::GenericConstraint>> for frontend::GenericPredicates {\n    fn import(&self, context: &Context) -> Vec<ast::GenericConstraint> {\n        let mut type_idx: u64 = 0;\n        self.predicates\n            .iter()\n            .filter_map(|(clause, span)| {\n                let span = span.import(context);\n                let mut gc = clause.spanned_import(context, span)?;\n                if let ast::GenericConstraint::TypeClass(impl_ident) = &mut gc {\n                    impl_ident.name = impl_expr_name(type_idx);\n                    type_idx += 1;\n                }\n                Some(gc)\n            })\n            .collect()\n    }\n}\n\nimpl Import<ast::Generics> for frontend::ParamEnv {\n    fn import(&self, context: &Context) -> ast::Generics {\n        ast::Generics {\n            params: self.generics.import(context),\n            constraints: self.predicates.import(context),\n        }\n    }\n}\n\nimpl Import<ast::SafetyKind> for frontend::Safety {\n    fn import(&self, _context: &Context) -> ast::SafetyKind {\n        match self {\n            frontend::Safety::Unsafe => ast::SafetyKind::Unsafe,\n            frontend::Safety::Safe => ast::SafetyKind::Safe,\n        }\n    }\n}\n\nfn import_fn_sig(\n    context: &Context,\n    fn_sig: &frontend::TyFnSig,\n    span: ast::span::Span,\n) -> ast::TyKind {\n    let inputs = if fn_sig.inputs.is_empty() {\n        vec![ast::TyKind::unit().promote()]\n    } else {\n        fn_sig\n            .inputs\n            .iter()\n            .map(|t| t.spanned_import(context, span))\n            .collect()\n    };\n    ast::TyKind::Arrow {\n        inputs,\n        output: fn_sig.output.spanned_import(context, span),\n    }\n}\n\nimpl SpannedImport<ast::Ty> for frontend::Ty {\n    fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::Ty {\n        let kind = match self.kind() {\n            frontend::TyKind::Bool => ast::TyKind::Primitive(ast::PrimitiveTy::Bool),\n            frontend::TyKind::Char => ast::TyKind::Primitive(ast::PrimitiveTy::Char),\n            frontend::TyKind::Int(int_ty) => {\n                ast::TyKind::Primitive(ast::PrimitiveTy::Int(int_ty.into()))\n            }\n            frontend::TyKind::Uint(uint_ty) => {\n                ast::TyKind::Primitive(ast::PrimitiveTy::Int(uint_ty.into()))\n            }\n            frontend::TyKind::Float(float_ty) => {\n                ast::TyKind::Primitive(ast::PrimitiveTy::Float(float_ty.into()))\n            }\n            frontend::TyKind::FnDef { fn_sig, .. } | frontend::TyKind::Arrow(fn_sig) => {\n                import_fn_sig(context, &fn_sig.as_ref().value, span)\n            }\n            frontend::TyKind::Closure(frontend::ClosureArgs { fn_sig, .. }) => {\n                import_fn_sig(context, &fn_sig.value, span)\n            }\n            frontend::TyKind::Adt(item_ref) => {\n                let head = item_ref.def_id.import_as_nonvalue();\n                let args = item_ref.generic_args.spanned_import(context, span);\n                ast::TyKind::App { head, args }\n            }\n            frontend::TyKind::Foreign(..) => {\n                ast::TyKind::Error(unsupported(\"Foreign type\", 928, span))\n            }\n            frontend::TyKind::Str => ast::TyKind::Primitive(ast::PrimitiveTy::Str),\n            frontend::TyKind::Array(item_ref) => {\n                if let [\n                    frontend::GenericArg::Type(ty),\n                    frontend::GenericArg::Const(length),\n                ] = &item_ref.generic_args[..]\n                {\n                    ast::TyKind::Array {\n                        ty: ty.spanned_import(context, span),\n                        length: Box::new(length.import(context)),\n                    }\n                } else {\n                    ast::TyKind::Error(assertion_failure(\n                        \"Wrong generics for array: expected a type and a constant. See synthetic_items in hax frontend.\",\n                        span,\n                    ))\n                }\n            }\n            frontend::TyKind::Slice(ty) => {\n                if let [frontend::GenericArg::Type(ty)] = &ty.generic_args[..] {\n                    ast::TyKind::Slice(ty.spanned_import(context, span))\n                } else {\n                    ast::TyKind::Error(assertion_failure(\n                        \"Wrong generics for slice: expected a type. See synthetic_items in hax frontend.\",\n                        span,\n                    ))\n                }\n            }\n            frontend::TyKind::RawPtr(..) => ast::TyKind::RawPointer,\n            frontend::TyKind::Ref(_region, ty, mutable) => ast::TyKind::Ref {\n                inner: ty.as_ref().spanned_import(context, span),\n                mutable: *mutable,\n                region: ast::Region,\n            },\n            frontend::TyKind::Dynamic(_, generic_predicates, _region) => {\n                let goals = generic_predicates\n                    .predicates\n                    .iter()\n                    .map(|(clause, _span)| match &clause.kind.value {\n                        frontend::ClauseKind::Trait(frontend::TraitPredicate {\n                            trait_ref, ..\n                        }) => Ok(ast::DynTraitGoal {\n                            trait_: trait_ref.def_id.import_as_nonvalue(),\n                            non_self_args: trait_ref.generic_args.spanned_import(context, span)\n                                [1..]\n                                .to_vec(),\n                        }),\n                        _ => Err(assertion_failure(\"type Dyn with non trait predicate\", span)),\n                    })\n                    .collect::<Result<Vec<_>, _>>();\n                match goals {\n                    Ok(goals) => ast::TyKind::Dyn(goals),\n                    Err(e) => ast::TyKind::Error(e),\n                }\n            }\n            frontend::TyKind::Coroutine(_) => {\n                ast::TyKind::Error(unsupported(\"Coroutine type\", 924, span))\n            }\n            frontend::TyKind::Never => ast::TyKind::App {\n                head: crate::names::rust_primitives::hax::Never,\n                args: Vec::new(),\n            },\n            frontend::TyKind::Tuple(items) => {\n                let args = items.generic_args.spanned_import(context, span);\n                ast::TyKind::tuple(args)\n            }\n            frontend::TyKind::Alias(frontend::Alias {\n                kind: frontend::AliasKind::Projection { impl_expr, .. },\n                def_id,\n                ..\n            }) => ast::TyKind::AssociatedType {\n                impl_: impl_expr.spanned_import(context, span),\n                item: def_id.import_as_nonvalue(),\n            },\n            frontend::TyKind::Alias(frontend::Alias {\n                kind: frontend::AliasKind::Opaque { .. },\n                def_id,\n                ..\n            }) => ast::TyKind::Opaque(def_id.import_as_nonvalue()),\n            frontend::TyKind::Alias(frontend::Alias {\n                kind: frontend::AliasKind::Inherent,\n                ..\n            }) => ast::TyKind::Error(assertion_failure(\n                \"Ty::Alias with AliasTyKind::Inherent\",\n                span,\n            )),\n            frontend::TyKind::Alias(frontend::Alias {\n                kind: frontend::AliasKind::Free,\n                ..\n            }) => ast::TyKind::Error(assertion_failure(\"Ty::Alias with AliasTyKind::Free\", span)),\n            frontend::TyKind::Param(frontend::ParamTy { name, .. }) => {\n                ast::TyKind::Param(ast::LocalId(Symbol::new(name.clone())))\n            }\n            frontend::TyKind::Bound(..) => ast::TyKind::Error(assertion_failure(\n                \"type Bound: should be gone after typechecking\",\n                span,\n            )),\n            frontend::TyKind::Placeholder(..) => ast::TyKind::Error(assertion_failure(\n                \"type Placeholder: should be gone after typechecking\",\n                span,\n            )),\n            frontend::TyKind::Infer(..) => ast::TyKind::Error(assertion_failure(\n                \"type Infer: should be gone after typechecking\",\n                span,\n            )),\n            frontend::TyKind::Error => ast::TyKind::Error(assertion_failure(\n                \"got type `Error`: Rust compilation probably failed.\",\n                span,\n            )),\n            frontend::TyKind::Todo(_) => ast::TyKind::Error(assertion_failure(\"type Todo\", span)),\n        };\n        kind.promote()\n    }\n}\n\nimpl SpannedImport<ast::literals::Literal> for frontend::ConstantLiteral {\n    fn spanned_import(&self, _context: &Context, _span: ast::span::Span) -> ast::literals::Literal {\n        match self {\n            frontend::ConstantLiteral::Bool(b) => ast::literals::Literal::Bool(*b),\n            frontend::ConstantLiteral::Char(c) => ast::literals::Literal::Char(*c),\n            frontend::ConstantLiteral::Float(f, float_ty) => match f.strip_prefix(\"-\") {\n                Some(f) => ast::literals::Literal::Float {\n                    value: Symbol::new(f),\n                    negative: true,\n                    kind: float_ty.into(),\n                },\n                None => ast::literals::Literal::Float {\n                    value: Symbol::new(f),\n                    negative: false,\n                    kind: float_ty.into(),\n                },\n            },\n            frontend::ConstantLiteral::Int(frontend::ConstantInt::Int(v, ty)) => {\n                ast::literals::Literal::Int {\n                    value: Symbol::new(v.abs().to_string()),\n                    negative: *v < 0,\n                    kind: ty.into(),\n                }\n            }\n            frontend::ConstantLiteral::Int(frontend::ConstantInt::Uint(v, ty)) => {\n                ast::literals::Literal::Int {\n                    value: Symbol::new(v.to_string()),\n                    negative: false,\n                    kind: ty.into(),\n                }\n            }\n            frontend::ConstantLiteral::PtrNoProvenance(_) => {\n                panic!(\"constant literal: PtrNoProvenance\")\n            }\n            frontend::ConstantLiteral::Str(s) => ast::literals::Literal::String(Symbol::new(s)),\n            frontend::ConstantLiteral::ByteStr(items) => {\n                // Represent a byte string as an array of u8 literals, like the OCaml importer.\n                let s = String::from_utf8_lossy(items).to_string();\n                ast::literals::Literal::String(Symbol::new(&s))\n            }\n        }\n    }\n}\n\nimpl Import<ast::Expr> for frontend::ConstantExpr {\n    fn import(&self, context: &Context) -> ast::Expr {\n        let Self {\n            ty,\n            span,\n            contents,\n            attributes,\n            ..\n        } = self;\n        let span = span.import(context);\n        let kind = match contents.as_ref() {\n            frontend::ConstantExprKind::Literal(constant_literal) => match constant_literal {\n                frontend::ConstantLiteral::ByteStr(items) => {\n                    let elems: Vec<ast::Expr> = items\n                        .iter()\n                        .map(|b| {\n                            let ty = ast::TyKind::Primitive(ast::PrimitiveTy::Int(\n                                (&frontend::UintTy::U8).into(),\n                            ));\n                            ast::ExprKind::Literal(ast::literals::Literal::Int {\n                                value: Symbol::new(b.to_string()),\n                                negative: false,\n                                kind: (&frontend::UintTy::U8).into(),\n                            })\n                            .promote(ty.promote(), span)\n                        })\n                        .collect();\n                    ast::ExprKind::Array(elems)\n                }\n                _ => ast::ExprKind::Literal(constant_literal.spanned_import(context, span)),\n            },\n            frontend::ConstantExprKind::Adt { info, fields } => {\n                let (is_struct, is_record) = match info.kind {\n                    frontend::VariantKind::Struct { named } => (true, named),\n                    frontend::VariantKind::Enum { named, .. } => (false, named),\n                    frontend::VariantKind::Union => (false, false),\n                };\n                let constructor = info.variant.import_as_value();\n                let fields = fields\n                    .iter()\n                    .map(|f| (f.field.import_as_value(), f.value.import(context)))\n                    .collect();\n                ast::ExprKind::Construct {\n                    constructor,\n                    is_record,\n                    is_struct,\n                    fields,\n                    base: None,\n                }\n            }\n            frontend::ConstantExprKind::Array { fields } => {\n                ast::ExprKind::Array(fields.import(context))\n            }\n            frontend::ConstantExprKind::Tuple { fields } => {\n                let length = fields.len();\n                let constructor: ast::GlobalId = TupleId::Constructor { length }.into();\n                let fields = fields\n                    .iter()\n                    .enumerate()\n                    .map(|(idx, value)| {\n                        let field: ast::GlobalId = TupleId::Field { length, field: idx }.into();\n                        (field, value.import(context))\n                    })\n                    .collect();\n                ast::ExprKind::Construct {\n                    constructor,\n                    is_record: false,\n                    is_struct: true,\n                    fields,\n                    base: None,\n                }\n            }\n            frontend::ConstantExprKind::GlobalName(item_ref) => {\n                ast::ExprKind::GlobalId(item_ref.contents().def_id.import_as_value())\n            }\n            frontend::ConstantExprKind::Borrow(inner) => ast::ExprKind::Borrow {\n                mutable: false,\n                inner: inner.import(context),\n            },\n            frontend::ConstantExprKind::ConstRef { id } => {\n                ast::ExprKind::LocalId(ast::LocalId(Symbol::new(id.name.clone())))\n            }\n            frontend::ConstantExprKind::TraitConst { .. }\n            | frontend::ConstantExprKind::RawBorrow { .. }\n            | frontend::ConstantExprKind::Cast { .. }\n            | frontend::ConstantExprKind::FnPtr(_)\n            | frontend::ConstantExprKind::Memory(_)\n            | frontend::ConstantExprKind::Todo(_) => ast::ExprKind::Error(assertion_failure(\n                \"constant_lit_to_lit: TraitConst | FnPtr | RawBorrow | Cast | Memory\",\n                span,\n            )),\n        };\n        ast::Expr {\n            kind: Box::new(kind),\n            ty: ty.spanned_import(context, span),\n            meta: ast::Metadata {\n                span,\n                attributes: import_attributes(context, attributes),\n            },\n        }\n    }\n}\n\nfn import_block_expr(\n    context: &Context,\n    block: &frontend::Block,\n    ty: &frontend::Ty,\n    full_span: ast::span::Span,\n    attributes: Vec<ast::Attribute>,\n) -> ast::Expr {\n    let typ = ty.spanned_import(context, full_span);\n    let safety_mode = match block.safety_mode {\n        frontend::BlockSafety::Safe => ast::SafetyKind::Safe,\n        frontend::BlockSafety::BuiltinUnsafe | frontend::BlockSafety::ExplicitUnsafe => {\n            ast::SafetyKind::Unsafe\n        }\n    };\n    let mut stmts = block.stmts.clone();\n    let mut tail_expr: Option<frontend::Expr> = block.expr.clone();\n\n    if tail_expr.is_none()\n        && matches!(ty.kind(), frontend::TyKind::Never)\n        && let Some(frontend::Stmt {\n            kind: frontend::StmtKind::Expr { expr, .. },\n        }) = stmts.pop()\n    {\n        tail_expr = Some(expr);\n    }\n\n    let mut acc = if let Some(expr) = tail_expr {\n        let body = expr.import(context);\n        ast::ExprKind::Block { body, safety_mode }.promote(typ.clone(), full_span)\n    } else {\n        ast::Expr::unit(full_span)\n    };\n\n    for stmt in stmts.into_iter().rev() {\n        match stmt.kind {\n            frontend::StmtKind::Expr { expr, .. } => {\n                let rhs = expr.import(context);\n                let lhs = ast::PatKind::Wild.promote(rhs.ty.clone(), rhs.meta.span);\n                acc = ast::ExprKind::Let {\n                    lhs,\n                    rhs,\n                    body: acc,\n                }\n                .promote(typ.clone(), full_span);\n            }\n            frontend::StmtKind::Let {\n                pattern,\n                initializer,\n                else_block,\n                ..\n            } => {\n                let Some(init) = initializer else {\n                    return ast::Expr {\n                        kind: Box::new(ast::ExprKind::Error(unsupported(\n                            \"Sorry, Hax does not support declare-first let bindings (see https://doc.rust-lang.org/rust-by-example/variable_bindings/declare.html) for now.\",\n                            156,\n                            full_span,\n                        ))),\n                        ty: typ,\n                        meta: ast::Metadata {\n                            span: full_span,\n                            attributes,\n                        },\n                    };\n                };\n                let lhs = pattern.import(context);\n                let rhs = init.import(context);\n                let body = acc;\n                if let Some(else_block) = else_block {\n                    let else_span = else_block.span.import(context);\n                    let mut else_expr =\n                        import_block_expr(context, &else_block, ty, else_span, Vec::new());\n                    else_expr.ty = body.ty.clone();\n                    let arm_then = ast::Arm {\n                        pat: lhs,\n                        body,\n                        guard: None,\n                        meta: ast::Metadata {\n                            span: full_span,\n                            attributes: Vec::new(),\n                        },\n                    };\n                    let arm_else = ast::Arm {\n                        pat: ast::PatKind::Wild.promote(arm_then.pat.ty.clone(), else_span),\n                        body: else_expr,\n                        guard: None,\n                        meta: ast::Metadata {\n                            span: full_span,\n                            attributes: Vec::new(),\n                        },\n                    };\n                    acc = ast::ExprKind::Match {\n                        scrutinee: rhs,\n                        arms: vec![arm_then, arm_else],\n                    }\n                    .promote(typ.clone(), full_span)\n                } else {\n                    acc = ast::ExprKind::Let { lhs, rhs, body }.promote(typ.clone(), full_span)\n                }\n            }\n        }\n    }\n\n    ast::Expr {\n        ty: typ,\n        meta: ast::Metadata {\n            span: full_span,\n            attributes,\n        },\n        ..acc\n    }\n}\n\nimpl Import<ast::Expr> for frontend::Expr {\n    fn import(&self, context: &Context) -> ast::Expr {\n        let Self {\n            ty,\n            span,\n            contents,\n            attributes,\n            ..\n        } = self;\n        let span = span.import(context);\n        let raw_attributes: Vec<Option<ast::Attribute>> = attributes.import(context);\n        let attributes: Vec<ast::Attribute> = raw_attributes.into_iter().flatten().collect();\n        let binop_id = |op| {\n            use crate::names::core::cmp::*;\n            use crate::names::core::ops::{arith::*, bit::*};\n            use crate::names::rust_primitives::hax::machine_int as hax_machine_int;\n            use frontend::BinOp as Op;\n            match op {\n                Op::Add | Op::AddUnchecked => Add::add,\n                Op::Sub | Op::SubUnchecked => Sub::sub,\n                Op::Mul | Op::MulUnchecked => Mul::mul,\n                Op::Div => Div::div,\n                Op::Rem => Rem::rem,\n                Op::BitXor => BitXor::bitxor,\n                Op::BitAnd => BitAnd::bitand,\n                Op::BitOr => BitOr::bitor,\n                Op::Shl | Op::ShlUnchecked => Shl::shl,\n                Op::Shr | Op::ShrUnchecked => Shr::shr,\n                Op::Lt => PartialOrd::lt,\n                Op::Le => PartialOrd::le,\n                Op::Ne => PartialEq::ne,\n                Op::Ge => PartialOrd::ge,\n                Op::Gt => PartialOrd::gt,\n                Op::Eq => PartialEq::eq,\n                Op::Offset => crate::names::core::ptr::const_ptr::Impl::offset,\n                Op::Cmp => hax_machine_int::cmp,\n                Op::AddWithOverflow => hax_machine_int::add_with_overflow,\n                Op::SubWithOverflow => hax_machine_int::sub_with_overflow,\n                Op::MulWithOverflow => hax_machine_int::mul_with_overflow,\n            }\n        };\n        let binop_call = |op, x, y, out_type| -> _ {\n            use frontend::BinOp as Op;\n            let needs_borrow = matches!(op, Op::Lt | Op::Le | Op::Ne | Op::Ge | Op::Gt | Op::Eq);\n            let borrow_if_needed = if needs_borrow {\n                |e: ast::Expr| {\n                    use crate::ast::traits::HasKind;\n                    if matches!(e.ty.kind(), ast::TyKind::Ref { .. }) {\n                        e\n                    } else {\n                        let meta = e.meta.clone();\n                        let ty = ast::TyKind::Ref {\n                            inner: e.ty.clone(),\n                            mutable: false,\n                            region: ast::Region,\n                        };\n                        let kind = ast::ExprKind::Borrow {\n                            mutable: false,\n                            inner: e,\n                        };\n                        ast::Expr {\n                            kind: Box::new(kind),\n                            ty: ty.promote(),\n                            meta,\n                        }\n                    }\n                }\n            } else {\n                |e: ast::Expr| e\n            };\n            let (bounds_impls, trait_, generic_args) = {\n                // TODO: we pretend the call is a standalone funtion call.\n                // This is not true, here we're calling methods.\n                // This should be fixed.\n                (vec![], None, vec![])\n            };\n            ast::ExprKind::fn_app(\n                binop_id(op),\n                generic_args,\n                vec![borrow_if_needed(x), borrow_if_needed(y)],\n                out_type,\n                bounds_impls,\n                trait_,\n                span,\n            )\n        };\n        let assign_binop = |op: frontend::AssignOp| match op {\n            frontend::AssignOp::AddAssign => frontend::BinOp::Add,\n            frontend::AssignOp::SubAssign => frontend::BinOp::Sub,\n            frontend::AssignOp::MulAssign => frontend::BinOp::Mul,\n            frontend::AssignOp::DivAssign => frontend::BinOp::Div,\n            frontend::AssignOp::RemAssign => frontend::BinOp::Rem,\n            frontend::AssignOp::BitXorAssign => frontend::BinOp::BitXor,\n            frontend::AssignOp::BitAndAssign => frontend::BinOp::BitAnd,\n            frontend::AssignOp::BitOrAssign => frontend::BinOp::BitOr,\n            frontend::AssignOp::ShlAssign => frontend::BinOp::Shl,\n            frontend::AssignOp::ShrAssign => frontend::BinOp::Shr,\n        };\n        let kind = match contents.as_ref() {\n            frontend::ExprKind::Box { value } => {\n                let value = value.import(context);\n                let ty = ty.spanned_import(context, span);\n                let id = crate::names::rust_primitives::hax::box_new;\n                ast::ExprKind::standalone_fn_app(id, vec![], vec![value], ty, span)\n            }\n            frontend::ExprKind::If {\n                if_then_scope: _,\n                cond,\n                then,\n                else_opt,\n            } => {\n                if let frontend::ExprKind::Let { expr, pat } = cond.contents.as_ref() {\n                    let scrutinee = expr.import(context);\n                    let pat = pat.import(context);\n                    let then_expr = then.import(context);\n                    let else_expr = else_opt\n                        .as_ref()\n                        .map(|value| value.import(context))\n                        .unwrap_or_else(|| ast::Expr::unit(span));\n                    let arm_then = ast::Arm {\n                        pat,\n                        body: then_expr,\n                        guard: None,\n                        meta: ast::Metadata {\n                            span,\n                            attributes: Vec::new(),\n                        },\n                    };\n                    let wildcard_pat = ast::Pat {\n                        kind: Box::new(ast::PatKind::Wild),\n                        ..arm_then.pat.clone()\n                    };\n                    let arm_else = ast::Arm {\n                        pat: wildcard_pat,\n                        body: else_expr,\n                        guard: None,\n                        meta: ast::Metadata {\n                            span,\n                            attributes: Vec::new(),\n                        },\n                    };\n                    ast::ExprKind::Match {\n                        scrutinee,\n                        arms: vec![arm_then, arm_else],\n                    }\n                } else {\n                    ast::ExprKind::If {\n                        condition: cond.import(context),\n                        then: then.import(context),\n                        else_: else_opt.as_ref().map(|value| value.import(context)),\n                    }\n                }\n            }\n            frontend::ExprKind::Call {\n                ty: _,\n                fun,\n                args,\n                from_hir_call: _,\n                fn_span: _,\n            } => {\n                let mut args = args.import(context);\n                if args.is_empty() {\n                    args.push(ast::Expr::unit(span));\n                }\n                if let frontend::ExprKind::GlobalName { item, .. } = fun.contents.as_ref() {\n                    let mut head = fun.import(context);\n                    *head.kind = ast::ExprKind::GlobalId(item.contents().def_id.import_as_value());\n                    let generic_args = item.contents().generic_args.spanned_import(context, span);\n                    let bounds_impls = item\n                        .contents()\n                        .impl_exprs\n                        .iter()\n                        .map(|ie| ie.spanned_import(context, span))\n                        .collect();\n                    let trait_ = item.contents().in_trait.as_ref().map(|ie| {\n                        let impl_expr = ie.spanned_import(context, span);\n                        let args = impl_expr.goal.args.clone();\n                        (impl_expr, args)\n                    });\n                    ast::ExprKind::App {\n                        head,\n                        args,\n                        generic_args,\n                        bounds_impls,\n                        trait_,\n                    }\n                } else {\n                    let head = fun.import(context);\n                    ast::ExprKind::App {\n                        head,\n                        args,\n                        generic_args: Vec::new(),\n                        bounds_impls: Vec::new(),\n                        trait_: None,\n                    }\n                }\n            }\n            frontend::ExprKind::Deref { arg } => {\n                let result_ty = ty.spanned_import(context, span);\n                ast::ExprKind::standalone_fn_app(\n                    crate::names::rust_primitives::hax::deref_op,\n                    vec![],\n                    vec![arg.import(context)],\n                    result_ty,\n                    span,\n                )\n            }\n            frontend::ExprKind::Binary { op, lhs, rhs } => {\n                let result_ty = ty.spanned_import(context, span);\n                binop_call(*op, lhs.import(context), rhs.import(context), result_ty)\n            }\n            frontend::ExprKind::LogicalOp { op, lhs, rhs } => {\n                let result_ty = ty.spanned_import(context, span);\n                let id = match op {\n                    frontend::LogicalOp::And => crate::names::rust_primitives::hax::logical_op_and,\n                    frontend::LogicalOp::Or => crate::names::rust_primitives::hax::logical_op_or,\n                };\n                ast::ExprKind::standalone_fn_app(\n                    id,\n                    vec![],\n                    vec![lhs.import(context), rhs.import(context)],\n                    result_ty,\n                    span,\n                )\n            }\n            frontend::ExprKind::Unary { op, arg } => {\n                let result_ty = ty.spanned_import(context, span);\n                let id = match op {\n                    frontend::UnOp::Not => crate::names::core::ops::bit::Not::not,\n                    frontend::UnOp::Neg => crate::names::core::ops::arith::Neg::neg,\n                    frontend::UnOp::PtrMetadata => crate::names::rust_primitives::hax::cast_op,\n                };\n                ast::ExprKind::standalone_fn_app(\n                    id,\n                    vec![],\n                    vec![arg.import(context)],\n                    result_ty,\n                    span,\n                )\n            }\n            frontend::ExprKind::Cast { source } => {\n                let source_ty = source.ty.spanned_import(context, span);\n                let result_ty = ty.spanned_import(context, span);\n                let cast_id = if let ast::TyKind::App { head, .. } = source_ty.0.as_ref() {\n                    if head.expect_tuple().is_none() {\n                        Some(head.with_suffix(ReservedSuffix::Cast))\n                    } else {\n                        None\n                    }\n                } else {\n                    None\n                };\n                let id = cast_id.unwrap_or(crate::names::rust_primitives::hax::cast_op);\n                ast::ExprKind::standalone_fn_app(\n                    id,\n                    vec![],\n                    vec![source.import(context)],\n                    result_ty,\n                    span,\n                )\n            }\n            frontend::ExprKind::Use { source } => return source.import(context),\n            frontend::ExprKind::NeverToAny { source } => ast::ExprKind::standalone_fn_app(\n                crate::names::rust_primitives::hax::never_to_any,\n                vec![],\n                vec![source.import(context)],\n                ty.spanned_import(context, span),\n                span,\n            ),\n            frontend::ExprKind::PointerCoercion { cast, source } => {\n                let result_ty = ty.spanned_import(context, span);\n                match cast {\n                    frontend::PointerCoercion::ClosureFnPointer(frontend::Safety::Safe)\n                    | frontend::PointerCoercion::ReifyFnPointer => return source.import(context),\n                    frontend::PointerCoercion::Unsize(_) => ast::ExprKind::standalone_fn_app(\n                        crate::names::rust_primitives::unsize,\n                        vec![],\n                        vec![source.import(context)],\n                        result_ty,\n                        span,\n                    ),\n                    _ => ast::ExprKind::Error(assertion_failure(\n                        &format!(\"Pointer, with [cast] being {:?}\", cast),\n                        span,\n                    )),\n                }\n            }\n            frontend::ExprKind::Loop { body } => ast::ExprKind::Loop {\n                body: body.import(context),\n                kind: Box::new(ast::LoopKind::UnconditionalLoop),\n                state: None,\n                control_flow: None,\n                label: None,\n            },\n            frontend::ExprKind::Match { scrutinee, arms } => ast::ExprKind::Match {\n                scrutinee: scrutinee.import(context),\n                arms: arms\n                    .iter()\n                    .map(|arm| ast::Arm {\n                        pat: arm.pattern.import(context),\n                        body: arm.body.import(context),\n                        guard: arm.guard.as_ref().map(|g| ast::Guard {\n                            kind: match g.contents.as_ref() {\n                                frontend::ExprKind::Let { expr, pat } => ast::GuardKind::IfLet {\n                                    lhs: pat.import(context),\n                                    rhs: expr.import(context),\n                                },\n                                _ => ast::GuardKind::IfLet {\n                                    lhs: ast::Pat {\n                                        kind: Box::new(ast::PatKind::Constant {\n                                            lit: ast::literals::Literal::Bool(true),\n                                        }),\n                                        ty: ast::TyKind::Primitive(ast::PrimitiveTy::Bool)\n                                            .promote(),\n                                        meta: ast::Metadata {\n                                            span,\n                                            attributes: Vec::new(),\n                                        },\n                                    },\n                                    rhs: g.import(context),\n                                },\n                            },\n                            meta: ast::Metadata {\n                                span: g.span.import(context),\n                                attributes: g.attributes.import(context),\n                            },\n                        }),\n                        meta: ast::Metadata {\n                            span: arm.span.import(context),\n                            attributes: Vec::new(),\n                        },\n                    })\n                    .collect(),\n            },\n            frontend::ExprKind::Let { expr: _, pat: _ } => ast::ExprKind::Error(unsupported(\n                \"Let-chains (e.g. `if let .. && let ..`) are not supported.\",\n                2018,\n                span,\n            )),\n            frontend::ExprKind::Block { block } => {\n                return import_block_expr(context, block, ty, span, attributes.clone());\n            }\n            frontend::ExprKind::Assign { lhs, rhs } => {\n                let lhs = lhs.import(context);\n                let rhs = rhs.import(context);\n                ast::ExprKind::Assign {\n                    lhs: lhs_from_expr(&lhs),\n                    value: rhs,\n                }\n            }\n            frontend::ExprKind::AssignOp { op, lhs, rhs } => {\n                let bin_op = assign_binop(*op);\n                let lhs = lhs.import(context);\n                let rhs = rhs.import(context);\n                let result_ty = lhs.ty.clone();\n                let op_expr = binop_call(bin_op, lhs.clone(), rhs, result_ty.clone())\n                    .promote(result_ty, span);\n                ast::ExprKind::Assign {\n                    lhs: lhs_from_expr(&lhs),\n                    value: op_expr,\n                }\n            }\n            frontend::ExprKind::Field { field, lhs } => ast::ExprKind::standalone_fn_app(\n                field.import_as_value(),\n                vec![],\n                vec![lhs.import(context)],\n                ty.spanned_import(context, span),\n                span,\n            ),\n            frontend::ExprKind::TupleField { field, lhs } => {\n                let length = match lhs.ty.kind() {\n                    frontend::TyKind::Tuple(item_ref) => item_ref.generic_args.len(),\n                    _ => panic!(\"TupleField on non-tuple type\"),\n                };\n                let projector: ast::GlobalId = TupleId::Field {\n                    length,\n                    field: *field,\n                }\n                .into();\n\n                ast::ExprKind::standalone_fn_app(\n                    projector,\n                    vec![],\n                    vec![lhs.import(context)],\n                    ty.spanned_import(context, span),\n                    span,\n                )\n            }\n            frontend::ExprKind::Index { lhs, index } => {\n                let result_ty = ty.spanned_import(context, span);\n                let id = crate::names::core::ops::index::Index::index;\n                ast::ExprKind::standalone_fn_app(\n                    id,\n                    vec![],\n                    vec![lhs.import(context), index.import(context)],\n                    result_ty,\n                    span,\n                )\n            }\n            frontend::ExprKind::VarRef { id } => ast::ExprKind::LocalId(ast::LocalId::from(id)),\n            frontend::ExprKind::ConstRef { id } => {\n                ast::ExprKind::LocalId(ast::LocalId(Symbol::new(id.name.clone())))\n            }\n            frontend::ExprKind::GlobalName {\n                item,\n                constructor: _,\n            } => {\n                let ident = item.contents().def_id.import_as_value();\n                ast::ExprKind::GlobalId(ident)\n            }\n            frontend::ExprKind::UpvarRef {\n                closure_def_id: _,\n                var_hir_id,\n            } => ast::ExprKind::LocalId(ast::LocalId::from(var_hir_id)),\n            frontend::ExprKind::Borrow { borrow_kind, arg } => {\n                let inner = arg.import(context);\n                let mutable = matches!(borrow_kind, frontend::BorrowKind::Mut { .. });\n                ast::ExprKind::Borrow { mutable, inner }\n            }\n            frontend::ExprKind::RawBorrow { mutability, arg } => ast::ExprKind::AddressOf {\n                mutable: *mutability,\n                inner: arg.import(context),\n            },\n            frontend::ExprKind::Break { label: _, value } => {\n                let value = value\n                    .as_ref()\n                    .map(|value| value.import(context))\n                    .unwrap_or_else(|| ast::Expr::unit(span));\n                ast::ExprKind::Break {\n                    value,\n                    label: None, // TODO: honour the label (issue #1800)\n                    state: None,\n                }\n            }\n            frontend::ExprKind::Continue { label: _ } => ast::ExprKind::Continue {\n                label: None, // TODO: honour the label (issue #1800)\n                state: None,\n            },\n            frontend::ExprKind::Return { value } => {\n                let value = value\n                    .as_ref()\n                    .map(|value| value.import(context))\n                    .unwrap_or_else(|| ast::Expr::unit(span));\n                ast::ExprKind::Return { value }\n            }\n            frontend::ExprKind::ConstBlock(_item_ref) => {\n                ast::ExprKind::Error(unsupported(\"ConstBlock\", 923, span))\n            }\n            frontend::ExprKind::Repeat { value, count } => {\n                let value_expr: ast::Expr = value.import(context);\n                let count_expr = count.import(context);\n                let repeated = ast::Expr::standalone_fn_app(\n                    crate::names::rust_primitives::hax::repeat,\n                    vec![],\n                    vec![value_expr, count_expr],\n                    ty.spanned_import(context, span),\n                    span,\n                );\n                ast::ExprKind::standalone_fn_app(\n                    crate::names::alloc::boxed::Impl::new,\n                    vec![],\n                    vec![repeated],\n                    ty.spanned_import(context, span),\n                    span,\n                )\n            }\n            frontend::ExprKind::Array { fields } => ast::ExprKind::Array(fields.import(context)),\n            frontend::ExprKind::Tuple { fields } => {\n                let length = fields.len();\n                let constructor: ast::GlobalId = TupleId::Constructor { length }.into();\n                let fields = fields\n                    .iter()\n                    .enumerate()\n                    .map(|(idx, value)| {\n                        let field: ast::GlobalId = TupleId::Field { length, field: idx }.into();\n                        (field, value.import(context))\n                    })\n                    .collect();\n                ast::ExprKind::Construct {\n                    constructor,\n                    is_record: false,\n                    is_struct: true,\n                    fields,\n                    base: None,\n                }\n            }\n            frontend::ExprKind::Adt(adt_expr) => {\n                let (is_struct, is_record) = match adt_expr.info.kind {\n                    frontend::VariantKind::Struct { named } => (true, named),\n                    frontend::VariantKind::Enum { named, .. } => (false, named),\n                    frontend::VariantKind::Union => (false, false),\n                };\n                let constructor = adt_expr.info.variant.import_as_value();\n                let base = match &adt_expr.base {\n                    frontend::AdtExprBase::None => None,\n                    frontend::AdtExprBase::Base(info) => Some(info.base.import(context)),\n                    frontend::AdtExprBase::DefaultFields(_) => {\n                        return ast::ExprKind::Error(unsupported(\n                            \"Default field values: not supported\",\n                            1386,\n                            span,\n                        ))\n                        .promote(ty.spanned_import(context, span), span);\n                    }\n                };\n                let fields = adt_expr\n                    .fields\n                    .iter()\n                    .map(|f| (f.field.import_as_value(), f.value.import(context)))\n                    .collect();\n                ast::ExprKind::Construct {\n                    constructor,\n                    is_record,\n                    is_struct,\n                    fields,\n                    base,\n                }\n            }\n            frontend::ExprKind::PlaceTypeAscription { source: _, .. } => {\n                ast::ExprKind::Error(assertion_failure(\n                    \"Got a unexpected node `PlaceTypeAscription`. Please report, we were not able to figure out an expression yielding that node: a bug report would be very valuable here!\",\n                    span,\n                ))\n            }\n            frontend::ExprKind::ValueTypeAscription { source, .. } => {\n                return source.import(context);\n            }\n            frontend::ExprKind::Closure {\n                params,\n                body,\n                upvars,\n                ..\n            } => {\n                let mut params: Vec<ast::Pat> = params\n                    .iter()\n                    .filter_map(|param| param.pat.as_ref().map(|pat| pat.import(context)))\n                    .collect();\n                if params.is_empty() {\n                    let ty = ast::TyKind::unit().promote();\n                    params.push(ast::PatKind::Wild.promote(ty, span));\n                }\n                ast::ExprKind::Closure {\n                    params,\n                    body: body.import(context),\n                    captures: upvars.import(context),\n                }\n            }\n            frontend::ExprKind::Literal { lit, neg } => {\n                let mut literal = match &lit.node {\n                    frontend::LitKind::Bool(b) => ast::literals::Literal::Bool(*b),\n                    frontend::LitKind::Char(c) => ast::literals::Literal::Char(*c),\n                    frontend::LitKind::Byte(b) => ast::literals::Literal::Int {\n                        value: Symbol::new(b.to_string()),\n                        negative: false,\n                        kind: (&frontend::UintTy::U8).into(),\n                    },\n                    frontend::LitKind::Str(s, _) => ast::literals::Literal::String(Symbol::new(s)),\n                    frontend::LitKind::Int(value, kind) => {\n                        use frontend::LitIntType::*;\n                        let kind = match (kind, ty.kind()) {\n                            (Signed(int_ty), _) => ast::literals::IntKind::from(int_ty),\n                            (Unsigned(uint_ty), _) => ast::literals::IntKind::from(uint_ty),\n                            (Unsuffixed, frontend::TyKind::Int(int_ty)) => {\n                                ast::literals::IntKind::from(int_ty)\n                            }\n                            (Unsuffixed, frontend::TyKind::Uint(uint_ty)) => {\n                                ast::literals::IntKind::from(uint_ty)\n                            }\n                            _ => panic!(\"Unsuffixed int literal without int/uint type\"),\n                        };\n                        ast::literals::Literal::Int {\n                            value: Symbol::new(value.to_string()),\n                            negative: false,\n                            kind,\n                        }\n                    }\n                    frontend::LitKind::Float(value, float_ty) => ast::literals::Literal::Float {\n                        value: Symbol::new(value),\n                        negative: false,\n                        kind: match (float_ty, ty.kind()) {\n                            (frontend::LitFloatType::Suffixed(k), _) => {\n                                ast::literals::FloatKind::from(k)\n                            }\n                            (frontend::LitFloatType::Unsuffixed, frontend::TyKind::Float(k)) => {\n                                ast::literals::FloatKind::from(k)\n                            }\n                            _ => panic!(\"Unsuffixed float literal without float type\"),\n                        },\n                    },\n                    frontend::LitKind::CStr(bytes, _) | frontend::LitKind::ByteStr(bytes, _) => {\n                        let elems: Vec<ast::Expr> = bytes\n                            .iter()\n                            .map(|b| {\n                                ast::ExprKind::Literal(ast::literals::Literal::Int {\n                                    value: Symbol::new(b.to_string()),\n                                    negative: false,\n                                    kind: (&frontend::UintTy::U8).into(),\n                                })\n                                .promote(\n                                    ast::TyKind::Primitive(ast::PrimitiveTy::Int(\n                                        (&frontend::UintTy::U8).into(),\n                                    ))\n                                    .promote(),\n                                    span,\n                                )\n                            })\n                            .collect();\n                        return ast::ExprKind::Array(elems)\n                            .promote(ty.spanned_import(context, span), span);\n                    }\n                    frontend::LitKind::Err(_) => {\n                        return ast::ExprKind::Error(assertion_failure(\n                                \"[import_thir:literal] got an error literal: this means the Rust compiler or Hax's frontend probably reported errors above.\",\n                                span,\n                            )).promote(ty.spanned_import(context, span), span);\n                    }\n                };\n                if *neg {\n                    match &mut literal {\n                        ast::literals::Literal::Int { negative, .. }\n                        | ast::literals::Literal::Float { negative, .. } => {\n                            *negative = true;\n                        }\n                        _ => {\n                            return ast::ExprKind::Error(assertion_failure(\n                                \"Unexpected negation on non-numeric literal\",\n                                span,\n                            ))\n                            .promote(ty.spanned_import(context, span), span);\n                        }\n                    }\n                }\n                ast::ExprKind::Literal(literal)\n            }\n            frontend::ExprKind::ZstLiteral { .. } => ast::ExprKind::Error(assertion_failure(\n                \"`ZstLiteral` are expected to be handled before-hand\",\n                span,\n            )),\n            frontend::ExprKind::NamedConst { item, user_ty: _ } => {\n                let generic_args: Vec<ast::GenericValue> =\n                    item.contents().generic_args.spanned_import(context, span);\n                let const_args: Vec<ast::Expr> = generic_args\n                    .iter()\n                    .filter_map(|gv| match gv {\n                        ast::GenericValue::Expr(e) => Some(e.clone()),\n                        _ => None,\n                    })\n                    .collect();\n                let def_id = item.contents().def_id.import_as_value();\n                if const_args.is_empty() && item.contents().in_trait.is_none() {\n                    ast::ExprKind::GlobalId(def_id)\n                } else {\n                    ast::ExprKind::fn_app(\n                        def_id,\n                        vec![],\n                        const_args,\n                        ty.spanned_import(context, span),\n                        vec![],\n                        item.contents().in_trait.as_ref().map(|impl_expr| {\n                            (\n                                impl_expr.spanned_import(context, span),\n                                generic_args.clone(),\n                            )\n                        }),\n                        span,\n                    )\n                }\n            }\n            frontend::ExprKind::ConstParam { param, def_id: _ } => {\n                ast::ExprKind::LocalId(ast::LocalId(Symbol::new(param.name.clone())))\n            }\n            frontend::ExprKind::StaticRef { def_id, .. } => {\n                ast::ExprKind::GlobalId(def_id.import_as_value())\n            }\n            frontend::ExprKind::Yield { value: _ } => ast::ExprKind::Error(unsupported(\n                \"Got expression `Yield`: coroutines are not supported by hax\",\n                924,\n                span,\n            )),\n            frontend::ExprKind::Todo(payload) => ast::ExprKind::Error(assertion_failure(\n                &format!(\"expression Todo\\n{}\", payload),\n                span,\n            )),\n        };\n        ast::Expr {\n            kind: Box::new(kind),\n            ty: ty.spanned_import(context, span),\n            meta: ast::Metadata { span, attributes },\n        }\n    }\n}\n\nimpl Import<(ast::GlobalId, ast::Ty, Vec<ast::Attribute>)> for frontend::FieldDef {\n    fn import(&self, context: &Context) -> (ast::GlobalId, ast::Ty, Vec<ast::Attribute>) {\n        (\n            self.did.import_as_value(),\n            self.ty.spanned_import(context, self.span.import(context)),\n            self.attributes.import(context),\n        )\n    }\n}\n\nimpl Import<ast::Expr> for frontend::ThirBody {\n    fn import(&self, context: &Context) -> ast::Expr {\n        self.expr.import(context)\n    }\n}\n\nimpl Import<ast::PatKind> for frontend::PatKind {\n    fn import(&self, context: &Context) -> ast::PatKind {\n        match self {\n            frontend::PatKind::Wild | frontend::PatKind::Missing => ast::PatKind::Wild,\n            frontend::PatKind::AscribeUserType { subpattern, .. } => ast::PatKind::Ascription {\n                pat: subpattern.import(context),\n                ty: ast::SpannedTy {\n                    span: ast::span::Span::dummy(),\n                    ty: subpattern\n                        .ty\n                        .spanned_import(context, ast::span::Span::dummy()),\n                },\n            },\n            frontend::PatKind::Binding {\n                mode,\n                var,\n                subpattern,\n                ..\n            } => {\n                let mutable = mode.mutability;\n                let mode = match mode.by_ref {\n                    frontend::ByRef::Yes(_, mutability) => ast::BindingMode::ByRef(if mutability {\n                        ast::BorrowKind::Mut\n                    } else {\n                        ast::BorrowKind::Shared\n                    }),\n                    frontend::ByRef::No => ast::BindingMode::ByValue,\n                };\n                ast::PatKind::Binding {\n                    mutable,\n                    var: ast::LocalId::from(var),\n                    mode,\n                    sub_pat: subpattern.as_ref().map(|value| value.import(context)),\n                }\n            }\n            frontend::PatKind::Variant {\n                info, subpatterns, ..\n            } => {\n                let (is_struct, is_record) = match info.kind {\n                    frontend::VariantKind::Struct { named } => (true, named),\n                    frontend::VariantKind::Enum { named, .. } => (false, named),\n                    frontend::VariantKind::Union => (false, false),\n                };\n                let constructor = info.variant.import_as_value();\n                let fields = subpatterns\n                    .iter()\n                    .map(|f| (f.field.import_as_value(), f.pattern.import(context)))\n                    .collect();\n                ast::PatKind::Construct {\n                    constructor,\n                    is_record,\n                    is_struct,\n                    fields,\n                }\n            }\n            frontend::PatKind::Tuple { subpatterns } => {\n                let length = subpatterns.len();\n                let constructor: ast::GlobalId = TupleId::Constructor { length }.into();\n                let fields = subpatterns\n                    .iter()\n                    .enumerate()\n                    .map(|(idx, pat)| {\n                        let field: ast::GlobalId = TupleId::Field { length, field: idx }.into();\n                        (field, pat.import(context))\n                    })\n                    .collect();\n                ast::PatKind::Construct {\n                    constructor,\n                    is_record: false,\n                    is_struct: true,\n                    fields,\n                }\n            }\n            frontend::PatKind::Deref { subpattern } => ast::PatKind::Deref {\n                sub_pat: subpattern.import(context),\n            },\n            frontend::PatKind::DerefPattern { .. } => ast::PatKind::Error(unsupported(\n                \"pat DerefPattern\",\n                926,\n                ast::span::Span::dummy(),\n            )),\n            frontend::PatKind::Constant { value } => {\n                use ast::*;\n                fn expr_to_pat(expr: Expr) -> Pat {\n                    let Expr { kind, ty, meta } = expr;\n                    let kind = match *kind {\n                        ExprKind::Literal(lit) => PatKind::Constant { lit },\n                        ExprKind::Array(args) => PatKind::Array {\n                            args: args.into_iter().map(expr_to_pat).collect(),\n                        },\n                        ExprKind::Borrow { mutable: _, inner } => PatKind::Deref {\n                            sub_pat: expr_to_pat(inner),\n                        },\n                        kind => PatKind::Error(assertion_failure(\n                            &format!(\n                                \"expr_to_pat: the given expression could not be interpreted as a pattern. kind={kind:#?}\"\n                            ),\n                            meta.span,\n                        )),\n                    };\n                    let kind = Box::new(kind);\n                    Pat { kind, ty, meta }\n                }\n                *expr_to_pat(value.import(context)).kind\n            }\n            frontend::PatKind::ExpandedConstant { subpattern, .. } => {\n                *subpattern.import(context).kind\n            }\n            frontend::PatKind::Range(_) => {\n                ast::PatKind::Error(unsupported(\"pat Range\", 925, ast::span::Span::dummy()))\n            }\n            frontend::PatKind::Slice { .. } | frontend::PatKind::Array { .. } => {\n                ast::PatKind::Error(unsupported(\n                    \"Pat:Array or Pat:Slice\",\n                    804,\n                    ast::span::Span::dummy(),\n                ))\n            }\n            frontend::PatKind::Or { pats } => ast::PatKind::Or {\n                sub_pats: pats.import(context),\n            },\n            frontend::PatKind::Never => {\n                ast::PatKind::Error(unsupported(\"pat Never\", 927, ast::span::Span::dummy()))\n            }\n            frontend::PatKind::Error(_) => ast::PatKind::Error(assertion_failure(\n                \"`Error` node: Rust compilation failed. If Rust compilation was fine, please file an issue.\",\n                ast::span::Span::dummy(),\n            )),\n        }\n    }\n}\nimpl Import<ast::Pat> for frontend::Pat {\n    fn import(&self, context: &Context) -> ast::Pat {\n        let Self {\n            ty,\n            span,\n            contents,\n            hir_id: _,\n            attributes,\n        } = self;\n        let span = span.import(context);\n        let kind = match contents.as_ref() {\n            frontend::PatKind::AscribeUserType {\n                ascription: _,\n                subpattern,\n            } => ast::PatKind::Ascription {\n                pat: subpattern.import(context),\n                ty: ast::SpannedTy {\n                    span,\n                    ty: ty.spanned_import(context, span),\n                },\n            },\n            other => other.import(context),\n        };\n        ast::Pat {\n            kind: Box::new(kind),\n            ty: ty.spanned_import(context, span),\n            meta: ast::Metadata {\n                span,\n                attributes: attributes.import(context),\n            },\n        }\n    }\n}\n\nfn import_params(\n    context: &Context,\n    params: &Vec<frontend::Param>,\n    span: ast::span::Span,\n) -> Vec<ast::Param> {\n    let params: Vec<ast::Param> = params.spanned_import(context, span);\n    if params.is_empty() {\n        let ty = ast::TyKind::unit().promote();\n        vec![ast::Param {\n            pat: ast::PatKind::Wild.promote(ty.clone(), span),\n            ty,\n            ty_span: None,\n            attributes: vec![],\n        }]\n    } else {\n        params\n    }\n}\n\nimpl SpannedImport<ast::Param> for frontend::Param {\n    fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::Param {\n        let frontend::Param {\n            pat,\n            ty,\n            ty_span,\n            attributes,\n            ..\n        } = self;\n        let ty_span = ty_span.as_ref().map(|value| value.import(context));\n        let ty = ty.spanned_import(context, ty_span.unwrap_or(span));\n        ast::Param {\n            pat: pat\n                .as_ref()\n                .map(|value| value.import(context))\n                .unwrap_or_else(|| ast::PatKind::Wild.promote(ty.clone(), span)),\n            ty,\n            ty_span,\n            attributes: attributes.import(context),\n        }\n    }\n}\n\nimpl Import<ast::Variant> for frontend::VariantDef {\n    fn import(&self, context: &Context) -> ast::Variant {\n        ast::Variant {\n            name: self.def_id.import_as_value(),\n            arguments: self.fields.import(context),\n            is_record: self.fields.raw.first().is_some_and(|fd| fd.name.is_some()),\n            attributes: self.attributes.import(context),\n        }\n    }\n}\n\nimpl<I, A: Import<B>, B> Import<Vec<B>> for frontend::IndexVec<I, A> {\n    fn import(&self, context: &Context) -> Vec<B> {\n        self.raw.iter().map(|value| value.import(context)).collect()\n    }\n}\n\nfn import_trait_item(\n    context: &Context,\n    item: &frontend::FullDef<frontend::ThirBody>,\n) -> ast::TraitItem {\n    let span = item.span.import(context);\n    let attributes = item.attributes.import(context);\n    let meta = ast::Metadata { span, attributes };\n    let (frontend::FullDefKind::AssocConst { param_env, .. }\n    | frontend::FullDefKind::AssocFn { param_env, .. }\n    | frontend::FullDefKind::AssocTy { param_env, .. }) = &item.kind\n    else {\n        unreachable!(\"Found associated item of an unknown kind.\")\n    };\n    let mut generics = param_env.import(context);\n    let mut imported_constraints: Vec<ast::GenericConstraint> = Vec::new();\n    let mut is_assoc_ty = false;\n    let kind = match &item.kind {\n        frontend::FullDefKind::AssocConst {\n            body: Some(default),\n            ..\n        } => ast::TraitItemKind::Default {\n            params: Vec::new(),\n            body: default.import(context),\n        },\n        frontend::FullDefKind::AssocConst { ty, .. } => {\n            ast::TraitItemKind::Fn(ty.spanned_import(context, span))\n        }\n        frontend::FullDefKind::AssocFn {\n            body: Some(default),\n            sig,\n            param_env,\n            ..\n        } => {\n            generics = import_generics(context, &sig.bound_vars, param_env);\n            ast::TraitItemKind::Default {\n                params: import_params(context, &default.params, span),\n                body: default.import(context),\n            }\n        }\n        frontend::FullDefKind::AssocFn { sig, param_env, .. } => {\n            generics = import_generics(context, &sig.bound_vars, param_env);\n            let inputs = sig\n                .value\n                .inputs\n                .iter()\n                .map(|ty| ty.spanned_import(context, span))\n                .collect();\n            let output = sig.value.output.spanned_import(context, span);\n            ast::TraitItemKind::Fn(ast::TyKind::Arrow { inputs, output }.promote())\n        }\n        frontend::FullDefKind::AssocTy {\n            value: Some(..), ..\n        } => ast::TraitItemKind::Error(assertion_failure(\n            \"Associate types defaults are not supported by hax yet (it is a nightly feature)\",\n            span,\n        )),\n        frontend::FullDefKind::AssocTy {\n            implied_predicates, ..\n        } => {\n            is_assoc_ty = true;\n            imported_constraints = implied_predicates.import(context);\n            let type_constraints = imported_constraints\n                .iter()\n                .filter_map(|gc| match gc {\n                    ast::GenericConstraint::TypeClass(t) => Some(t.clone()),\n                    _ => None,\n                })\n                .collect();\n            ast::TraitItemKind::Type(type_constraints)\n        }\n        _ => ast::TraitItemKind::Error(assertion_failure(\n            \"Found associated item of an unknown kind.\",\n            span,\n        )),\n    };\n    if is_assoc_ty {\n        generics.constraints = imported_constraints;\n    }\n    generics\n        .constraints\n        .retain(|gc| !is_self_type_constraint(gc));\n    for (idx, gc) in generics.constraints.iter_mut().enumerate() {\n        if let ast::GenericConstraint::TypeClass(impl_ident) = gc {\n            impl_ident.name = impl_expr_name(idx as u64);\n        }\n    }\n    ast::TraitItem {\n        meta,\n        kind,\n        generics,\n        ident: item.def_id().import_as_nonvalue(),\n    }\n}\n\nimpl SpannedImport<ast::TraitGoal> for frontend::TraitRef {\n    fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::TraitGoal {\n        let trait_ = self.def_id.import_as_nonvalue();\n        let args = self.generic_args.spanned_import(context, span);\n        ast::TraitGoal { trait_, args }\n    }\n}\n\nfn impl_expr_name(index: u64) -> Symbol {\n    Symbol::new(format!(\"i{}\", index))\n}\n\nfn browse_path(\n    context: &Context,\n    item_kind: ast::ImplExprKind,\n    chunk: &frontend::ImplExprPathChunk,\n    span: ast::span::Span,\n    idx: usize,\n) -> ast::ImplExprKind {\n    match chunk {\n        frontend::ImplExprPathChunk::AssocItem {\n            item,\n            predicate:\n                frontend::Binder {\n                    value: frontend::TraitPredicate { trait_ref, .. },\n                    ..\n                },\n            ..\n        } => {\n            let ident = ast::ImplIdent {\n                goal: trait_ref.spanned_import(context, span),\n                name: impl_expr_name(idx as u64),\n            };\n            let item = item.contents().def_id.import_as_nonvalue();\n            ast::ImplExprKind::Projection {\n                impl_: ast::ImplExpr {\n                    kind: Box::new(item_kind),\n                    goal: trait_ref.spanned_import(context, span),\n                },\n                item,\n                ident,\n            }\n        }\n        frontend::ImplExprPathChunk::Parent {\n            predicate:\n                frontend::Binder {\n                    value: frontend::TraitPredicate { trait_ref, .. },\n                    ..\n                },\n            ..\n        } => {\n            let ident = ast::ImplIdent {\n                goal: trait_ref.spanned_import(context, span),\n                name: impl_expr_name(idx as u64),\n            };\n            ast::ImplExprKind::Parent {\n                impl_: ast::ImplExpr {\n                    kind: Box::new(item_kind),\n                    goal: trait_ref.spanned_import(context, span),\n                },\n                ident,\n            }\n        }\n    }\n}\n\nfn import_impl_expr_atom(\n    context: &Context,\n    ie: &frontend::ImplExprAtom,\n    span: ast::span::Span,\n    goal: ast::TraitGoal,\n) -> ast::ImplExprKind {\n    match ie {\n        frontend::ImplExprAtom::Concrete(item_ref) => {\n            ast::ImplExprKind::Concrete(item_ref.spanned_import(context, span))\n        }\n        frontend::ImplExprAtom::LocalBound { index, path, .. } => {\n            let mut kind = ast::ImplExprKind::LocalBound {\n                id: impl_expr_name(*index as u64),\n            };\n            for (i, chunk) in path.iter().enumerate() {\n                kind = browse_path(context, kind, chunk, span, i)\n            }\n            kind\n        }\n        frontend::ImplExprAtom::SelfImpl { path, .. } => {\n            let mut kind = ast::ImplExprKind::Self_;\n            for (i, chunk) in path.iter().enumerate() {\n                kind = browse_path(context, kind, chunk, span, i)\n            }\n            kind\n        }\n        frontend::ImplExprAtom::Dyn => ast::ImplExprKind::Dyn,\n        frontend::ImplExprAtom::Builtin { .. } => ast::ImplExprKind::Builtin(goal),\n        frontend::ImplExprAtom::Error(msg) => ast::ImplExprKind::Error(unsupported(msg, 707, span)),\n    }\n}\n\nimpl SpannedImport<ast::ImplExpr> for frontend::ImplExpr {\n    fn spanned_import(&self, context: &Context, span: ast::span::Span) -> ast::ImplExpr {\n        let goal = self.r#trait.value.spanned_import(context, span);\n        let impl_ = ast::ImplExpr {\n            kind: Box::new(import_impl_expr_atom(\n                context,\n                &self.r#impl,\n                span,\n                goal.clone(),\n            )),\n            goal: goal.clone(),\n        };\n        match &self.r#impl {\n            frontend::ImplExprAtom::Concrete(item_ref) if !item_ref.impl_exprs.is_empty() => {\n                let args = item_ref\n                    .impl_exprs\n                    .iter()\n                    .map(|ie| ie.spanned_import(context, span))\n                    .collect();\n                ast::ImplExpr {\n                    kind: Box::new(ast::ImplExprKind::ImplApp { impl_, args }),\n                    goal,\n                }\n            }\n            _ => impl_,\n        }\n    }\n}\n\nfn generic_param_to_value(p: &ast::GenericParam) -> ast::GenericValue {\n    match &p.kind {\n        ast::GenericParamKind::Lifetime => ast::GenericValue::Lifetime,\n        ast::GenericParamKind::Type => {\n            ast::GenericValue::Ty(ast::TyKind::Param(p.ident.clone()).promote())\n        }\n        ast::GenericParamKind::Const { ty } => ast::GenericValue::Expr(\n            ast::ExprKind::LocalId(p.ident.clone()).promote(ty.clone(), p.meta.span),\n        ),\n    }\n}\n\nfn import_generics(\n    context: &Context,\n    bound_var_kinds: &[frontend::BoundVariableKind],\n    param_env: &frontend::ParamEnv,\n) -> ast::Generics {\n    let mut generics: ast::Generics = param_env.import(context);\n    bound_var_kinds\n        .iter()\n        .flat_map(|var| match var {\n            frontend::BoundVariableKind::Region(frontend::BoundRegionKind::Named {\n                def_id: _,\n                name,\n                span,\n                attributes,\n            }) => {\n                let name = name.strip_prefix(\"'\").unwrap_or(name);\n                Some(ast::GenericParam {\n                    ident: ast::identifiers::LocalId(Symbol::new(name)),\n                    meta: ast::Metadata {\n                        span: span.import(context),\n                        attributes: import_attributes(context, attributes),\n                    },\n                    kind: ast::GenericParamKind::Lifetime,\n                })\n            }\n            _ => None,\n        })\n        .for_each(|var| generics.params.push(var));\n    generics\n}\n\nfn cast_of_enum(\n    context: &Context,\n    type_id: ast::GlobalId,\n    generics: &ast::Generics,\n    ty: ast::Ty,\n    span: ast::span::Span,\n    variants: impl Iterator<Item = (ast::Variant, frontend::VariantDef)>,\n) -> ast::Item {\n    let name = ast::GlobalId::with_suffix(type_id, ReservedSuffix::Cast);\n    let arms = {\n        let ast::TyKind::Primitive(ast::PrimitiveTy::Int(int_kind)) = &*ty.0 else {\n            return ast::ItemKind::Error(assertion_failure(\n                &format!(\"cast_of_enum: expected int type, got {:?}\", ty),\n                span,\n            ))\n            .promote(name, span);\n        };\n        let mut previous_explicit_determinator: Option<ast::Expr> = None;\n        variants\n            .map(|(variant, variant_def)| {\n                // Each variant comes with a `rustc_middle::ty::VariantDiscr`. Some variant have `Explicit` discr (i.e. an expression)\n                // while other have `Relative` discr (the distance to the previous last explicit discr).\n                let body = match &variant_def.discr_def {\n                    frontend::DiscriminantDefinition::Relative(m) => {\n                        let relative = ast::ExprKind::Literal(ast::literals::Literal::Int {\n                            value: Symbol::new(m.to_string()),\n                            negative: false,\n                            kind: int_kind.clone(),\n                        })\n                        .promote(ty.clone(), span);\n                        if let Some(base) = &previous_explicit_determinator {\n                            ast::ExprKind::fn_app(\n                                crate::names::core::ops::arith::Add::add,\n                                vec![],\n                                vec![base.clone(), relative],\n                                ty.clone(),\n                                vec![],\n                                None,\n                                span,\n                            )\n                            .promote(ty.clone(), span)\n                        } else {\n                            relative\n                        }\n                    }\n                    frontend::DiscriminantDefinition::Explicit { def_id, span } => {\n                        let e = ast::ExprKind::GlobalId(def_id.import_as_value())\n                            .promote(ty.clone(), span.import(context));\n                        previous_explicit_determinator = Some(e.clone());\n                        e\n                    }\n                };\n                let pat = ast::PatKind::Construct {\n                    constructor: variant.name,\n                    is_record: variant.is_record,\n                    is_struct: false,\n                    fields: variant\n                        .arguments\n                        .iter()\n                        .map(|(cid, ty, _)| (*cid, ast::PatKind::Wild.promote(ty.clone(), span)))\n                        .collect(),\n                }\n                .promote(ty.clone(), span);\n                ast::Arm::non_guarded(pat, body, span)\n            })\n            .collect()\n    };\n    let type_ref = ast::TyKind::App {\n        head: type_id,\n        args: generics.params.iter().map(generic_param_to_value).collect(),\n    }\n    .promote();\n    let scrutinee_var = ast::LocalId(Symbol::new(\"x\"));\n    let params = vec![ast::Param {\n        pat: ast::PatKind::var_pat(scrutinee_var.clone()).promote(type_ref.clone(), span),\n        ty: type_ref.clone(),\n        ty_span: None,\n        attributes: Vec::new(),\n    }];\n    let scrutinee = ast::ExprKind::LocalId(scrutinee_var).promote(type_ref.clone(), span);\n    ast::ItemKind::Fn {\n        name,\n        generics: generics.clone(),\n        body: ast::ExprKind::Match { scrutinee, arms }.promote(ty, span),\n        params,\n        safety: ast::SafetyKind::Safe,\n    }\n    .promote(name, span)\n}\n\nfn expect_body<'a, Body>(\n    optional: &'a Option<Body>,\n    span: ast::span::Span,\n    label: &str,\n) -> Result<&'a Body, ast::ErrorNode> {\n    optional\n        .as_ref()\n        .ok_or_else(|| assertion_failure(&format!(\"Expected body at {label}\"), span))\n}\n\nfn missing_associated_item() -> core::convert::Infallible {\n    panic!(\"All assoc items should be included in the list of items produced by the frontend.\")\n}\n\nuse std::collections::HashMap;\n\n/// Import a `FullDef` item produced by the frontend, and produce the corresponding item\n/// (or items for inherent impls)\npub fn import_item(\n    item: &frontend::FullDef<frontend::ThirBody>,\n    all_items: &HashMap<frontend::DefId, &frontend::FullDef<frontend::ThirBody>>,\n) -> Vec<ast::Item> {\n    let frontend::FullDef {\n        this,\n        span,\n        attributes,\n        kind,\n        ..\n    } = item;\n    let context = &Context {\n        owner_hint: Some(this.contents().def_id.clone()),\n    };\n    let ident = this.contents().def_id.clone().import_as_nonvalue();\n    let span = span.import(context);\n    let attributes = attributes.import(context);\n    let has_auto = has_automatically_derived(&attributes);\n    let mut items = Vec::new();\n    let kind = match kind {\n        frontend::FullDefKind::Adt {\n            param_env,\n            adt_kind,\n            variants: frontend_variants,\n            repr,\n            ..\n        } => {\n            let generics = param_env.import(context);\n            let frontend_variants = || frontend_variants.clone().into_iter();\n            let variants: Vec<ast::Variant> =\n                frontend_variants().map(|v| v.import(context)).collect();\n            use frontend::{AdtKind, DiscriminantDefinition};\n            let adt_item_kind = {\n                let make_type = |is_struct| ast::ItemKind::Type {\n                    name: ident,\n                    generics: generics.clone(),\n                    variants: variants.clone(),\n                    is_struct,\n                };\n                match adt_kind {\n                    AdtKind::Enum => make_type(false),\n                    AdtKind::Struct => make_type(true),\n                    AdtKind::Union => ast::ItemKind::Error(unsupported(\"Union type\", 998, span)),\n                    AdtKind::Array | AdtKind::Slice | AdtKind::Tuple => {\n                        ast::ItemKind::Error(assertion_failure(\n                            &format!(\n                                \"While translating a item, we got an ADT of kind {adt_kind:#?}. This is not supposed to be ever produced.\"\n                            ),\n                            span,\n                        ))\n                    }\n                }\n            };\n\n            // For enums that are fieldless (see https://doc.rust-lang.org/reference/items/enumerations.html#casting),\n            // we produce a cast function.\n            if matches!(adt_kind, AdtKind::Enum) && variants.iter().all(ast::Variant::is_fieldless)\n            {\n                // Each variant might introduce a anonymous constant defining its discriminant integer\n                let discriminant_const_items = frontend_variants().filter_map(|v| {\n                    let DiscriminantDefinition::Explicit { def_id, span } = &v.discr_def else {\n                        return None;\n                    };\n\n                    let span = span.import(context);\n                    let name = def_id.import_as_value();\n                    let value = v.discr_val.val;\n                    let (value, kind) = match v.discr_val.ty.kind() {\n                        frontend::TyKind::Int(int_ty) => (value.to_string(), int_ty.into()),\n                        frontend::TyKind::Uint(int_ty) => {\n                            ((value as i128).to_string(), int_ty.into())\n                        }\n                        _ => {\n                            return Some(\n                                ast::ItemKind::Error(assertion_failure(\"\", span))\n                                    .promote(name, span),\n                            );\n                        }\n                    };\n                    Some(\n                        ast::ItemKind::Fn {\n                            name,\n                            generics: ast::Generics::empty(),\n                            body: ast::ExprKind::Literal(ast::literals::Literal::Int {\n                                value: Symbol::new(value),\n                                negative: false,\n                                kind,\n                            })\n                            .promote(v.discr_val.ty.spanned_import(context, span), span),\n                            params: Vec::new(),\n                            safety: ast::SafetyKind::Safe,\n                        }\n                        .promote(name, span),\n                    )\n                });\n\n                let cast_item = cast_of_enum(\n                    context,\n                    ident,\n                    &generics,\n                    repr.typ.spanned_import(context, span),\n                    span,\n                    variants.into_iter().zip(frontend_variants()),\n                );\n                return std::iter::once(adt_item_kind.promote(ident, span))\n                    .chain(discriminant_const_items)\n                    .chain(std::iter::once(cast_item))\n                    .collect();\n            } else {\n                adt_item_kind\n            }\n        }\n        frontend::FullDefKind::TyAlias { param_env, ty } => ast::ItemKind::TyAlias {\n            name: ident,\n            generics: param_env.import(context),\n            ty: ty.spanned_import(context, span),\n        },\n        frontend::FullDefKind::ForeignTy => {\n            ast::ItemKind::Error(unsupported(\"Foreign type\", 928, span))\n        }\n        frontend::FullDefKind::OpaqueTy => ast::ItemKind::Error(assertion_failure(\n            \"OpaqueTy should be replaced by Alias in the frontend\",\n            span,\n        )),\n        frontend::FullDefKind::Trait {\n            param_env,\n            implied_predicates,\n            items,\n            safety,\n            ..\n        } => {\n            let mut generics = param_env.import(context);\n            generics.constraints = implied_predicates.import(context);\n            ast::ItemKind::Trait {\n                name: ident,\n                generics,\n                items: items\n                    .iter()\n                    .map(|assoc_item| {\n                        let item = all_items\n                            .get(&assoc_item.def_id)\n                            .expect(\"Could not find definition for associated item\");\n                        import_trait_item(context, item)\n                    })\n                    .collect(),\n                safety: safety.import(context),\n            }\n        }\n\n        frontend::FullDefKind::TraitAlias { .. } => {\n            ast::ItemKind::Error(assertion_failure(\"Trait Alias\", span))\n        }\n        frontend::FullDefKind::TraitImpl {\n            param_env,\n            trait_pred,\n            implied_impl_exprs,\n            items,\n            ..\n        } => {\n            let mut generics = param_env.import(context);\n            let trait_ref = trait_pred.trait_ref.contents();\n            let of_trait: (ast::GlobalId, Vec<ast::GenericValue>) = (\n                trait_ref.def_id.import_as_nonvalue(),\n                trait_ref\n                    .generic_args\n                    .iter()\n                    .map(|ga| ga.spanned_import(context, span))\n                    .collect(),\n            );\n\n            let mut parent_bounds: Vec<(ast::ImplExpr, ast::ImplIdent)> =\n                implied_impl_exprs.spanned_import(context, span);\n            let items: Vec<ast::ImplItem> = if has_auto {\n                Vec::new()\n            } else {\n                items\n                    .iter()\n                    .flat_map(|assoc_item| {\n                        // The DefId for this very specific impl associated item.\n                        // The DefId of the original associated item on the trait is\n                        // `assoc_item.decl_def_id`, here we discard it, but it may\n                        // be useful in the future (for e.g. for\n                        // https://github.com/cryspen/hax-evit/issues/24).\n                        let method_def_id_impl = match &assoc_item.value {\n                            hax_frontend_exporter::ImplAssocItemValue::Provided {\n                                def_id, ..\n                            } => def_id,\n                            _ => {\n                                // TODO: Here, we skip defaulted associated items.\n                                return None;\n                            }\n                        };\n                        let ident = method_def_id_impl.import_as_nonvalue();\n                        let assoc_item_def = all_items.get(method_def_id_impl).unwrap_or_else(\n                            #[allow(unreachable_code)]\n                            || match missing_associated_item() {},\n                        );\n                        let span = assoc_item_def.span.import(context);\n                        let attributes = assoc_item_def.attributes.import(context);\n                        let (generics, kind) = match assoc_item_def.kind() {\n                            frontend::FullDefKind::AssocTy {\n                                param_env, value, ..\n                            } => (\n                                param_env.import(context),\n                                match expect_body(value, span, \"import_item/TraitImpl/AssocTy\") {\n                                    Ok(body) => ast::ImplItemKind::Type {\n                                        ty: body.spanned_import(context, span),\n                                        parent_bounds: assoc_item\n                                            .required_impl_exprs\n                                            .spanned_import(context, span),\n                                    },\n                                    Err(error) => ast::ImplItemKind::Error(error),\n                                },\n                            ),\n                            frontend::FullDefKind::AssocFn {\n                                param_env,\n                                body,\n                                sig,\n                                ..\n                            } => (\n                                import_generics(context, &sig.bound_vars, param_env),\n                                match expect_body(body, span, \"import_item/TraitImpl/AssocFn\") {\n                                    Ok(body) => ast::ImplItemKind::Fn {\n                                        body: body.import(context),\n                                        params: import_params(context, &body.params, span),\n                                    },\n                                    Err(error) => ast::ImplItemKind::Error(error),\n                                },\n                            ),\n                            frontend::FullDefKind::AssocConst {\n                                param_env, body, ..\n                            } => (\n                                param_env.import(context),\n                                match expect_body(body, span, \"import_item/TraitImpl/AssocConst\") {\n                                    Ok(body) => ast::ImplItemKind::Fn {\n                                        body: body.import(context),\n                                        params: Vec::new(),\n                                    },\n                                    Err(error) => ast::ImplItemKind::Error(error),\n                                },\n                            ),\n                            #[allow(unreachable_code)]\n                            _ => match missing_associated_item() {},\n                        };\n                        Some(ast::ImplItem {\n                            meta: ast::Metadata { span, attributes },\n                            generics,\n                            kind,\n                            ident,\n                        })\n                    })\n                    .collect()\n            };\n\n            if let [ast::GenericValue::Ty(self_ty), ..] = &of_trait.1[..] {\n                parent_bounds.retain(|(impl_expr, _)| {\n                    matches!(impl_expr.goal.args.first(), Some(ast::GenericValue::Ty(arg_ty)) if arg_ty == self_ty)\n                });\n                generics\n                    .constraints\n                    .retain(|gc| !is_constraint_on_ty(gc, self_ty));\n                if generics.constraints.len() > 1 {\n                    generics.constraints.truncate(1);\n                }\n                ast::ItemKind::Impl {\n                    generics,\n                    self_ty: self_ty.clone(),\n                    of_trait,\n                    items,\n                    parent_bounds,\n                }\n            } else {\n                ast::ItemKind::Error(assertion_failure(\n                    \"Self should always be the first generic argument of a trait application.\",\n                    span,\n                ))\n            }\n        }\n        frontend::FullDefKind::InherentImpl {\n            param_env, items, ..\n        } => {\n            if has_auto {\n                return Vec::new();\n            }\n            return items\n                .iter()\n                .map(|assoc_item| {\n                    let ident = assoc_item.def_id.import_as_nonvalue();\n                    let assoc_item = all_items.get(&assoc_item.def_id).unwrap_or_else(\n                        #[allow(unused)]\n                        || match missing_associated_item() {},\n                    );\n                    let span = assoc_item.span.import(context);\n                    let attributes = assoc_item.attributes.import(context);\n                    let impl_generics = param_env.import(context);\n                    let kind = match assoc_item.kind() {\n                        frontend::FullDefKind::AssocTy {\n                            param_env, value, ..\n                        } => {\n                            let generics = impl_generics.clone().concat(param_env.import(context));\n                            match expect_body(value, span, \"import_item/InherentImpl/AssocTy\") {\n                                Ok(body) => ast::ItemKind::TyAlias {\n                                    name: ident,\n                                    generics,\n                                    ty: body.spanned_import(context, span),\n                                },\n                                Err(err) => ast::ItemKind::Error(err),\n                            }\n                        }\n                        frontend::FullDefKind::AssocFn {\n                            param_env,\n                            sig,\n                            body,\n                            ..\n                        } => {\n                            let generics = impl_generics.clone().concat(import_generics(\n                                context,\n                                &sig.bound_vars,\n                                param_env,\n                            ));\n                            match expect_body(body, span, \"import_item/InherentImpl/AssocFn\") {\n                                Ok(body) => ast::ItemKind::Fn {\n                                    name: ident,\n                                    generics,\n                                    body: body.import(context),\n                                    params: import_params(context, &body.params, span),\n                                    safety: sig.value.safety.import(context),\n                                },\n                                Err(err) => ast::ItemKind::Error(err),\n                            }\n                        }\n                        frontend::FullDefKind::AssocConst {\n                            param_env, body, ..\n                        } => {\n                            let generics = impl_generics.clone().concat(param_env.import(context));\n                            match expect_body(body, span, \"import_item/InherentImpl/AssocConst\") {\n                                Ok(body) => ast::ItemKind::Fn {\n                                    name: ident,\n                                    generics,\n                                    body: body.import(context),\n                                    params: Vec::new(),\n                                    safety: ast::SafetyKind::Safe,\n                                },\n                                Err(err) => ast::ItemKind::Error(err),\n                            }\n                        }\n                        _ =>\n                        {\n                            #[allow(unused)]\n                            match missing_associated_item() {}\n                        }\n                    };\n                    ast::Item {\n                        ident,\n                        kind,\n                        meta: ast::Metadata { span, attributes },\n                    }\n                })\n                .collect();\n        }\n        frontend::FullDefKind::Fn {\n            param_env,\n            sig,\n            body,\n            ..\n        } => match expect_body(body, span, \"import_item/Fn\") {\n            Ok(body) => ast::ItemKind::Fn {\n                name: ident,\n                generics: import_generics(context, &sig.bound_vars, param_env),\n                body: body.import(context),\n                params: import_params(context, &body.params, span),\n                safety: sig.value.safety.import(context),\n            },\n            Err(err) => ast::ItemKind::Error(err),\n        },\n        frontend::FullDefKind::Closure { .. } => {\n            ast::ItemKind::Error(assertion_failure(\"Closure item\", span))\n        }\n        frontend::FullDefKind::Const {\n            param_env, body, ..\n        } => match expect_body(body, span, \"import_item/Const\") {\n            Ok(body) => ast::ItemKind::Fn {\n                name: ident,\n                generics: param_env.import(context),\n                body: body.import(context),\n                params: Vec::new(),\n                safety: ast::SafetyKind::Safe,\n            },\n            Err(err) => ast::ItemKind::Error(err),\n        },\n        frontend::FullDefKind::Static {\n            mutability: true, ..\n        } => ast::ItemKind::Error(unsupported(\"Mutable static item\", 1343, span)),\n        frontend::FullDefKind::Static {\n            mutability: false,\n            body,\n            ..\n        } => match expect_body(body, span, \"import_item/Static\") {\n            Ok(body) => ast::ItemKind::Fn {\n                name: ident,\n                generics: ast::Generics {\n                    params: Vec::new(),\n                    constraints: Vec::new(),\n                },\n                body: body.import(context),\n                params: Vec::new(),\n                safety: ast::SafetyKind::Safe,\n            },\n            Err(err) => ast::ItemKind::Error(err),\n        },\n        frontend::FullDefKind::Use(Some((\n            frontend::UsePath {\n                res,\n                segments,\n                rename,\n                ..\n            },\n            _,\n        ))) => ast::ItemKind::Use {\n            path: segments\n                .iter()\n                .map(|segment| &segment.ident.0)\n                .cloned()\n                .collect(),\n            is_external: res\n                .iter()\n                .any(|x| matches!(x, None | Some(frontend::Res::Err))),\n            rename: rename.clone(),\n        },\n        frontend::FullDefKind::Mod { .. } => ast::ItemKind::RustModule,\n        frontend::FullDefKind::ExternCrate\n        | frontend::FullDefKind::Use { .. }\n        | frontend::FullDefKind::TyParam\n        | frontend::FullDefKind::ConstParam\n        | frontend::FullDefKind::LifetimeParam\n        | frontend::FullDefKind::Variant\n        | frontend::FullDefKind::Ctor { .. }\n        | frontend::FullDefKind::Field\n        | frontend::FullDefKind::Macro(_)\n        | frontend::FullDefKind::ForeignMod { .. }\n        | frontend::FullDefKind::SyntheticCoroutineBody => return Vec::new(),\n        frontend::FullDefKind::GlobalAsm => {\n            ast::ItemKind::Error(unsupported(\"Inline assembly item\", 1344, span))\n        }\n        frontend::FullDefKind::AssocConst { .. }\n        | frontend::FullDefKind::AssocFn { .. }\n        | frontend::FullDefKind::AssocTy { .. } => return Vec::new(), // These item kinds are handled by the case of Impl\n    };\n    items.push(ast::Item {\n        ident,\n        kind,\n        meta: ast::Metadata { span, attributes },\n    });\n    items\n}\n"
  },
  {
    "path": "rust-engine/src/interning.rs",
    "content": "//! # Interning System\n//!\n//! This module provides a minimal system for **global interning** of values in\n//! Rust. Interning allows you to deduplicate equal values and replace them with\n//! cheap, copyable handles (`Interned<T>`) that support **O(1) equality**,\n//! hashing, and compact storage.\n//!\n//! ## Core Concepts\n//!\n//! - [`Interned<T>`]: A compact, copyable handle to a deduplicated value.\n//! - [`InterningTable<T>`]: Stores interned values and manages uniqueness.\n//! - [`Internable`]: A trait for types that can be interned.\n//!\n//! ## Safety Note\n//!\n//! The `.get()` method on `Interned<T>` returns a `&'static T` using an\n//! internal `transmute`, assuming the backing storage (interning table) never\n//! remove items from its table. This is guaranteed by the implementation of\n//! `InterningTable`.\n\nuse std::{\n    collections::{HashMap, HashSet},\n    fmt::Debug,\n    hash::Hash,\n    marker::PhantomData,\n    ops::Deref,\n    sync::{LazyLock, Mutex},\n};\n\nuse schemars::JsonSchema;\nuse serde::{Deserialize, Serialize};\n\n/// An interning table storing unique values of `T` and assigning them stable indices.\n///\n/// This type is primarily an implementation detail behind [`Interned<T>`] and\n/// the [`Internable`] trait. You typically won't use it directly unless you're\n/// wiring up a new globally‑interned type.\npub struct InterningTable<T> {\n    /// The raw items: item at index `n` will be an `Interned { index: n }`.\n    /// Fast lookup.\n    items: Vec<T>,\n    /// A map from `T`s to indexes, for fast interning of existing values.\n    ids: HashMap<T, Interned<T>>,\n}\n\nimpl<T> Default for InterningTable<T> {\n    fn default() -> Self {\n        Self {\n            items: Default::default(),\n            ids: Default::default(),\n        }\n    }\n}\n\n/// A statically interned value of type `T`.\n///\n/// An `Interned<T>` is a compact, copyable handle that deduplicates equal values\n/// and compares in **O(1)** using its index. It behaves like `&'static T` via\n/// [`Deref`], and can be obtained with [`InternExtTrait::intern`] or\n/// [`Interned::intern`].\n// Note: `Interned<T>` has `PartialEq` only if `T` has `PartialEq`. If we\n// implement `PartialEq` manually, we loose the ability to pattern match on\n// constant of this type. This is because of structural equality (see\n// https://doc.rust-lang.org/stable/std/marker/trait.StructuralPartialEq.html).\n#[derive(Hash, Eq, PartialEq)]\npub struct Interned<T> {\n    phantom: PhantomData<T>,\n    index: u32,\n}\n\nimpl<T: Eq> PartialOrd for Interned<T> {\n    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n        Some(self.cmp(other))\n    }\n}\nimpl<T: Eq> Ord for Interned<T> {\n    fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n        self.index.cmp(&other.index)\n    }\n}\n\nimpl<T: Serialize + Internable> Serialize for Interned<T> {\n    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n    where\n        S: serde::Serializer,\n    {\n        self.get().serialize(serializer)\n    }\n}\n\nimpl<T: Internable> AsRef<T> for Interned<T> {\n    fn as_ref(&self) -> &T {\n        (*self).get()\n    }\n}\n\nimpl<'a, T: Deserialize<'a> + Internable> Deserialize<'a> for Interned<T> {\n    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n    where\n        D: serde::Deserializer<'a>,\n    {\n        Ok(Interned::intern(&T::deserialize(deserializer)?))\n    }\n}\n\nimpl<T: JsonSchema> JsonSchema for Interned<T> {\n    fn schema_name() -> String {\n        T::schema_name()\n    }\n\n    fn json_schema(generator: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {\n        T::json_schema(generator)\n    }\n}\n\nimpl<T: Internable + Debug> Debug for Interned<T> {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        f.debug_struct(\"Interned\")\n            .field(\"index\", &self.index)\n            .field(\"value\", self.get())\n            .finish()\n    }\n}\n\nimpl<T> Clone for Interned<T> {\n    fn clone(&self) -> Self {\n        *self\n    }\n}\nimpl<T> Copy for Interned<T> {}\n\n/// A tiny, `FnOnce`-compatible wrapper used to initialize a `LazyLock` with a\n/// captured value.\n///\n/// This is a utility to build `LazyLock<T>` where the initializer needs to own\n/// some value prepared in a `const` context.\n///\n/// This is required since we need an explicit concrete type for the\n/// initializataion function given to `LazyLock::new`.\n///\n/// You usually don't need this directly unless you're calling\n/// [`InterningTable::new_with_values`].\npub struct ExplicitClosure<T, R>(T, fn(T) -> R);\nimpl<T, R> FnOnce<()> for ExplicitClosure<T, R> {\n    type Output = R;\n\n    extern \"rust-call\" fn call_once(self, _: ()) -> Self::Output {\n        let Self(input, function) = self;\n        function(input)\n    }\n}\n\nimpl<T: Hash + Eq + Clone + Send> InterningTable<T> {\n    fn try_intern(&mut self, value: &T) -> Option<Interned<T>> {\n        Some(if let Some(interned) = self.ids.get(value) {\n            *interned\n        } else {\n            let index = self.items.len();\n            self.items.push(value.clone());\n            let handle = Interned {\n                phantom: PhantomData,\n                index: index.try_into().ok()?,\n            };\n            self.ids.insert(value.clone(), handle);\n            handle\n        })\n    }\n    fn get(&self, interned: Interned<T>) -> &T {\n        &self.items[interned.index as usize]\n    }\n\n    /// Creates a global `LazyLock` interning table prepopulated with `values`,\n    /// and returns both the lock and the corresponding `Interned<T>` handles.\n    ///\n    /// # Panics\n    ///\n    /// Panics if `values` contains duplicates (by `Eq`) or if `N` is greater\n    /// than `u32::MAX`.\n    pub const fn new_with_values<const N: usize>(\n        values: fn() -> [T; N],\n    ) -> (LazyLockNewWithValue<T, N>, [Interned<T>; N]) {\n        assert!(N < u32::MAX as usize);\n        let mut i = 0;\n        let mut interned_values: [Interned<T>; N] = [Interned {\n            phantom: PhantomData,\n            index: 0,\n        }; N];\n        while i < N {\n            interned_values[i].index = i as u32;\n            i += 1;\n        }\n        let lazy_lock = LazyLock::new(ExplicitClosure(values, |values| {\n            let values = values();\n            {\n                // Ensure `value` has no duplicate.\n                let set: HashSet<_> = values.iter().collect();\n                if set.len() != values.len() {\n                    panic!(\"new_with_values: the input has duplicates\");\n                }\n            }\n\n            let mut table = InterningTable::default();\n            for value in values {\n                if table.try_intern(&value).is_none() {\n                    unreachable!(\n                        \"we asserted `N < u32::MAX`, the length of the internal vector `table` should be less than `u32::MAX`\"\n                    )\n                }\n            }\n            Mutex::new(table)\n        }));\n        (lazy_lock, interned_values)\n    }\n}\n\n/// A type alias representing a lazily initialized `Mutex<InterningTable<T>>`\n/// backed by a fixed-size array initializer.\n///\n/// This is the return type of [`InterningTable::new_with_values`].\npub type LazyLockNewWithValue<T, const N: usize> =\n    LazyLock<Mutex<InterningTable<T>>, ExplicitClosure<fn() -> [T; N], Mutex<InterningTable<T>>>>;\n\n/// Types that have a single, process‑global interning table.\n///\n/// Implement this for your type to opt in to interning:\n/// provide a `static` (usually a `LazyLock<Mutex<InterningTable<Self>>>`)\n/// and return a reference to it.\npub trait Internable: Sized + Hash + Eq + Clone + Send + 'static {\n    /// Returns the global interning table for `Self`.\n    fn interning_table() -> &'static Mutex<InterningTable<Self>>;\n\n    /// Interns a `value` and returns its compact handle.\n    ///\n    /// If an equal value has been interned before, this returns the existing\n    /// handle; otherwise it inserts the value into the global table.\n    fn intern(&self) -> Interned<Self> {\n        Interned::intern(self)\n    }\n}\n\nimpl<T: Internable> Interned<T> {\n    /// Interns a `value` and returns its compact handle.\n    ///\n    /// If an equal value has been interned before, this returns the existing\n    /// handle; otherwise it inserts the value into the global table.\n    pub fn intern(value: &T) -> Self {\n        {\n            // Invariant: the interning mutex is only locked here, and InterningTable::try_intern\n            // is panic-free (and does not invoke user code that may panic). Therefore, no\n            // panic can occur while the mutex is held, so the mutex cannot be poisoned.\n            // If this ever panics, our invariant was broken elsewhere.\n            let mut table = T::interning_table()\n                .lock()\n                .expect(\"interning table mutex poisoned\");\n            table.try_intern(value)\n        }\n        .unwrap_or_else(|| {\n            panic!(\n                \"more than `u32::MAX` values have been interned for type `{}`\",\n                std::any::type_name::<T>()\n            )\n        })\n    }\n\n    /// Returns a `&'static T` for this handle.\n    ///\n    /// # Safety & Lifetimes\n    ///\n    /// This method relies on the fact that the backing storage lives for the\n    /// entire program (it is kept in a `static` global table). The `'static`\n    /// reference is sound as long as values are never removed from that table.\n    /// This implementation uses `transmute` internally for that reason.\n    pub fn get(self) -> &'static T {\n        let table = T::interning_table().lock().unwrap();\n        let local_reference = table.get(self);\n        let static_reference: &'static T = unsafe { std::mem::transmute(local_reference) };\n        static_reference\n    }\n}\n\nimpl<T: Internable> Deref for Interned<T> {\n    type Target = T;\n\n    /// Dereferences to the underlying value (`&'static T`).\n    ///\n    /// Equivalent to calling [`Interned::get`].\n    fn deref(&self) -> &Self::Target {\n        self.get()\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/lib.rs",
    "content": "//! The Rust engine of hax.\n\n#![feature(rustc_private)]\n#![feature(fn_traits, unboxed_closures)]\n#![warn(\n    rustdoc::broken_intra_doc_links,\n    missing_docs,\n    unused_qualifications,\n    unused_crate_dependencies\n)]\n\npub mod ast;\npub mod attributes;\npub mod backends;\npub mod debugger;\npub mod hax_io;\npub mod import_thir;\npub mod interning;\npub mod names;\npub mod ocaml_engine;\npub mod phase;\npub mod printer;\npub mod resugarings;\npub mod symbol;\n"
  },
  {
    "path": "rust-engine/src/main.rs",
    "content": "use hax_rust_engine::{\n    backends,\n    ocaml_engine::{self, Response},\n};\nuse hax_types::{cli_options::Backend, engine_api::File};\nuse std::collections::HashMap;\n\nfn main() {\n    let (value, table) = hax_rust_engine::hax_io::read_engine_input_message().destruct();\n\n    ocaml_engine::initialize(ocaml_engine::Meta {\n        hax_version: value.hax_version,\n        impl_infos: value.impl_infos,\n        debug_bind_phase: value.backend.debug_engine.is_some(),\n        profiling: value.backend.profile,\n    });\n\n    let items = match value.input {\n        hax_types::driver_api::Items::Legacy(input) => {\n            let query = hax_rust_engine::ocaml_engine::QueryKind::ImportThir {\n                input,\n                translation_options: value.backend.translation_options,\n            };\n\n            let Some(Response::ImportThir { output }) = query.execute(Some(table)) else {\n                panic!()\n            };\n            output\n        }\n        hax_types::driver_api::Items::FullDef(items) => {\n            let items: Vec<_> = items\n                .into_iter()\n                .filter(|item| {\n                    !matches!(\n                        item.kind,\n                        hax_frontend_exporter::FullDefKind::Use(_)\n                            | hax_frontend_exporter::FullDefKind::ExternCrate\n                    )\n                })\n                .collect();\n            let items_by_def_id = HashMap::from_iter(\n                items\n                    .iter()\n                    .map(|item| (item.this.contents().def_id.clone(), item)),\n            );\n            items\n                .iter()\n                .flat_map(|item| hax_rust_engine::import_thir::import_item(item, &items_by_def_id))\n                .collect()\n        }\n    };\n\n    let files = match &value.backend.backend {\n        Backend::Coq | Backend::Ssprove | Backend::Easycrypt | Backend::ProVerif { .. } => panic!(\n            \"The Rust engine cannot be called with backend {}.\",\n            value.backend.backend\n        ),\n        Backend::Fstar(_) => {\n            let mut items = items;\n            hax_rust_engine::phase::Phase::apply(&backends::fstar::FStarBackend, &mut items);\n\n            let query = hax_rust_engine::ocaml_engine::QueryKind::Print {\n                printer: value.backend.backend,\n                input: items,\n            };\n\n            let Some(Response::PrintOk) = query.execute(None) else {\n                panic!()\n            };\n            return;\n        }\n        Backend::Lean => backends::apply_backend(backends::lean::LeanBackend, items),\n        Backend::Rust => backends::apply_backend(backends::rust::RustBackend, items),\n        Backend::Debugger { interactive } => {\n            use hax_rust_engine::debugger::*;\n\n            if *interactive {\n                http_interactive_debugger(items);\n                vec![]\n            } else {\n                let mut state = State {\n                    initial_items: items,\n                    requests: vec![],\n                };\n\n                let contents = match state.apply(Request::DumpAst(DumpAstOptions::default())) {\n                    Response::TypedDumpedAst(items) => {\n                        serde_json::to_string_pretty(&items).unwrap()\n                    }\n                    Response::DumpedAst(value) => serde_json::to_string_pretty(&value).unwrap(),\n                    _ => todo!(),\n                };\n\n                vec![File {\n                    path: \"ast.json\".into(),\n                    contents,\n                    sourcemap: None,\n                }]\n            }\n        }\n        Backend::GenerateRustEngineNames => vec![File {\n            path: \"generated.rs\".into(),\n            contents: hax_rust_engine::names::codegen::export_def_ids_to_mod(items),\n            sourcemap: None,\n        }],\n    };\n    for file in files {\n        hax_rust_engine::hax_io::write(&hax_types::engine_api::protocol::FromEngine::File(file));\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/names.rs",
    "content": "//! This module provides a list of handy `DefId` for the engine.\n//! The list of `DefId`s comes from the crate `/engine/names`: any name mentionned\n//! in that crate will be provided here automatically.\n//!\n//! For example, to be able to resugar `std::ops::Add::add(x, y)` into `x + y`,\n//! we need to:\n//!  1. match on the expression `std::ops::Add::add(x, y)`, figure out it is the\n//!     application of the function denoted by the global identifier\n//!     `std::ops::Add::add` with arguments `x` and `y`.\n//!  2. check that global identifier `id: GlobalId` `std::ops::Add::add` is\n//!     indeed `std::ops::Add::add`.\n//!\n//! Point (2.) seems a bit tautological, but we need to write a comparison like\n//! `some_id == the_function_add`. This module basically provides such\n//! `the_function_add` symbols.\n//!\n//! As an example, the names `std::option::Option::Some` and `None` will be provided by this module as:\n//! ```rust,ignore\n//! mod std {\n//!     mod option {\n//!         mod Option {\n//!             fn Some() -> DefId { ... }\n//!             fn None() -> DefId { ... }\n//!         }\n//!     }\n//! }\n//! ```\n\npub use crate::ast::identifiers::global_id::generated_names::{codegen, root::*};\n"
  },
  {
    "path": "rust-engine/src/ocaml_engine.rs",
    "content": "//! This module implements an interface to the OCaml hax engine. Via this\n//! interface, the rust engine can communicate with the OCaml engine, and reuse\n//! some of its components.\n\nuse std::{io::BufRead, sync::OnceLock};\n\nuse hax_frontend_exporter::{\n    ThirBody,\n    id_table::{Table, WithTable},\n};\nuse hax_types::engine_api::protocol::{FromEngine, ToEngine};\nuse serde::Deserialize;\n\n/// A query for the OCaml engine\n#[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)]\npub struct Query {\n    #[serde(flatten)]\n    meta: Meta,\n    /// The kind of query we want to send to the engine\n    kind: QueryKind,\n}\n\n/// The metadata required to perform a query.\n#[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)]\npub struct Meta {\n    /// The version of hax currently used\n    pub hax_version: String,\n    /// Dictionary from `DefId`s to `impl_infos`\n    pub impl_infos: Vec<(\n        hax_frontend_exporter::DefId,\n        hax_frontend_exporter::ImplInfos,\n    )>,\n    /// Enable debugging of phases in the OCaml engine\n    pub debug_bind_phase: bool,\n    /// Enable profiling in the OCaml engine\n    pub profiling: bool,\n}\n\nstatic STATE: OnceLock<Meta> = OnceLock::new();\n\n/// Initialize query metadata.\npub fn initialize(meta: Meta) {\n    STATE\n        .set(meta)\n        .expect(\"`ocaml_engine::initialize` was called more than once\")\n}\n\n/// The payload of the query. [`Response`] below mirrors this enum to represent\n/// the response from the engine.\n#[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)]\npub enum QueryKind {\n    /// Ask the OCaml engine to import the given THIR from the frontend\n    ImportThir {\n        /// The input THIR items\n        input: Vec<hax_frontend_exporter::Item<ThirBody>>,\n        /// Translation options which contains include clauses (items filtering)\n        translation_options: hax_types::cli_options::TranslationOptions,\n    },\n\n    /// Ask the OCaml engine to run given phases on given items\n    ApplyPhases {\n        /// The phases to run. See `untyped_phases.ml`.\n        phases: Vec<String>,\n        /// The items on which the phases will be applied.\n        input: Vec<crate::ast::Item>,\n    },\n\n    /// Ask the OCaml engine to call an OCaml printer\n    Print {\n        /// Which printer to use\n        printer: hax_types::cli_options::Backend<()>,\n        /// The items after applying the phases.\n        input: Vec<crate::ast::Item>,\n    },\n}\n/// A Response after a [`Query`]\n#[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)]\npub enum Response {\n    /// Return imported THIR as an internal AST from Rust engine\n    ImportThir {\n        /// The output Rust AST items\n        output: Vec<crate::ast::Item>,\n    },\n    /// Return items after phase application\n    ApplyPhases {\n        /// The output Rust AST items after phases\n        output: Vec<crate::ast::Item>,\n    },\n    /// Printing was done successfully\n    PrintOk,\n}\n\n/// Extends the common `FromEngine` messages with one extra case: `Response`.\n#[derive(Debug, Clone, ::schemars::JsonSchema, ::serde::Deserialize, ::serde::Serialize)]\n#[serde(untagged)]\npub enum ExtendedFromEngine {\n    /// A standard `FromEngine` message\n    FromEngine(FromEngine),\n    /// A `Response`\n    Response(Response),\n}\n\nimpl QueryKind {\n    /// Execute the query synchronously.\n    pub fn execute(self, table: Option<Table>) -> Option<Response> {\n        let query = Query {\n            meta: STATE\n                .get()\n                .expect(\"`ocaml_engine::initialize` should be called first\")\n                .clone(),\n            kind: self,\n        };\n        use std::io::Write;\n        use std::process::Command;\n\n        macro_rules! send {\n            ($where: expr, $value:expr) => {\n                serde_json::to_writer(&mut $where, $value).unwrap();\n                $where.write_all(b\"\\n\").unwrap();\n                $where.flush().unwrap();\n            };\n        }\n\n        let mut engine_subprocess =\n            Command::new(std::env::var(\"HAX_ENGINE_BINARY\").unwrap_or(\"hax-engine\".into()))\n                .arg(\"driver_rust_engine\")\n                .stdin(std::process::Stdio::piped())\n                .stdout(std::process::Stdio::piped())\n                .spawn()\n                .unwrap();\n\n        let mut stdin = std::io::BufWriter::new(\n            engine_subprocess\n                .stdin\n                .as_mut()\n                .expect(\"Could not write on stdin\"),\n        );\n\n        if let Some(table) = table {\n            WithTable::run(table, query, |with_table| {\n                send!(stdin, with_table);\n            });\n        } else {\n            send!(stdin, &(vec![] as Vec<()>, query));\n        }\n\n        let mut response = None;\n        let stdout = std::io::BufReader::new(engine_subprocess.stdout.take().unwrap());\n        // TODO: this should be streaming (i.e. use a `LineAsEOF` reader wrapper that consumes a reader until `\\n` occurs)\n        //       See https://github.com/cryspen/hax/issues/1537.\n        for slice in stdout.split(b'\\n') {\n            let msg = (|| {\n                let slice = slice.ok()?;\n                let mut de = serde_json::Deserializer::from_slice(&slice);\n                de.disable_recursion_limit();\n                let de = serde_stacker::Deserializer::new(&mut de);\n                let msg = ExtendedFromEngine::deserialize(de);\n                msg.ok()\n            })()\n            .expect(\n                \"Hax engine sent an invalid json value. \\\n                                This might be caused by debug messages on stdout, \\\n                                which is reserved for JSON communication with cargo-hax\",\n            );\n\n            match msg {\n                ExtendedFromEngine::Response(res) => response = Some(res),\n                ExtendedFromEngine::FromEngine(FromEngine::Exit) => break,\n                // Proxy messages from the OCaml engine\n                ExtendedFromEngine::FromEngine(from_engine) => {\n                    crate::hax_io::write(&from_engine);\n                    if from_engine.requires_response() {\n                        let response: ToEngine = crate::hax_io::read_to_engine_message();\n                        send!(stdin, &response);\n                    }\n                }\n            }\n        }\n        drop(stdin);\n\n        let exit_status = engine_subprocess.wait().unwrap();\n        if !exit_status.success() {\n            panic!(\"ocaml engine crashed\");\n        }\n\n        response\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/phase/explicit_monadic.rs",
    "content": "use std::fmt::Debug;\n\nuse crate::ast::identifiers::GlobalId;\nuse crate::ast::*;\nuse crate::ast::{diagnostics::*, visitors::*};\nuse crate::phase::Phase;\n\nuse crate::names::rust_primitives::hax::explicit_monadic::*;\n\n/// Monadic Phase\n///\n/// This module defines a phase that makes the monadic encoding explicit by introducing calls to hax\n/// primitives (`pure` and `lift`) when necessary.\n///\n/// # Details\n///\n/// In backends with a monadic encoding (Lean for instance), rust computations that can *crash* are\n/// wrapped in an error Monad (say `RustM`): a function `fn f(x:u32) -> u32` will be extracted to\n/// something like `def f (x:u32) : RustM u32`. There are two challenges in this encoding :\n///\n/// 1. Some expressions cannot panic (literals, consts, constructors for enums, etc) and should be\n///    wrapped in the monad[^coe]. This phase inserts explicit calls to `pure` to that aim.\n///\n/// 2. Language constructs (if-then-else, `match`, etc.) and rust functions still expect rust values\n///    as input, not monadic ones. This phase inserts explicit calls to `lift` to materialize the\n///    sub-expressions that return a monadic result where a value is expected. The Lean backend turns\n///    them into explicit lifts `(← ..)`, which implicitly introduces a monadic bind\n///\n/// This phase expects all function and closure bodies to be monadic computations by default.\n///\n/// [^coe]: While implicit coercions can sometime be enough, they can also badly interact with\n/// inference, typically when dealing with branches (like if-then-else) where some branches are\n/// pure and some are not.\n#[derive(Default, Debug)]\npub struct ExplicitMonadic;\n\n/// Stateless visitor\n#[setup_error_handling_struct]\n#[derive(Default)]\nstruct ExplicitMonadicVisitor;\n\n/// Status of a rust expression. Computations are possibly panicking, while values are pure\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)]\nenum MonadicStatus {\n    Computation,\n    Value,\n}\n\nimpl Phase for ExplicitMonadic {\n    fn apply(&self, items: &mut Vec<Item>) {\n        ExplicitMonadicVisitor::default().visit(items)\n    }\n}\n\nimpl ExplicitMonadicVisitor {\n    /// Helper while waiting for a proper ast API. Wraps an expression in an application node, where\n    /// the head is a global id\n    fn wrap_app(expr: &Expr, head_id: GlobalId) -> Box<ExprKind> {\n        let expr = expr.clone();\n        Box::new(ExprKind::App {\n            head: Expr {\n                kind: Box::new(ExprKind::GlobalId(head_id)),\n                ty: Ty(Box::new(TyKind::Arrow {\n                    inputs: vec![expr.ty.clone()],\n                    output: expr.ty.clone(),\n                })),\n                meta: Metadata {\n                    span: expr.meta.span,\n                    attributes: vec![],\n                },\n            },\n            args: vec![expr],\n            generic_args: vec![],\n            bounds_impls: vec![],\n            trait_: None,\n        })\n    }\n\n    /// Helper to coerce a expression into a given status. `from` should be the status of `expr`\n    fn coerce(&mut self, expr: &mut Expr, from: MonadicStatus, to: MonadicStatus) {\n        // If the status is already correct, nothing to do.\n        if from == to {\n            return;\n        }\n        expr.kind = ExplicitMonadicVisitor::wrap_app(\n            expr,\n            match to {\n                // from = Value, to = Computation : we insert `pure`\n                MonadicStatus::Computation => pure,\n                // from = Computation, to = Value : we insert `lift`\n                MonadicStatus::Value => lift,\n            },\n        );\n    }\n}\n\nimpl VisitorWithContext for ExplicitMonadicVisitor {\n    fn context(&self) -> Context {\n        Context::Phase(stringify!(ExplicitMonadic).into())\n    }\n}\n\nimpl ExplicitMonadicVisitor {\n    fn visit_expr_coerce(&mut self, constraint: MonadicStatus, expr: &mut Expr) {\n        // Expression can force a status (returned as `Some(...)`), or be \"transparent\" (typically\n        // for control-flow) and just propagate the constraint.\n        let opt_status = match &mut *expr.kind {\n            // Control flow nodes\n            ExprKind::If {\n                condition,\n                then,\n                else_,\n            } => {\n                self.visit_expr_coerce(MonadicStatus::Value, condition);\n                [Some(then), else_.as_mut()]\n                    .into_iter()\n                    .flatten()\n                    .for_each(|branch| self.visit_expr_coerce(MonadicStatus::Computation, branch));\n                Some(MonadicStatus::Computation)\n            }\n            ExprKind::Match { scrutinee, arms } => {\n                self.visit_expr_coerce(MonadicStatus::Value, scrutinee);\n                arms.iter_mut().for_each(|arm| {\n                    if let Some(Guard {\n                        kind: GuardKind::IfLet { rhs, .. },\n                        ..\n                    }) = &mut arm.guard\n                    {\n                        self.visit_expr_coerce(MonadicStatus::Value, rhs);\n                    };\n                    self.visit_expr_coerce(MonadicStatus::Computation, &mut arm.body)\n                });\n                Some(MonadicStatus::Computation)\n            }\n            ExprKind::Block { body, .. } => {\n                self.visit_expr_coerce(constraint, body);\n                None\n            }\n            ExprKind::Break { .. }\n            | ExprKind::Return { .. }\n            | ExprKind::Continue { .. }\n            | ExprKind::Loop { .. } => {\n                unreachable_by_invariant!(Functionalize_loops)\n            }\n            // Opaque nodes\n            ExprKind::Let { lhs: _, rhs, body } => {\n                self.visit_expr_coerce(MonadicStatus::Computation, rhs);\n                self.visit_expr_coerce(MonadicStatus::Computation, body);\n                Some(MonadicStatus::Computation)\n            }\n            ExprKind::App { head, args, .. } => {\n                self.visit_expr_coerce(MonadicStatus::Value, head);\n                args.iter_mut()\n                    .for_each(|arg| self.visit_expr_coerce(MonadicStatus::Value, arg));\n                if let ExprKind::GlobalId(head) = &*head.kind\n                    && head.is_projector()\n                {\n                    // Constructors for structures and enums are values\n                    Some(MonadicStatus::Value)\n                } else if args.is_empty() {\n                    // Constants are values\n                    Some(MonadicStatus::Value)\n                } else {\n                    // Other function calls are computations\n                    Some(MonadicStatus::Computation)\n                }\n            }\n            ExprKind::Array(exprs) => {\n                exprs\n                    .iter_mut()\n                    .for_each(|expr| self.visit_expr_coerce(MonadicStatus::Value, expr));\n                Some(MonadicStatus::Value)\n            }\n            ExprKind::Construct { fields, base, .. } => {\n                fields\n                    .iter_mut()\n                    .map(|(_, e)| e)\n                    .chain(base.iter_mut())\n                    .for_each(|expr| self.visit_expr_coerce(MonadicStatus::Value, expr));\n                Some(MonadicStatus::Value)\n            }\n            ExprKind::Assign { value: inner, .. }\n            | ExprKind::Borrow { inner, .. }\n            | ExprKind::AddressOf { inner, .. } => {\n                self.visit_expr_coerce(MonadicStatus::Value, inner);\n                Some(MonadicStatus::Value)\n            }\n            ExprKind::Ascription { e, ty } => {\n                self.visit_expr_coerce(MonadicStatus::Value, e);\n                self.visit(ty);\n                Some(MonadicStatus::Value)\n            }\n            ExprKind::Closure {\n                params: _,\n                body,\n                captures,\n            } => {\n                captures\n                    .iter_mut()\n                    .for_each(|capture| self.visit_expr_coerce(MonadicStatus::Value, capture));\n                self.visit_expr_coerce(MonadicStatus::Computation, body);\n                Some(MonadicStatus::Value)\n            }\n            ExprKind::Literal(_)\n            | ExprKind::GlobalId(_)\n            | ExprKind::LocalId(_)\n            | ExprKind::Quote { .. }\n            | ExprKind::Error(_) => Some(MonadicStatus::Value),\n            ExprKind::Resugared(_) => {\n                unreachable!(\"Resugarings should happen after phases\")\n            }\n        };\n        if let Some(status) = opt_status {\n            self.coerce(expr, status, constraint)\n        }\n    }\n}\n\nimpl AstVisitorMut for ExplicitMonadicVisitor {\n    setup_error_handling_impl!();\n\n    fn visit_expr(&mut self, x: &mut Expr) {\n        // Entry points are functions (items and impl items), which start with a `do` block,\n        // therefore a monadic computation\n        self.visit_expr_coerce(MonadicStatus::Computation, x)\n    }\n\n    fn visit_ty(&mut self, x: &mut Ty) {\n        if let TyKind::Array { length, .. } = x.kind_mut() {\n            self.visit_expr_coerce(MonadicStatus::Value, length);\n        };\n    }\n\n    fn visit_generic_value(&mut self, x: &mut GenericValue) {\n        if let GenericValue::Expr(expr) = x {\n            self.visit_expr_coerce(MonadicStatus::Value, expr);\n        };\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/phase/filter_unprintable_items.rs",
    "content": "use crate::ast::*;\nuse crate::phase::Phase;\n\n/// Phase to filter unprintable items\n///\n/// This phase filters out items that are not printable (Error, NotImplementedYet, Use).\n#[derive(Default, Debug)]\npub struct FilterUnprintableItems;\n\nimpl Phase for FilterUnprintableItems {\n    fn apply(&self, items: &mut Vec<Item>) {\n        items.retain(|item| match &item.kind {\n            // Items to remove:\n            ItemKind::Error(_)\n            | ItemKind::NotImplementedYet\n            | ItemKind::Use { .. }\n            | ItemKind::RustModule => false,\n            // Items to keep:\n            ItemKind::Fn { .. }\n            | ItemKind::TyAlias { .. }\n            | ItemKind::Type { .. }\n            | ItemKind::Trait { .. }\n            | ItemKind::Impl { .. }\n            | ItemKind::Alias { .. }\n            | ItemKind::Resugared(_)\n            | ItemKind::Quote { .. } => true,\n        });\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/phase/legacy.rs",
    "content": "//! This module exposes the legacy phases written in OCaml in the OCaml engine.\n\nuse crate::{\n    ast::Item,\n    phase::{Phase, PhaseKind},\n};\n\n/// Group consecutive ocaml phases as one monolithic phase, so that we avoid extra roundtrips to the OCaml engine.\npub fn group_consecutive_ocaml_phases(phases: Vec<PhaseKind>) -> Vec<Box<dyn Phase>> {\n    let mut output: Vec<Box<dyn Phase>> = vec![];\n    let mut ocaml_phases = vec![];\n    let mut phases = phases.into_iter();\n\n    struct LegacyOCamlPhases {\n        phases: Vec<LegacyOCamlPhase>,\n    }\n\n    impl Phase for LegacyOCamlPhases {\n        fn apply(&self, items: &mut Vec<Item>) {\n            apply_legacy_phases(&self.phases, items);\n        }\n    }\n\n    loop {\n        let phase = phases.next();\n        if let Some(PhaseKind::Legacy(ocaml_phase)) = phase {\n            ocaml_phases.push(ocaml_phase)\n        } else {\n            if !ocaml_phases.is_empty() {\n                output.push(Box::new(LegacyOCamlPhases {\n                    phases: std::mem::take(&mut ocaml_phases),\n                }));\n            }\n            if let Some(phase) = phase {\n                output.push(Box::new(phase));\n            } else {\n                break;\n            }\n        }\n    }\n\n    output\n}\n\nfn apply_legacy_phases(phases: &[LegacyOCamlPhase], items: &mut Vec<Item>) {\n    use crate::ocaml_engine::Response;\n    let query = crate::ocaml_engine::QueryKind::ApplyPhases {\n        input: std::mem::take(items),\n        phases: phases.iter().map(ToString::to_string).collect(),\n    };\n    let Some(Response::ApplyPhases { output }) = query.execute(None) else {\n        panic!()\n    };\n    *items = output;\n}\n\nmacro_rules! make_ocaml_legacy_phase {\n    ($($name:ident),*) => {\n\n        pastey::paste!{\n            /// The list of exposed OCaml phases.\n            #[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]\n            pub enum LegacyOCamlPhase {\n                $(\n                    #[doc = concat!(\"The phase \", stringify!($name), \" from the OCaml engine.\")]\n                    [< $name:camel >]\n                ),*\n            }\n\n\n            impl std::fmt::Display for LegacyOCamlPhase {\n                fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n                    match self {\n                        $(Self::[< $name:camel >] => stringify!($name).fmt(f)),*\n                    }\n                }\n            }\n\n            impl Phase for LegacyOCamlPhase {\n                fn apply(&self, items: &mut Vec<Item>) {\n                    apply_legacy_phases(&[*self], items);\n                }\n            }\n        }\n    };\n}\n\nimpl From<LegacyOCamlPhase> for PhaseKind {\n    fn from(legacy_phase: LegacyOCamlPhase) -> Self {\n        Self::Legacy(legacy_phase)\n    }\n}\n\nmake_ocaml_legacy_phase!(\n    and_mut_defsite,\n    bundle_cycles,\n    cf_into_monads,\n    direct_and_mut,\n    drop_blocks,\n    drop_match_guards,\n    drop_references,\n    drop_return_break_continue,\n    drop_sized_trait,\n    explicit_conversions,\n    functionalize_loops,\n    hoist_disjunctive_patterns,\n    local_mutation,\n    newtype_as_refinement,\n    reconstruct_asserts,\n    reconstruct_for_index_loops,\n    reconstruct_for_loops,\n    reconstruct_question_marks,\n    reconstruct_while_loops,\n    reorder_fields,\n    rewrite_control_flow,\n    rewrite_local_self,\n    simplify_hoisting,\n    simplify_match_return,\n    simplify_question_marks,\n    sort_items,\n    specialize,\n    traits_specs,\n    transform_hax_lib_inline,\n    trivialize_assign_lhs,\n    reject_arbitrary_lhs,\n    reject_continue,\n    reject_question_mark,\n    reject_raw_or_mut_pointer,\n    reject_early_exit,\n    reject_as_pattern,\n    reject_dyn,\n    reject_trait_item_default,\n    reject_unsafe,\n    reject_impl_type_method,\n    hoist_side_effects\n);\n"
  },
  {
    "path": "rust-engine/src/phase/reject_not_do_lean_dsl.rs",
    "content": "use crate::ast::*;\nuse crate::ast::{diagnostics::*, visitors::*};\nuse crate::phase::Phase;\n\n/// Rejection Phase for patterns unsupported by Lean's do-notation DSL\n///\n/// This phase rejects unsupported interleavings of expressions and statements.\n/// It is built as a visitor.\n#[derive(Default)]\npub struct RejectNotDoLeanDSL;\n\n/// Expressions are either do-statements or do-expressions. The former can be downgraded into the\n/// latter.\n#[derive(Clone, Copy, Debug)]\nenum DoDSLExprKind {\n    Statement,\n    Expression,\n}\n\n/// Gives the \"kind\" of an expression in the do-notation DSL\nfn dsl_expr_kind(expr_kind: &ExprKind) -> DoDSLExprKind {\n    match expr_kind {\n        ExprKind::If { .. } | ExprKind::Match { .. } | ExprKind::Let { .. } => {\n            DoDSLExprKind::Statement\n        }\n        _ => DoDSLExprKind::Expression,\n    }\n}\n\n/// The default value for entry points of expression (function items, function impl items)\nimpl Default for DoDSLExprKind {\n    fn default() -> Self {\n        Self::Statement\n    }\n}\n\n/// Visitor internal state\n#[setup_error_handling_struct]\n#[derive(Default)]\npub struct RejectNotDoLeanDSLVisitor {\n    /// Expected kind for the visited expression. Used by `visit_expr`, ignored by other methods\n    dsl_expr_kind: DoDSLExprKind,\n}\n\nimpl VisitorWithContext for RejectNotDoLeanDSLVisitor {\n    fn context(&self) -> Context {\n        Context::Phase(stringify!(RejectNotDoLeanDSL).to_string())\n    }\n}\n\nimpl AstVisitorMut for RejectNotDoLeanDSLVisitor {\n    setup_error_handling_impl!();\n\n    fn visit_expr(&mut self, expr: &mut Expr) {\n        use DoDSLExprKind::*;\n        let parent_dsl_expr_kind = self.dsl_expr_kind;\n        self.dsl_expr_kind = match (self.dsl_expr_kind, dsl_expr_kind(&expr.kind)) {\n            // A do-expression cannot be upgraded to a do-statement, we throw an error\n            (Expression, Statement) => {\n                self.error(\n                    expr.clone(),\n                    DiagnosticInfoKind::ExplicitRejection {\n                        reason: \"This interleaving of expression and statements does not fit in Lean's do-notation DSL.\\\n                                 \\nYou may try hoisting out let-bindings and control-flow.\".to_string(),\n                        issue_id: Some(1741),\n                    },\n                );\n                Statement\n            }\n            // Closures body are do-statement, as a `do` keyword is introduced\n            (_, _) if matches!(&*expr.kind, ExprKind::Closure { .. }) => Statement,\n            // In other cases, we keep the computed kind\n            (_, kind) => kind,\n        };\n        self.visit_inner(expr);\n        self.dsl_expr_kind = parent_dsl_expr_kind;\n    }\n\n    /// Visitor for types. Array lengths can be any (const) expression, so they are checked for dsl\n    /// patterns (as DoDSL-expressions)\n    fn visit_ty(&mut self, ty: &mut Ty) {\n        if let TyKind::Array { length, .. } = ty.kind_mut() {\n            // The Lean Backend does not support computation in array lengths yet.  It should be\n            // possible to have do-blocks, and treat them like constants. See\n            // https://github.com/cryspen/hax/issues/1713\n            let parent_dsl_expr_kind = self.dsl_expr_kind;\n            self.dsl_expr_kind = DoDSLExprKind::Expression;\n            self.visit_inner(&mut *length);\n            self.dsl_expr_kind = parent_dsl_expr_kind;\n        }\n    }\n}\n\nimpl Phase for RejectNotDoLeanDSL {\n    fn apply(&self, items: &mut Vec<Item>) {\n        // Entry points are statements\n        RejectNotDoLeanDSLVisitor::default().visit(items)\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/phase.rs",
    "content": "//! A phase rewrites the AST.\n\nuse crate::ast::Item;\n\n// Special kind of unreachability that should be prevented by a phase\nmacro_rules! unreachable_by_invariant {\n    ($phase:ident) => {\n        unreachable!(\n            \"The phase {} should make this unreachable\",\n            stringify!($phase)\n        )\n    };\n}\npub(crate) use unreachable_by_invariant;\n\n/// A Rust phase that operates on the AST.\npub trait Phase {\n    /// Apply the phase on items.\n    /// A phase may transform an item into zero, one or more items.\n    fn apply(&self, items: &mut Vec<Item>);\n}\n\npub mod legacy;\n\nmod explicit_monadic;\nmod filter_unprintable_items;\nmod reject_not_do_lean_dsl;\n\nmacro_rules! declare_phase_kind {\n    {$($name:ident = $phase:expr),*$(,)?} => {\n        /// Enumeration of the available phases.\n        #[derive(Clone, Debug, Copy, serde::Serialize, serde::Deserialize)]\n        pub enum PhaseKind {\n            $(\n                #[doc = concat!(\"The phase [`\", stringify!($phase), \"].\")]\n                $name,\n            )*\n            /// A legacy (OCaml) phase.\n            Legacy(crate::phase::legacy::LegacyOCamlPhase),\n        }\n\n        impl crate::phase::Phase for PhaseKind {\n            fn apply(&self, items: &mut Vec<Item>) {\n                match *self {\n                    $(Self::$name => $phase.apply(items),)*\n                    Self::Legacy(phase) => phase.apply(items),\n                }\n            }\n        }\n    };\n}\n\ndeclare_phase_kind! {\n    ExplicitMonadic = explicit_monadic::ExplicitMonadic,\n    RejectNotDoLeanDSL = reject_not_do_lean_dsl::RejectNotDoLeanDSL,\n    FilterUnprintableItems = filter_unprintable_items::FilterUnprintableItems,\n}\n"
  },
  {
    "path": "rust-engine/src/printer/pretty_ast/debug_json.rs",
    "content": "use std::fmt::{Debug, Display};\n\nuse crate::printer::pretty_ast::ToDocument;\n\n/// This type is primarily useful inside printer implementations when you want a\n/// low-friction way to inspect an AST fragment.\n///\n/// # What it does\n/// - Appends a JSON representation of the wrapped value to\n///   `\"/tmp/hax-ast-debug.json\"` (one JSON document per line).\n/// - Implements [`std::fmt::Display`] to print a `just` invocation you can paste in a shell\n///   to re-open that same JSON by line number:\n///   `just debug-json <line-id>`\n///\n/// # Example\n/// ```rust\n/// # use hax_rust_engine::printer::pretty_ast::DebugJSON;\n/// # #[derive(serde::Serialize)]\n/// # struct Small { x: u32 }\n/// let s = Small { x: 42 };\n/// // Prints something like: `just debug-json 17`.\n/// println!(\"{}\", DebugJSON(&s));\n/// // Running `just debug-json 17` will print `{\"x\":42}`\n/// ```\n///\n/// # Notes\n/// - This is a **debugging convenience** and intentionally has a side-effect (file write).\n///   Avoid keeping it in user-facing output paths.\n/// - The file grows over time; occasionally delete it if you no longer need historical entries.\npub struct DebugJSON<T: serde::Serialize>(pub T);\n\nimpl<T: serde::Serialize> Display for DebugJSON<T> {\n    #[cfg(not(unix))]\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        write!(f, \"<unknown, DebugJSON supported on unix plateforms only>\")\n    }\n    #[cfg(unix)]\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n        const PATH: &str = \"/tmp/hax-ast-debug.json\";\n        /// Write a new JSON as a line at the end of `PATH`\n        fn append_line_json(value: &serde_json::Value) -> std::io::Result<usize> {\n            use std::io::{BufRead, BufReader, Write};\n            cleanup();\n            let file = std::fs::OpenOptions::new()\n                .read(true)\n                .append(true)\n                .create(true)\n                .open(PATH)?;\n            let count = BufReader::new(&file).lines().count();\n            writeln!(&file, \"{value}\")?;\n            Ok(count)\n        }\n\n        /// Drop the file at `PATH` when we first write\n        fn cleanup() {\n            static DID_RUN: AtomicBool = AtomicBool::new(false);\n            use std::sync::atomic::{AtomicBool, Ordering};\n            if DID_RUN\n                .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)\n                .is_ok()\n            {\n                let _ignored = std::fs::remove_file(PATH);\n            }\n        }\n\n        if let Ok(id) = append_line_json(&serde_json::to_value(&self.0).unwrap()) {\n            write!(f, \"`just debug-json {id}`\")\n        } else {\n            write!(f, \"<DebugJSON failed>\")\n        }\n    }\n}\n\nimpl<A: 'static + Clone, P, T: serde::Serialize + Debug> ToDocument<P, A> for DebugJSON<T> {\n    fn to_document(&self, _: &P) -> super::DocBuilder<A> {\n        pretty::DocAllocator::as_string(\n            &pretty::BoxAllocator,\n            serde_json::to_string_pretty(&self.0).unwrap_or_else(|_| format!(\"{:#?}\", &self.0)),\n        )\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/printer/pretty_ast/to_document.rs",
    "content": "use pretty::{BoxAllocator, DocAllocator};\nuse std::ops::Deref as _;\n\n/// A convenience alias tying the document builder to the global\n/// [`pretty::BoxAllocator`].\npub type DocBuilder<A> = pretty::DocBuilder<'static, BoxAllocator, A>;\n\n/// Convert a value into a document by-value.\n///\n/// Implementations typically delegate to [`ToDocument`] after adjusting the\n/// input ownership (e.g., cloning or borrowing). It allows helpers to accept\n/// either borrowed or owned values transparently.\npub trait ToDocumentOwned<P, A> {\n    /// Produce a document using the provided printer.\n    fn to_document_owned(self, printer: &P) -> DocBuilder<A>;\n}\n\nimpl<P, A, T: ToDocument<P, A>> ToDocumentOwned<P, A> for &T {\n    fn to_document_owned(self, printer: &P) -> DocBuilder<A> {\n        self.to_document(printer)\n    }\n}\n\nimpl<P, A> ToDocumentOwned<P, A> for DocBuilder<A> {\n    fn to_document_owned(self, _printer: &P) -> DocBuilder<A> {\n        self\n    }\n}\nimpl<P, A> ToDocumentOwned<P, A> for &str {\n    fn to_document_owned(self, _printer: &P) -> DocBuilder<A> {\n        DocAllocator::as_string(&BoxAllocator, self)\n    }\n}\nimpl<P, A> ToDocumentOwned<P, A> for String {\n    fn to_document_owned(self, _printer: &P) -> DocBuilder<A> {\n        DocAllocator::as_string(&BoxAllocator, self)\n    }\n}\nimpl<P, A> ToDocumentOwned<P, A> for Option<&str> {\n    fn to_document_owned(self, printer: &P) -> DocBuilder<A> {\n        self.map(|s| s.to_document_owned(printer))\n            .unwrap_or_else(|| DocAllocator::nil(&BoxAllocator))\n    }\n}\n\n/// Convert a value into a document using the supplied printer.\n///\n/// This is the primary trait invoked throughout the pretty-printing pipeline;\n/// it mirrors [`pretty::Pretty::pretty`] while giving access to printer-specific\n/// context.\npub trait ToDocument<P: ?Sized, A> {\n    /// Produce a document using the provided printer reference.\n    fn to_document(&self, printer: &P) -> DocBuilder<A>;\n}\n\nimpl<A, P, T: ToDocument<P, A>> ToDocument<P, A> for Box<T> {\n    fn to_document(&self, printer: &P) -> DocBuilder<A> {\n        self.deref().to_document(printer)\n    }\n}\nimpl<A, P, T: ToDocument<P, A>> ToDocument<P, A> for Option<T> {\n    fn to_document(&self, printer: &P) -> DocBuilder<A> {\n        self.as_ref()\n            .map(|value| value.to_document(printer))\n            .unwrap_or_else(|| DocAllocator::nil(&BoxAllocator))\n    }\n}\nimpl<A, P> ToDocument<P, A> for String {\n    fn to_document(&self, _printer: &P) -> DocBuilder<A> {\n        DocAllocator::as_string(&BoxAllocator, self)\n    }\n}\nimpl<A: Clone, P> ToDocument<P, A> for DocBuilder<A> {\n    #[inline(always)]\n    fn to_document(&self, _printer: &P) -> DocBuilder<A> {\n        self.clone()\n    }\n}\nimpl<A: Clone, P, T> ToDocument<P, A> for &T\nwhere\n    T: ToDocument<P, A>,\n{\n    #[inline(always)]\n    fn to_document(&self, printer: &P) -> DocBuilder<A> {\n        (*self).to_document(printer)\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/printer/pretty_ast.rs",
    "content": "//! Pretty-printing support for the hax AST.\n//!\n//! This module defines the trait [`PrettyAst`], which is the **primary trait a printer should\n//! implement**.\n//!\n//! # Quickstart\n//! In most printers you:\n//! 1. Implement [`Printer`] for your printer type,\n//! 2. Implement [`PrettyAst`] for that printer type,\n//! 3. Call `ast_value.to_document(&print)` on AST values.\n//!\n//! See [`crate::backends`] for backend and printer examples.\n\nuse std::{borrow::Cow, fmt::Display};\n\nuse super::*;\nuse crate::ast::*;\nuse pretty::BoxAllocator;\n\nuse crate::symbol::Symbol;\nuse literals::*;\nuse resugared::*;\n\nmod debug_json;\nmod to_document;\npub use debug_json::*;\npub use to_document::*;\n\n#[macro_export]\n/// Similar to [`std::todo`], but returns a document instead of panicking with a message.\n/// In addition, `todo_document!` accepts a prefix to point to a specific issue number.\n///\n/// ## Examples:\n/// - `todo_document!(allocator)`\n/// - `todo_document!(allocator, \"This is a todo\")`\n/// - `todo_document!(allocator, issue 42)`\n/// - `todo_document!(allocator, issue 42, \"This is a todo\")`\nmacro_rules! todo_document {\n    ($allocator:ident, issue $issue:literal) => {\n        {return $allocator.todo_document(&format!(\"TODO_LINE_{}\", std::line!()), Some($issue));}\n    };\n    ($allocator:ident, issue $issue:literal, $($tt:tt)*) => {\n        {\n            let message = format!($($tt)*);\n            return $allocator.todo_document(&message, Some($issue));\n        }\n    };\n    ($allocator:ident,) => {\n        {return $allocator.todo_document(&format!(\"TODO_LINE_{}\", std::line!()), None);}\n    };\n    ($allocator:ident, $($tt:tt)*) => {\n        {\n            let message = format!($($tt)*);\n            return $allocator.todo_document(&message, None);\n        }\n    };\n}\npub use todo_document;\n\n/// Expand a list of values into documents and concatenate them in order.\n///\n/// This helper mirrors [`pretty::docs!`] but automatically calls\n/// [`ToDocumentOwned::to_document_owned`] on each argument before appending it\n/// to the accumulator that starts as [`PrettyAstExt::nil`].\n#[macro_export]\nmacro_rules! pretty_ast_docs {\n    ($printer: expr, $docs:expr) => {{\n        use $crate::printer::pretty_ast::{ToDocumentOwned};\n        $docs.to_document_owned($printer)\n    }};\n    ($printer: expr, $($docs:expr),*$(,)?) => {{\n        use $crate::printer::pretty_ast::{ToDocumentOwned};\n        nil!()\n        $(.append($docs.to_document_owned($printer)))*\n    }};\n}\npub use pretty_ast_docs;\n\n/// Convert a collection of values into documents separated by another\n/// document.\n///\n/// It forwards to [`PrettyAstExt::intersperse`] after materialising the\n/// separator. The macro exists so call sites can stay concise while still\n/// benefiting from the allocator captured by [`install_pretty_helpers!`].\n#[macro_export]\nmacro_rules! pretty_ast_intersperse {\n    ($printer: expr, $docs:expr, $sep: expr$(,)?) => {{\n        let docs = $docs;\n        let sep = $sep;\n        $crate::printer::pretty_ast::PrettyAstExt::intersperse($printer, docs, sep)\n    }};\n}\npub use pretty_ast_intersperse;\n\n#[macro_export]\n/// Install pretty-printing helpers partially applied with a given local\n/// allocator.\n///\n/// This macro declares a set of small, local macros that proxy to the\n/// underlying [`pretty::DocAllocator`] methods and macro while capturing your\n/// allocator value. It keeps printing code concise and avoids passing the\n/// allocator around explicitly.\n///\n/// # Syntax\n/// ```rust,ignore\n/// install_pretty_helpers!(alloc_ident: AllocatorType)\n/// ```\n///\n/// - `alloc_ident`: the in-scope variable that implements both\n///   [`pretty::DocAllocator`] and [`Printer`].\n/// - `AllocatorType`: the concrete type of that variable.\n///\n/// # What gets installed\n/// - macro shorthands for common allocator methods:\n///   [`PrettyAstExt::nil`], [`PrettyAstExt::fail`],\n///   [`PrettyAstExt::hardline`], [`PrettyAstExt::space`],\n///   [`PrettyAstExt::line`], [`PrettyAstExt::line_`],\n///   [`PrettyAstExt::softline`], [`PrettyAstExt::softline_`],\n///   [`PrettyAstExt::as_string`], [`PrettyAstExt::text`],\n///   [`PrettyAstExt::concat`], [`PrettyAstExt::intersperse`],\n///   [`PrettyAstExt::column`], [`PrettyAstExt::nesting`],\n///   [`PrettyAstExt::reflow`].\n/// - a partially applied version of [`pretty::docs!`].\n/// - [`todo_document!`]: produce a placeholder document (that does not panic).\nmacro_rules! install_pretty_helpers {\n    ($allocator:ident : $allocator_type:ty) => {\n        $crate::printer::pretty_ast::install_pretty_helpers!(\n            @$allocator,\n            #[doc = ::std::concat!(\"Proxy macro for [`\", stringify!($crate), \"::printer::pretty_ast::todo_document`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            #[doc = ::std::concat!(r#\"Example: `disambiguated_todo!(\"Error message\")` or `disambiguated_todo!(issue #123, \"Error message with issue attached\")`.\"#)]\n            disambiguated_todo{$crate::printer::pretty_ast::todo_document!},\n            #[doc = ::std::concat!(\"Proxy macro for [`pretty::docs`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            docs{$crate::printer::pretty_ast::pretty_ast_docs!},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::nil`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            nil{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::nil},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::fail`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            fail{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::fail},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::hardline`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            hardline{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::hardline},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::space`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            space{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::space},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::line`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            disambiguated_line{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::line},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::line_`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            line_{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::line_},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::softline`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            softline{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::softline},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::softline_`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            softline_{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::softline_},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::as_string`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            as_string{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::as_string},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::text`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            text{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::text},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::concat`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            disambiguated_concat{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::concat},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::intersperse`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            intersperse{$crate::printer::pretty_ast::pretty_ast_intersperse!},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::column`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            column{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::column},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::nesting`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            nesting{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::nesting},\n            #[doc = ::std::concat!(\"Proxy macro for [`PrettyAstExt::reflow`] that automatically uses `\", stringify!($allocator),\"` as allocator.\")]\n            reflow{<$allocator_type as $crate::printer::pretty_ast::PrettyAstExt<_>>::reflow}\n        );\n    };\n    (@$allocator:ident, $($(#[$($attrs:tt)*])*$name:ident{$($callable:tt)*}),*) => {\n        $(\n            #[hax_rust_engine_macros::partial_apply($($callable)*, $allocator,)]\n            #[allow(unused)]\n            $(#[$($attrs)*])*\n            macro_rules! $name {}\n        )*\n    };\n}\npub use install_pretty_helpers;\n\n/// `PrettyAstExt` exposes `DocAllocator`-style constructors for printers.\n///\n/// Every method simply forwards to the global [`pretty::BoxAllocator`] so that printers\n/// implementing [`PrettyAst`] can build documents without juggling allocator plumbing.\npub trait PrettyAstExt<A: 'static>: Sized {\n    /// Returns an empty document.\n    /// Mirrors [`pretty::DocAllocator::nil`].\n    fn nil(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::nil(&BoxAllocator)\n    }\n\n    /// Produces a document that fails rendering immediately.\n    /// Mirrors [`pretty::DocAllocator::fail`].\n    ///\n    /// This is typically used to abort rendering inside the left side of a [`pretty::Doc::Union`].\n    fn fail(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::fail(&BoxAllocator)\n    }\n\n    /// Inserts a mandatory line break.\n    /// Mirrors [`pretty::DocAllocator::hardline`].\n    fn hardline(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::hardline(&BoxAllocator)\n    }\n\n    /// Inserts a single space that disappears when groups flatten.\n    /// Mirrors [`pretty::DocAllocator::space`].\n    fn space(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::space(&BoxAllocator)\n    }\n\n    /// Acts like a `\\n` but behaves like `space` once grouped onto a single line.\n    /// Mirrors [`pretty::DocAllocator::line`].\n    fn line(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::line(&BoxAllocator)\n    }\n\n    /// Acts like `line` but collapses to `nil` if grouped on a single line.\n    /// Mirrors [`pretty::DocAllocator::line_`].\n    fn line_(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::line_(&BoxAllocator)\n    }\n\n    /// Acts like `space` when the document fits the page, otherwise behaves like `line`.\n    /// Mirrors [`pretty::DocAllocator::softline`].\n    fn softline(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::softline(&BoxAllocator)\n    }\n\n    /// Acts like `nil` when the document fits the page, otherwise behaves like `line_`.\n    /// Mirrors [`pretty::DocAllocator::softline_`].\n    fn softline_(&self) -> DocBuilder<A> {\n        pretty::DocAllocator::softline_(&BoxAllocator)\n    }\n\n    /// Renders `data` via its [`Display`] implementation.\n    /// Mirrors [`pretty::DocAllocator::as_string`].\n    ///\n    /// The resulting document must not contain explicit line breaks.\n    fn as_string<U: Display>(&self, data: U) -> DocBuilder<A> {\n        pretty::DocAllocator::as_string(&BoxAllocator, data)\n    }\n\n    /// Renders the provided text verbatim.\n    /// Mirrors [`pretty::DocAllocator::text`].\n    ///\n    /// The supplied string must not contain line breaks.\n    fn text<'a>(&self, data: impl Into<Cow<'a, str>>) -> DocBuilder<A> {\n        self.as_string(data.into())\n    }\n\n    /// Concatenates the given values after turning each into a document.\n    /// Mirrors [`pretty::DocAllocator::concat`].\n    fn concat<I>(&self, docs: I) -> DocBuilder<A>\n    where\n        I::Item: ToDocumentOwned<Self, A>,\n        I: IntoIterator,\n    {\n        pretty::DocAllocator::concat(\n            &BoxAllocator,\n            docs.into_iter().map(|doc| doc.to_document_owned(self)),\n        )\n    }\n\n    /// Concatenates documents while interspersing `separator` between every pair.\n    /// Mirrors [`pretty::DocAllocator::intersperse`].\n    ///\n    /// `separator` may need to be cloned; consider cheap pointer documents like `RefDoc` or `RcDoc`.\n    fn intersperse<I, S>(&self, docs: I, separator: S) -> DocBuilder<A>\n    where\n        I::Item: ToDocumentOwned<Self, A>,\n        I: IntoIterator,\n        S: ToDocumentOwned<Self, A> + Clone,\n        A: Clone,\n    {\n        let separator = separator.to_document_owned(self);\n        pretty::DocAllocator::intersperse(\n            &BoxAllocator,\n            docs.into_iter().map(|doc| doc.to_document_owned(self)),\n            separator,\n        )\n    }\n\n    /// Reflows `text`, inserting `softline` wherever whitespace appears.\n    /// Mirrors [`pretty::DocAllocator::reflow`].\n    fn reflow(&self, text: &'static str) -> DocBuilder<A>\n    where\n        A: Clone,\n    {\n        pretty::DocAllocator::reflow(&BoxAllocator, text)\n    }\n}\n\nimpl<A: 'static + Clone, P: PrettyAst<A>> PrettyAstExt<A> for P {}\n\n/// Generate a dispatcher macro that forwards a token to specialised macros.\nmacro_rules! make_cases_macro {\n    (\n        $macro_name:ident,\n        $(\n            $($idents:ident)|* => $target:ident,\n        )*\n        _ => $fallback:ident $(,)?\n    ) => {\n        macro_rules! $macro_name {\n            $(\n                $(\n                    ($idents $tt:tt) => { $target!($tt); };\n                )*\n            )*\n            ($anything:ident $tt:tt) => { $fallback!($tt); };\n        }\n    };\n}\n\n/// Helper macro used to ignore a matched arm in `make_cases_macro!`.\nmacro_rules! skip {\n    ($tt:tt) => {};\n}\n/// Helper macro used to keep the body for specific matches in\n/// `make_cases_macro!`.\nmacro_rules! keep {\n    ({$($tt:tt)*}) => { $($tt)* };\n}\n\nmake_cases_macro!(method_deny_list,\n    ExprKind | PatKind | TyKind | GuardKind | ImplExprKind | ImplItemKind | TraitItemKind | AttributeKind | DocCommentKind => skip,\n    Signedness  | IntSize => skip,\n    ItemQuoteOrigin | ItemQuoteOriginKind | ItemQuoteOriginPosition => skip,\n    ControlFlowKind | LoopState | LoopKind => skip,\n    _ => keep\n);\n\nmake_cases_macro!(span_handling,\n    Item | Expr | Pat | Guard | Arm | ImplItem | TraitItem | GenericParam | Attribute | Attribute => keep,\n    _ => skip\n);\n\n/// A trait that provides an optional contextual span for printers: during a\n/// pretty printing job, spans will be inserted so that errors are always tagged\n/// with precise location information.\n///\n/// This should not be implemented by hand, instead, use\n/// [`hax_rust_engine_macros::setup_printer_struct`].\npub trait HasContextualSpan: Clone {\n    /// Clone the printer, adding a span hint. Useful for errors.\n    fn with_span(&self, _span: Span) -> Self;\n\n    /// Returns the span currently associated with the printer, if any.\n    fn span(&self) -> Option<Span>;\n}\n\n/// Declare the `PrettyAst` trait and wiring for deriving `ToDocument` for AST\n/// nodes.\nmacro_rules! mk {\n    ($($ty:ident),*) => {\n        pastey::paste! {\n            /// A trait that defines a print method per type in the AST.\n            ///\n            /// This is the main trait a printer should implement.\n            ///\n            /// You then implement the actual formatting logic in the generated\n            /// per-type methods. These methods are intentionally marked\n            /// `#[deprecated]` to discourage calling them directly; instead,\n            /// call `node.to_document(self)` from the [`ToDocument`] trait to\n            /// ensure annotations and spans are applied correctly.\n            ///\n            /// Note that using `install_pretty_helpers!` will produce macro\n            /// that implicitely use `self` as allocator. Take a look at a\n            /// printer in the [`backends`] module for an example.\n            pub trait PrettyAst<A: 'static + Clone>: Sized + HasContextualSpan {\n                /// A name for this instance of `PrettyAst`.\n                /// Useful for diagnostics and debugging.\n                const NAME: &'static str;\n\n                /// Emit a diagnostic with proper context and span.\n                fn emit_diagnostic(&self, kind: hax_types::diagnostics::Kind) {\n                    let span = self.span().unwrap_or_else(|| Span::dummy());\n                    use crate::ast::diagnostics::{DiagnosticInfo, Context};\n                    (DiagnosticInfo {\n                        context: Context::Printer(Self::NAME.to_string()),\n                        span,\n                        kind\n                    }).emit()\n                }\n\n                /// Produce a non-panicking placeholder document. In general, prefer the use of the helper macro [`todo_document!`].\n                fn todo_document(&self, message: &str, issue_id: Option<u32>) -> DocBuilder<A> {\n                    self.emit_diagnostic(hax_types::diagnostics::Kind::Unimplemented {\n                        issue_id,\n                        details: Some(message.into()),\n                    });\n                    self.as_string(message)\n                }\n\n                /// Produce a structured error document for an unimplemented\n                /// method.\n                ///\n                /// Printers may override this for nicer diagnostics (e.g.,\n                /// colored \"unimplemented\" banners or links back to source\n                /// locations). The default produces a small, debuggable piece\n                /// of text that includes the method name and a JSON handle for\n                /// the AST fragment (via [`DebugJSON`]).\n                fn unimplemented_method(&self, method: &str, ast: ast::fragment::FragmentRef<'_>) -> DocBuilder<A> {\n                    let debug_json = DebugJSON(ast).to_string();\n                    self.emit_diagnostic(hax_types::diagnostics::Kind::Unimplemented {\n                        issue_id: None,\n                        details: Some(format!(\"The method `{method}` is not implemented in the backend {}. To show the AST fragment that could not be printed, run {debug_json}.\", Self::NAME)),\n                    });\n                    self.text(format!(\"`{method}` unimpl, {debug_json}\", )).parens()\n                }\n\n                $(\n                    method_deny_list!($ty{\n                        #[doc = \"Define how the printer formats a value of this AST type.\"]\n                        #[doc = \"Do not call this method directly. Use [`ToDocument::to_document`] instead, so annotations/spans are preserved correctly.\"]\n                        #[deprecated = \"Do not call this method directly. Use [`ToDocument::to_document`] instead, so annotations/spans are preserved correctly.\"]\n                        fn [<$ty:snake>](&self, [<$ty:snake>]: &$ty) -> DocBuilder<A> {\n                            mk!(@method_body $ty [<$ty:snake>] self [<$ty:snake>])\n                        }\n                    });\n                )*\n            }\n\n            $(\n                method_deny_list!($ty{\n                    impl<A: 'static + Clone, P: PrettyAst<A>> ToDocument<P, A> for $ty {\n                        fn to_document(&self, printer: &P) -> DocBuilder<A> {\n                            span_handling!($ty{\n                                let printer = &(printer.with_span(self.span()));\n                            });\n                            // Note about deprecation:\n                            //   Here is the only place where calling the deprecated methods from the trait `PrettyAst` is fine.\n                            //   Here is the place we (will) take care of spans, etc.\n                            #[allow(deprecated)]\n                            let print = <P as PrettyAst<A>>::[<$ty:snake>];\n                            print(printer, self)\n                        }\n                    }\n                });\n            )*\n        }\n    };\n\n    // Special default implementation for specific types\n    (@method_body Symbol $meth:ident $self:ident $value:ident) => {\n        $self.as_string($value.to_string())\n    };\n    (@method_body LocalId $meth:ident $self:ident $value:ident) => {\n        $value.0.to_document($self)\n    };\n    (@method_body SpannedTy $meth:ident $self:ident $value:ident) => {\n        $value.ty.to_document($self)\n    };\n    (@method_body $ty:ident $meth:ident $self:ident $value:ident) => {\n        $self.unimplemented_method(stringify!($meth), ast::fragment::FragmentRef::from($meth))\n    };\n}\n\n#[hax_rust_engine_macros::replace(AstNodes => include(VisitableAstNodes))]\nmk!(GlobalId, AstNodes);\n"
  },
  {
    "path": "rust-engine/src/printer/render_view.rs",
    "content": "//! Tools for rendering Rust paths into strings.\n//!\n//! This module takes a typed [`View`] (a list of [`PathSegment`]s) and turns it\n//! into either:\n//! - a structured [`Rendered`] (with `module` vs. `path` parts), or\n//! - a single flat `String`.\n//!\n//! The [`RenderView`] trait allows for customization.\n\nuse crate::{\n    ast::identifiers::global_id::{\n        ReservedSuffix,\n        view::{PathSegment, PathSegmentPayload, UnnamedPathSegmentPayload, View},\n    },\n    symbol::Symbol,\n};\n\nuse std::collections::HashSet;\nuse std::sync::OnceLock;\n\n/// A helper trait to render a [`View`] (a typed list of path segments) into\n/// strings.\n///\n/// Rendering is split into two parts:\n/// - module path: the crate + module prefix,\n/// - relative path: the remaining (non-module) segments, and both may contain\n///   hierarchical sub-segments (e.g. `Foo::MyVariant::field`).\n///\n/// Implementors can:\n/// - override how unnamed segments (e.g. `impl`, `anon const`) are displayed,\n/// - override how each segment is rendered,\n/// - customize the separator (defaults to `\"::\"`),\n/// - render to either a structured [`Rendered`] or a single flat `String`.\n///\n/// # Terminology\n///\n/// A path segment can be:\n/// - named: carries a `Symbol` that can be printed as-is,\n/// - unnamed: carries an [`UnnamedPathSegmentPayload`] (like `Impl`, `Closure`,\n///   …), which must be turned into a `Symbol` first (see\n///   [`RenderView::render_unnamed_path_segment_payload`]).\n///\n/// # Hierarchical segments\n///\n/// Some segments are actually small trees (e.g., field → constructor → type).\n/// [`RenderView::render_path_segment`] returns all display atoms for such a\n/// segment, so callers can flatten or join as needed.\npub trait RenderView: Sized {\n    /// List of reserved keywords that will be escaped when rendering\n    fn reserved_keywords() -> &'static HashSet<String> {\n        static SET: OnceLock<HashSet<String>> = OnceLock::new();\n        SET.get_or_init(|| [].into_iter().collect())\n    }\n\n    /// Check if a string is a reserved keyword that needs escaping\n    fn is_reserved_keyword(id: &str) -> bool {\n        let reserved = Self::reserved_keywords();\n        reserved.contains(id)\n    }\n\n    /// Check if a string needs escaping\n    fn should_escape(id: &str) -> bool {\n        Self::is_reserved_keyword(id)\n    }\n\n    /// Escape a string if it needs escaping according to `Self::should_escape`\n    fn escape(id: &str) -> String {\n        // See https://github.com/cryspen/hax/issues/1866\n        let id = id.replace([' ', '<', '>'], \"_\");\n        if id.is_empty() {\n            \"_ERROR_EMPTY_ID_\".to_string()\n        } else if Self::should_escape(id.trim_start_matches(\"_\")) {\n            format!(\"_{id}\")\n        } else {\n            id\n        }\n    }\n\n    /// Converts an unnamed path segment payload into a printable [`Symbol`].\n    ///\n    /// Unnamed segments include `impl`, `anon const`, `inline const`, `foreign mod`,\n    /// `global_asm`, `use`, `opaque`, and `closure`. By default, these map to\n    /// their capitalized identifier (e.g., `Impl`, `AnonConst`, …).\n    ///\n    /// Override this method to customize how unnamed items appear in output.\n    fn render_unnamed_path_segment_payload(&self, unnamed: UnnamedPathSegmentPayload) -> Symbol {\n        default::render_unnamed_path_segment_payload(self, unnamed)\n    }\n\n    /// Converts a full [`PathSegmentPayload`] (named or unnamed) into a printable [`Symbol`].\n    ///\n    /// Named payloads return their `Symbol` unchanged. Unnamed payloads are delegated to\n    /// [`render_unnamed_path_segment_payload`].\n    fn render_path_segment_payload(&self, payload: PathSegmentPayload) -> Symbol {\n        match payload {\n            PathSegmentPayload::Named(symbol) => symbol,\n            PathSegmentPayload::Unnamed(unnamed) => {\n                self.render_unnamed_path_segment_payload(unnamed)\n            }\n        }\n    }\n\n    /// Renders a single [`PathSegment`] into a vector of display atoms.\n    ///\n    /// Most segments render to a single atom (e.g., `\"Foo\"`). Hierarchical segments\n    /// (like a field) render to multiple atoms representing their parent chain\n    /// (e.g., `[\"Foo\", \"MyVariant\", \"my_field\"]`). Disambiguators (see\n    /// [`PathSegment::disambiguator`]) are suffixed as `_N` when `N > 0`.\n    ///\n    /// The resulting atoms are suitable for joining with [`separator`](Self::separator),\n    /// or for further grouping into module vs. relative path.\n    fn render_path_segment(&self, seg: &PathSegment) -> Vec<String> {\n        default::render_path_segment(self, seg)\n    }\n\n    /// Renders the optional suffix\n    fn render_suffix(&self, suffix: &ReservedSuffix) -> String {\n        default::render_suffix(suffix)\n    }\n\n    /// Renders just the module path (crate + modules) of a [`View`], as a list of atoms.\n    ///\n    /// This is a convenience wrapper around [`render`](Self::render) that returns only\n    /// the `module` component.\n    fn module(&self, view: &View) -> Vec<String> {\n        self.render(view).module\n    }\n\n    /// Allows backends to adjust a module path before rendering, e.g., to shorten it according\n    /// to currenly open namespaces.\n    fn relativize_module_path<'a>(&self, module_path: &'a [PathSegment]) -> &'a [PathSegment] {\n        module_path\n    }\n\n    /// Renders a [`View`] into a structured [`Rendered`] value,\n    /// splitting output into `module` and `path` parts.\n    ///\n    /// Internally, this uses [`View::split_at_module`] to separate module segments\n    /// from the remaining non-module segments, rendering each with\n    /// [`render_path_segment`].\n    fn render(&self, view: &View) -> Rendered {\n        let (module_path, relative_path) = view.split_at_module();\n        let module_path = self.relativize_module_path(module_path);\n        let path_segment = |seg| self.render_path_segment(seg);\n        let mut path: Vec<String> = relative_path.iter().flat_map(path_segment).collect();\n        if let Some(last) = path.last_mut()\n            && let Some(suffix) = view.suffix()\n        {\n            last.push_str(&self.render_suffix(suffix));\n        }\n        Rendered {\n            module: module_path.iter().flat_map(path_segment).collect(),\n            path,\n        }\n    }\n\n    /// Returns the string used to join rendered atoms (defaults to `\"::\"`).\n    ///\n    /// Override to customize separators (e.g., `\".\"`).\n    fn separator(&self) -> &str {\n        \"::\"\n    }\n\n    /// Lazy render a view as an iterator of strings.\n    ///\n    /// This chains `rendered.module` and `rendered.path` in order.\n    fn rendered_to_strings(&self, rendered: Rendered) -> impl Iterator<Item = String> {\n        rendered.module.into_iter().chain(rendered.path)\n    }\n\n    /// Joins the atoms contained in a [`Rendered`] into a single string using\n    /// [`separator`](Self::separator).\n    ///\n    /// This concatenates `rendered.module` and `rendered.path` in order, inserting\n    /// the separator between atoms.\n    fn rendered_to_string(&self, rendered: Rendered) -> String {\n        self.rendered_to_strings(rendered)\n            .collect::<Vec<_>>()\n            .join(self.separator())\n    }\n\n    /// Convenience: renders a [`View`] straight to a single `String`.\n    fn render_string(&self, view: &View) -> String {\n        self.rendered_to_string(self.render(view))\n    }\n\n    /// Convenience: renders a [`View`] straight to a iterator of `String`s.\n    fn render_strings(&self, view: &View) -> impl Iterator<Item = String> {\n        self.rendered_to_strings(self.render(view))\n    }\n}\n\n/// Default rendering helpers used by [`RenderView`]'s blanket implementations.\n///\n/// You can call these directly when composing your own renderer, or override the\n/// trait methods to change behavior selectively.\npub mod default {\n    use super::*;\n\n    /// Default mapping of unnamed payloads to printable symbols.\n    pub fn render_unnamed_path_segment_payload<V: RenderView + Sized>(\n        _render_view: &V,\n        unnamed: UnnamedPathSegmentPayload,\n    ) -> Symbol {\n        Symbol::new(match unnamed {\n            UnnamedPathSegmentPayload::Impl => \"Impl\",\n            UnnamedPathSegmentPayload::AnonConst => \"AnonConst\",\n            UnnamedPathSegmentPayload::InlineConst => \"InlineConst\",\n            UnnamedPathSegmentPayload::Foreign => \"Foreign\",\n            UnnamedPathSegmentPayload::GlobalAsm => \"GlobalAsm\",\n            UnnamedPathSegmentPayload::Use => \"Use\",\n            UnnamedPathSegmentPayload::Opaque => \"Opaque\",\n            UnnamedPathSegmentPayload::Closure => \"Closure\",\n        })\n    }\n\n    /// Default rendering of a single [`PathSegment`] into display atoms.\n    ///\n    /// This walks the segment's parent chain (see [`PathSegment::parents`]) and\n    /// produces an atom for each level using\n    /// [`RenderView::render_path_segment_payload`]. If a level has a disambiguator\n    /// `> 0`, it is appended as `_<n>` (e.g., `Foo_2`).\n    pub fn render_path_segment<V: RenderView + Sized>(\n        render_view: &V,\n        seg: &PathSegment,\n    ) -> Vec<String> {\n        let mut strings: Vec<String> = seg\n            .parents()\n            .map(|seg| {\n                let id = render_view.render_path_segment_payload(seg.payload());\n                let d = seg.disambiguator();\n                if d > 0 {\n                    format!(\"{id}_{d}\")\n                } else {\n                    format!(\"{id}\")\n                }\n            })\n            .map(|str| V::escape(&str))\n            .collect();\n        strings.reverse();\n        strings\n    }\n\n    /// Default suffix rendering\n    pub fn render_suffix(suffix: &ReservedSuffix) -> String {\n        match suffix {\n            ReservedSuffix::Pre => \"_pre\",\n            ReservedSuffix::Post => \"_post\",\n            ReservedSuffix::Cast => \"_cast_to_repr\",\n        }\n        .to_owned()\n    }\n}\n\n/// The structured result of rendering a [`View`].\n///\n/// - `module`: atoms for the crate + modules prefix (may be empty for local/anonymous contexts),\n/// - `path`:   atoms for the remaining segments (item, constructors, fields, etc.).\n///\n/// Join with [`RenderView::rendered_to_string`] to obtain a single string.\npub struct Rendered {\n    /// Crate + module atoms (e.g., `[\"my_crate\", \"a\", \"b\"]`).\n    pub module: Vec<String>,\n    /// Non-module atoms (e.g., `[\"Foo::f\", \"MyEnum::MyVariant::my_field\"]`).\n    pub path: Vec<String>,\n}\n"
  },
  {
    "path": "rust-engine/src/printer.rs",
    "content": "//! Printer infrastructure: allocators, traits, and the printing pipeline.\n//!\n//! This module contains the common plumbing that backends and printers rely on\n//! to turn AST values into formatted text:\n//! - [`Allocator`]: a thin wrapper around the `pretty` crate's allocator,\n//!   parameterized by the backend, used to produce [`pretty::Doc`] nodes.\n//! - [`PrettyAst`]: the trait that printers implement to provide per-type\n//!   formatting of Hax AST nodes (re-exported from [`pretty_ast`]).\n//! - The resugaring pipeline: a sequence of local AST rewrites that make\n//!   emitted code idiomatic for the target language before pretty-printing.\n\nuse std::ops::Deref;\n\nuse crate::{\n    ast::{self, span::Span},\n    attributes::LinkedItemGraph,\n    printer::pretty_ast::ToDocument,\n};\nuse ast::visitors::dyn_compatible;\n\npub mod pretty_ast;\npub use pretty_ast::PrettyAst;\n\npub mod render_view;\n\n/// A resugaring is an erased mapper visitor with a name.\n/// A resugaring is a *local* transformation on the AST that produces exclusively `ast::resugared` nodes.\n/// Any involved or non-local transformation should be a phase, not a resugaring.\n///\n/// Backends may provide **multiple resugaring phases** to incrementally refine\n/// the tree into something idiomatic for the target language (e.g., desugaring\n/// pattern sugar into a more uniform core, then resugaring back into target\n/// idioms). Each phase mutates the AST in place and should be small, focused,\n/// and easy to test.\n///\n/// If you add a new phase, make sure it appears in the backend’s\n/// `resugaring_phases()` list in the correct order.\npub trait Resugaring: for<'a> dyn_compatible::AstVisitorMut<'a> {\n    /// Get the name of the resugar.\n    fn name(&self) -> String;\n}\n\n/// A printer defines a list of resugaring phases.\npub trait Printer: Sized + PrettyAst<Span> + Default + HasLinkedItemGraph {\n    /// The name of the printer\n    const NAME: &'static str = <Self as PrettyAst<Span>>::NAME;\n}\n\n/// Getter and setter for `LinkedItemGraph`, useful for printers.\npub trait HasLinkedItemGraph {\n    /// Get a reference of the `LinkedItemGraph`.\n    fn linked_item_graph(&self) -> &LinkedItemGraph;\n    /// Set a `LinkedItemGraph`.\n    fn with_linked_item_graph(self, graph: std::rc::Rc<LinkedItemGraph>) -> Self;\n}\n\n#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)]\n/// Placeholder type for sourcemaps.\npub struct SourceMap;\n\n/// Helper trait to print AST fragments.\npub trait Print<T>\nwhere\n    for<'a> dyn Resugaring: dyn_compatible::AstVisitableMut<'a, T>,\n{\n    /// Print a single AST fragment using this backend.\n    fn print_returning_fragment(&mut self, fragment: T) -> (String, SourceMap, T)\n    where\n        T: ToDocument<Self, Span>;\n\n    /// Print a single AST fragment using this backend.\n    fn print(&mut self, fragment: T) -> (String, SourceMap)\n    where\n        T: ToDocument<Self, Span>;\n}\n\nimpl<P: Printer, T> Print<T> for P\nwhere\n    for<'a> dyn Resugaring: dyn_compatible::AstVisitableMut<'a, T>,\n{\n    fn print_returning_fragment(&mut self, fragment: T) -> (String, SourceMap, T)\n    where\n        T: ToDocument<Self, Span>,\n    {\n        let doc_builder = fragment.to_document(self).into_doc();\n        (\n            doc_builder.deref().pretty(80).to_string(),\n            SourceMap,\n            fragment,\n        )\n    }\n\n    fn print(&mut self, fragment: T) -> (String, SourceMap)\n    where\n        T: ToDocument<Self, Span>,\n    {\n        let (rendered, sourcemap, _) = <Self as Print<_>>::print_returning_fragment(self, fragment);\n        (rendered, sourcemap)\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/resugarings.rs",
    "content": "//! The \"resugaring\" phases used by printers.\n\n//! This module defines resugarings instances (see\n//! [`hax_rust_engine::ast::Resugaring`] for the definition of a\n//! resugaring). Each backend defines its own set of resugaring phases.\n\nuse crate::ast::identifiers::GlobalId;\nuse crate::ast::resugared::*;\nuse crate::ast::visitors::*;\nuse crate::ast::*;\nuse crate::printer::*;\n\n/// Transforms [`ItemKind::Fn`] of arity zero into [`ResugaredItemKind::Constant`].\n/// Rust `const` items are encoded by the `ImportThir` phase of the hax engine as function of arity zero.\n/// Functions of arity zero themselves are encoded as functions operating on one argument of type `()`.\n#[derive(Copy, Clone, Default)]\npub struct FunctionsToConstants;\n\nimpl AstVisitorMut for FunctionsToConstants {\n    fn enter_item_kind(&mut self, item_kind: &mut ItemKind) {\n        let ItemKind::Fn {\n            name,\n            generics,\n            body,\n            params,\n            safety: SafetyKind::Safe,\n        } = item_kind\n        else {\n            return;\n        };\n        if !params.is_empty() {\n            return;\n        }\n        *item_kind = ItemKind::Resugared(ResugaredItemKind::Constant {\n            name: *name,\n            body: body.clone(),\n            generics: generics.clone(),\n        });\n    }\n    fn enter_impl_item_kind(&mut self, item_kind: &mut ImplItemKind) {\n        if let ImplItemKind::Fn { body, params } = item_kind\n            && params.is_empty()\n        {\n            *item_kind =\n                ImplItemKind::Resugared(ResugaredImplItemKind::Constant { body: body.clone() })\n        }\n    }\n}\n\nimpl Resugaring for FunctionsToConstants {\n    fn name(&self) -> String {\n        \"functions-to-constants\".to_string()\n    }\n}\n\n/// Tuples resugaring. Resugars tuple constructors to the dedicated expression variant [`ResugaredExprKind::Tuple`],\n/// and tuple types to the dedicated type variant [`ResugaredTyKind::Tuple`].\npub struct Tuples;\n\nimpl AstVisitorMut for Tuples {\n    fn enter_expr_kind(&mut self, x: &mut ExprKind) {\n        let (constructor, fields) = match x {\n            ExprKind::Construct {\n                constructor,\n                is_record: false,\n                is_struct: true,\n                base: None,\n                fields,\n            } => (constructor, &fields[..]),\n            ExprKind::GlobalId(constructor) => (constructor, &[][..]),\n            _ => return,\n        };\n        if constructor.expect_tuple().is_some() {\n            let args = fields.iter().map(|(_, e)| e).cloned().collect();\n            *x = ExprKind::Resugared(ResugaredExprKind::Tuple(args))\n        }\n    }\n    fn enter_ty_kind(&mut self, x: &mut TyKind) {\n        let TyKind::App { head, args } = x else {\n            return;\n        };\n        if head.expect_tuple().is_some() {\n            let Some(args) = args\n                .iter()\n                .map(GenericValue::expect_ty)\n                .collect::<Option<Vec<_>>>()\n            else {\n                return;\n            };\n            *x = TyKind::Resugared(ResugaredTyKind::Tuple(args.into_iter().cloned().collect()))\n        }\n    }\n}\n\nimpl Resugaring for Tuples {\n    fn name(&self) -> String {\n        \"tuples\".to_string()\n    }\n}\n\n/// Let-pure resugaring. Use to identify expressions of the form `let x ← pure ..`, where the arrow\n/// can be turned into a normal assignment `:=`\npub struct LetPure;\n\nimpl AstVisitorMut for LetPure {\n    fn enter_expr_kind(&mut self, expr: &mut ExprKind) {\n        const PURE: GlobalId = crate::names::rust_primitives::hax::explicit_monadic::pure;\n        if let ExprKind::Let { lhs, rhs, body } = expr\n            && let ExprKind::App {\n                head,\n                args,\n                generic_args,\n                bounds_impls,\n                trait_: None,\n            } = rhs.kind()\n            && *head.kind() == ExprKind::GlobalId(PURE)\n            && let ([pure_rhs], [], []) = (&args[..], &generic_args[..], &bounds_impls[..])\n        {\n            *expr = ExprKind::Resugared(ResugaredExprKind::LetPure {\n                lhs: lhs.clone(),\n                rhs: pure_rhs.clone(),\n                body: body.clone(),\n            })\n        }\n    }\n}\n\nimpl Resugaring for LetPure {\n    fn name(&self) -> String {\n        \"let_pure\".to_string()\n    }\n}\n\n/// Recursive function detection. Identifies functions whose body contains a\n/// reference to their own name and resugars them to [`ResugaredItemKind::RecursiveFn`].\n#[derive(Copy, Clone, Default)]\npub struct RecursiveFunctions;\n\n/// Helper visitor that checks whether an expression tree contains a reference\n/// to a specific [`GlobalId`].\nstruct SelfReferenceChecker {\n    target: GlobalId,\n    found: bool,\n}\n\nimpl AstVisitor for SelfReferenceChecker {\n    fn enter_expr_kind(&mut self, kind: &ExprKind) {\n        if let ExprKind::GlobalId(id) = kind\n            && *id == self.target\n        {\n            self.found = true;\n        }\n    }\n}\n\nimpl AstVisitorMut for RecursiveFunctions {\n    fn visit_item_kind(&mut self, item_kind: &mut ItemKind) {\n        if let ItemKind::Fn {\n            name,\n            generics,\n            body,\n            params,\n            safety,\n        } = &*item_kind\n        {\n            let mut checker = SelfReferenceChecker {\n                target: *name,\n                found: false,\n            };\n            checker.visit_expr(body);\n            if checker.found {\n                *item_kind = ItemKind::Resugared(ResugaredItemKind::RecursiveFn {\n                    name: *name,\n                    generics: generics.clone(),\n                    body: body.clone(),\n                    params: params.clone(),\n                    safety: safety.clone(),\n                });\n            }\n        }\n    }\n}\n\nimpl Resugaring for RecursiveFunctions {\n    fn name(&self) -> String {\n        \"recursive-functions\".to_string()\n    }\n}\n\n/// Record ellipsis resugaring. Identifies record-like `Construct` patterns where\n/// some fields are wildcards and resugars them into `ConstructWithEllipsis`,\n/// dropping the wildcard fields so the printer can emit `..`.\npub struct RecordEllipsis;\n\nimpl AstVisitorMut for RecordEllipsis {\n    fn enter_pat_kind(&mut self, x: &mut PatKind) {\n        let PatKind::Construct {\n            constructor,\n            is_record: true,\n            is_struct,\n            fields,\n        } = x\n        else {\n            return;\n        };\n        let non_wild: Vec<_> = fields\n            .iter()\n            .filter(|(_, pat)| !matches!(&*pat.kind, PatKind::Wild))\n            .cloned()\n            .collect();\n        if non_wild.len() < fields.len() {\n            *x = ResugaredPatKind::ConstructWithEllipsis {\n                constructor: *constructor,\n                is_struct: *is_struct,\n                fields: non_wild,\n            }\n            .into();\n        }\n    }\n}\n\nimpl Resugaring for RecordEllipsis {\n    fn name(&self) -> String {\n        \"record-ellipsis\".to_string()\n    }\n}\n"
  },
  {
    "path": "rust-engine/src/symbol.rs",
    "content": "//! Interned string identifiers used throughout the AST.\n//!\n//! Symbols are lightweight wrappers around `String` for use in identifiers.\n//! Eventually, this could be backed by a real interner or arena.\n\nuse std::ops::Deref;\n\nuse hax_rust_engine_macros::*;\n\n/// Interned string identifier for the AST\n#[derive_group_for_ast]\npub struct Symbol(String);\n\nimpl Symbol {\n    /// Create a new symbol\n    pub fn new(s: impl AsRef<str>) -> Self {\n        Self(s.as_ref().to_string())\n    }\n}\n\nimpl Deref for Symbol {\n    type Target = str;\n\n    fn deref(&self) -> &Self::Target {\n        &self.0\n    }\n}\n\nimpl AsRef<str> for Symbol {\n    fn as_ref(&self) -> &str {\n        &self.0\n    }\n}\n\nimpl std::fmt::Display for Symbol {\n    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {\n        write!(f, \"{}\", self.0)\n    }\n}\n"
  },
  {
    "path": "rust-toolchain.toml",
    "content": "[toolchain]\nchannel = \"nightly-2025-11-08\"\ncomponents = [ \"rustc-dev\", \"llvm-tools-preview\" , \"rust-analysis\" , \"rust-src\" , \"rustfmt\" ]\n"
  },
  {
    "path": "rustc-coverage-tests/Cargo.toml",
    "content": "[package]\nname = \"coverage\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../hax-lib\" }\n\n[features]\nfstar = []\nfstar-lax = []\ncoq = []\njson = []\nlean = []\n\n"
  },
  {
    "path": "rustc-coverage-tests/README.md",
    "content": "This crate contains the [rust files](https://github.com/rust-lang/rust/tree/master/tests/coverage) from rustc [coverage tests](https://rustc-dev-guide.rust-lang.org/tests/compiletest.html#coverage-tests). \n\nThe following test target are available:\n- `json` to test AST extraction as json using hax frontend\n- `fstar` to test that extraction to F* succeeds\n- `fstar-lax` to test that extraction to F* and lax-checking succeed\n- `coq` to test that extraction to coq succeeds\n- `lean` to test that extraction to Lean succeeds\n- `lean-tc` to test that extraction to Lean succeeds and type checks\n\n## Running\n\nA script is available to run the tests using `python rustc-coverage-tests.py <target>` where `<target>` is either one of the targets or `all`.\n\n### Checking negative results\n\nIf you run `python run-coverage-tests.py --with-negative <target>` you also check that tests that are not expected to succeed actually fail.\n\nIt will produce a summary of the results file by file.\n\nThis feature has some python dependencies, so you may need to run `pip install tabulate pyyaml` first.\n\n### Checking stability\n\nIf you run `python run-coverage-tests.py --with-negative --check-stability <target>` you check that the generated files correspond to the stored snapshots. To update these snapshots, run `python run-coverage-tests.py --with-negative --check-stability --update-snapshots <target>`\n\n\n## Modifying\n\n### Updating sources\n\nRun ./update-test-sources.sh to update the test with the latest versions used by rustc.\n\n### Adding a new test target\n\nTo add a new test target:\n- Add a corresponding feature to the `Cargo.toml`\n- Activate the wanted tests for this feature by enabling them under the feature. This is done using the `cfg` attributes in the `lib.rs`/`mod.rs` files (see next section).\n- Activate the same tests for the feature in `test_config.yaml`.\n- Modify the script to add the new target\n\n### Activating a test file for a given target\n\nTo activate a test for a target, you can add the feature corresponding to the target to the `cfg` attribute of this test in `lib.rs` (or `mod.rs` for tests contained in submodules). For example: \n```rust\n#[cfg(any(feature = \"json\", feature = \"fstar\"))]\nmod abort;\n```\nThis means that the test in `abort.rs` runs only for features `json` and `fstar`. If you want to also run it under a new feature you can modify this to `#[cfg(any(feature = \"json\", feature = \"fstar\", feature = \"<my_new_feature>\"))]`.\n\nSome tests are currently not activated for any feature. The corresponding module declarations are commented out (for example `// mod async_block;`). To add these tests to a target, uncomment the corresponding line and add the adequate `cfg` attribute.\n\nThere is a separate configuration in `test_config.yaml` that should match what is defined with the `cfg` flags. This one is also used to check that tests that are not activated do fail.\n"
  },
  {
    "path": "rustc-coverage-tests/proofs/fstar/extraction/Makefile",
    "content": "# This is a generically useful Makefile for F* that is self-contained\n#\n# We expect:\n#  1. `fstar.exe` to be in PATH (alternatively, you can also set\n#     $FSTAR_HOME to be set to your F* repo/install directory)\n#\n#  2. `cargo`, `rustup`, `hax` and `jq` to be installed and in PATH.\n#\n#  3. the extracted Cargo crate to have \"hax-lib\" as a dependency:\n#     `hax-lib = { version = \"0.1.0-pre.1\", git = \"https://github.com/hacspec/hax\"}`\n#\n# Optionally, you can set `HACL_HOME`.\n#\n# ROOTS contains all the top-level F* files you wish to verify\n# The default target `verify` verified ROOTS and its dependencies\n# To lax-check instead, set `OTHERFLAGS=\"--lax\"` on the command-line\n#\n# To make F* emacs mode use the settings in this file, you need to\n# add the following lines to your .emacs\n#\n# (setq-default fstar-executable \"<YOUR_FSTAR_HOME>/bin/fstar.exe\")\n# (setq-default fstar-smt-executable \"<YOUR_Z3_HOME>/bin/z3\")\n#\n# (defun my-fstar-compute-prover-args-using-make ()\n#   \"Construct arguments to pass to F* by calling make.\"\n#   (with-demoted-errors \"Error when constructing arg string: %S\"\n#     (let* ((fname (file-name-nondirectory buffer-file-name))\n# \t   (target (concat fname \"-in\"))\n# \t   (argstr (car (process-lines \"make\" \"--quiet\" target))))\n#       (split-string argstr))))\n# (setq fstar-subp-prover-args #'my-fstar-compute-prover-args-using-make)\n#\n\nPATH_TO_CHILD_MAKEFILE := \"$(abspath $(firstword $(MAKEFILE_LIST)))\"\nPATH_TO_TEMPLATE_MAKEFILE := \"$(abspath $(lastword $(MAKEFILE_LIST)))\"\n\nHACL_HOME      ?= $(HOME)/.hax/hacl_home\n# Expand variable FSTAR_BIN_DETECT now, so that we don't run this over and over\n\nFSTAR_BIN_DETECT := $(if $(shell command -v fstar.exe), fstar.exe, $(FSTAR_HOME)/bin/fstar.exe)\nFSTAR_BIN      ?= $(FSTAR_BIN_DETECT)\n\nGIT_ROOT_DIR   := $(shell git rev-parse --show-toplevel)/\nCACHE_DIR      ?= ${GIT_ROOT_DIR}.fstar-cache/checked\nHINT_DIR       ?= ${GIT_ROOT_DIR}.fstar-cache/hints\n\n# Makes command quiet by default\nQ ?= @\n\n# Verify the required executable are in PATH\nEXECUTABLES = cargo cargo-hax jq\nK := $(foreach exec,$(EXECUTABLES),\\\n        $(if $(shell which $(exec)),some string,$(error \"No $(exec) in PATH\")))\n\nexport ANSI_COLOR_BLUE=\\033[34m\nexport ANSI_COLOR_RED=\\033[31m\nexport ANSI_COLOR_BBLUE=\\033[1;34m\nexport ANSI_COLOR_GRAY=\\033[90m\nexport ANSI_COLOR_TONE=\\033[35m\nexport ANSI_COLOR_RESET=\\033[0m\n\nifdef NO_COLOR\nexport ANSI_COLOR_BLUE=\nexport ANSI_COLOR_RED=\nexport ANSI_COLOR_BBLUE=\nexport ANSI_COLOR_GRAY=\nexport ANSI_COLOR_TONE=\nexport ANSI_COLOR_RESET=\nendif\n\n# The following is a bash script that discovers F* libraries.\n# Due to incompatibilities with make 4.3, I had to make a \"oneliner\" bash script...\ndefine FINDLIBS\n    : \"Prints a path if and only if it exists. Takes one argument: the path.\"; \\\n    function print_if_exists() { \\\n        if [ -d \"$$1\" ]; then \\\n            echo \"$$1\"; \\\n        fi; \\\n    } ; \\\n    : \"Asks Cargo all the dependencies for the current crate or workspace,\"; \\\n    : \"and extract all \"root\" directories for each. Takes zero argument.\"; \\\n    function dependencies() { \\\n        cargo metadata --format-version 1 | \\\n            jq -r \".packages | .[] | .manifest_path | split(\\\"/\\\") | .[:-1] | join(\\\"/\\\")\"; \\\n    } ; \\\n    : \"Find hax libraries *around* a given path. Takes one argument: the\"; \\\n    : \"path.\"; \\\n    function find_hax_libraries_at_path() { \\\n        path=\"$$1\" ; \\\n        : \"if there is a [proofs/fstar/extraction] subfolder, then that s a F* library\" ; \\\n        print_if_exists \"$$path/proofs/fstar/extraction\" ; \\\n        : \"Maybe the [proof-libs] folder of hax is around?\" ; \\\n        MAYBE_PROOF_LIBS=$$(realpath -q \"$$path/../proof-libs/fstar\") ; \\\n        if [ $$? -eq 0 ]; then \\\n            print_if_exists \"$$MAYBE_PROOF_LIBS/core\" ; \\\n            print_if_exists \"$$MAYBE_PROOF_LIBS/rust_primitives\" ; \\\n        fi ; \\\n    } ; \\\n    { while IFS= read path; do \\\n          find_hax_libraries_at_path \"$$path\"; \\\n      done < <(dependencies) ; } | sort -u\nendef\nexport FINDLIBS\n\nFSTAR_INCLUDE_DIRS_EXTRA ?=\nFINDLIBS_OUTPUT := $(shell bash -c '${FINDLIBS}')\nFSTAR_INCLUDE_DIRS = $(HACL_HOME)/lib $(FSTAR_INCLUDE_DIRS_EXTRA) $(FINDLIBS_OUTPUT) ../models\n\n# Make sure FSTAR_INCLUDE_DIRS has the `proof-libs`, print hints and\n# an error message otherwise\nifneq (,$(findstring proof-libs/fstar,$(FSTAR_INCLUDE_DIRS)))\nelse\n\tK += $(info )\n\tERROR := $(shell printf '${ANSI_COLOR_RED}Error: could not detect `proof-libs`!${ANSI_COLOR_RESET}')\n\tK += $(info ${ERROR})\n\tERROR := $(shell printf '  > Do you have `${ANSI_COLOR_BLUE}hax-lib${ANSI_COLOR_RESET}` in your `${ANSI_COLOR_BLUE}Cargo.toml${ANSI_COLOR_RESET}` as a ${ANSI_COLOR_BLUE}git${ANSI_COLOR_RESET} or ${ANSI_COLOR_BLUE}path${ANSI_COLOR_RESET} dependency?')\n\tK += $(info ${ERROR})\n\tERROR := $(shell printf '  ${ANSI_COLOR_BLUE}> Tip: you may want to run `cargo add --git https://github.com/hacspec/hax hax-lib`${ANSI_COLOR_RESET}')\n\tK += $(info ${ERROR})\n\tK += $(info )\n\tK += $(error Fatal error: `proof-libs` is required.)\nendif\n\n.PHONY: all verify clean\n\nall:\n\t$(Q)rm -f .depend\n\t$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) .depend hax.fst.config.json verify\n\nall-keep-going:\n\t$(Q)rm -f .depend\n\t$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) --keep-going .depend hax.fst.config.json verify\n\n# If $HACL_HOME doesn't exist, clone it\n${HACL_HOME}:\n\t$(Q)mkdir -p \"${HACL_HOME}\"\n\t$(info Cloning Hacl* in ${HACL_HOME}...)\n\tgit clone --depth 1 https://github.com/hacl-star/hacl-star.git \"${HACL_HOME}\"\n\t$(info Cloning Hacl* in ${HACL_HOME}... done!)\n\n# If no any F* file is detected, we run hax\nifeq \"$(wildcard *.fst *fsti)\" \"\"\n$(shell cargo hax into fstar)\nendif\n\n# By default, we process all the files in the current directory\nROOTS ?= $(wildcard *.fst *fsti)\nADMIT_MODULES ?=\n\nADMIT_MODULE_FLAGS ?= --admit_smt_queries true\n\n# Can be useful for debugging purposes\nFINDLIBS.sh:\n\t$(Q)echo '${FINDLIBS}' > FINDLIBS.sh\ninclude-dirs:\n\t$(Q)bash -c '${FINDLIBS}'\n\nFSTAR_FLAGS = \\\n  --warn_error -321-331-241-274-239-271 \\\n  --cache_checked_modules --cache_dir $(CACHE_DIR) \\\n  --already_cached \"+Prims+FStar+LowStar+C+Spec.Loops+TestLib\" \\\n  $(addprefix --include ,$(FSTAR_INCLUDE_DIRS))\n\nFSTAR := $(FSTAR_BIN) $(FSTAR_FLAGS)\n\n.depend: $(HINT_DIR) $(CACHE_DIR) $(ROOTS) $(HACL_HOME)\n\t@$(FSTAR) --dep full $(ROOTS) --extract '* -Prims -LowStar -FStar' > $@\n\ninclude .depend\n\n$(HINT_DIR) $(CACHE_DIR):\n\t$(Q)mkdir -p $@\n\ndefine HELPMESSAGE\necho \"hax' default Makefile for F*\"\necho \"\"\necho \"The available targets are:\"\necho \"\"\nfunction target() {\n  printf '  ${ANSI_COLOR_BLUE}%-20b${ANSI_COLOR_RESET} %s\\n' \"$$1\" \"$$2\"\n}\ntarget \"all\" \"Verify every F* files (stops whenever an F* fails first)\"\ntarget \"all-keep-going\" \"Verify every F* files (tries as many F* module as possible)\"\ntarget \"\" \"\"\ntarget \"run/${ANSI_COLOR_TONE}<MyModule.fst>  \" 'Runs F* on `MyModule.fst` only'\ntarget \"\" \"\"\ntarget \"vscode\" 'Generates a `hax.fst.config.json` file'\ntarget \"${ANSI_COLOR_TONE}<MyModule.fst>${ANSI_COLOR_BLUE}-in   \" 'Useful for Emacs, outputs the F* prefix command to be used'\ntarget \"\" \"\"\ntarget \"clean\" 'Cleanup the target'\ntarget \"include-dirs\" 'List the F* include directories'\ntarget \"\" \"\"\ntarget \"describe\" 'List the F* root modules, and describe the environment.'\necho \"\"\necho \"Variables:\"\ntarget \"NO_COLOR\" \"Set to anything to disable colors\"\ntarget \"ADMIT_MODULES\" \"List of modules where F* will assume every SMT query\"\ntarget \"FSTAR_INCLUDE_DIRS_EXTRA\" \"List of extra include F* dirs\"\nendef\nexport HELPMESSAGE\n\ndescribe:\n\t@printf '${ANSI_COLOR_BBLUE}F* roots:${ANSI_COLOR_RESET}\\n'\n\t@for root in ${ROOTS}; do \\\n\t  filename=$$(basename -- \"$$root\") ;\\\n\t  ext=\"$${filename##*.}\" ;\\\n\t  noext=\"$${filename%.*}\" ;\\\n\t  printf \"${ANSI_COLOR_GRAY}$$(dirname -- \"$$root\")/${ANSI_COLOR_RESET}%s${ANSI_COLOR_GRAY}.${ANSI_COLOR_TONE}%s${ANSI_COLOR_RESET}%b\\n\" \"$$noext\" \"$$ext\" $$([[ \"${ADMIT_MODULES}\" =~ (^| )$$root($$| ) ]] && echo '${ANSI_COLOR_RED}\\t[ADMITTED]${ANSI_COLOR_RESET}'); \\\n\tdone\n\t@printf '\\n${ANSI_COLOR_BBLUE}Environment:${ANSI_COLOR_RESET}\\n'\n\t@printf ' - ${ANSI_COLOR_BLUE}HACL_HOME${ANSI_COLOR_RESET} = %s\\n' '${HACL_HOME}'\n\t@printf ' - ${ANSI_COLOR_BLUE}FSTAR_BIN${ANSI_COLOR_RESET} = %s\\n' '${FSTAR_BIN}'\n\t@printf ' - ${ANSI_COLOR_BLUE}GIT_ROOT_DIR${ANSI_COLOR_RESET} = %s\\n' '${GIT_ROOT_DIR}'\n\t@printf ' - ${ANSI_COLOR_BLUE}CACHE_DIR${ANSI_COLOR_RESET} = %s\\n' '${CACHE_DIR}'\n\t@printf ' - ${ANSI_COLOR_BLUE}HINT_DIR${ANSI_COLOR_RESET} = %s\\n' '${HINT_DIR}'\n\t@printf ' - ${ANSI_COLOR_BLUE}ADMIT_MODULE_FLAGS${ANSI_COLOR_RESET} = %s\\n' '${ADMIT_MODULE_FLAGS}'\n\t@printf ' - ${ANSI_COLOR_BLUE}FSTAR_INCLUDE_DIRS_EXTRA${ANSI_COLOR_RESET} = %s\\n' '${FSTAR_INCLUDE_DIRS_EXTRA}'\n\nhelp: ;@bash -c \"$$HELPMESSAGE\"\nh: ;@bash -c \"$$HELPMESSAGE\"\n\nHEADER = $(Q)printf '${ANSI_COLOR_BBLUE}[CHECK] %s ${ANSI_COLOR_RESET}\\n' \"$(basename $(notdir $@))\"\n\nrun/%: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME)\n\t${HEADER}\n\t$(Q)$(FSTAR) $(OTHERFLAGS) $(@:run/%=%)\n\nVERIFIED_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ROOTS)))\nADMIT_CHECKED = $(addsuffix .checked, $(addprefix $(CACHE_DIR)/,$(ADMIT_MODULES)))\n\n$(ADMIT_CHECKED):\n\t$(Q)printf '${ANSI_COLOR_BBLUE}[${ANSI_COLOR_TONE}ADMIT${ANSI_COLOR_BBLUE}] %s ${ANSI_COLOR_RESET}\\n' \"$(basename $(notdir $@))\"\n\t$(Q)$(FSTAR) $(OTHERFLAGS) $(ADMIT_MODULE_FLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \\\n\t  echo \"\" ; \\\n\t  exit 1 ; \\\n\t}\n\t$(Q)printf \"\\n\\n\"\n\n$(CACHE_DIR)/%.checked: | .depend $(HINT_DIR) $(CACHE_DIR) $(HACL_HOME)\n\t${HEADER}\n\t$(Q)$(FSTAR) $(OTHERFLAGS) $< $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(notdir $*).hints || { \\\n\t  echo \"\" ; \\\n\t  exit 1 ; \\\n\t}\n\ttouch $@\n\t$(Q)printf \"\\n\\n\"\n\nverify: $(VERIFIED_CHECKED) $(ADMIT_CHECKED)\n\n# Targets for Emacs\n%.fst-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fst.hints)\n%.fsti-in:\n\t$(info $(FSTAR_FLAGS) \\\n\t  $(ENABLE_HINTS) --hint_file $(HINT_DIR)/$(basename $@).fsti.hints)\n\n# Targets for VSCode\nhax.fst.config.json: .depend\n\t$(Q)echo \"$(FSTAR_INCLUDE_DIRS)\" | jq --arg fstar \"$(FSTAR_BIN)\" -R 'split(\" \") | {fstar_exe: $$fstar | gsub(\"^\\\\s+|\\\\s+$$\";\"\"), include_dirs: .}' > $@\nvscode:\n\t$(Q)rm -f .depend\n\t$(Q)$(MAKE) -f $(PATH_TO_CHILD_MAKEFILE) hax.fst.config.json\n\nSHELL=bash\n\n# Clean target\nclean:\n\trm -rf $(CACHE_DIR)/*\n\trm *.fst\n"
  },
  {
    "path": "rustc-coverage-tests/proofs/lean/extraction/lakefile.toml",
    "content": "name = \"Coverage\"\nversion = \"0.1.0\"\ndefaultTargets = [\"Coverage\"]\n\n[[lean_lib]]\nname = \"Coverage\"\n\n[[require]]\nname = \"Hax\"\npath = \"../../../../hax-lib/proof-libs/lean\""
  },
  {
    "path": "rustc-coverage-tests/requirements.txt",
    "content": "pyyaml\ntabulate\n"
  },
  {
    "path": "rustc-coverage-tests/run-coverage-tests.py",
    "content": "#!/usr/bin/env python3\n\nimport argparse\nimport subprocess\nimport os\nimport sys\nfrom pathlib import Path\nimport shutil\nimport filecmp\nimport difflib\n\nCONFIG_FILE = \"test_config.yaml\"\n\ndef compare_and_store_outputs(target, base_dir=\"proofs\", store_dir=\"snapshots\", update_snapshots = False):\n    actual_dir = Path(base_dir) / target / \"extraction\"\n    expected_dir = Path(store_dir) / target\n\n    if not actual_dir.exists():\n        print(f\"[WARN] Output dir not found: {actual_dir}\")\n        return True  # No outputs to check\n\n    unstable = False\n    \n    # Only consider .v and .fst files\n    valid_extensions = {\".fst\", \".v\"}\n    files_to_check = [f for f in actual_dir.rglob(\"*\") if f.is_file() and f.suffix in valid_extensions]\n\n\n    for file in files_to_check:\n        if file.is_file():\n            rel_path = file.relative_to(actual_dir)\n            expected_file = expected_dir / rel_path\n\n            if expected_file.exists():\n                if not filecmp.cmp(file, expected_file, shallow=False):\n                    if update_snapshots:\n                        shutil.copy(file, expected_file)\n                        print(f\"✅ Stored new reference for file: {expected_file}\")\n                    else:\n                        print(f\"❌ File mismatch: {rel_path}\")\n                        show_file_diff(expected_file, file)\n                        unstable = True\n            else:\n                # First time: store it\n                expected_file.parent.mkdir(parents=True, exist_ok=True)\n                shutil.copy(file, expected_file)\n                print(f\"✅ Stored new reference file: {expected_file}\")\n\n    return not unstable\n\ndef cleanup_extraction(base_dir=\"proofs\"):\n    actual_dir = Path(base_dir)\n\n    if not actual_dir.exists():\n        print(f\"[WARN] Output dir not found: {actual_dir}\")\n        return True  # No outputs to check\n    \n    # Only consider .v and .fst files\n    valid_extensions = {\".v\", \".fst\"}\n    files_to_delete = [f for f in actual_dir.rglob(\"*\") if f.is_file() and f.suffix in valid_extensions]\n\n\n    for file in files_to_delete:\n        if file.is_file():\n            os.remove(file)\n\ndef show_file_diff(file1, file2):\n    with open(file1, \"r\") as f1, open(file2, \"r\") as f2:\n        lines1 = f1.readlines()\n        lines2 = f2.readlines()\n        diff = list(difflib.unified_diff(lines1, lines2, fromfile=str(file1), tofile=str(file2)))\n        if diff:\n            print(\"\".join(diff))\n\ndef load_config(file_path):\n    import yaml\n    with open(file_path, \"r\") as f:\n        return yaml.safe_load(f)\n\ndef run_command(cmd):\n    result = subprocess.run(\n        cmd, \n        stdout=subprocess.PIPE, \n        stderr=subprocess.STDOUT,  # Redirect stderr to stdout\n        shell = True,\n        text=True)\n    return result\n\ndef cargo_cmd(test_name, target, feature):\n    feature_flag = f\"--features {feature}\"\n    target_filter = f\"-i '-** +coverage::{test_name}::**'\" if test_name else \"\"\n    print(f\"cargo hax -C {feature_flag} \\\\; into {target_filter} {target}\")\n    return f\"cargo hax -C {feature_flag} \\\\; into {target_filter} {target}\"\n\ndef run_fstar_lax(test_name, include_negative):\n    cmd = cargo_cmd(test_name, \"fstar\", \"json\" if include_negative else \"fstar-lax\")\n    extraction = run_command(cmd)\n    if extraction.returncode != 0:\n        return extraction\n    return run_command(\"OTHERFLAGS='--admit_smt_queries true' make -C proofs/fstar/extraction\")\n\ndef run_lean_tc(test_name, include_negative):\n    cmd = cargo_cmd(test_name, \"lean\", \"json\" if include_negative else \"lean-tc\")\n    extraction = run_command(cmd)\n    if extraction.returncode != 0:\n        return extraction\n    return run_command(\"lake --dir proofs/lean/extraction build\")\n\ndef run_json_target():\n    return run_command(\"cargo hax -C --features json \\\\; json\")\n\ndef write_summary(results, stability):\n    from tabulate import tabulate\n    headers = [\"Test\", \"Target\", \"Expected\", \"Actual\"]\n    if stability:\n        headers.append(\"Stability\")\n    headers.append(\"Result\")\n\n    rows = []\n    for r in results:\n        row = [r['test'], r['target'], r['expected'], r['actual']]\n        if stability:\n            row.append(r.get(\"stability\", \"N/A\"))\n        row.append(r['result'])\n        rows.append(row)\n\n    table = tabulate(rows, headers=headers, tablefmt=\"github\")\n    summary = \"## 🧪 Test Summary\\n\\n\" + table + \"\\n\"\n    path = os.getenv(\"GITHUB_STEP_SUMMARY\")\n    if path:\n        with open(path, \"a\") as f:\n            f.write(summary)\n    else:\n        print(summary)\n\ndef run_tests(config, target, include_negative, check_stability, update_snapshots):\n    results = []\n    all_targets = [\"coq\", \"fstar\", \"fstar-lax\", \"lean\", \"lean-tc\", \"json\"]\n\n    applicable_targets = [target] if target != \"all\" else all_targets\n    \n    if \"json\" in applicable_targets:\n        json_result = run_json_target()\n        rc = json_result.returncode\n        if rc != 0:\n            print(json_result.stdout)   \n            results.append({\n                \"test\": \"cargo-hax-json\",\n                \"target\": \"json\",\n                \"expected\": \"✅ Pass\",\n                \"actual\": \"✅ Pass\" if rc == 0 else \"❌ Fail\",\n                \"result\": \"✅\" if rc == 0 else \"❌\"\n            })\n            return results\n\n    \n    if target == \"json\":\n        return results\n\n    for test_name, targets in config[\"tests\"].items():\n        for t in applicable_targets:\n            is_expected_to_run = t in targets\n            should_run = is_expected_to_run or include_negative\n\n            if not should_run:\n                continue\n\n            cleanup_extraction()\n\n            if t == \"fstar-lax\":\n                command_result = run_fstar_lax(test_name, include_negative)\n            elif t == \"lean-tc\":\n                command_result = run_lean_tc(test_name, include_negative)\n            elif t == \"json\":\n                command_result = json_result\n            else:\n                cmd = cargo_cmd(test_name, t, \"json\" if include_negative else t)\n                command_result = run_command(cmd)\n\n            rc = min(command_result.returncode, 1)\n\n            expected_code = 0 if is_expected_to_run else 1\n            passed = (rc == expected_code)\n\n            result = {\n                \"test\": test_name,\n                \"target\": t,\n                \"expected\": \"✅ Pass\" if is_expected_to_run else \"❌ Fail\",\n                \"actual\": \"✅ Pass\" if rc == 0 else \"❌ Fail\",\n                \"result\": \"✅\" if passed else \"❌\"\n            }\n\n            if check_stability and t in [\"fstar\"]:\n                is_stable = compare_and_store_outputs(t, update_snapshots = update_snapshots)\n                if not is_stable:\n                    # optionally mark test as failed\n                    result[\"stability\"] = \"❌\"\n                    result[\"result\"] = \"❌\"\n                else:\n                    result[\"stability\"] = \"✅\"\n\n            print(result)\n            \n            if not passed:\n                print(command_result.stdout)\n\n            results.append(result)\n\n    return results\n\ndef main():\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"target\", choices=[\"coq\", \"fstar\", \"fstar-lax\", \"lean\", \"lean-tc\", \"json\", \"all\"], help=\"Test target\")\n    parser.add_argument(\"--config\", help=\"Path to YAML config file\")\n    parser.add_argument(\"--with-negative\", action=\"store_true\", help=\"Also run non-enabled tests and expect them to fail\")\n    parser.add_argument(\"--check-stability\", action=\"store_true\", help=\"Compare output files to reference versions, applicable only in conjunction with with-negative\")\n    parser.add_argument(\"--update-snapshots\", action=\"store_true\", help=\"Store new reference versions of generated files, applicable only in conjunction with with-negative and check-stability\")\n\n    args = parser.parse_args()\n\n    os.environ[\"RUSTFLAGS\"] = \"-C instrument-coverage\"\n\n    stability = args.check_stability and args.with_negative\n\n    config = load_config(args.config) if args.config else load_config(CONFIG_FILE) if args.with_negative else {\"tests\" : {\"\": [\"coq\", \"fstar\", \"fstar-lax\", \"lean\", \"lean-tc\"]}}\n    results = run_tests(config, args.target, args.with_negative, stability, args.update_snapshots)\n    if args.with_negative:\n        write_summary(results, stability)\n    else:\n        print(results)\n    # Exit with non-zero if any result failed (actual != expected)\n    if any(r[\"result\"] == \"❌\" for r in results):\n        sys.exit(1)\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Abort.fst",
    "content": "module Coverage.Abort\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet might_abort (should_abort: bool) : Prims.unit =\n  if should_abort\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"aborting...\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const\n              (mk_usize 1)\n              (let list = [\"panics and aborts\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n            <:\n            Core_models.Fmt.t_Arguments)\n        <:\n        Rust_primitives.Hax.t_Never)\n  else\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"Don't Panic\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let countdown:i32 = mk_i32 10 in\n  let countdown:i32 =\n    Rust_primitives.Hax.while_loop (fun countdown ->\n          let countdown:i32 = countdown in\n          true)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          countdown >. mk_i32 0 <: bool)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n      countdown\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          let _:Prims.unit =\n            if countdown <. mk_i32 5\n            then\n              let _:Prims.unit = might_abort false in\n              ()\n          in\n          let _:Prims.unit =\n            if countdown <. mk_i32 5\n            then\n              let _:Prims.unit = might_abort false in\n              ()\n          in\n          let _:Prims.unit =\n            if countdown <. mk_i32 5\n            then\n              let _:Prims.unit = might_abort false in\n              ()\n          in\n          let countdown:i32 = countdown -! mk_i32 1 in\n          countdown)\n  in\n  Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Assert.fst",
    "content": "module Coverage.Assert\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet might_fail_assert (one_plus_one: u32) : Prims.unit =\n  let args:u32 = one_plus_one <: u32 in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"does 1 + 1 = \"; \"?\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let _:Prims.unit =\n    match mk_u32 1 +! mk_u32 1, one_plus_one <: (u32 & u32) with\n    | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool)\n  in\n  ()\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let countdown:i32 = mk_i32 10 in\n  let countdown:i32 =\n    Rust_primitives.Hax.while_loop (fun countdown ->\n          let countdown:i32 = countdown in\n          true)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          countdown >. mk_i32 0 <: bool)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n      countdown\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          let _:Prims.unit =\n            if countdown =. mk_i32 1\n            then\n              let _:Prims.unit = might_fail_assert (mk_u32 3) in\n              ()\n            else\n              if countdown <. mk_i32 5\n              then\n                let _:Prims.unit = might_fail_assert (mk_u32 2) in\n                ()\n          in\n          let countdown:i32 = countdown -! mk_i32 1 in\n          countdown)\n  in\n  Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Assert_ne.fst",
    "content": "module Coverage.Assert_ne\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo = | Foo : u32 -> t_Foo\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl': Core_models.Fmt.t_Debug t_Foo\n\nunfold\nlet impl = impl'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1': Core_models.Marker.t_StructuralPartialEq t_Foo\n\nunfold\nlet impl_1 = impl_1'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': Core_models.Cmp.t_PartialEq t_Foo t_Foo\n\nunfold\nlet impl_2 = impl_2'\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    match\n      Core_models.Hint.black_box #t_Foo (Foo (mk_u32 5) <: t_Foo),\n      (if Core_models.Hint.black_box #bool false\n        then Foo (mk_u32 0) <: t_Foo\n        else Foo (mk_u32 1) <: t_Foo)\n      <:\n      (t_Foo & t_Foo)\n    with\n    | left_val, right_val -> Hax_lib.v_assert (~.(left_val =. right_val <: bool) <: bool)\n  in\n  () <: Prims.unit\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Assert_not.fst",
    "content": "module Coverage.Assert_not\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = Hax_lib.v_assert true in\n  let _:Prims.unit = Hax_lib.v_assert (~.false <: bool) in\n  let _:Prims.unit = Hax_lib.v_assert (~.(~.true <: bool) <: bool) in\n  let _:Prims.unit = Hax_lib.v_assert (~.(~.(~.false <: bool) <: bool) <: bool) in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Impl_.fst",
    "content": "module Coverage.Attr.Impl_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_MyStruct = | MyStruct : t_MyStruct\n\nlet impl_MyStruct__off_inherit (_: Prims.unit) : Prims.unit = ()\n\nlet impl_MyStruct__off_on (_: Prims.unit) : Prims.unit = ()\n\nlet impl_MyStruct__off_off (_: Prims.unit) : Prims.unit = ()\n\nlet impl_MyStruct__on_inherit (_: Prims.unit) : Prims.unit = ()\n\nlet impl_MyStruct__on_on (_: Prims.unit) : Prims.unit = ()\n\nlet impl_MyStruct__on_off (_: Prims.unit) : Prims.unit = ()\n\nclass t_MyTrait (v_Self: Type0) = {\n  f_method_pre:Prims.unit -> Type0;\n  f_method_post:Prims.unit -> Prims.unit -> Type0;\n  f_method:x0: Prims.unit\n    -> Prims.Pure Prims.unit (f_method_pre x0) (fun result -> f_method_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_MyTrait_for_MyStruct: t_MyTrait t_MyStruct =\n  {\n    f_method_pre = (fun (_: Prims.unit) -> true);\n    f_method_post = (fun (_: Prims.unit) (out: Prims.unit) -> true);\n    f_method = fun (_: Prims.unit) -> ()\n  }\n\nlet main (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.Nested_a.Nested_b.fst",
    "content": "module Coverage.Attr.Module.Nested_a.Nested_b\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet inner (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.Off.fst",
    "content": "module Coverage.Attr.Module.Off\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet inherit (_: Prims.unit) : Prims.unit = ()\n\nlet on (_: Prims.unit) : Prims.unit = ()\n\nlet off (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.On.fst",
    "content": "module Coverage.Attr.Module.On\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet inherit (_: Prims.unit) : Prims.unit = ()\n\nlet on (_: Prims.unit) : Prims.unit = ()\n\nlet off (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Module.fst",
    "content": "module Coverage.Attr.Module\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Off_on_sandwich.fst",
    "content": "module Coverage.Attr.Off_on_sandwich\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet do_stuff (_: Prims.unit) : Prims.unit = ()\n\nlet dense_a__dense_b__dense_c (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = do_stuff () in\n  ()\n\nlet dense_a__dense_b (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = dense_a__dense_b__dense_c () in\n  let _:Prims.unit = dense_a__dense_b__dense_c () in\n  ()\n\nlet dense_a (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = dense_a__dense_b () in\n  let _:Prims.unit = dense_a__dense_b () in\n  ()\n\nlet sparse_a__sparse_b__sparse_c__sparse_d__sparse_e (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = do_stuff () in\n  ()\n\nlet sparse_a__sparse_b__sparse_c__sparse_d (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d__sparse_e () in\n  let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d__sparse_e () in\n  ()\n\nlet sparse_a__sparse_b__sparse_c (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d () in\n  let _:Prims.unit = sparse_a__sparse_b__sparse_c__sparse_d () in\n  ()\n\nlet sparse_a__sparse_b (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = sparse_a__sparse_b__sparse_c () in\n  let _:Prims.unit = sparse_a__sparse_b__sparse_c () in\n  ()\n\nlet sparse_a (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = sparse_a__sparse_b () in\n  let _:Prims.unit = sparse_a__sparse_b () in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = dense_a () in\n  let _:Prims.unit = sparse_a () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Attr.Trait_impl_inherit.fst",
    "content": "module Coverage.Attr.Trait_impl_inherit\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\n(* item error backend: Explicit rejection by a phase in the Hax engine:\na node of kind [Trait_item_default] have been found in the AST\n\nNote: the error was labeled with context `reject_TraitItemDefault`.\n\nLast available AST for this item:\n\n#[<cfg_trace>(any(feature = \"json\"))]#[feature(coverage_attribute)]#[<cfg_trace>(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature =\n\"fstar-lax\", feature = \"coq\"))]#[feature(coverage_attribute)]#[allow(unused_attributes)]#[allow(dead_code)]#[allow(unreachable_code)]#[feature(register_tool)]#[register_tool(_hax)]trait t_T<Self_>{fn f_f((self: Self)) -> tuple0{{let _: tuple0 = {std::io::stdio::e_print(core_models::fmt::rt::impl_1__new_const::<generic_value!(todo)>([\"default\\n\"]))};{let _: tuple0 = {Tuple0};Tuple0}}}}\n\nLast AST:\n/** print_rust: pitem: not implemented  (item: { Concrete_ident.T.def_id =\n  { Explicit_def_id.T.is_constructor = false;\n    def_id =\n    { Types.index = (0, 0, None); is_local = true; kind = Types.Trait;\n      krate = \"coverage\";\n      parent =\n      (Some { Types.contents =\n              { Types.id = 0;\n                value =\n                { Types.index = (0, 0, None); is_local = true;\n                  kind = Types.Mod; krate = \"coverage\";\n                  parent =\n                  (Some { Types.contents =\n                          { Types.id = 0;\n                            value =\n                            { Types.index = (0, 0, None); is_local = true;\n                              kind = Types.Mod; krate = \"coverage\";\n                              parent =\n                              (Some { Types.contents =\n                                      { Types.id = 0;\n                                        value =\n                                        { Types.index = (0, 0, None);\n                                          is_local = true; kind = Types.Mod;\n                                          krate = \"coverage\"; parent = None;\n                                          path = [] }\n                                        }\n                                      });\n                              path =\n                              [{ Types.data = (Types.TypeNs \"attr\");\n                                 disambiguator = 0 }\n                                ]\n                              }\n                            }\n                          });\n                  path =\n                  [{ Types.data = (Types.TypeNs \"attr\"); disambiguator = 0 };\n                    { Types.data = (Types.TypeNs \"trait_impl_inherit\");\n                      disambiguator = 0 }\n                    ]\n                  }\n                }\n              });\n      path =\n      [{ Types.data = (Types.TypeNs \"attr\"); disambiguator = 0 };\n        { Types.data = (Types.TypeNs \"trait_impl_inherit\"); disambiguator = 0\n          };\n        { Types.data = (Types.TypeNs \"T\"); disambiguator = 0 }]\n      }\n    };\n  moved = None; suffix = None }) */\nconst _: () = ();\n *)\n\ntype t_S = | S : t_S\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_T t_S =\n  {\n    f_f_pre = (fun (self: t_S) -> true);\n    f_f_post = (fun (self: t_S) (out: Prims.unit) -> true);\n    f_f\n    =\n    fun (self: t_S) ->\n      let _:Prims.unit =\n        Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n              (let list = [\"impl S\\n\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let _:Prims.unit = () in\n      ()\n  }\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = f_f #t_S #FStar.Tactics.Typeclasses.solve (S <: t_S) in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Auxiliary.Discard_all_helper.fst",
    "content": "module Coverage.Auxiliary.Discard_all_helper\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet external_function (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Auxiliary.Used_crate.fst",
    "content": "module Coverage.Auxiliary.Used_crate\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet used_only_from_bin_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"used_only_from_bin_crate_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet used_only_from_this_lib_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"used_only_from_this_lib_crate_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet used_from_bin_crate_and_lib_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"used_from_bin_crate_and_lib_crate_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet used_with_same_type_from_bin_crate_and_lib_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list =\n              [\"used_with_same_type_from_bin_crate_and_lib_crate_generic_function with \"; \"\\n\"]\n            in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet unused_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"unused_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet unused_function (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 2 in\n  if ~.is_true\n  then\n    let countdown:i32 = mk_i32 20 in\n    ()\n\nlet unused_private_function (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 2 in\n  if ~.is_true\n  then\n    let countdown:i32 = mk_i32 20 in\n    ()\n\nlet uuse_this_lib_crate (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    used_from_bin_crate_and_lib_crate_generic_function #string \"used from library used_crate.rs\"\n  in\n  let _:Prims.unit =\n    used_with_same_type_from_bin_crate_and_lib_crate_generic_function #string\n      \"used from library used_crate.rs\"\n  in\n  let some_vec:Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__into_vec #i32\n      #Alloc.Alloc.t_Global\n      (Rust_primitives.unsize (Rust_primitives.Hax.box_new (let list =\n                  [mk_i32 5; mk_i32 6; mk_i32 7; mk_i32 8]\n                in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 4);\n                Rust_primitives.Hax.array_of_list 4 list)\n            <:\n            Alloc.Boxed.t_Box (t_Array i32 (mk_usize 4)) Alloc.Alloc.t_Global)\n        <:\n        Alloc.Boxed.t_Box (t_Slice i32) Alloc.Alloc.t_Global)\n  in\n  let _:Prims.unit =\n    used_only_from_this_lib_crate_generic_function #(Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global)\n      some_vec\n  in\n  let _:Prims.unit =\n    used_only_from_this_lib_crate_generic_function #string \"used ONLY from library used_crate.rs\"\n  in\n  ()\n\nlet used_function (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  let countdown:i32 =\n    if is_true\n    then\n      let countdown:i32 = mk_i32 10 in\n      countdown\n    else countdown\n  in\n  let _:Prims.unit = uuse_this_lib_crate () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Auxiliary.Used_inline_crate.fst",
    "content": "module Coverage.Auxiliary.Used_inline_crate\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet used_only_from_bin_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"used_only_from_bin_crate_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet used_only_from_this_lib_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"used_only_from_this_lib_crate_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet used_from_bin_crate_and_lib_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"used_from_bin_crate_and_lib_crate_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet used_with_same_type_from_bin_crate_and_lib_crate_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list =\n              [\"used_with_same_type_from_bin_crate_and_lib_crate_generic_function with \"; \"\\n\"]\n            in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet unused_generic_function\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Debug v_T)\n      (arg: v_T)\n    : Prims.unit =\n  let args:v_T = arg <: v_T in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #v_T args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"unused_generic_function with \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet unused_function (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 2 in\n  if ~.is_true\n  then\n    let countdown:i32 = mk_i32 20 in\n    ()\n\nlet unused_private_function (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 2 in\n  if ~.is_true\n  then\n    let countdown:i32 = mk_i32 20 in\n    ()\n\nlet uuse_this_lib_crate (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    used_from_bin_crate_and_lib_crate_generic_function #string \"used from library used_crate.rs\"\n  in\n  let _:Prims.unit =\n    used_with_same_type_from_bin_crate_and_lib_crate_generic_function #string\n      \"used from library used_crate.rs\"\n  in\n  let some_vec:Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__into_vec #i32\n      #Alloc.Alloc.t_Global\n      (Rust_primitives.unsize (Rust_primitives.Hax.box_new (let list =\n                  [mk_i32 5; mk_i32 6; mk_i32 7; mk_i32 8]\n                in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 4);\n                Rust_primitives.Hax.array_of_list 4 list)\n            <:\n            Alloc.Boxed.t_Box (t_Array i32 (mk_usize 4)) Alloc.Alloc.t_Global)\n        <:\n        Alloc.Boxed.t_Box (t_Slice i32) Alloc.Alloc.t_Global)\n  in\n  let _:Prims.unit =\n    used_only_from_this_lib_crate_generic_function #(Alloc.Vec.t_Vec i32 Alloc.Alloc.t_Global)\n      some_vec\n  in\n  let _:Prims.unit =\n    used_only_from_this_lib_crate_generic_function #string \"used ONLY from library used_crate.rs\"\n  in\n  ()\n\nlet used_function (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  let countdown:i32 =\n    if is_true\n    then\n      let countdown:i32 = mk_i32 10 in\n      countdown\n    else countdown\n  in\n  let _:Prims.unit = uuse_this_lib_crate () in\n  ()\n\nlet used_inline_function (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  let countdown:i32 =\n    if is_true\n    then\n      let countdown:i32 = mk_i32 10 in\n      countdown\n    else countdown\n  in\n  let _:Prims.unit = uuse_this_lib_crate () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Closure_macro.fst",
    "content": "module Coverage.Closure_macro\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet load_configuration_files (_: Prims.unit)\n    : Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String =\n  Core_models.Result.Result_Ok\n  (Core_models.Convert.f_from #Alloc.String.t_String\n      #string\n      #FStar.Tactics.Typeclasses.solve\n      \"config\")\n  <:\n  Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Alloc.String.t_String =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"Starting service\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  match\n    Core_models.Result.impl__or_else #Alloc.String.t_String\n      #Alloc.String.t_String\n      #Alloc.String.t_String\n      (load_configuration_files ()\n        <:\n        Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String)\n      (fun e ->\n          let e:Alloc.String.t_String = e in\n          let args:Alloc.String.t_String = e <: Alloc.String.t_String in\n          let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n            let list = [Core_models.Fmt.Rt.impl__new_display #Alloc.String.t_String args] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list\n          in\n          let message:Alloc.String.t_String =\n            Core_models.Hint.must_use #Alloc.String.t_String\n              (Alloc.Fmt.format (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 1)\n                      (mk_usize 1)\n                      (let list = [\"Error loading configs: \"] in\n                        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                        Rust_primitives.Hax.array_of_list 1 list)\n                      args\n                    <:\n                    Core_models.Fmt.t_Arguments)\n                <:\n                Alloc.String.t_String)\n          in\n          if (Alloc.String.impl_String__len message <: usize) >. mk_usize 0\n          then\n            let args:Alloc.String.t_String = message <: Alloc.String.t_String in\n            let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n              let list = [Core_models.Fmt.Rt.impl__new_display #Alloc.String.t_String args] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list\n            in\n            let _:Prims.unit =\n              Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n                    (mk_usize 1)\n                    (let list = [\"\"; \"\\n\"] in\n                      FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n                      Rust_primitives.Hax.array_of_list 2 list)\n                    args\n                  <:\n                  Core_models.Fmt.t_Arguments)\n            in\n            let _:Prims.unit = () in\n            Core_models.Result.Result_Ok\n            (Core_models.Convert.f_from #Alloc.String.t_String\n                #string\n                #FStar.Tactics.Typeclasses.solve\n                \"ok\")\n            <:\n            Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String\n          else\n            let _:Prims.unit =\n              if (Core_models.Str.impl_str__len \"error\" <: usize) >. mk_usize 0\n              then\n                let _:Prims.unit =\n                  Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                        (let list = [\"no msg\\n\"] in\n                          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                          Rust_primitives.Hax.array_of_list 1 list)\n                      <:\n                      Core_models.Fmt.t_Arguments)\n                in\n                let _:Prims.unit = () in\n                ()\n              else\n                let _:Prims.unit =\n                  Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                        (let list = [\"error\\n\"] in\n                          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                          Rust_primitives.Hax.array_of_list 1 list)\n                      <:\n                      Core_models.Fmt.t_Arguments)\n                in\n                let _:Prims.unit = () in\n                ()\n            in\n            Core_models.Result.Result_Err\n            (Core_models.Convert.f_from #Alloc.String.t_String\n                #string\n                #FStar.Tactics.Typeclasses.solve\n                \"error\")\n            <:\n            Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String)\n    <:\n    Core_models.Result.t_Result Alloc.String.t_String Alloc.String.t_String\n  with\n  | Core_models.Result.Result_Ok config ->\n    let startup_delay_duration:Alloc.String.t_String =\n      Core_models.Convert.f_from #Alloc.String.t_String\n        #string\n        #FStar.Tactics.Typeclasses.solve\n        \"arg\"\n    in\n    let _:(Alloc.String.t_String & Alloc.String.t_String) =\n      config, startup_delay_duration <: (Alloc.String.t_String & Alloc.String.t_String)\n    in\n    Core_models.Result.Result_Ok (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result Prims.unit Alloc.String.t_String\n  | Core_models.Result.Result_Err err ->\n    Core_models.Result.Result_Err err\n    <:\n    Core_models.Result.t_Result Prims.unit Alloc.String.t_String\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Closure_unit_return.fst",
    "content": "module Coverage.Closure_unit_return\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet explicit_unit (_: Prims.unit) : Prims.unit =\n  let closure: Prims.unit -> Prims.unit =\n    fun temp_0_ ->\n      let _:Prims.unit = temp_0_ in\n      let _:Prims.unit = () <: Prims.unit in\n      ()\n  in\n  let _:Prims.unit = Core_models.Mem.drop closure in\n  () <: Prims.unit\n\nlet implicit_unit (_: Prims.unit) : Prims.unit =\n  let closure: Prims.unit -> Prims.unit =\n    fun temp_0_ ->\n      let _:Prims.unit = temp_0_ in\n      let _:Prims.unit = () <: Prims.unit in\n      ()\n  in\n  let _:Prims.unit = Core_models.Mem.drop closure in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = explicit_unit () in\n  let _:Prims.unit = implicit_unit () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Color.fst",
    "content": "module Coverage.Color\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : (Prims.unit & Prims.unit) =\n  Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n    (mk_i32 0)\n    (fun temp_0_ temp_1_ ->\n        let _:Prims.unit = temp_0_ in\n        let _:i32 = temp_1_ in\n        true)\n    ()\n    (fun temp_0_ e_i ->\n        let _:Prims.unit = temp_0_ in\n        let e_i:i32 = e_i in\n        ()),\n  ()\n  <:\n  (Prims.unit & Prims.unit)\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Condition.Conditions.fst",
    "content": "module Coverage.Condition.Conditions\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet simple_assign (a: bool) : Prims.unit =\n  let x:bool = a in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet assign_and (a b: bool) : Prims.unit =\n  let x:bool = a && b in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet assign_or (a b: bool) : Prims.unit =\n  let x:bool = a || b in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet assign_3_or_and (a b c: bool) : Prims.unit =\n  let x:bool = a || b && c in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet assign_3_and_or (a b c: bool) : Prims.unit =\n  let x:bool = a && b || c in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet foo (a: bool) : bool = Core_models.Hint.black_box #bool a\n\nlet func_call (a b: bool) : Prims.unit =\n  let _:bool = foo (a && b) in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = simple_assign true in\n  let _:Prims.unit = simple_assign false in\n  let _:Prims.unit = assign_and true false in\n  let _:Prims.unit = assign_and true true in\n  let _:Prims.unit = assign_and false false in\n  let _:Prims.unit = assign_or true false in\n  let _:Prims.unit = assign_or true true in\n  let _:Prims.unit = assign_or false false in\n  let _:Prims.unit = assign_3_or_and true false false in\n  let _:Prims.unit = assign_3_or_and true true false in\n  let _:Prims.unit = assign_3_or_and false false true in\n  let _:Prims.unit = assign_3_or_and false true true in\n  let _:Prims.unit = assign_3_and_or true false false in\n  let _:Prims.unit = assign_3_and_or true true false in\n  let _:Prims.unit = assign_3_and_or false false true in\n  let _:Prims.unit = assign_3_and_or false true true in\n  let _:Prims.unit = func_call true false in\n  let _:Prims.unit = func_call true true in\n  let _:Prims.unit = func_call false false in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Conditions.fst",
    "content": "module Coverage.Conditions\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main__v_B: u32 = mk_u32 100\n\nlet main (_: Prims.unit) : Prims.unit =\n  let countdown:u32 = mk_u32 0 in\n  let countdown:u32 =\n    if true\n    then\n      let countdown:u32 = mk_u32 10 in\n      countdown\n    else countdown\n  in\n  if countdown >. mk_u32 7\n  then\n    let countdown:u32 = countdown -! mk_u32 4 in\n    let (countdown: u32), (x: u32) = countdown, main__v_B <: (u32 & u32) in\n    let countdown:i32 = mk_i32 0 in\n    let countdown:i32 =\n      if true\n      then\n        let countdown:i32 = mk_i32 10 in\n        countdown\n      else countdown\n    in\n    if countdown >. mk_i32 7\n    then\n      let countdown:i32 = countdown -! mk_i32 4 in\n      if true\n      then\n        let countdown:i32 = mk_i32 0 in\n        let countdown:i32 =\n          if true\n          then\n            let countdown:i32 = mk_i32 10 in\n            countdown\n          else countdown\n        in\n        if countdown >. mk_i32 7\n        then\n          let countdown:i32 = countdown -! mk_i32 4 in\n          let countdown:i32 = mk_i32 0 in\n          let countdown:i32 =\n            if true\n            then\n              let countdown:i32 = mk_i32 1 in\n              countdown\n            else countdown\n          in\n          if countdown >. mk_i32 7\n          then\n            let countdown:i32 = countdown -! mk_i32 4 in\n            let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n          else\n            if countdown >. mk_i32 2\n            then\n              let countdown:i32 =\n                if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                then\n                  let countdown:i32 = mk_i32 0 in\n                  countdown\n                else countdown\n              in\n              let countdown:i32 = countdown -! mk_i32 5 in\n              let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n            else\n              let should_be_reachable:i32 = countdown in\n              let _:Prims.unit =\n                Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                      (let list = [\"reached\\n\"] in\n                        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                        Rust_primitives.Hax.array_of_list 1 list)\n                    <:\n                    Core_models.Fmt.t_Arguments)\n              in\n              let _:Prims.unit = () in\n              ()\n        else\n          if countdown >. mk_i32 2\n          then\n            let countdown:i32 =\n              if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n              then\n                let countdown:i32 = mk_i32 0 in\n                countdown\n              else countdown\n            in\n            let countdown:i32 = countdown -! mk_i32 5 in\n            let countdown:i32 = mk_i32 0 in\n            let countdown:i32 =\n              if true\n              then\n                let countdown:i32 = mk_i32 1 in\n                countdown\n              else countdown\n            in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n              else\n                let should_be_reachable:i32 = countdown in\n                let _:Prims.unit =\n                  Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                        (let list = [\"reached\\n\"] in\n                          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                          Rust_primitives.Hax.array_of_list 1 list)\n                      <:\n                      Core_models.Fmt.t_Arguments)\n                in\n                let _:Prims.unit = () in\n                ()\n      else\n        let countdown:i32 = mk_i32 0 in\n        let countdown:i32 =\n          if true\n          then\n            let countdown:i32 = mk_i32 1 in\n            countdown\n          else countdown\n        in\n        if countdown >. mk_i32 7\n        then\n          let countdown:i32 = countdown -! mk_i32 4 in\n          let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n          if countdown >. mk_i32 7\n          then\n            let countdown:i32 = countdown -! mk_i32 4 in\n            let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n            ()\n          else\n            if countdown >. mk_i32 2\n            then\n              let countdown:i32 =\n                if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                then\n                  let countdown:i32 = mk_i32 0 in\n                  countdown\n                else countdown\n              in\n              let countdown:i32 = countdown -! mk_i32 5 in\n              let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              ()\n        else\n          if countdown >. mk_i32 2\n          then\n            let countdown:i32 =\n              if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n              then\n                let countdown:i32 = mk_i32 0 in\n                countdown\n              else countdown\n            in\n            let countdown:i32 = countdown -! mk_i32 5 in\n            let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n          else\n            let should_be_reachable:i32 = countdown in\n            let _:Prims.unit =\n              Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                    (let list = [\"reached\\n\"] in\n                      FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                      Rust_primitives.Hax.array_of_list 1 list)\n                  <:\n                  Core_models.Fmt.t_Arguments)\n            in\n            let _:Prims.unit = () in\n            ()\n    else\n      if countdown >. mk_i32 2\n      then\n        let countdown:i32 =\n          if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n          then\n            let countdown:i32 = mk_i32 0 in\n            countdown\n          else countdown\n        in\n        let countdown:i32 = countdown -! mk_i32 5 in\n        if true\n        then\n          let countdown:i32 = mk_i32 0 in\n          let countdown:i32 =\n            if true\n            then\n              let countdown:i32 = mk_i32 10 in\n              countdown\n            else countdown\n          in\n          if countdown >. mk_i32 7\n          then\n            let countdown:i32 = countdown -! mk_i32 4 in\n            let countdown:i32 = mk_i32 0 in\n            let countdown:i32 =\n              if true\n              then\n                let countdown:i32 = mk_i32 1 in\n                countdown\n              else countdown\n            in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n              else\n                let should_be_reachable:i32 = countdown in\n                let _:Prims.unit =\n                  Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                        (let list = [\"reached\\n\"] in\n                          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                          Rust_primitives.Hax.array_of_list 1 list)\n                      <:\n                      Core_models.Fmt.t_Arguments)\n                in\n                let _:Prims.unit = () in\n                ()\n          else\n            if countdown >. mk_i32 2\n            then\n              let countdown:i32 =\n                if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                then\n                  let countdown:i32 = mk_i32 0 in\n                  countdown\n                else countdown\n              in\n              let countdown:i32 = countdown -! mk_i32 5 in\n              let countdown:i32 = mk_i32 0 in\n              let countdown:i32 =\n                if true\n                then\n                  let countdown:i32 = mk_i32 1 in\n                  countdown\n                else countdown\n              in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  if countdown >. mk_i32 7\n                  then\n                    let countdown:i32 = countdown -! mk_i32 4 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n                  else\n                    if countdown >. mk_i32 2\n                    then\n                      let countdown:i32 =\n                        if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                        then\n                          let countdown:i32 = mk_i32 0 in\n                          countdown\n                        else countdown\n                      in\n                      let countdown:i32 = countdown -! mk_i32 5 in\n                      let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                      ()\n                else\n                  let should_be_reachable:i32 = countdown in\n                  let _:Prims.unit =\n                    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                          (let list = [\"reached\\n\"] in\n                            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                            Rust_primitives.Hax.array_of_list 1 list)\n                        <:\n                        Core_models.Fmt.t_Arguments)\n                  in\n                  let _:Prims.unit = () in\n                  ()\n        else\n          let countdown:i32 = mk_i32 0 in\n          let countdown:i32 =\n            if true\n            then\n              let countdown:i32 = mk_i32 1 in\n              countdown\n            else countdown\n          in\n          if countdown >. mk_i32 7\n          then\n            let countdown:i32 = countdown -! mk_i32 4 in\n            let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n          else\n            if countdown >. mk_i32 2\n            then\n              let countdown:i32 =\n                if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                then\n                  let countdown:i32 = mk_i32 0 in\n                  countdown\n                else countdown\n              in\n              let countdown:i32 = countdown -! mk_i32 5 in\n              let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n            else\n              let should_be_reachable:i32 = countdown in\n              let _:Prims.unit =\n                Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                      (let list = [\"reached\\n\"] in\n                        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                        Rust_primitives.Hax.array_of_list 1 list)\n                    <:\n                    Core_models.Fmt.t_Arguments)\n              in\n              let _:Prims.unit = () in\n              ()\n  else\n    if countdown >. mk_u32 2\n    then\n      let countdown:u32 =\n        if countdown <. mk_u32 1 || countdown >. mk_u32 5 || countdown <>. mk_u32 9\n        then\n          let countdown:u32 = mk_u32 0 in\n          countdown\n        else countdown\n      in\n      let countdown:u32 = countdown -! mk_u32 5 in\n      let (countdown: u32), (x: u32) = countdown, countdown <: (u32 & u32) in\n      let countdown:i32 = mk_i32 0 in\n      let countdown:i32 =\n        if true\n        then\n          let countdown:i32 = mk_i32 10 in\n          countdown\n        else countdown\n      in\n      if countdown >. mk_i32 7\n      then\n        let countdown:i32 = countdown -! mk_i32 4 in\n        if true\n        then\n          let countdown:i32 = mk_i32 0 in\n          let countdown:i32 =\n            if true\n            then\n              let countdown:i32 = mk_i32 10 in\n              countdown\n            else countdown\n          in\n          if countdown >. mk_i32 7\n          then\n            let countdown:i32 = countdown -! mk_i32 4 in\n            let countdown:i32 = mk_i32 0 in\n            let countdown:i32 =\n              if true\n              then\n                let countdown:i32 = mk_i32 1 in\n                countdown\n              else countdown\n            in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n              else\n                let should_be_reachable:i32 = countdown in\n                let _:Prims.unit =\n                  Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                        (let list = [\"reached\\n\"] in\n                          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                          Rust_primitives.Hax.array_of_list 1 list)\n                      <:\n                      Core_models.Fmt.t_Arguments)\n                in\n                let _:Prims.unit = () in\n                ()\n          else\n            if countdown >. mk_i32 2\n            then\n              let countdown:i32 =\n                if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                then\n                  let countdown:i32 = mk_i32 0 in\n                  countdown\n                else countdown\n              in\n              let countdown:i32 = countdown -! mk_i32 5 in\n              let countdown:i32 = mk_i32 0 in\n              let countdown:i32 =\n                if true\n                then\n                  let countdown:i32 = mk_i32 1 in\n                  countdown\n                else countdown\n              in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  if countdown >. mk_i32 7\n                  then\n                    let countdown:i32 = countdown -! mk_i32 4 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n                  else\n                    if countdown >. mk_i32 2\n                    then\n                      let countdown:i32 =\n                        if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                        then\n                          let countdown:i32 = mk_i32 0 in\n                          countdown\n                        else countdown\n                      in\n                      let countdown:i32 = countdown -! mk_i32 5 in\n                      let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                      ()\n                else\n                  let should_be_reachable:i32 = countdown in\n                  let _:Prims.unit =\n                    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                          (let list = [\"reached\\n\"] in\n                            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                            Rust_primitives.Hax.array_of_list 1 list)\n                        <:\n                        Core_models.Fmt.t_Arguments)\n                  in\n                  let _:Prims.unit = () in\n                  ()\n        else\n          let countdown:i32 = mk_i32 0 in\n          let countdown:i32 =\n            if true\n            then\n              let countdown:i32 = mk_i32 1 in\n              countdown\n            else countdown\n          in\n          if countdown >. mk_i32 7\n          then\n            let countdown:i32 = countdown -! mk_i32 4 in\n            let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n          else\n            if countdown >. mk_i32 2\n            then\n              let countdown:i32 =\n                if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                then\n                  let countdown:i32 = mk_i32 0 in\n                  countdown\n                else countdown\n              in\n              let countdown:i32 = countdown -! mk_i32 5 in\n              let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n            else\n              let should_be_reachable:i32 = countdown in\n              let _:Prims.unit =\n                Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                      (let list = [\"reached\\n\"] in\n                        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                        Rust_primitives.Hax.array_of_list 1 list)\n                    <:\n                    Core_models.Fmt.t_Arguments)\n              in\n              let _:Prims.unit = () in\n              ()\n      else\n        if countdown >. mk_i32 2\n        then\n          let countdown:i32 =\n            if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n            then\n              let countdown:i32 = mk_i32 0 in\n              countdown\n            else countdown\n          in\n          let countdown:i32 = countdown -! mk_i32 5 in\n          if true\n          then\n            let countdown:i32 = mk_i32 0 in\n            let countdown:i32 =\n              if true\n              then\n                let countdown:i32 = mk_i32 10 in\n                countdown\n              else countdown\n            in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let countdown:i32 = mk_i32 0 in\n              let countdown:i32 =\n                if true\n                then\n                  let countdown:i32 = mk_i32 1 in\n                  countdown\n                else countdown\n              in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  if countdown >. mk_i32 7\n                  then\n                    let countdown:i32 = countdown -! mk_i32 4 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n                  else\n                    if countdown >. mk_i32 2\n                    then\n                      let countdown:i32 =\n                        if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                        then\n                          let countdown:i32 = mk_i32 0 in\n                          countdown\n                        else countdown\n                      in\n                      let countdown:i32 = countdown -! mk_i32 5 in\n                      let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                      ()\n                else\n                  let should_be_reachable:i32 = countdown in\n                  let _:Prims.unit =\n                    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                          (let list = [\"reached\\n\"] in\n                            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                            Rust_primitives.Hax.array_of_list 1 list)\n                        <:\n                        Core_models.Fmt.t_Arguments)\n                  in\n                  let _:Prims.unit = () in\n                  ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let countdown:i32 = mk_i32 0 in\n                let countdown:i32 =\n                  if true\n                  then\n                    let countdown:i32 = mk_i32 1 in\n                    countdown\n                  else countdown\n                in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  if countdown >. mk_i32 7\n                  then\n                    let countdown:i32 = countdown -! mk_i32 4 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n                  else\n                    if countdown >. mk_i32 2\n                    then\n                      let countdown:i32 =\n                        if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                        then\n                          let countdown:i32 = mk_i32 0 in\n                          countdown\n                        else countdown\n                      in\n                      let countdown:i32 = countdown -! mk_i32 5 in\n                      let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                      ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    if countdown >. mk_i32 7\n                    then\n                      let countdown:i32 = countdown -! mk_i32 4 in\n                      let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                      ()\n                    else\n                      if countdown >. mk_i32 2\n                      then\n                        let countdown:i32 =\n                          if\n                            countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                          then\n                            let countdown:i32 = mk_i32 0 in\n                            countdown\n                          else countdown\n                        in\n                        let countdown:i32 = countdown -! mk_i32 5 in\n                        let (countdown: i32), (w: Prims.unit) =\n                          countdown, () <: (i32 & Prims.unit)\n                        in\n                        ()\n                  else\n                    let should_be_reachable:i32 = countdown in\n                    let _:Prims.unit =\n                      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                            (let list = [\"reached\\n\"] in\n                              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                              Rust_primitives.Hax.array_of_list 1 list)\n                          <:\n                          Core_models.Fmt.t_Arguments)\n                    in\n                    let _:Prims.unit = () in\n                    ()\n          else\n            let countdown:i32 = mk_i32 0 in\n            let countdown:i32 =\n              if true\n              then\n                let countdown:i32 = mk_i32 1 in\n                countdown\n              else countdown\n            in\n            if countdown >. mk_i32 7\n            then\n              let countdown:i32 = countdown -! mk_i32 4 in\n              let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n              if countdown >. mk_i32 7\n              then\n                let countdown:i32 = countdown -! mk_i32 4 in\n                let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                ()\n              else\n                if countdown >. mk_i32 2\n                then\n                  let countdown:i32 =\n                    if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                    then\n                      let countdown:i32 = mk_i32 0 in\n                      countdown\n                    else countdown\n                  in\n                  let countdown:i32 = countdown -! mk_i32 5 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n            else\n              if countdown >. mk_i32 2\n              then\n                let countdown:i32 =\n                  if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                  then\n                    let countdown:i32 = mk_i32 0 in\n                    countdown\n                  else countdown\n                in\n                let countdown:i32 = countdown -! mk_i32 5 in\n                let (countdown: i32), (z: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                if countdown >. mk_i32 7\n                then\n                  let countdown:i32 = countdown -! mk_i32 4 in\n                  let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                  ()\n                else\n                  if countdown >. mk_i32 2\n                  then\n                    let countdown:i32 =\n                      if countdown <. mk_i32 1 || countdown >. mk_i32 5 || countdown <>. mk_i32 9\n                      then\n                        let countdown:i32 = mk_i32 0 in\n                        countdown\n                      else countdown\n                    in\n                    let countdown:i32 = countdown -! mk_i32 5 in\n                    let (countdown: i32), (w: Prims.unit) = countdown, () <: (i32 & Prims.unit) in\n                    ()\n              else\n                let should_be_reachable:i32 = countdown in\n                let _:Prims.unit =\n                  Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                        (let list = [\"reached\\n\"] in\n                          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                          Rust_primitives.Hax.array_of_list 1 list)\n                      <:\n                      Core_models.Fmt.t_Arguments)\n                in\n                let _:Prims.unit = () in\n                ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Continue_.fst",
    "content": "module Coverage.Continue_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let x:i32 = mk_i32 0 in\n  let x:i32 =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n      (mk_i32 10)\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          true)\n      x\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          match is_true <: bool with\n          | true -> x\n          | _ ->\n            let x:i32 = mk_i32 1 in\n            mk_i32 3)\n  in\n  let x:i32 =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n      (mk_i32 10)\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          true)\n      x\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          match is_true <: bool with\n          | false ->\n            let x:i32 = mk_i32 1 in\n            mk_i32 3\n          | _ -> x)\n  in\n  let x:i32 =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n      (mk_i32 10)\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          true)\n      x\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          match is_true <: bool with\n          | true ->\n            let x:i32 = mk_i32 1 in\n            mk_i32 3\n          | _ -> x)\n  in\n  let x:i32 =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n      (mk_i32 10)\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          true)\n      x\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          if is_true then x else mk_i32 3)\n  in\n  let x:i32 =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n      (mk_i32 10)\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          true)\n      x\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          let x:i32 =\n            match is_true <: bool with\n            | false ->\n              let x:i32 = mk_i32 1 in\n              x\n            | _ ->\n              let _:i32 = x in\n              x\n          in\n          let x:i32 = mk_i32 3 in\n          x)\n  in\n  let x:i32 =\n    Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 0)\n      (mk_i32 10)\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          true)\n      x\n      (fun x temp_1_ ->\n          let x:i32 = x in\n          let _:i32 = temp_1_ in\n          match is_true <: bool with\n          | false ->\n            let x:i32 = mk_i32 1 in\n            Core_models.Ops.Control_flow.ControlFlow_Continue (mk_i32 3)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32\n          | _ ->\n            Core_models.Ops.Control_flow.ControlFlow_Break ((), x <: (Prims.unit & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32)\n  in\n  let _:i32 = x in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Dead_code.fst",
    "content": "module Coverage.Dead_code\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet unused_pub_fn_not_in_library (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  if is_true\n  then\n    let countdown:i32 = mk_i32 10 in\n    ()\n\nlet unused_fn (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  if is_true\n  then\n    let countdown:i32 = mk_i32 10 in\n    ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  if is_true\n  then\n    let countdown:i32 = mk_i32 10 in\n    ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Drop_trait.fst",
    "content": "module Coverage.Drop_trait\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Firework = { f_strength:i32 }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Ops.Drop.t_Drop t_Firework =\n  {\n    f_drop_pre = (fun (self: t_Firework) -> true);\n    f_drop_post = (fun (self: t_Firework) (out: t_Firework) -> true);\n    f_drop\n    =\n    fun (self: t_Firework) ->\n      let args:i32 = self.f_strength <: i32 in\n      let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n        let list = [Core_models.Fmt.Rt.impl__new_display #i32 args] in\n        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n        Rust_primitives.Hax.array_of_list 1 list\n      in\n      let _:Prims.unit =\n        Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n              (mk_usize 1)\n              (let list = [\"BOOM times \"; \"!!!\\n\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n                Rust_primitives.Hax.array_of_list 2 list)\n              args\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let _:Prims.unit = () in\n      self\n  }\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let e_firecracker:t_Firework = { f_strength = mk_i32 1 } <: t_Firework in\n  let e_tnt:t_Firework = { f_strength = mk_i32 100 } <: t_Firework in\n  if true\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"Exiting with error...\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    Core_models.Result.Result_Err (mk_u8 1) <: Core_models.Result.t_Result Prims.unit u8\n  else\n    let _:t_Firework = { f_strength = mk_i32 1000 } <: t_Firework in\n    Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Fn_sig_into_try.fst",
    "content": "module Coverage.Fn_sig_into_try\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet a (_: Prims.unit) : Core_models.Option.t_Option i32 =\n  let _:Core_models.Option.t_Option i32 =\n    Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32\n  in\n  Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32\n\nlet b (_: Prims.unit) : Core_models.Option.t_Option i32 =\n  match Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32 with\n  | Core_models.Option.Option_Some _ ->\n    Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32\n  | Core_models.Option.Option_None  ->\n    Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n\nlet c (_: Prims.unit) : Core_models.Option.t_Option i32 =\n  match Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32 with\n  | Core_models.Option.Option_Some _ ->\n    Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32\n  | Core_models.Option.Option_None  ->\n    Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n\nlet d (_: Prims.unit) : Core_models.Option.t_Option i32 =\n  let _:Prims.unit = () <: Prims.unit in\n  match Core_models.Option.Option_Some (mk_i32 7) <: Core_models.Option.t_Option i32 with\n  | Core_models.Option.Option_Some _ ->\n    Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32\n  | Core_models.Option.Option_None  ->\n    Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Core_models.Option.t_Option i32 = a () in\n  let _:Core_models.Option.t_Option i32 = b () in\n  let _:Core_models.Option.t_Option i32 = c () in\n  let _:Core_models.Option.t_Option i32 = d () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Generics.fst",
    "content": "module Coverage.Generics\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Firework\n  (v_T: Type0) {| i0: Core_models.Marker.t_Copy v_T |} {| i1: Core_models.Fmt.t_Display v_T |}\n  = { f_strength:v_T }\n\nlet impl__set_strength\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T)\n      (self: t_Firework v_T)\n      (new_strength: v_T)\n    : t_Firework v_T =\n  let self:t_Firework v_T = { self with f_strength = new_strength } <: t_Firework v_T in\n  self\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T)\n    : Core_models.Ops.Drop.t_Drop (t_Firework v_T) =\n  {\n    f_drop_pre = (fun (self: t_Firework v_T) -> true);\n    f_drop_post = (fun (self: t_Firework v_T) (out: t_Firework v_T) -> true);\n    f_drop\n    =\n    fun (self: t_Firework v_T) ->\n      let args:v_T = self.f_strength <: v_T in\n      let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n        let list = [Core_models.Fmt.Rt.impl__new_display #v_T args] in\n        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n        Rust_primitives.Hax.array_of_list 1 list\n      in\n      let _:Prims.unit =\n        Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n              (mk_usize 1)\n              (let list = [\"BOOM times \"; \"!!!\\n\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n                Rust_primitives.Hax.array_of_list 2 list)\n              args\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let _:Prims.unit = () in\n      self\n  }\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let firecracker:t_Firework i32 = { f_strength = mk_i32 1 } <: t_Firework i32 in\n  let firecracker:t_Firework i32 = impl__set_strength #i32 firecracker (mk_i32 2) in\n  let tnt:t_Firework float = { f_strength = mk_float \"100.1\" } <: t_Firework float in\n  let tnt:t_Firework float = impl__set_strength #float tnt (mk_float \"200.1\") in\n  let tnt:t_Firework float = impl__set_strength #float tnt (mk_float \"300.3\") in\n  if true\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"Exiting with error...\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    Core_models.Result.Result_Err (mk_u8 1) <: Core_models.Result.t_Result Prims.unit u8\n  else\n    let _:t_Firework i32 = { f_strength = mk_i32 1000 } <: t_Firework i32 in\n    Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.If_.fst",
    "content": "module Coverage.If_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  if is_true\n  then\n    let countdown:i32 = mk_i32 10 in\n    ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.If_else.fst",
    "content": "module Coverage.If_else\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  let countdown:i32 =\n    if is_true\n    then\n      let countdown:i32 = mk_i32 10 in\n      countdown\n    else mk_i32 100\n  in\n  if is_true\n  then\n    let countdown:i32 = mk_i32 10 in\n    ()\n  else\n    let countdown:i32 = mk_i32 100 in\n    ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.If_not.fst",
    "content": "module Coverage.If_not\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet if_not (cond: bool) : Prims.unit =\n  let _:Prims.unit =\n    if ~.cond\n    then\n      let _:Prims.unit =\n        Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n              (let list = [\"cond was false\\n\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let _:Prims.unit = () in\n      ()\n  in\n  let _:Prims.unit =\n    if ~.cond\n    then\n      let _:Prims.unit =\n        Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n              (let list = [\"cond was false\\n\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let _:Prims.unit = () in\n      ()\n  in\n  if ~.cond\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"cond was false\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n  else\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"cond was true\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet main (_: Prims.unit) : (Prims.unit & Prims.unit) =\n  let _:Prims.unit =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n      (mk_i32 8)\n      (fun temp_0_ temp_1_ ->\n          let _:Prims.unit = temp_0_ in\n          let _:i32 = temp_1_ in\n          true)\n      ()\n      (fun temp_0_ temp_1_ ->\n          let _:Prims.unit = temp_0_ in\n          let _:i32 = temp_1_ in\n          if_not (Core_models.Hint.black_box #bool true <: bool) <: Prims.unit)\n  in\n  Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n    (mk_i32 4)\n    (fun temp_0_ temp_1_ ->\n        let _:Prims.unit = temp_0_ in\n        let _:i32 = temp_1_ in\n        true)\n    ()\n    (fun temp_0_ temp_1_ ->\n        let _:Prims.unit = temp_0_ in\n        let _:i32 = temp_1_ in\n        if_not (Core_models.Hint.black_box #bool false <: bool) <: Prims.unit),\n  ()\n  <:\n  (Prims.unit & Prims.unit)\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Ignore_map.fst",
    "content": "module Coverage.Ignore_map\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Ignore_run.fst",
    "content": "module Coverage.Ignore_run\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Inline.fst",
    "content": "module Coverage.Inline\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet length (#v_T: Type0) (xs: t_Slice v_T) : usize = Core_models.Slice.impl__len #v_T xs\n\nlet swap\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n      (xs: t_Slice v_T)\n      (i j: usize)\n    : t_Slice v_T =\n  let t:v_T = xs.[ i ] in\n  let xs:t_Slice v_T =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize xs i (xs.[ j ] <: v_T)\n  in\n  let xs:t_Slice v_T = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize xs j t in\n  xs\n\nlet display\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Fmt.t_Display v_T)\n      (xs: t_Slice v_T)\n    : Prims.unit =\n  let _:Prims.unit =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice\n            v_T)\n          #FStar.Tactics.Typeclasses.solve\n          xs\n        <:\n        Core_models.Slice.Iter.t_Iter v_T)\n      ()\n      (fun temp_0_ x ->\n          let _:Prims.unit = temp_0_ in\n          let x:v_T = x in\n          let args:v_T = x <: v_T in\n          let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n            let list = [Core_models.Fmt.Rt.impl__new_display #v_T args] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list\n          in\n          let _:Prims.unit =\n            Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 1)\n                  (mk_usize 1)\n                  (let list = [\"\"] in\n                    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                    Rust_primitives.Hax.array_of_list 1 list)\n                  args\n                <:\n                Core_models.Fmt.t_Arguments)\n          in\n          ())\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet error (_: Prims.unit) : Prims.unit =\n  Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const\n            (mk_usize 1)\n            (let list = [\"error\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n      <:\n      Rust_primitives.Hax.t_Never)\n\nlet rec permutate\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T)\n      (xs: t_Slice v_T)\n      (k: usize)\n    : t_Slice v_T =\n  let n:usize = length #v_T xs in\n  let xs:t_Slice v_T =\n    if k =. n\n    then\n      let _:Prims.unit = display #v_T xs in\n      xs\n    else\n      if k <. n\n      then\n        Rust_primitives.Hax.Folds.fold_range k\n          n\n          (fun xs temp_1_ ->\n              let xs:t_Slice v_T = xs in\n              let _:usize = temp_1_ in\n              true)\n          xs\n          (fun xs i ->\n              let xs:t_Slice v_T = xs in\n              let i:usize = i in\n              let xs:t_Slice v_T = swap #v_T xs i k in\n              let xs:t_Slice v_T = permutate #v_T xs (k +! mk_usize 1 <: usize) in\n              let xs:t_Slice v_T = swap #v_T xs i k in\n              xs)\n      else\n        let _:Prims.unit = error () in\n        xs\n  in\n  xs\n\nlet permutations\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: Core_models.Fmt.t_Display v_T)\n      (xs: t_Slice v_T)\n    : Prims.unit =\n  let ys:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global =\n    Alloc.Borrow.f_to_owned #(t_Slice v_T) #FStar.Tactics.Typeclasses.solve xs\n  in\n  let ys:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global = permutate #v_T ys (mk_usize 0) in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    permutations #FStar.Char.char\n      ((let list = ['a'; 'b'; 'c'] in\n          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3);\n          Rust_primitives.Hax.array_of_list 3 list)\n        <:\n        t_Slice FStar.Char.char)\n  in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Inline_dead.fst",
    "content": "module Coverage.Inline_dead\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet dead (_: Prims.unit) : u32 = mk_u32 42\n\nlet live (v_B: bool) (_: Prims.unit) : u32 = if v_B then dead () else mk_u32 0\n\nlet main (_: Prims.unit) : Prims.unit =\n  let args:u32 = live false () <: u32 in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let f: bool -> Prims.unit =\n    fun x ->\n      let x:bool = x in\n      let _:Prims.unit =\n        if true\n        then\n          let _:Prims.unit = Hax_lib.v_assert x in\n          ()\n      in\n      ()\n  in\n  let _:Prims.unit =\n    Core_models.Ops.Function.f_call #bool #FStar.Tactics.Typeclasses.solve f (false <: bool)\n  in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Inner_items.fst",
    "content": "module Coverage.Inner_items\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main__t_in_mod__v_IN_MOD_CONST: u32 = mk_u32 1000\n\nlet main__in_func (a: u32) : Prims.unit =\n  let b:u32 = mk_u32 1 in\n  let c:u32 = a +! b in\n  let args:u32 = c <: u32 in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"c = \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  ()\n\ntype main__t_InStruct = { main__f_in_struct_field:u32 }\n\nlet main__v_IN_CONST: u32 = mk_u32 1234\n\n(* item error backend: Explicit rejection by a phase in the Hax engine:\na node of kind [Trait_item_default] have been found in the AST\n\nNote: the error was labeled with context `reject_TraitItemDefault`.\n\nLast available AST for this item:\n\n#[<cfg_trace>(any(feature = \"json\", feature = \"lean\"))]#[allow(unused_assignments, unused_variables, dead_code)]#[feature(coverage_attribute)]#[allow(unused_attributes)]#[allow(dead_code)]#[allow(unreachable_code)]#[feature(register_tool)]#[register_tool(_hax)]trait main__t_InTrait<Self_>{#[_hax::json(\"\\\"TraitMethodNoPrePost\\\"\")]fn main__f_trait_func_pre(_: Self,_: int) -> bool;\n#[_hax::json(\"\\\"TraitMethodNoPrePost\\\"\")]fn main__f_trait_func_post(_: Self,_: int,_: Self) -> bool;\nfn main__f_trait_func(_: Self,_: int) -> Self;\nfn main__f_default_trait_func((self: Self)) -> Self{{let _: tuple0 = {coverage::inner_items::main__in_func(coverage::inner_items::main__v_IN_CONST)};{let self: Self = {coverage::inner_items::main__f_trait_func(self,coverage::inner_items::main__v_IN_CONST)};self}}}}\n\nLast AST:\n/** print_rust: pitem: not implemented  (item: { Concrete_ident.T.def_id =\n  { Explicit_def_id.T.is_constructor = false;\n    def_id =\n    { Types.index = (0, 0, None); is_local = true; kind = Types.Trait;\n      krate = \"coverage\";\n      parent =\n      (Some { Types.contents =\n              { Types.id = 0;\n                value =\n                { Types.index = (0, 0, None); is_local = true;\n                  kind = Types.Fn; krate = \"coverage\";\n                  parent =\n                  (Some { Types.contents =\n                          { Types.id = 0;\n                            value =\n                            { Types.index = (0, 0, None); is_local = true;\n                              kind = Types.Mod; krate = \"coverage\";\n                              parent =\n                              (Some { Types.contents =\n                                      { Types.id = 0;\n                                        value =\n                                        { Types.index = (0, 0, None);\n                                          is_local = true; kind = Types.Mod;\n                                          krate = \"coverage\"; parent = None;\n                                          path = [] }\n                                        }\n                                      });\n                              path =\n                              [{ Types.data = (Types.TypeNs \"inner_items\");\n                                 disambiguator = 0 }\n                                ]\n                              }\n                            }\n                          });\n                  path =\n                  [{ Types.data = (Types.TypeNs \"inner_items\");\n                     disambiguator = 0 };\n                    { Types.data = (Types.ValueNs \"main\"); disambiguator = 0\n                      }\n                    ]\n                  }\n                }\n              });\n      path =\n      [{ Types.data = (Types.TypeNs \"inner_items\"); disambiguator = 0 };\n        { Types.data = (Types.ValueNs \"main\"); disambiguator = 0 };\n        { Types.data = (Types.TypeNs \"InTrait\"); disambiguator = 0 }]\n      }\n    };\n  moved = None; suffix = None }) */\nconst _: () = ();\n *)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet main__impl: main__t_InTrait main__t_InStruct =\n  {\n    main__f_trait_func_pre = (fun (self: main__t_InStruct) (incr: u32) -> true);\n    main__f_trait_func_post\n    =\n    (fun (self: main__t_InStruct) (incr: u32) (out: main__t_InStruct) -> true);\n    main__f_trait_func\n    =\n    fun (self: main__t_InStruct) (incr: u32) ->\n      let self:main__t_InStruct =\n        { self with main__f_in_struct_field = self.main__f_in_struct_field +! incr }\n        <:\n        main__t_InStruct\n      in\n      let _:Prims.unit = main__in_func self.main__f_in_struct_field in\n      self\n  }\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:u32 = mk_u32 0 in\n  let countdown:u32 =\n    if is_true\n    then\n      let countdown:u32 = mk_u32 10 in\n      countdown\n    else countdown\n  in\n  let _:Prims.unit =\n    if is_true\n    then\n      let _:Prims.unit = main__in_func countdown in\n      ()\n  in\n  let v_val:main__t_InStruct = { main__f_in_struct_field = mk_u32 101 } <: main__t_InStruct in\n  let v_val:main__t_InStruct =\n    main__f_default_trait_func #main__t_InStruct #FStar.Tactics.Typeclasses.solve v_val\n  in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Issue_83601_.fst",
    "content": "module Coverage.Issue_83601_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo = | Foo : u32 -> t_Foo\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl': Core_models.Fmt.t_Debug t_Foo\n\nunfold\nlet impl = impl'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1': Core_models.Marker.t_StructuralPartialEq t_Foo\n\nunfold\nlet impl_1 = impl_1'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': Core_models.Cmp.t_PartialEq t_Foo t_Foo\n\nunfold\nlet impl_2 = impl_2'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_3': Core_models.Cmp.t_Eq t_Foo\n\nunfold\nlet impl_3 = impl_3'\n\nlet main (_: Prims.unit) : Prims.unit =\n  let bar:t_Foo = Foo (mk_u32 1) <: t_Foo in\n  let _:Prims.unit =\n    match bar, (Foo (mk_u32 1) <: t_Foo) <: (t_Foo & t_Foo) with\n    | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool)\n  in\n  let baz:t_Foo = Foo (mk_u32 0) <: t_Foo in\n  let _:Prims.unit =\n    match baz, (Foo (mk_u32 1) <: t_Foo) <: (t_Foo & t_Foo) with\n    | left_val, right_val -> Hax_lib.v_assert (~.(left_val =. right_val <: bool) <: bool)\n  in\n  let args:t_Foo = (Foo (mk_u32 1) <: t_Foo) <: t_Foo in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #t_Foo args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let args:t_Foo = bar <: t_Foo in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #t_Foo args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let args:t_Foo = baz <: t_Foo in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #t_Foo args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Lazy_boolean.fst",
    "content": "module Coverage.Lazy_boolean\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let (a: i32), (b: i32), (c: i32) = mk_i32 0, mk_i32 0, mk_i32 0 <: (i32 & i32 & i32) in\n  let (a: i32), (b: i32), (c: i32) =\n    if is_true\n    then\n      let a:i32 = mk_i32 1 in\n      let b:i32 = mk_i32 10 in\n      let c:i32 = mk_i32 100 in\n      a, b, c <: (i32 & i32 & i32)\n    else a, b, c <: (i32 & i32 & i32)\n  in\n  let somebool:bool = a <. b || b <. c in\n  let somebool:bool = b <. a || b <. c in\n  let somebool:bool = a <. b && b <. c in\n  let somebool:bool = b <. a && b <. c in\n  let a:i32 =\n    if ~.is_true\n    then\n      let a:i32 = mk_i32 2 in\n      a\n    else a\n  in\n  let (b: i32), (c: i32) =\n    if is_true\n    then\n      let b:i32 = mk_i32 30 in\n      b, c <: (i32 & i32)\n    else\n      let c:i32 = mk_i32 400 in\n      b, c <: (i32 & i32)\n  in\n  let a:i32 =\n    if ~.is_true\n    then\n      let a:i32 = mk_i32 2 in\n      a\n    else a\n  in\n  if is_true\n  then\n    let b:i32 = mk_i32 30 in\n    ()\n  else\n    let c:i32 = mk_i32 400 in\n    ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Let_else_loop.fst",
    "content": "module Coverage.Let_else_loop\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet loopy (cond: bool) : Prims.unit =\n  match cond <: bool with\n  | true -> ()\n  | _ ->\n    Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n      \"{\\n loop {\\n Tuple0\\n }\\n }\",\n    ()\n    <:\n    (Prims.unit & Prims.unit)\n\nlet e_loop_either_way (cond: bool) : Prims.unit =\n  match cond <: bool with\n  | true ->\n    Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n            \"{\\n loop {\\n Tuple0\\n }\\n }\"\n          <:\n          Prims.unit),\n        ()\n        <:\n        (Prims.unit & Prims.unit))\n  | _ ->\n    Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n      \"{\\n loop {\\n Tuple0\\n }\\n }\",\n    ()\n    <:\n    (Prims.unit & Prims.unit)\n\nlet e_if (cond: bool) : Prims.unit =\n  if cond\n  then\n    Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n            \"{\\n loop {\\n Tuple0\\n }\\n }\"\n          <:\n          Prims.unit),\n        ()\n        <:\n        (Prims.unit & Prims.unit))\n  else\n    Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n            \"{\\n loop {\\n Tuple0\\n }\\n }\"\n          <:\n          Prims.unit),\n        ()\n        <:\n        (Prims.unit & Prims.unit))\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = loopy true in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Long_and_wide.fst",
    "content": "module Coverage.Long_and_wide\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet wide_function (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = () <: Prims.unit in\n  ()\n\nlet long_function (_: Prims.unit) : Prims.unit = ()\n\nlet far_function (_: Prims.unit) : Prims.unit = ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = wide_function () in\n  let _:Prims.unit = long_function () in\n  let _:Prims.unit = far_function () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Loop_break.fst",
    "content": "module Coverage.Loop_break\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : (Prims.unit & Prims.unit) =\n  Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n    \"{\\n loop {\\n (if core_models::hint::black_box::<bool>(true) {\\n core_models::ops::control_flow::ControlFlow_Break(\\n Tuple2(Tuple0, Tuple0()),\\n )\\n } else {\\n core_models::ops::control_flow::ControlFlow_Con...\"\n  ,\n  ()\n  <:\n  (Prims.unit & Prims.unit)\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Loop_break_value.fst",
    "content": "module Coverage.Loop_break_value\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit =\n  let result:i32 =\n    Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n      \"{\\n loop {\\n core_models::ops::control_flow::ControlFlow_Break(Tuple2(10, Tuple0()))\\n }\\n }\",\n    ()\n    <:\n    (Prims.unit & Prims.unit)\n  in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Loops_branches.fst",
    "content": "module Coverage.Loops_branches\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_DebugTest = | DebugTest : t_DebugTest\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Fmt.t_Debug t_DebugTest =\n  {\n    f_fmt_pre = (fun (self: t_DebugTest) (f: Core_models.Fmt.t_Formatter) -> true);\n    f_fmt_post\n    =\n    (fun\n        (self: t_DebugTest)\n        (f: Core_models.Fmt.t_Formatter)\n        (out1:\n          (Core_models.Fmt.t_Formatter &\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n        ->\n        true);\n    f_fmt\n    =\n    fun (self: t_DebugTest) (f: Core_models.Fmt.t_Formatter) ->\n      if true\n      then\n        let _:Prims.unit =\n          if false\n          then\n            Rust_primitives.Hax.while_loop (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  true)\n              (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  true)\n              (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n              ()\n              (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  ())\n        in\n        let\n        (tmp0: Core_models.Fmt.t_Formatter),\n        (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) =\n          Core_models.Fmt.impl_11__write_fmt f\n            (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                (let list = [\"cool\"] in\n                  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                  Rust_primitives.Hax.array_of_list 1 list)\n              <:\n              Core_models.Fmt.t_Arguments)\n        in\n        let f:Core_models.Fmt.t_Formatter = tmp0 in\n        match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with\n        | Core_models.Result.Result_Ok _ ->\n          (match\n              Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0)\n                (mk_i32 10)\n                (fun f temp_1_ ->\n                    let f:Core_models.Fmt.t_Formatter = f in\n                    let _:i32 = temp_1_ in\n                    true)\n                f\n                (fun f i ->\n                    let f:Core_models.Fmt.t_Formatter = f in\n                    let i:i32 = i in\n                    if true\n                    then\n                      let _:Prims.unit =\n                        if false\n                        then\n                          Rust_primitives.Hax.while_loop (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                true)\n                            (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                true)\n                            (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int\n                            )\n                            ()\n                            (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                ())\n                      in\n                      let\n                      (tmp0: Core_models.Fmt.t_Formatter),\n                      (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) =\n                        Core_models.Fmt.impl_11__write_fmt f\n                          (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                              (let list = [\"cool\"] in\n                                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                                Rust_primitives.Hax.array_of_list 1 list)\n                            <:\n                            Core_models.Fmt.t_Arguments)\n                      in\n                      let f:Core_models.Fmt.t_Formatter = tmp0 in\n                      match\n                        out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error\n                      with\n                      | Core_models.Result.Result_Ok _ ->\n                        Core_models.Ops.Control_flow.ControlFlow_Continue f\n                        <:\n                        Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Ops.Control_flow.t_ControlFlow\n                              (Core_models.Fmt.t_Formatter &\n                                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                              (Prims.unit & Core_models.Fmt.t_Formatter))\n                          Core_models.Fmt.t_Formatter\n                      | Core_models.Result.Result_Err err ->\n                        Core_models.Ops.Control_flow.ControlFlow_Break\n                        (Core_models.Ops.Control_flow.ControlFlow_Break\n                          (f,\n                            (Core_models.Result.Result_Err err\n                              <:\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                            <:\n                            (Core_models.Fmt.t_Formatter &\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n                          <:\n                          Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Fmt.t_Formatter &\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                            (Prims.unit & Core_models.Fmt.t_Formatter))\n                        <:\n                        Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Ops.Control_flow.t_ControlFlow\n                              (Core_models.Fmt.t_Formatter &\n                                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                              (Prims.unit & Core_models.Fmt.t_Formatter))\n                          Core_models.Fmt.t_Formatter\n                    else\n                      Core_models.Ops.Control_flow.ControlFlow_Continue f\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Fmt.t_Formatter &\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                            (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter)\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Fmt.t_Formatter &\n                  Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                Core_models.Fmt.t_Formatter\n            with\n            | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n            | Core_models.Ops.Control_flow.ControlFlow_Continue f ->\n              let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error =\n                Core_models.Result.Result_Ok (() <: Prims.unit)\n                <:\n                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error\n              in\n              f, hax_temp_output\n              <:\n              (Core_models.Fmt.t_Formatter &\n                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n        | Core_models.Result.Result_Err err ->\n          f,\n          (Core_models.Result.Result_Err err\n            <:\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n          <:\n          (Core_models.Fmt.t_Formatter &\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n      else\n        match\n          Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0)\n            (mk_i32 10)\n            (fun f temp_1_ ->\n                let f:Core_models.Fmt.t_Formatter = f in\n                let _:i32 = temp_1_ in\n                true)\n            f\n            (fun f i ->\n                let f:Core_models.Fmt.t_Formatter = f in\n                let i:i32 = i in\n                if true\n                then\n                  let _:Prims.unit =\n                    if false\n                    then\n                      Rust_primitives.Hax.while_loop (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            true)\n                        (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            true)\n                        (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n                        ()\n                        (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            ())\n                  in\n                  let\n                  (tmp0: Core_models.Fmt.t_Formatter),\n                  (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) =\n                    Core_models.Fmt.impl_11__write_fmt f\n                      (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                          (let list = [\"cool\"] in\n                            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                            Rust_primitives.Hax.array_of_list 1 list)\n                        <:\n                        Core_models.Fmt.t_Arguments)\n                  in\n                  let f:Core_models.Fmt.t_Formatter = tmp0 in\n                  match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with\n                  | Core_models.Result.Result_Ok _ ->\n                    Core_models.Ops.Control_flow.ControlFlow_Continue f\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Fmt.t_Formatter &\n                            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                          (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter\n                  | Core_models.Result.Result_Err err ->\n                    Core_models.Ops.Control_flow.ControlFlow_Break\n                    (Core_models.Ops.Control_flow.ControlFlow_Break\n                      (f,\n                        (Core_models.Result.Result_Err err\n                          <:\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                        <:\n                        (Core_models.Fmt.t_Formatter &\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Fmt.t_Formatter &\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                        (Prims.unit & Core_models.Fmt.t_Formatter))\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Fmt.t_Formatter &\n                            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                          (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter\n                else\n                  Core_models.Ops.Control_flow.ControlFlow_Continue f\n                  <:\n                  Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Fmt.t_Formatter &\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                        (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter)\n          <:\n          Core_models.Ops.Control_flow.t_ControlFlow\n            (Core_models.Fmt.t_Formatter &\n              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n            Core_models.Fmt.t_Formatter\n        with\n        | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n        | Core_models.Ops.Control_flow.ControlFlow_Continue f ->\n          let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error =\n            Core_models.Result.Result_Ok (() <: Prims.unit)\n            <:\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error\n          in\n          f, hax_temp_output\n          <:\n          (Core_models.Fmt.t_Formatter &\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n  }\n\ntype t_DisplayTest = | DisplayTest : t_DisplayTest\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Core_models.Fmt.t_Display t_DisplayTest =\n  {\n    f_fmt_pre = (fun (self: t_DisplayTest) (f: Core_models.Fmt.t_Formatter) -> true);\n    f_fmt_post\n    =\n    (fun\n        (self: t_DisplayTest)\n        (f: Core_models.Fmt.t_Formatter)\n        (out1:\n          (Core_models.Fmt.t_Formatter &\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n        ->\n        true);\n    f_fmt\n    =\n    fun (self: t_DisplayTest) (f: Core_models.Fmt.t_Formatter) ->\n      if false\n      then\n        match\n          Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0)\n            (mk_i32 10)\n            (fun f temp_1_ ->\n                let f:Core_models.Fmt.t_Formatter = f in\n                let _:i32 = temp_1_ in\n                true)\n            f\n            (fun f i ->\n                let f:Core_models.Fmt.t_Formatter = f in\n                let i:i32 = i in\n                if false\n                then\n                  Core_models.Ops.Control_flow.ControlFlow_Continue f\n                  <:\n                  Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Fmt.t_Formatter &\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                        (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter\n                else\n                  let _:Prims.unit =\n                    if false\n                    then\n                      Rust_primitives.Hax.while_loop (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            true)\n                        (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            true)\n                        (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n                        ()\n                        (fun temp_0_ ->\n                            let _:Prims.unit = temp_0_ in\n                            ())\n                  in\n                  let\n                  (tmp0: Core_models.Fmt.t_Formatter),\n                  (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) =\n                    Core_models.Fmt.impl_11__write_fmt f\n                      (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                          (let list = [\"cool\"] in\n                            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                            Rust_primitives.Hax.array_of_list 1 list)\n                        <:\n                        Core_models.Fmt.t_Arguments)\n                  in\n                  let f:Core_models.Fmt.t_Formatter = tmp0 in\n                  match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with\n                  | Core_models.Result.Result_Ok _ ->\n                    Core_models.Ops.Control_flow.ControlFlow_Continue f\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Fmt.t_Formatter &\n                            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                          (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter\n                  | Core_models.Result.Result_Err err ->\n                    Core_models.Ops.Control_flow.ControlFlow_Break\n                    (Core_models.Ops.Control_flow.ControlFlow_Break\n                      (f,\n                        (Core_models.Result.Result_Err err\n                          <:\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                        <:\n                        (Core_models.Fmt.t_Formatter &\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Fmt.t_Formatter &\n                          Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                        (Prims.unit & Core_models.Fmt.t_Formatter))\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Fmt.t_Formatter &\n                            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                          (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter)\n          <:\n          Core_models.Ops.Control_flow.t_ControlFlow\n            (Core_models.Fmt.t_Formatter &\n              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n            Core_models.Fmt.t_Formatter\n        with\n        | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n        | Core_models.Ops.Control_flow.ControlFlow_Continue f ->\n          let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error =\n            Core_models.Result.Result_Ok (() <: Prims.unit)\n            <:\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error\n          in\n          f, hax_temp_output\n          <:\n          (Core_models.Fmt.t_Formatter &\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n      else\n        let _:Prims.unit =\n          if false\n          then\n            Rust_primitives.Hax.while_loop (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  true)\n              (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  true)\n              (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n              ()\n              (fun temp_0_ ->\n                  let _:Prims.unit = temp_0_ in\n                  ())\n        in\n        let\n        (tmp0: Core_models.Fmt.t_Formatter),\n        (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) =\n          Core_models.Fmt.impl_11__write_fmt f\n            (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                (let list = [\"cool\"] in\n                  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                  Rust_primitives.Hax.array_of_list 1 list)\n              <:\n              Core_models.Fmt.t_Arguments)\n        in\n        let f:Core_models.Fmt.t_Formatter = tmp0 in\n        match out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error with\n        | Core_models.Result.Result_Ok _ ->\n          (match\n              Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0)\n                (mk_i32 10)\n                (fun f temp_1_ ->\n                    let f:Core_models.Fmt.t_Formatter = f in\n                    let _:i32 = temp_1_ in\n                    true)\n                f\n                (fun f i ->\n                    let f:Core_models.Fmt.t_Formatter = f in\n                    let i:i32 = i in\n                    if false\n                    then\n                      Core_models.Ops.Control_flow.ControlFlow_Continue f\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Fmt.t_Formatter &\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                            (Prims.unit & Core_models.Fmt.t_Formatter)) Core_models.Fmt.t_Formatter\n                    else\n                      let _:Prims.unit =\n                        if false\n                        then\n                          Rust_primitives.Hax.while_loop (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                true)\n                            (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                true)\n                            (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int\n                            )\n                            ()\n                            (fun temp_0_ ->\n                                let _:Prims.unit = temp_0_ in\n                                ())\n                      in\n                      let\n                      (tmp0: Core_models.Fmt.t_Formatter),\n                      (out: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error) =\n                        Core_models.Fmt.impl_11__write_fmt f\n                          (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n                              (let list = [\"cool\"] in\n                                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                                Rust_primitives.Hax.array_of_list 1 list)\n                            <:\n                            Core_models.Fmt.t_Arguments)\n                      in\n                      let f:Core_models.Fmt.t_Formatter = tmp0 in\n                      match\n                        out <: Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error\n                      with\n                      | Core_models.Result.Result_Ok _ ->\n                        Core_models.Ops.Control_flow.ControlFlow_Continue f\n                        <:\n                        Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Ops.Control_flow.t_ControlFlow\n                              (Core_models.Fmt.t_Formatter &\n                                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                              (Prims.unit & Core_models.Fmt.t_Formatter))\n                          Core_models.Fmt.t_Formatter\n                      | Core_models.Result.Result_Err err ->\n                        Core_models.Ops.Control_flow.ControlFlow_Break\n                        (Core_models.Ops.Control_flow.ControlFlow_Break\n                          (f,\n                            (Core_models.Result.Result_Err err\n                              <:\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                            <:\n                            (Core_models.Fmt.t_Formatter &\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n                          <:\n                          Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Fmt.t_Formatter &\n                              Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                            (Prims.unit & Core_models.Fmt.t_Formatter))\n                        <:\n                        Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Ops.Control_flow.t_ControlFlow\n                              (Core_models.Fmt.t_Formatter &\n                                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                              (Prims.unit & Core_models.Fmt.t_Formatter))\n                          Core_models.Fmt.t_Formatter)\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Fmt.t_Formatter &\n                  Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n                Core_models.Fmt.t_Formatter\n            with\n            | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n            | Core_models.Ops.Control_flow.ControlFlow_Continue f ->\n              let hax_temp_output:Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error =\n                Core_models.Result.Result_Ok (() <: Prims.unit)\n                <:\n                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error\n              in\n              f, hax_temp_output\n              <:\n              (Core_models.Fmt.t_Formatter &\n                Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error))\n        | Core_models.Result.Result_Err err ->\n          f,\n          (Core_models.Result.Result_Err err\n            <:\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n          <:\n          (Core_models.Fmt.t_Formatter &\n            Core_models.Result.t_Result Prims.unit Core_models.Fmt.t_Error)\n  }\n\nlet main (_: Prims.unit) : Prims.unit =\n  let debug_test:t_DebugTest = DebugTest <: t_DebugTest in\n  let args:t_DebugTest = debug_test <: t_DebugTest in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_debug #t_DebugTest args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let display_test:t_DisplayTest = DisplayTest <: t_DisplayTest in\n  let args:t_DisplayTest = display_test <: t_DisplayTest in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_display #t_DisplayTest args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Macro_in_closure.fst",
    "content": "module Coverage.Macro_in_closure\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_NO_BLOCK:  Prims.unit -> Prims.unit =\n  fun temp_0_ ->\n    let _:Prims.unit = temp_0_ in\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"hello\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    ()\n\nlet v_WITH_BLOCK:  Prims.unit -> Prims.unit =\n  fun temp_0_ ->\n    let _:Prims.unit = temp_0_ in\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"hello\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = v_NO_BLOCK () in\n  let _:Prims.unit = v_WITH_BLOCK () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Match_or_pattern.fst",
    "content": "module Coverage.Match_or_pattern\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let (a: u8):u8 = mk_u8 0 in\n  let (b: u8):u8 = mk_u8 0 in\n  let (a: u8), (b: u8) =\n    if is_true\n    then\n      let a:u8 = mk_u8 2 in\n      let b:u8 = mk_u8 0 in\n      a, b <: (u8 & u8)\n    else a, b <: (u8 & u8)\n  in\n  let _:Prims.unit =\n    match a, b <: (u8 & u8) with\n    | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2\n    | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3\n    | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2\n    | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> ()\n    | _ -> ()\n  in\n  let (a: u8), (b: u8) =\n    if is_true\n    then\n      let a:u8 = mk_u8 0 in\n      let b:u8 = mk_u8 0 in\n      a, b <: (u8 & u8)\n    else a, b <: (u8 & u8)\n  in\n  let _:Prims.unit =\n    match a, b <: (u8 & u8) with\n    | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2\n    | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3\n    | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2\n    | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> ()\n    | _ -> ()\n  in\n  let (a: u8), (b: u8) =\n    if is_true\n    then\n      let a:u8 = mk_u8 2 in\n      let b:u8 = mk_u8 2 in\n      a, b <: (u8 & u8)\n    else a, b <: (u8 & u8)\n  in\n  let _:Prims.unit =\n    match a, b <: (u8 & u8) with\n    | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2\n    | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3\n    | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2\n    | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> ()\n    | _ -> ()\n  in\n  let (a: u8), (b: u8) =\n    if is_true\n    then\n      let a:u8 = mk_u8 0 in\n      let b:u8 = mk_u8 2 in\n      a, b <: (u8 & u8)\n    else a, b <: (u8 & u8)\n  in\n  match a, b <: (u8 & u8) with\n  | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 2\n  | Rust_primitives.Integers.MkInt 0, Rust_primitives.Integers.MkInt 3\n  | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 2\n  | Rust_primitives.Integers.MkInt 1, Rust_primitives.Integers.MkInt 3 -> ()\n  | _ -> ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Condition_limit.fst",
    "content": "module Coverage.Mcdc.Condition_limit\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet accept_7_conditions (bool_arr: t_Array bool (mk_usize 7)) : Prims.unit =\n  Rust_primitives.Hax.failure \"something is not implemented yet.\\nPat:Array\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/804.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `AST import`.\\n\"\n    \"\"\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    accept_7_conditions (Rust_primitives.Hax.repeat false (mk_usize 7) <: t_Array bool (mk_usize 7))\n  in\n  let _:Prims.unit =\n    accept_7_conditions (Rust_primitives.Hax.repeat true (mk_usize 7) <: t_Array bool (mk_usize 7))\n  in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.If_.fst",
    "content": "module Coverage.Mcdc.If_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet say (message: string) : Prims.unit =\n  let _:string = Core_models.Hint.black_box #string message in\n  ()\n\nlet mcdc_check_neither (a b: bool) : Prims.unit =\n  if a && b\n  then\n    let _:Prims.unit = say \"a and b\" in\n    ()\n  else\n    let _:Prims.unit = say \"not both\" in\n    ()\n\nlet mcdc_check_a (a b: bool) : Prims.unit =\n  if a && b\n  then\n    let _:Prims.unit = say \"a and b\" in\n    ()\n  else\n    let _:Prims.unit = say \"not both\" in\n    ()\n\nlet mcdc_check_b (a b: bool) : Prims.unit =\n  if a && b\n  then\n    let _:Prims.unit = say \"a and b\" in\n    ()\n  else\n    let _:Prims.unit = say \"not both\" in\n    ()\n\nlet mcdc_check_both (a b: bool) : Prims.unit =\n  if a && b\n  then\n    let _:Prims.unit = say \"a and b\" in\n    ()\n  else\n    let _:Prims.unit = say \"not both\" in\n    ()\n\nlet mcdc_check_tree_decision (a b c: bool) : Prims.unit =\n  if a && (b || c)\n  then\n    let _:Prims.unit = say \"pass\" in\n    ()\n  else\n    let _:Prims.unit = say \"reject\" in\n    ()\n\nlet mcdc_check_not_tree_decision (a b c: bool) : Prims.unit =\n  if (a || b) && c\n  then\n    let _:Prims.unit = say \"pass\" in\n    ()\n  else\n    let _:Prims.unit = say \"reject\" in\n    ()\n\nlet mcdc_nested_if (a b c: bool) : Prims.unit =\n  if a || b\n  then\n    let _:Prims.unit = say \"a or b\" in\n    if b && c\n    then\n      let _:Prims.unit = say \"b and c\" in\n      ()\n  else\n    let _:Prims.unit = say \"neither a nor b\" in\n    ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = mcdc_check_neither false false in\n  let _:Prims.unit = mcdc_check_neither false true in\n  let _:Prims.unit = mcdc_check_a true true in\n  let _:Prims.unit = mcdc_check_a false true in\n  let _:Prims.unit = mcdc_check_b true true in\n  let _:Prims.unit = mcdc_check_b true false in\n  let _:Prims.unit = mcdc_check_both false true in\n  let _:Prims.unit = mcdc_check_both true true in\n  let _:Prims.unit = mcdc_check_both true false in\n  let _:Prims.unit = mcdc_check_tree_decision false true true in\n  let _:Prims.unit = mcdc_check_tree_decision true true false in\n  let _:Prims.unit = mcdc_check_tree_decision true false false in\n  let _:Prims.unit = mcdc_check_tree_decision true false true in\n  let _:Prims.unit = mcdc_check_not_tree_decision false true true in\n  let _:Prims.unit = mcdc_check_not_tree_decision true true false in\n  let _:Prims.unit = mcdc_check_not_tree_decision true false false in\n  let _:Prims.unit = mcdc_check_not_tree_decision true false true in\n  let _:Prims.unit = mcdc_nested_if true false true in\n  let _:Prims.unit = mcdc_nested_if true true true in\n  let _:Prims.unit = mcdc_nested_if true true false in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Inlined_expressions.fst",
    "content": "module Coverage.Mcdc.Inlined_expressions\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet inlined_instance (a b: bool) : bool = a && b\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:bool = inlined_instance true false in\n  let _:bool = inlined_instance false true in\n  let _:bool = inlined_instance true true in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Nested_if.fst",
    "content": "module Coverage.Mcdc.Nested_if\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet say (message: string) : Prims.unit =\n  let _:string = Core_models.Hint.black_box #string message in\n  ()\n\nlet nested_if_in_condition (a b c: bool) : Prims.unit =\n  if a && (if b || c then true else false)\n  then\n    let _:Prims.unit = say \"yes\" in\n    ()\n  else\n    let _:Prims.unit = say \"no\" in\n    ()\n\nlet doubly_nested_if_in_condition (a b c d: bool) : Prims.unit =\n  if a && (if b || (if c && d then true else false) then false else true)\n  then\n    let _:Prims.unit = say \"yes\" in\n    ()\n  else\n    let _:Prims.unit = say \"no\" in\n    ()\n\nlet nested_single_condition_decision (a b: bool) : Prims.unit =\n  if a && (if b then false else true)\n  then\n    let _:Prims.unit = say \"yes\" in\n    ()\n  else\n    let _:Prims.unit = say \"no\" in\n    ()\n\nlet nested_in_then_block_in_condition (a b c d e: bool) : Prims.unit =\n  if a && (if b || c then if d && e then true else false else false)\n  then\n    let _:Prims.unit = say \"yes\" in\n    ()\n  else\n    let _:Prims.unit = say \"no\" in\n    ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = nested_if_in_condition true false false in\n  let _:Prims.unit = nested_if_in_condition true true true in\n  let _:Prims.unit = nested_if_in_condition true false true in\n  let _:Prims.unit = nested_if_in_condition false true true in\n  let _:Prims.unit = doubly_nested_if_in_condition true false false true in\n  let _:Prims.unit = doubly_nested_if_in_condition true true true true in\n  let _:Prims.unit = doubly_nested_if_in_condition true false true true in\n  let _:Prims.unit = doubly_nested_if_in_condition false true true true in\n  let _:Prims.unit = nested_single_condition_decision true true in\n  let _:Prims.unit = nested_single_condition_decision true false in\n  let _:Prims.unit = nested_single_condition_decision false false in\n  let _:Prims.unit = nested_in_then_block_in_condition false false false false false in\n  let _:Prims.unit = nested_in_then_block_in_condition true false false false false in\n  let _:Prims.unit = nested_in_then_block_in_condition true true false false false in\n  let _:Prims.unit = nested_in_then_block_in_condition true false true false false in\n  let _:Prims.unit = nested_in_then_block_in_condition true false true true false in\n  let _:Prims.unit = nested_in_then_block_in_condition true false true false true in\n  let _:Prims.unit = nested_in_then_block_in_condition true false true true true in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Mcdc.Non_control_flow.fst",
    "content": "module Coverage.Mcdc.Non_control_flow\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet assign_and (a b: bool) : Prims.unit =\n  let x:bool = a && b in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet assign_or (a b: bool) : Prims.unit =\n  let x:bool = a || b in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet assign_3_ (a b c: bool) : Prims.unit =\n  let x:bool = a || b && c in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet assign_3_bis (a b c: bool) : Prims.unit =\n  let x:bool = a && b || c in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet right_comb_tree (a b c d e: bool) : Prims.unit =\n  let x:bool = a && (b && (c && (d && e))) in\n  let _:bool = Core_models.Hint.black_box #bool x in\n  ()\n\nlet foo (a: bool) : bool = Core_models.Hint.black_box #bool a\n\nlet func_call (a b: bool) : Prims.unit =\n  let _:bool = foo (a && b) in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = assign_and true false in\n  let _:Prims.unit = assign_and true true in\n  let _:Prims.unit = assign_and false false in\n  let _:Prims.unit = assign_or true false in\n  let _:Prims.unit = assign_or true true in\n  let _:Prims.unit = assign_or false false in\n  let _:Prims.unit = assign_3_ true false false in\n  let _:Prims.unit = assign_3_ true true false in\n  let _:Prims.unit = assign_3_ false false true in\n  let _:Prims.unit = assign_3_ false true true in\n  let _:Prims.unit = assign_3_bis true false false in\n  let _:Prims.unit = assign_3_bis true true false in\n  let _:Prims.unit = assign_3_bis false false true in\n  let _:Prims.unit = assign_3_bis false true true in\n  let _:Prims.unit = right_comb_tree false false false true true in\n  let _:Prims.unit = right_comb_tree true false false true true in\n  let _:Prims.unit = right_comb_tree true true true true true in\n  let _:Prims.unit = func_call true false in\n  let _:Prims.unit = func_call true true in\n  let _:Prims.unit = func_call false false in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Nested_loops.fst",
    "content": "module Coverage.Nested_loops\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : (i32 & Prims.unit) =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 10 in\n  Rust_primitives.Hax.while_loop (fun countdown ->\n        let countdown:i32 = countdown in\n        true)\n    (fun countdown ->\n        let countdown:i32 = countdown in\n        countdown >. mk_i32 0 <: bool)\n    (fun countdown ->\n        let countdown:i32 = countdown in\n        Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n    countdown\n    (fun countdown ->\n        let countdown:i32 = countdown in\n        let a:i32 = mk_i32 100 in\n        let b:i32 = mk_i32 100 in\n        let (a: i32), (b: i32) =\n          Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 0)\n            (mk_i32 50)\n            (fun temp_0_ temp_1_ ->\n                let (a: i32), (b: i32) = temp_0_ in\n                let _:i32 = temp_1_ in\n                true)\n            (a, b <: (i32 & i32))\n            (fun temp_0_ temp_1_ ->\n                let (a: i32), (b: i32) = temp_0_ in\n                let _:i32 = temp_1_ in\n                if a <. mk_i32 30 <: bool\n                then\n                  Core_models.Ops.Control_flow.ControlFlow_Break\n                  ((), (a, b <: (i32 & i32)) <: (Prims.unit & (i32 & i32)))\n                  <:\n                  Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32)\n                else\n                  let a:i32 = a -! mk_i32 5 in\n                  let b:i32 = b -! mk_i32 5 in\n                  if b <. mk_i32 90\n                  then\n                    let a:i32 = a -! mk_i32 10 in\n                    if is_true\n                    then\n                      Core_models.Ops.Control_flow.ControlFlow_Break\n                      ((), (a, b <: (i32 & i32)) <: (Prims.unit & (i32 & i32)))\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32))\n                        (i32 & i32)\n                    else\n                      let a:i32 = a -! mk_i32 2 in\n                      Core_models.Ops.Control_flow.ControlFlow_Continue (a, b <: (i32 & i32))\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32))\n                        (i32 & i32)\n                  else\n                    Core_models.Ops.Control_flow.ControlFlow_Continue (a, b <: (i32 & i32))\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32))\n                      (i32 & i32))\n        in\n        countdown -! mk_i32 1),\n  ()\n  <:\n  (i32 & Prims.unit)\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.No_cov_crate.Nested_fns.fst",
    "content": "module Coverage.No_cov_crate.Nested_fns\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet outer_not_covered__inner (is_true: bool) : Prims.unit =\n  if is_true\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"called and covered\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n  else\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"absolutely not covered\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet outer_not_covered (is_true: bool) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"called but not covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let _:Prims.unit = outer_not_covered__inner is_true in\n  ()\n\nlet outer__inner_not_covered (is_true: bool) : Prims.unit =\n  if is_true\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"called but not covered\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n  else\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"absolutely not covered\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet outer (is_true: bool) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"called and covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let _:Prims.unit = outer__inner_not_covered is_true in\n  ()\n\nlet outer_both_covered__inner (is_true: bool) : Prims.unit =\n  if is_true\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"called and covered\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n  else\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"absolutely not covered\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet outer_both_covered (is_true: bool) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"called and covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let _:Prims.unit = outer_both_covered__inner is_true in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.No_cov_crate.fst",
    "content": "module Coverage.No_cov_crate\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet do_not_add_coverage_1_ (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"called but not covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet do_not_add_coverage_2_ (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"called but not covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet do_not_add_coverage_not_called (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"not called and not covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet add_coverage_1_ (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"called and covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet add_coverage_2_ (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"called and covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet add_coverage_not_called (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"not called but covered\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let _:Prims.unit = do_not_add_coverage_1_ () in\n  let _:Prims.unit = do_not_add_coverage_2_ () in\n  let _:Prims.unit = add_coverage_1_ () in\n  let _:Prims.unit = add_coverage_2_ () in\n  let _:Prims.unit = Coverage.No_cov_crate.Nested_fns.outer_not_covered is_true in\n  let _:Prims.unit = Coverage.No_cov_crate.Nested_fns.outer is_true in\n  let _:Prims.unit = Coverage.No_cov_crate.Nested_fns.outer_both_covered is_true in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.No_spans.fst",
    "content": "module Coverage.No_spans\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet affected_function (_: Prims.unit) :  Prims.unit -> Prims.unit =\n  fun temp_0_ ->\n    let _:Prims.unit = temp_0_ in\n    () <: Prims.unit\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Core_models.Ops.Function.f_call #Prims.unit\n      #FStar.Tactics.Typeclasses.solve\n      (affected_function () <: Prims.unit -> Prims.unit)\n      (() <: Prims.unit)\n  in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.No_spans_if_not.fst",
    "content": "module Coverage.No_spans_if_not\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet affected_function (_: Prims.unit) : Prims.unit =\n  if ~.false then () <: Prims.unit else () <: Prims.unit\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = affected_function () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Overflow.fst",
    "content": "module Coverage.Overflow\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet might_overflow (to_add: u32) : u32 =\n  let _:Prims.unit =\n    if to_add >. mk_u32 5\n    then\n      let _:Prims.unit =\n        Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n              (let list = [\"this will probably overflow\\n\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let _:Prims.unit = () in\n      ()\n  in\n  let add_to:u32 = Core_models.Num.impl_u32__MAX -! mk_u32 5 in\n  let args:(u32 & u32) = add_to, to_add <: (u32 & u32) in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 2) =\n    let list =\n      [\n        Core_models.Fmt.Rt.impl__new_display #u32 args._1;\n        Core_models.Fmt.Rt.impl__new_display #u32 args._2\n      ]\n    in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n    Rust_primitives.Hax.array_of_list 2 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 3)\n          (mk_usize 2)\n          (let list = [\"does \"; \" + \"; \" overflow?\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3);\n            Rust_primitives.Hax.array_of_list 3 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  let result:u32 = to_add +! add_to in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"continuing after overflow check\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  result\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let countdown:i32 = mk_i32 10 in\n  let countdown:i32 =\n    Rust_primitives.Hax.while_loop (fun countdown ->\n          let countdown:i32 = countdown in\n          true)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          countdown >. mk_i32 0 <: bool)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n      countdown\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          let _:Prims.unit =\n            if countdown =. mk_i32 1\n            then\n              let result:u32 = might_overflow (mk_u32 10) in\n              let args:u32 = result <: u32 in\n              let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n                let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list\n              in\n              let _:Prims.unit =\n                Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n                      (mk_usize 1)\n                      (let list = [\"Result: \"; \"\\n\"] in\n                        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n                        Rust_primitives.Hax.array_of_list 2 list)\n                      args\n                    <:\n                    Core_models.Fmt.t_Arguments)\n              in\n              let _:Prims.unit = () in\n              ()\n            else\n              if countdown <. mk_i32 5\n              then\n                let result:u32 = might_overflow (mk_u32 1) in\n                let args:u32 = result <: u32 in\n                let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n                  let list = [Core_models.Fmt.Rt.impl__new_display #u32 args] in\n                  FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                  Rust_primitives.Hax.array_of_list 1 list\n                in\n                let _:Prims.unit =\n                  Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n                        (mk_usize 1)\n                        (let list = [\"Result: \"; \"\\n\"] in\n                          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n                          Rust_primitives.Hax.array_of_list 2 list)\n                        args\n                      <:\n                      Core_models.Fmt.t_Arguments)\n                in\n                let _:Prims.unit = () in\n                ()\n          in\n          let countdown:i32 = countdown -! mk_i32 1 in\n          countdown)\n  in\n  Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Panic_unwind.fst",
    "content": "module Coverage.Panic_unwind\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet might_panic (should_panic: bool) : Prims.unit =\n  if should_panic\n  then\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"panicking...\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const\n              (mk_usize 1)\n              (let list = [\"panics\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n            <:\n            Core_models.Fmt.t_Arguments)\n        <:\n        Rust_primitives.Hax.t_Never)\n  else\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n            (let list = [\"Don't Panic\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let countdown:i32 = mk_i32 10 in\n  let countdown:i32 =\n    Rust_primitives.Hax.while_loop (fun countdown ->\n          let countdown:i32 = countdown in\n          true)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          countdown >. mk_i32 0 <: bool)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n      countdown\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          let _:Prims.unit =\n            if countdown =. mk_i32 1\n            then\n              let _:Prims.unit = might_panic true in\n              ()\n            else\n              if countdown <. mk_i32 5\n              then\n                let _:Prims.unit = might_panic false in\n                ()\n          in\n          let countdown:i32 = countdown -! mk_i32 1 in\n          countdown)\n  in\n  Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Partial_eq.fst",
    "content": "module Coverage.Partial_eq\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Version = {\n  f_major:usize;\n  f_minor:usize;\n  f_patch:usize\n}\n\nlet impl_1: Core_models.Clone.t_Clone t_Version =\n  { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': Core_models.Fmt.t_Debug t_Version\n\nunfold\nlet impl_2 = impl_2'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_3': Core_models.Marker.t_StructuralPartialEq t_Version\n\nunfold\nlet impl_3 = impl_3'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_4': Core_models.Cmp.t_PartialEq t_Version t_Version\n\nunfold\nlet impl_4 = impl_4'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_5': Core_models.Cmp.t_Eq t_Version\n\nunfold\nlet impl_5 = impl_5'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_6': Core_models.Cmp.t_PartialOrd t_Version t_Version\n\nunfold\nlet impl_6 = impl_6'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_7': Core_models.Cmp.t_Ord t_Version\n\nunfold\nlet impl_7 = impl_7'\n\nlet impl_Version__new (major minor patch: usize) : t_Version =\n  { f_major = major; f_minor = minor; f_patch = patch } <: t_Version\n\nlet main (_: Prims.unit) : Prims.unit =\n  let version_3_2_1_:t_Version = impl_Version__new (mk_usize 3) (mk_usize 2) (mk_usize 1) in\n  let version_3_3_0_:t_Version = impl_Version__new (mk_usize 3) (mk_usize 3) (mk_usize 0) in\n  let args:(t_Version & t_Version & bool) =\n    version_3_2_1_,\n    version_3_3_0_,\n    Core_models.Cmp.f_lt #t_Version\n      #t_Version\n      #FStar.Tactics.Typeclasses.solve\n      version_3_2_1_\n      version_3_3_0_\n    <:\n    (t_Version & t_Version & bool)\n  in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 3) =\n    let list =\n      [\n        Core_models.Fmt.Rt.impl__new_debug #t_Version args._1;\n        Core_models.Fmt.Rt.impl__new_debug #t_Version args._2;\n        Core_models.Fmt.Rt.impl__new_display #bool args._3\n      ]\n    in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3);\n    Rust_primitives.Hax.array_of_list 3 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 4)\n          (mk_usize 3)\n          (let list = [\"\"; \" < \"; \" = \"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 4);\n            Rust_primitives.Hax.array_of_list 4 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Simple_loop.fst",
    "content": "module Coverage.Simple_loop\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : (i32 & Prims.unit) =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 0 in\n  let countdown:i32 =\n    if is_true\n    then\n      let countdown:i32 = mk_i32 10 in\n      countdown\n    else countdown\n  in\n  Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n    \"{\\n (loop {\\n |countdown| {\\n (if rust_primitives::hax::machine_int::eq(countdown, 0) {\\n core_models::ops::control_flow::ControlFlow_Break(\\n Tuple2(Tuple0, countdown),\\n )\\n } else {\\n core_models::ops::con...\"\n  ,\n  ()\n  <:\n  (i32 & Prims.unit)\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Simple_match.fst",
    "content": "module Coverage.Simple_match\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet main (_: Prims.unit) : (Prims.unit & Prims.unit) =\n  let is_true:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) =.\n    mk_usize 1\n  in\n  let countdown:i32 = mk_i32 1 in\n  let countdown:i32 =\n    if is_true\n    then\n      let countdown:i32 = mk_i32 0 in\n      countdown\n    else countdown\n  in\n  Rust_primitives.Hax.Folds.fold_range (mk_i32 0)\n    (mk_i32 2)\n    (fun temp_0_ temp_1_ ->\n        let _:Prims.unit = temp_0_ in\n        let _:i32 = temp_1_ in\n        true)\n    ()\n    (fun temp_0_ temp_1_ ->\n        let _:Prims.unit = temp_0_ in\n        let _:i32 = temp_1_ in\n        Rust_primitives.Hax.failure \"something is not implemented yet.\\nSorry, Hax does not support declare-first let bindings (see https://doc.rust-lang.org/rust-by-example/variable_bindings/declare.html) for now.\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/156.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `AST import`.\\n\"\n          \"\"\n        <:\n        Prims.unit),\n  ()\n  <:\n  (Prims.unit & Prims.unit)\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Sort_groups.fst",
    "content": "module Coverage.Sort_groups\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Std.Env in\n  ()\n\nlet generic_fn (#v_T: Type0) (cond: bool) : Prims.unit =\n  if cond\n  then\n    let args:string = Core_models.Any.type_name #v_T () <: string in\n    let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n      let list = [Core_models.Fmt.Rt.impl__new_display #string args] in\n      FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n      Rust_primitives.Hax.array_of_list 1 list\n    in\n    let _:Prims.unit =\n      Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n            (mk_usize 1)\n            (let list = [\"\"; \"\\n\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n              Rust_primitives.Hax.array_of_list 2 list)\n            args\n          <:\n          Core_models.Fmt.t_Arguments)\n    in\n    let _:Prims.unit = () in\n    ()\n\nlet other_fn (_: Prims.unit) : Prims.unit = ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let cond:bool =\n    (Core_models.Iter.Traits.Exact_size.f_len #Std.Env.t_Args\n        #FStar.Tactics.Typeclasses.solve\n        (Std.Env.args () <: Std.Env.t_Args)\n      <:\n      usize) >.\n    mk_usize 1\n  in\n  let _:Prims.unit = generic_fn #Prims.unit cond in\n  let _:Prims.unit = generic_fn #string (~.cond <: bool) in\n  let _:Prims.unit =\n    if Core_models.Hint.black_box #bool false\n    then\n      let _:Prims.unit = generic_fn #FStar.Char.char cond in\n      ()\n  in\n  let _:Prims.unit = generic_fn #i32 cond in\n  let _:Prims.unit = other_fn () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Test_harness.fst",
    "content": "module Coverage.Test_harness\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet unused (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Tight_inf_loop.fst",
    "content": "module Coverage.Tight_inf_loop\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit =\n  if false\n  then\n    Rust_primitives.Hax.never_to_any ((Rust_primitives.Hax.failure \"something is not implemented yet.\\nUnhandled loop kind\\n\\nThis is discussed in issue https://github.com/hacspec/hax/issues/933.\\nPlease upvote or comment this issue if you see this error message.\\nNote: the error was labeled with context `FunctionalizeLoops`.\\n\"\n            \"{\\n loop {\\n Tuple0\\n }\\n }\"\n          <:\n          Prims.unit),\n        ()\n        <:\n        (Prims.unit & Prims.unit))\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Trivial.fst",
    "content": "module Coverage.Trivial\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Try_error_result.fst",
    "content": "module Coverage.Try_error_result\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet call (return_error: bool) : Core_models.Result.t_Result Prims.unit Prims.unit =\n  if return_error\n  then\n    Core_models.Result.Result_Err (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result Prims.unit Prims.unit\n  else\n    Core_models.Result.Result_Ok (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result Prims.unit Prims.unit\n\nlet test1 (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Prims.unit =\n  let countdown:i32 = mk_i32 10 in\n  match\n    Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0)\n      (mk_i32 10)\n      (fun countdown temp_1_ ->\n          let countdown:i32 = countdown in\n          let _:i32 = temp_1_ in\n          true)\n      countdown\n      (fun countdown temp_1_ ->\n          let countdown:i32 = countdown in\n          let _:i32 = temp_1_ in\n          let countdown:i32 = countdown -! mk_i32 1 in\n          if countdown <. mk_i32 5\n          then\n            match call true <: Core_models.Result.t_Result Prims.unit Prims.unit with\n            | Core_models.Result.Result_Ok _ ->\n              (match call false <: Core_models.Result.t_Result Prims.unit Prims.unit with\n                | Core_models.Result.Result_Ok _ ->\n                  Core_models.Ops.Control_flow.ControlFlow_Continue countdown\n                  <:\n                  Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32\n                | Core_models.Result.Result_Err err ->\n                  Core_models.Ops.Control_flow.ControlFlow_Break\n                  (Core_models.Ops.Control_flow.ControlFlow_Break\n                    (Core_models.Result.Result_Err err\n                      <:\n                      Core_models.Result.t_Result Prims.unit Prims.unit)\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n                  <:\n                  Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32)\n            | Core_models.Result.Result_Err err ->\n              Core_models.Ops.Control_flow.ControlFlow_Break\n              (Core_models.Ops.Control_flow.ControlFlow_Break\n                (Core_models.Result.Result_Err err\n                  <:\n                  Core_models.Result.t_Result Prims.unit Prims.unit)\n                <:\n                Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32\n          else\n            match call false <: Core_models.Result.t_Result Prims.unit Prims.unit with\n            | Core_models.Result.Result_Ok _ ->\n              Core_models.Ops.Control_flow.ControlFlow_Continue countdown\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32\n            | Core_models.Result.Result_Err err ->\n              Core_models.Ops.Control_flow.ControlFlow_Break\n              (Core_models.Ops.Control_flow.ControlFlow_Break\n                (Core_models.Result.Result_Err err\n                  <:\n                  Core_models.Result.t_Result Prims.unit Prims.unit)\n                <:\n                Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32)\n    <:\n    Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit)\n      i32\n  with\n  | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n  | Core_models.Ops.Control_flow.ControlFlow_Continue countdown ->\n    Core_models.Result.Result_Ok (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result Prims.unit Prims.unit\n\ntype t_Thing1 = | Thing1 : t_Thing1\n\ntype t_Thing2 = | Thing2 : t_Thing2\n\nlet impl_Thing1__get_thing_2_ (self: t_Thing1) (return_error: bool)\n    : Core_models.Result.t_Result t_Thing2 Prims.unit =\n  if return_error\n  then\n    Core_models.Result.Result_Err (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result t_Thing2 Prims.unit\n  else\n    Core_models.Result.Result_Ok (Thing2 <: t_Thing2)\n    <:\n    Core_models.Result.t_Result t_Thing2 Prims.unit\n\nlet impl_Thing2__call (self: t_Thing2) (return_error: bool)\n    : Core_models.Result.t_Result u32 Prims.unit =\n  if return_error\n  then\n    Core_models.Result.Result_Err (() <: Prims.unit) <: Core_models.Result.t_Result u32 Prims.unit\n  else Core_models.Result.Result_Ok (mk_u32 57) <: Core_models.Result.t_Result u32 Prims.unit\n\nlet test2 (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Prims.unit =\n  let thing1:t_Thing1 = Thing1 <: t_Thing1 in\n  let countdown:i32 = mk_i32 10 in\n  match\n    Rust_primitives.Hax.Folds.fold_range_return (mk_i32 0)\n      (mk_i32 10)\n      (fun countdown temp_1_ ->\n          let countdown:i32 = countdown in\n          let _:i32 = temp_1_ in\n          true)\n      countdown\n      (fun countdown temp_1_ ->\n          let countdown:i32 = countdown in\n          let _:i32 = temp_1_ in\n          let countdown:i32 = countdown -! mk_i32 1 in\n          if countdown <. mk_i32 5\n          then\n            match\n              impl_Thing1__get_thing_2_ thing1 false\n              <:\n              Core_models.Result.t_Result t_Thing2 Prims.unit\n            with\n            | Core_models.Result.Result_Ok hoist1 ->\n              let _:Prims.unit =\n                Core_models.Result.impl__expect_err #u32\n                  #Prims.unit\n                  (impl_Thing2__call hoist1 true <: Core_models.Result.t_Result u32 Prims.unit)\n                  \"call should fail\"\n              in\n              (match\n                  impl_Thing1__get_thing_2_ thing1 false\n                  <:\n                  Core_models.Result.t_Result t_Thing2 Prims.unit\n                with\n                | Core_models.Result.Result_Ok hoist3 ->\n                  let _:Prims.unit =\n                    Core_models.Result.impl__expect_err #u32\n                      #Prims.unit\n                      (impl_Thing2__call hoist3 true <: Core_models.Result.t_Result u32 Prims.unit)\n                      \"call should fail\"\n                  in\n                  (match\n                      impl_Thing1__get_thing_2_ thing1 true\n                      <:\n                      Core_models.Result.t_Result t_Thing2 Prims.unit\n                    with\n                    | Core_models.Result.Result_Ok hoist5 ->\n                      (match\n                          impl_Thing2__call hoist5 true\n                          <:\n                          Core_models.Result.t_Result u32 Prims.unit\n                        with\n                        | Core_models.Result.Result_Ok v_val ->\n                          let _:Prims.unit =\n                            match v_val, mk_u32 57 <: (u32 & u32) with\n                            | left_val, right_val ->\n                              Hax_lib.v_assert (left_val =. right_val <: bool)\n                          in\n                          (match\n                              impl_Thing1__get_thing_2_ thing1 true\n                              <:\n                              Core_models.Result.t_Result t_Thing2 Prims.unit\n                            with\n                            | Core_models.Result.Result_Ok hoist7 ->\n                              (match\n                                  impl_Thing2__call hoist7 false\n                                  <:\n                                  Core_models.Result.t_Result u32 Prims.unit\n                                with\n                                | Core_models.Result.Result_Ok v_val ->\n                                  let _:Prims.unit =\n                                    match v_val, mk_u32 57 <: (u32 & u32) with\n                                    | left_val, right_val ->\n                                      Hax_lib.v_assert (left_val =. right_val <: bool)\n                                  in\n                                  Core_models.Ops.Control_flow.ControlFlow_Continue countdown\n                                  <:\n                                  Core_models.Ops.Control_flow.t_ControlFlow\n                                    (Core_models.Ops.Control_flow.t_ControlFlow\n                                        (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                        (Prims.unit & i32)) i32\n                                | Core_models.Result.Result_Err err ->\n                                  Core_models.Ops.Control_flow.ControlFlow_Break\n                                  (Core_models.Ops.Control_flow.ControlFlow_Break\n                                    (Core_models.Result.Result_Err err\n                                      <:\n                                      Core_models.Result.t_Result Prims.unit Prims.unit)\n                                    <:\n                                    Core_models.Ops.Control_flow.t_ControlFlow\n                                      (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                      (Prims.unit & i32))\n                                  <:\n                                  Core_models.Ops.Control_flow.t_ControlFlow\n                                    (Core_models.Ops.Control_flow.t_ControlFlow\n                                        (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                        (Prims.unit & i32)) i32)\n                            | Core_models.Result.Result_Err err ->\n                              Core_models.Ops.Control_flow.ControlFlow_Break\n                              (Core_models.Ops.Control_flow.ControlFlow_Break\n                                (Core_models.Result.Result_Err err\n                                  <:\n                                  Core_models.Result.t_Result Prims.unit Prims.unit)\n                                <:\n                                Core_models.Ops.Control_flow.t_ControlFlow\n                                  (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                  (Prims.unit & i32))\n                              <:\n                              Core_models.Ops.Control_flow.t_ControlFlow\n                                (Core_models.Ops.Control_flow.t_ControlFlow\n                                    (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                    (Prims.unit & i32)) i32)\n                        | Core_models.Result.Result_Err err ->\n                          Core_models.Ops.Control_flow.ControlFlow_Break\n                          (Core_models.Ops.Control_flow.ControlFlow_Break\n                            (Core_models.Result.Result_Err err\n                              <:\n                              Core_models.Result.t_Result Prims.unit Prims.unit)\n                            <:\n                            Core_models.Ops.Control_flow.t_ControlFlow\n                              (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)\n                          )\n                          <:\n                          Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Ops.Control_flow.t_ControlFlow\n                                (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                (Prims.unit & i32)) i32)\n                    | Core_models.Result.Result_Err err ->\n                      Core_models.Ops.Control_flow.ControlFlow_Break\n                      (Core_models.Ops.Control_flow.ControlFlow_Break\n                        (Core_models.Result.Result_Err err\n                          <:\n                          Core_models.Result.t_Result Prims.unit Prims.unit)\n                        <:\n                        Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n                        i32)\n                | Core_models.Result.Result_Err err ->\n                  Core_models.Ops.Control_flow.ControlFlow_Break\n                  (Core_models.Ops.Control_flow.ControlFlow_Break\n                    (Core_models.Result.Result_Err err\n                      <:\n                      Core_models.Result.t_Result Prims.unit Prims.unit)\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n                  <:\n                  Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32)\n            | Core_models.Result.Result_Err err ->\n              Core_models.Ops.Control_flow.ControlFlow_Break\n              (Core_models.Ops.Control_flow.ControlFlow_Break\n                (Core_models.Result.Result_Err err\n                  <:\n                  Core_models.Result.t_Result Prims.unit Prims.unit)\n                <:\n                Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32\n          else\n            match\n              impl_Thing1__get_thing_2_ thing1 false\n              <:\n              Core_models.Result.t_Result t_Thing2 Prims.unit\n            with\n            | Core_models.Result.Result_Ok hoist9 ->\n              (match\n                  impl_Thing2__call hoist9 false <: Core_models.Result.t_Result u32 Prims.unit\n                with\n                | Core_models.Result.Result_Ok v_val ->\n                  let _:Prims.unit =\n                    match v_val, mk_u32 57 <: (u32 & u32) with\n                    | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool)\n                  in\n                  (match\n                      impl_Thing1__get_thing_2_ thing1 false\n                      <:\n                      Core_models.Result.t_Result t_Thing2 Prims.unit\n                    with\n                    | Core_models.Result.Result_Ok hoist11 ->\n                      (match\n                          impl_Thing2__call hoist11 false\n                          <:\n                          Core_models.Result.t_Result u32 Prims.unit\n                        with\n                        | Core_models.Result.Result_Ok v_val ->\n                          let _:Prims.unit =\n                            match v_val, mk_u32 57 <: (u32 & u32) with\n                            | left_val, right_val ->\n                              Hax_lib.v_assert (left_val =. right_val <: bool)\n                          in\n                          (match\n                              impl_Thing1__get_thing_2_ thing1 false\n                              <:\n                              Core_models.Result.t_Result t_Thing2 Prims.unit\n                            with\n                            | Core_models.Result.Result_Ok hoist13 ->\n                              (match\n                                  impl_Thing2__call hoist13 false\n                                  <:\n                                  Core_models.Result.t_Result u32 Prims.unit\n                                with\n                                | Core_models.Result.Result_Ok v_val ->\n                                  let _:Prims.unit =\n                                    match v_val, mk_u32 57 <: (u32 & u32) with\n                                    | left_val, right_val ->\n                                      Hax_lib.v_assert (left_val =. right_val <: bool)\n                                  in\n                                  Core_models.Ops.Control_flow.ControlFlow_Continue countdown\n                                  <:\n                                  Core_models.Ops.Control_flow.t_ControlFlow\n                                    (Core_models.Ops.Control_flow.t_ControlFlow\n                                        (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                        (Prims.unit & i32)) i32\n                                | Core_models.Result.Result_Err err ->\n                                  Core_models.Ops.Control_flow.ControlFlow_Break\n                                  (Core_models.Ops.Control_flow.ControlFlow_Break\n                                    (Core_models.Result.Result_Err err\n                                      <:\n                                      Core_models.Result.t_Result Prims.unit Prims.unit)\n                                    <:\n                                    Core_models.Ops.Control_flow.t_ControlFlow\n                                      (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                      (Prims.unit & i32))\n                                  <:\n                                  Core_models.Ops.Control_flow.t_ControlFlow\n                                    (Core_models.Ops.Control_flow.t_ControlFlow\n                                        (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                        (Prims.unit & i32)) i32)\n                            | Core_models.Result.Result_Err err ->\n                              Core_models.Ops.Control_flow.ControlFlow_Break\n                              (Core_models.Ops.Control_flow.ControlFlow_Break\n                                (Core_models.Result.Result_Err err\n                                  <:\n                                  Core_models.Result.t_Result Prims.unit Prims.unit)\n                                <:\n                                Core_models.Ops.Control_flow.t_ControlFlow\n                                  (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                  (Prims.unit & i32))\n                              <:\n                              Core_models.Ops.Control_flow.t_ControlFlow\n                                (Core_models.Ops.Control_flow.t_ControlFlow\n                                    (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                    (Prims.unit & i32)) i32)\n                        | Core_models.Result.Result_Err err ->\n                          Core_models.Ops.Control_flow.ControlFlow_Break\n                          (Core_models.Ops.Control_flow.ControlFlow_Break\n                            (Core_models.Result.Result_Err err\n                              <:\n                              Core_models.Result.t_Result Prims.unit Prims.unit)\n                            <:\n                            Core_models.Ops.Control_flow.t_ControlFlow\n                              (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)\n                          )\n                          <:\n                          Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Ops.Control_flow.t_ControlFlow\n                                (Core_models.Result.t_Result Prims.unit Prims.unit)\n                                (Prims.unit & i32)) i32)\n                    | Core_models.Result.Result_Err err ->\n                      Core_models.Ops.Control_flow.ControlFlow_Break\n                      (Core_models.Ops.Control_flow.ControlFlow_Break\n                        (Core_models.Result.Result_Err err\n                          <:\n                          Core_models.Result.t_Result Prims.unit Prims.unit)\n                        <:\n                        Core_models.Ops.Control_flow.t_ControlFlow\n                          (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Ops.Control_flow.t_ControlFlow\n                            (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n                        i32)\n                | Core_models.Result.Result_Err err ->\n                  Core_models.Ops.Control_flow.ControlFlow_Break\n                  (Core_models.Ops.Control_flow.ControlFlow_Break\n                    (Core_models.Result.Result_Err err\n                      <:\n                      Core_models.Result.t_Result Prims.unit Prims.unit)\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n                  <:\n                  Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Ops.Control_flow.t_ControlFlow\n                        (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32)\n            | Core_models.Result.Result_Err err ->\n              Core_models.Ops.Control_flow.ControlFlow_Break\n              (Core_models.Ops.Control_flow.ControlFlow_Break\n                (Core_models.Result.Result_Err err\n                  <:\n                  Core_models.Result.t_Result Prims.unit Prims.unit)\n                <:\n                Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32))\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Ops.Control_flow.t_ControlFlow\n                    (Core_models.Result.t_Result Prims.unit Prims.unit) (Prims.unit & i32)) i32)\n    <:\n    Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit Prims.unit)\n      i32\n  with\n  | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n  | Core_models.Ops.Control_flow.ControlFlow_Continue countdown ->\n    Core_models.Result.Result_Ok (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result Prims.unit Prims.unit\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit Prims.unit =\n  let _:Prims.unit =\n    Core_models.Result.impl__expect_err #Prims.unit\n      #Prims.unit\n      (test1 () <: Core_models.Result.t_Result Prims.unit Prims.unit)\n      \"test1 should fail\"\n  in\n  match test2 () <: Core_models.Result.t_Result Prims.unit Prims.unit with\n  | Core_models.Result.Result_Ok _ ->\n    Core_models.Result.Result_Ok (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result Prims.unit Prims.unit\n  | Core_models.Result.Result_Err err ->\n    Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Unicode.fst",
    "content": "module Coverage.Unicode\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_申し訳ございません (_: Prims.unit) : bool = Core_models.Hint.black_box #bool false\n\nlet v_サビ (_: Prims.unit) : Prims.unit = ()\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Ops.Range.t_RangeInclusive\n            FStar.Char.char)\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Ops.Range.impl_7__new #FStar.Char.char 'Ð' 'Ð'\n            <:\n            Core_models.Ops.Range.t_RangeInclusive FStar.Char.char)\n        <:\n        Core_models.Ops.Range.t_RangeInclusive FStar.Char.char)\n      ()\n      (fun temp_0_ e_İ ->\n          let _:Prims.unit = temp_0_ in\n          let e_İ:FStar.Char.char = e_İ in\n          ())\n  in\n  let _:Prims.unit =\n    if v_申し訳ございません () && v_申し訳ございません ()\n    then\n      let _:Prims.unit =\n        Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n              (let list = [\"true\\n\"] in\n                FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n                Rust_primitives.Hax.array_of_list 1 list)\n            <:\n            Core_models.Fmt.t_Arguments)\n      in\n      let _:Prims.unit = () in\n      ()\n  in\n  let _:Prims.unit = v_サビ () in\n  ()\n\nlet v_他 (_: Prims.unit) : Prims.unit = ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Unused.fst",
    "content": "module Coverage.Unused\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet foo (#v_T: Type0) (x: v_T) : (i32 & Prims.unit) =\n  let i:i32 = mk_i32 0 in\n  Rust_primitives.Hax.while_loop (fun i ->\n        let i:i32 = i in\n        true)\n    (fun i ->\n        let i:i32 = i in\n        i <. mk_i32 10 <: bool)\n    (fun i ->\n        let i:i32 = i in\n        Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n    i\n    (fun i ->\n        let i:i32 = i in\n        let _:bool = i <>. mk_i32 0 || i <>. mk_i32 0 in\n        let i:i32 = i +! mk_i32 1 in\n        i),\n  ()\n  <:\n  (i32 & Prims.unit)\n\nlet unused_template_func (#v_T: Type0) (x: v_T) : (i32 & Prims.unit) =\n  let i:i32 = mk_i32 0 in\n  Rust_primitives.Hax.while_loop (fun i ->\n        let i:i32 = i in\n        true)\n    (fun i ->\n        let i:i32 = i in\n        i <. mk_i32 10 <: bool)\n    (fun i ->\n        let i:i32 = i in\n        Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n    i\n    (fun i ->\n        let i:i32 = i in\n        let _:bool = i <>. mk_i32 0 || i <>. mk_i32 0 in\n        let i:i32 = i +! mk_i32 1 in\n        i),\n  ()\n  <:\n  (i32 & Prims.unit)\n\nlet unused_func (a: u32) : Prims.unit =\n  if a <>. mk_u32 0\n  then\n    let a:u32 = a +! mk_u32 1 in\n    ()\n\nlet unused_func2 (a: u32) : Prims.unit =\n  if a <>. mk_u32 0\n  then\n    let a:u32 = a +! mk_u32 1 in\n    ()\n\nlet unused_func3 (a: u32) : Prims.unit =\n  if a <>. mk_u32 0\n  then\n    let a:u32 = a +! mk_u32 1 in\n    ()\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let _:Prims.unit = foo #u32 (mk_u32 0) in\n  let _:Prims.unit = foo #float (mk_float \"0.0\") in\n  Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Unused_mod.Unused_module.fst",
    "content": "module Coverage.Unused_mod.Unused_module\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet never_called_function (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"I am never called\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.Unused_mod.fst",
    "content": "module Coverage.Unused_mod\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_const (mk_usize 1)\n          (let list = [\"hello world!\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n            Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.While_.fst",
    "content": "module Coverage.While_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : (Prims.unit & Prims.unit) =\n  let num:i32 = mk_i32 9 in\n  Rust_primitives.Hax.while_loop (fun temp_0_ ->\n        let _:Prims.unit = temp_0_ in\n        true)\n    (fun temp_0_ ->\n        let _:Prims.unit = temp_0_ in\n        num >=. mk_i32 10 <: bool)\n    (fun temp_0_ ->\n        let _:Prims.unit = temp_0_ in\n        Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n    ()\n    (fun temp_0_ ->\n        let _:Prims.unit = temp_0_ in\n        ()),\n  ()\n  <:\n  (Prims.unit & Prims.unit)\n"
  },
  {
    "path": "rustc-coverage-tests/snapshots/fstar/Coverage.While_early_ret.fst",
    "content": "module Coverage.While_early_ret\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet main (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  let countdown:i32 = mk_i32 10 in\n  match\n    Rust_primitives.Hax.while_loop_return (fun countdown ->\n          let countdown:i32 = countdown in\n          true)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          countdown >. mk_i32 0 <: bool)\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n      countdown\n      (fun countdown ->\n          let countdown:i32 = countdown in\n          if countdown <. mk_i32 5 <: bool\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break\n            (Core_models.Ops.Control_flow.ControlFlow_Break\n              (if countdown >. mk_i32 8 <: bool\n                then\n                  Core_models.Result.Result_Ok (() <: Prims.unit)\n                  <:\n                  Core_models.Result.t_Result Prims.unit u8\n                else\n                  Core_models.Result.Result_Err (mk_u8 1)\n                  <:\n                  Core_models.Result.t_Result Prims.unit u8)\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit u8)\n                (Prims.unit & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Result.t_Result Prims.unit u8) (Prims.unit & i32)) i32\n          else\n            Core_models.Ops.Control_flow.ControlFlow_Continue (countdown -! mk_i32 1 <: i32)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Result.t_Result Prims.unit u8) (Prims.unit & i32)) i32)\n    <:\n    Core_models.Ops.Control_flow.t_ControlFlow (Core_models.Result.t_Result Prims.unit u8) i32\n  with\n  | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n  | Core_models.Ops.Control_flow.ControlFlow_Continue countdown ->\n    Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n"
  },
  {
    "path": "rustc-coverage-tests/src/abort.rs",
    "content": "#![allow(unused_assignments)]\n\nextern \"C\" fn might_abort(should_abort: bool) {\n    if should_abort {\n        println!(\"aborting...\");\n        panic!(\"panics and aborts\");\n    } else {\n        println!(\"Don't Panic\");\n    }\n}\n\n#[rustfmt::skip]\nfn main() -> Result<(), u8> {\n    let mut countdown = 10;\n    while countdown > 0 {\n        if countdown < 5 {\n            might_abort(false);\n        }\n        // See discussion (below the `Notes` section) on coverage results for the closing brace.\n        if countdown < 5 { might_abort(false); } // Counts for different regions on one line.\n        // For the following example, the closing brace is the last character on the line.\n        // This shows the character after the closing brace is highlighted, even if that next\n        // character is a newline.\n        if countdown < 5 { might_abort(false); }\n        countdown -= 1;\n    }\n    Ok(())\n}\n\n// Notes:\n//   1. Compare this program and its coverage results to those of the similar tests\n//      `panic_unwind.rs` and `try_error_result.rs`.\n//   2. This test confirms the coverage generated when a program includes `UnwindAction::Terminate`.\n//   3. The test does not invoke the abort. By executing to a successful completion, the coverage\n//      results show where the program did and did not execute.\n//   4. If the program actually aborted, the coverage counters would not be saved (which \"works as\n//      intended\"). Coverage results would show no executed coverage regions.\n//   6. If `should_abort` is `true` and the program aborts, the program exits with a `132` status\n//      (on Linux at least).\n\n/*\n\nExpect the following coverage results:\n\n```text\n    16|     11|    while countdown > 0 {\n    17|     10|        if countdown < 5 {\n    18|      4|            might_abort(false);\n    19|      6|        }\n```\n\nThis is actually correct.\n\nThe condition `countdown < 5` executed 10 times (10 loop iterations).\n\nIt evaluated to `true` 4 times, and executed the `might_abort()` call.\n\nIt skipped the body of the `might_abort()` call 6 times. If an `if` does not include an explicit\n`else`, the coverage implementation injects a counter, at the character immediately after the `if`s\nclosing brace, to count the \"implicit\" `else`. This is the only way to capture the coverage of the\nnon-true condition.\n\nAs another example of why this is important, say the condition was `countdown < 50`, which is always\n`true`. In that case, we wouldn't have a test for what happens if `might_abort()` is not called.\nThe closing brace would have a count of `0`, highlighting the missed coverage.\n*/\n"
  },
  {
    "path": "rustc-coverage-tests/src/assert-ne.rs",
    "content": "//@ edition: 2021\n\nuse core::hint::black_box;\n\n#[derive(Debug, PartialEq)]\nstruct Foo(u32);\n\nfn main() {\n    assert_ne!(\n        black_box(Foo(5)), // Make sure this expression's span isn't lost.\n        if black_box(false) {\n            Foo(0) //\n        } else {\n            Foo(1) //\n        }\n    );\n    ()\n}\n\n// This test is a short fragment extracted from `issue-84561.rs`, highlighting\n// a particular span of code that can easily be lost if overlapping spans are\n// processed incorrectly.\n"
  },
  {
    "path": "rustc-coverage-tests/src/assert.rs",
    "content": "#![allow(unused_assignments)]\n//@ failure-status: 101\n\nfn might_fail_assert(one_plus_one: u32) {\n    println!(\"does 1 + 1 = {}?\", one_plus_one);\n    assert_eq!(1 + 1, one_plus_one, \"the argument was wrong\");\n}\n\nfn main() -> Result<(), u8> {\n    let mut countdown = 10;\n    while countdown > 0 {\n        if countdown == 1 {\n            might_fail_assert(3);\n        } else if countdown < 5 {\n            might_fail_assert(2);\n        }\n        countdown -= 1;\n    }\n    Ok(())\n}\n\n// Notes:\n//   1. Compare this program and its coverage results to those of the very similar test\n//      `panic_unwind.rs`, and similar tests `abort.rs` and `try_error_result.rs`.\n//   2. This test confirms the coverage generated when a program passes or fails an `assert!()` or\n//      related `assert_*!()` macro.\n//   3. Notably, the `assert` macros *do not* generate `TerminatorKind::Assert`. The macros produce\n//      conditional expressions, `TerminatorKind::SwitchInt` branches, and a possible call to\n//      `begin_panic_fmt()` (that begins a panic unwind, if the assertion test fails).\n//   4. `TerminatoKind::Assert` is, however, also present in the MIR generated for this test\n//      (and in many other coverage tests). The `Assert` terminator is typically generated by the\n//      Rust compiler to check for runtime failures, such as numeric overflows.\n"
  },
  {
    "path": "rustc-coverage-tests/src/assert_not.rs",
    "content": "//@ edition: 2021\n\n// Regression test for <https://github.com/rust-lang/rust/issues/118904>.\n// `assert!(true)` and `assert!(!false)` should have similar coverage spans.\n\nfn main() {\n    assert!(true);\n    assert!(!false);\n    assert!(!!true);\n    assert!(!!!false);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/async.rs",
    "content": "#![feature(coverage_attribute)]\n#![feature(custom_inner_attributes)] // for #![rustfmt::skip]\n#![allow(unused_assignments, dead_code)]\n#![rustfmt::skip]\n//@ edition: 2018\n//@ compile-flags: -Copt-level=1\n\n//@ aux-build: executor.rs\nextern crate executor;\n\nasync fn c(x: u8) -> u8 {\n    if x == 8 {\n        1\n    } else {\n        0\n    }\n}\n\nasync fn d() -> u8 { 1 }\n\nasync fn e() -> u8 { 1 } // unused function; executor does not block on `g()`\n\nasync fn f() -> u8 { 1 }\n\nasync fn foo() -> [bool; 10] { [false; 10] } // unused function; executor does not block on `h()`\n\npub async fn g(x: u8) {\n    match x {\n        y if e().await == y => (),\n        y if f().await == y => (),\n        _ => (),\n    }\n}\n\nasync fn h(x: usize) { // The function signature is counted when called, but the body is not\n                       // executed (not awaited) so the open brace has a `0` count (at least when\n                       // displayed with `llvm-cov show` in color-mode).\n    match x {\n        y if foo().await[y] => (),\n        _ => (),\n    }\n}\n\nasync fn i(x: u8) { // line coverage is 1, but there are 2 regions:\n                    // (a) the function signature, counted when the function is called; and\n                    // (b) the open brace for the function body, counted once when the body is\n                    // executed asynchronously.\n    match x {\n        y if c(x).await == y + 1 => { d().await; }\n        y if f().await == y + 1 => (),\n        _ => (),\n    }\n}\n\nfn j(x: u8) {\n    // non-async versions of `c()`, `d()`, and `f()` to make it similar to async `i()`.\n    fn c(x: u8) -> u8 {\n        if x == 8 {\n            1\n        } else {\n            0\n        }\n    }\n    fn d() -> u8 { 1 } // inner function is defined in-line, but the function is not executed\n    fn f() -> u8 { 1 }\n    match x {\n        y if c(x) == y + 1 => { d(); }\n        y if f() == y + 1 => (),\n        _ => (),\n    }\n}\n\nfn k(x: u8) { // unused function\n    match x {\n        1 => (),\n        2 => (),\n        _ => (),\n    }\n}\n\nfn l(x: u8) {\n    match x {\n        1 => (),\n        2 => (),\n        _ => (),\n    }\n}\n\nasync fn m(x: u8) -> u8 { x - 1 }\n\nfn main() {\n    let _ = g(10);\n    let _ = h(9);\n    let mut future = Box::pin(i(8));\n    j(7);\n    l(6);\n    let _ = m(5);\n    executor::block_on(future.as_mut());\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/async2.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2018\n\n//@ aux-build: executor.rs\nextern crate executor;\n\nfn non_async_func() {\n    println!(\"non_async_func was covered\");\n    let b = true;\n    if b {\n        println!(\"non_async_func println in block\");\n    }\n}\n\nasync fn async_func() {\n    println!(\"async_func was covered\");\n    let b = true;\n    if b {\n        println!(\"async_func println in block\");\n    }\n}\n\nasync fn async_func_just_println() {\n    println!(\"async_func_just_println was covered\");\n}\n\nfn main() {\n    println!(\"codecovsample::main\");\n\n    non_async_func();\n\n    executor::block_on(async_func());\n    executor::block_on(async_func_just_println());\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/async_block.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\n//@ aux-build: executor.rs\nextern crate executor;\n\nfn main() {\n    for i in 0..16 {\n        let future = async {\n            if i >= 12 {\n                println!(\"big\");\n            } else {\n                println!(\"small\");\n            }\n        };\n        executor::block_on(future);\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/async_closure.rs",
    "content": "//@ edition: 2021\n\n//@ aux-build: executor.rs\nextern crate executor;\n\nasync fn call_once(f: impl AsyncFnOnce()) {\n    f().await;\n}\n\npub fn main() {\n    let async_closure = async || {};\n    executor::block_on(async_closure());\n    executor::block_on(call_once(async_closure));\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/attr/impl.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ reference: attributes.coverage.nesting\n\n// Checks that `#[coverage(..)]` can be applied to impl and impl-trait blocks,\n// and is inherited by any enclosed functions.\n\nstruct MyStruct;\n\n#[coverage(off)]\nimpl MyStruct {\n    fn off_inherit() {}\n\n    #[coverage(on)]\n    fn off_on() {}\n\n    #[coverage(off)]\n    fn off_off() {}\n}\n\n#[coverage(on)]\nimpl MyStruct {\n    fn on_inherit() {}\n\n    #[coverage(on)]\n    fn on_on() {}\n\n    #[coverage(off)]\n    fn on_off() {}\n}\n\ntrait MyTrait {\n    fn method();\n}\n\n#[coverage(off)]\nimpl MyTrait for MyStruct {\n    fn method() {}\n}\n\n#[coverage(off)]\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/attr/mod.rs",
    "content": "#[path = \"impl.rs\"]\nmod impl_;\nmod module;\n// mod nested;\n#[path = \"off-on-sandwich.rs\"]\nmod off_on_sandwich;\n#[path = \"trait-impl-inherit.rs\"]\n#[cfg(any(feature = \"json\"))]\nmod trait_impl_inherit;\n"
  },
  {
    "path": "rustc-coverage-tests/src/attr/module.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ reference: attributes.coverage.nesting\n\n// Checks that `#[coverage(..)]` can be applied to modules, and is inherited\n// by any enclosed functions.\n\n#[coverage(off)]\nmod off {\n    fn inherit() {}\n\n    #[coverage(on)]\n    fn on() {}\n\n    #[coverage(off)]\n    fn off() {}\n}\n\n#[coverage(on)]\nmod on {\n    fn inherit() {}\n\n    #[coverage(on)]\n    fn on() {}\n\n    #[coverage(off)]\n    fn off() {}\n}\n\n#[coverage(off)]\nmod nested_a {\n    mod nested_b {\n        fn inner() {}\n    }\n}\n\n#[coverage(off)]\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/attr/nested.rs",
    "content": "#![feature(coverage_attribute, stmt_expr_attributes)]\n//@ edition: 2021\n//@ reference: attributes.coverage.nesting\n\n// Demonstrates the interaction between #[coverage(off)] and various kinds of\n// nested function.\n\n#[coverage(off)]\nfn do_stuff() {}\n\n#[coverage(off)]\nfn outer_fn() {\n    fn middle_fn() {\n        fn inner_fn() {\n            do_stuff();\n        }\n        do_stuff();\n    }\n    do_stuff();\n}\n\nstruct MyOuter;\nimpl MyOuter {\n    #[coverage(off)]\n    fn outer_method(&self) {\n        struct MyMiddle;\n        impl MyMiddle {\n            fn middle_method(&self) {\n                struct MyInner;\n                impl MyInner {\n                    fn inner_method(&self) {\n                        do_stuff();\n                    }\n                }\n                do_stuff();\n            }\n        }\n        do_stuff();\n    }\n}\n\ntrait MyTrait {\n    fn trait_method(&self);\n}\nimpl MyTrait for MyOuter {\n    #[coverage(off)]\n    fn trait_method(&self) {\n        struct MyMiddle;\n        impl MyTrait for MyMiddle {\n            fn trait_method(&self) {\n                struct MyInner;\n                impl MyTrait for MyInner {\n                    fn trait_method(&self) {\n                        do_stuff();\n                    }\n                }\n                do_stuff();\n            }\n        }\n        do_stuff();\n    }\n}\n\nfn closure_expr() {\n    let _outer = #[coverage(off)]\n    || {\n        let _middle = || {\n            let _inner = || {\n                do_stuff();\n            };\n            do_stuff();\n        };\n        do_stuff();\n    };\n    do_stuff();\n}\n\n// This syntax is allowed, even without #![feature(stmt_expr_attributes)].\nfn closure_tail() {\n    let _outer = {\n        #[coverage(off)]\n        || {\n            let _middle = {\n                || {\n                    let _inner = {\n                        || {\n                            do_stuff();\n                        }\n                    };\n                    do_stuff();\n                }\n            };\n            do_stuff();\n        }\n    };\n    do_stuff();\n}\n\n#[coverage(off)]\nfn main() {\n    outer_fn();\n    MyOuter.outer_method();\n    MyOuter.trait_method();\n    closure_expr();\n    closure_tail();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/attr/off-on-sandwich.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ reference: attributes.coverage.nesting\n\n// Demonstrates the interaction of `#[coverage(off)]` and `#[coverage(on)]`\n// in nested functions.\n\n#[coverage(off)]\nfn do_stuff() {}\n\n#[coverage(off)]\nfn dense_a() {\n    dense_b();\n    dense_b();\n    #[coverage(on)]\n    fn dense_b() {\n        dense_c();\n        dense_c();\n        #[coverage(off)]\n        fn dense_c() {\n            do_stuff();\n        }\n    }\n}\n\n#[coverage(off)]\nfn sparse_a() {\n    sparse_b();\n    sparse_b();\n    fn sparse_b() {\n        sparse_c();\n        sparse_c();\n        #[coverage(on)]\n        fn sparse_c() {\n            sparse_d();\n            sparse_d();\n            fn sparse_d() {\n                sparse_e();\n                sparse_e();\n                #[coverage(off)]\n                fn sparse_e() {\n                    do_stuff();\n                }\n            }\n        }\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    dense_a();\n    sparse_a();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/attr/trait-impl-inherit.rs",
    "content": "#![feature(coverage_attribute)]\n// Checks that `#[coverage(..)]` in a trait method is not inherited in an\n// implementation.\n//@ edition: 2021\n//@ reference: attributes.coverage.trait-impl-inherit\n\ntrait T {\n    #[coverage(off)]\n    fn f(&self) {\n        println!(\"default\");\n    }\n}\n\nstruct S;\n\nimpl T for S {\n    fn f(&self) {\n        println!(\"impl S\");\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    S.f();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/discard_all_helper.rs",
    "content": "//@ edition: 2021\n\n// Force this function to be generated in its home crate, so that it ends up\n// with normal coverage metadata.\n#[inline(never)]\npub fn external_function() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/executor.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\nuse core::future::Future;\nuse core::pin::pin;\nuse core::task::{Context, Poll, Waker};\n\n/// Dummy \"executor\" that just repeatedly polls a future until it's ready.\n#[coverage(off)]\npub fn block_on<F: Future>(mut future: F) -> F::Output {\n    let mut future = pin!(future);\n    let mut context = Context::from_waker(Waker::noop());\n\n    loop {\n        if let Poll::Ready(val) = future.as_mut().poll(&mut context) {\n            break val;\n        }\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/inline_always_with_dead_code.rs",
    "content": "//@ compile-flags: -Cinstrument-coverage -Ccodegen-units=4 -Copt-level=0\n\n#![allow(dead_code)]\n\nmod foo {\n    #[inline(always)]\n    pub fn called() {}\n\n    fn uncalled() {}\n}\n\npub mod bar {\n    pub fn call_me() {\n        super::foo::called();\n    }\n}\n\npub mod baz {\n    pub fn call_me() {\n        super::foo::called();\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/inline_mixed_helper.rs",
    "content": "//@ edition: 2021\n//@ compile-flags: -Cinstrument-coverage=on\n\n#[inline]\npub fn inline_me() {}\n\n#[inline(never)]\npub fn no_inlining_please() {}\n\npub fn generic<T>() {}\n\n// FIXME(#132436): Even though this doesn't ICE, it still produces coverage\n// reports that undercount the affected code.\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/macro_name_span_helper.rs",
    "content": "//@ edition: 2021\n\n#[macro_export]\nmacro_rules! macro_that_defines_a_function {\n    (fn $name:ident () $body:tt) => {\n        fn $name () -> () $body\n    }\n}\n\n// Non-executable comment.\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/mod.rs",
    "content": "mod discard_all_helper;\n// mod executor;\n// mod inline_always_with_dead_code;\n// mod inline_mixed_helper;\n// mod macro_name_span_helper;\n// mod unused_mod_helper;\n#[cfg(any(feature = \"json\", feature = \"fstar\", feature = \"coq\"))]\nmod used_crate;\n#[cfg(any(feature = \"json\", feature = \"fstar\", feature = \"coq\"))]\nmod used_inline_crate;\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/unused_mod_helper.rs",
    "content": "#[allow(dead_code)]\npub fn never_called_function() {\n    println!(\"I am never called\");\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/used_crate.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n// Verify that coverage works with optimizations:\n//@ compile-flags: -C opt-level=3\n\nuse std::fmt::Debug;\n\npub fn used_function() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 0;\n    if is_true {\n        countdown = 10;\n    }\n    use_this_lib_crate();\n}\n\npub fn used_only_from_bin_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_only_from_bin_crate_generic_function with {arg:?}\");\n}\n// Expect for above function: `Unexecuted instantiation` (see below)\npub fn used_only_from_this_lib_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_only_from_this_lib_crate_generic_function with {arg:?}\");\n}\n\npub fn used_from_bin_crate_and_lib_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_from_bin_crate_and_lib_crate_generic_function with {arg:?}\");\n}\n\npub fn used_with_same_type_from_bin_crate_and_lib_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_with_same_type_from_bin_crate_and_lib_crate_generic_function with {arg:?}\");\n}\n\npub fn unused_generic_function<T: Debug>(arg: T) {\n    println!(\"unused_generic_function with {arg:?}\");\n}\n\npub fn unused_function() {\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 2;\n    if !is_true {\n        countdown = 20;\n    }\n}\n\n#[allow(dead_code)]\nfn unused_private_function() {\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 2;\n    if !is_true {\n        countdown = 20;\n    }\n}\n\nfn use_this_lib_crate() {\n    used_from_bin_crate_and_lib_crate_generic_function(\"used from library used_crate.rs\");\n    used_with_same_type_from_bin_crate_and_lib_crate_generic_function(\n        \"used from library used_crate.rs\",\n    );\n    let some_vec = vec![5, 6, 7, 8];\n    used_only_from_this_lib_crate_generic_function(some_vec);\n    used_only_from_this_lib_crate_generic_function(\"used ONLY from library used_crate.rs\");\n}\n\n// FIXME(#79651): \"Unexecuted instantiation\" errors appear in coverage results,\n// for example:\n//\n// | Unexecuted instantiation: used_crate::used_only_from_bin_crate_generic_function::<_>\n//\n// These notices appear when `llvm-cov` shows instantiations. This may be a\n// default option, but it can be suppressed with:\n//\n// ```shell\n// $ `llvm-cov show --show-instantiations=0 ...`\n// ```\n//\n// The notice is triggered because the function is unused by the library itself,\n// so when the library is compiled, an \"unused\" set of mappings for that function\n// is included in the library's coverage metadata.\n//\n// Even though this function is used by `uses_crate.rs` (and\n// counted), with substitutions for `T`, those instantiations are only generated\n// when the generic function is actually used (from the binary, not from this\n// library crate). So the test result shows coverage for all instantiated\n// versions and their generic type substitutions, plus the `Unexecuted\n// instantiation` message for the non-substituted version. This is valid, but\n// unfortunately a little confusing.\n//\n// The library crate has its own coverage map, and the only way to show unused\n// coverage of a generic function is to include the generic function in the\n// coverage map, marked as an \"unused function\". If the library were used by\n// another binary that never used this generic function, then it would be valid\n// to show the unused generic, with unknown substitution (`_`).\n//\n// The alternative would be to exclude all generics from being included in the\n// \"unused functions\" list, which would then omit coverage results for\n// `unused_generic_function<T>()`.\n"
  },
  {
    "path": "rustc-coverage-tests/src/auxiliary/used_inline_crate.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n// Verify that coverage works with optimizations:\n//@ compile-flags: -C opt-level=3\n\nuse std::fmt::Debug;\n\npub fn used_function() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 0;\n    if is_true {\n        countdown = 10;\n    }\n    use_this_lib_crate();\n}\n\n#[inline(always)]\npub fn used_inline_function() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 0;\n    if is_true {\n        countdown = 10;\n    }\n    use_this_lib_crate();\n}\n\n#[inline(always)]\npub fn used_only_from_bin_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_only_from_bin_crate_generic_function with {arg:?}\");\n}\n// Expect for above function: `Unexecuted instantiation` (see notes in `used_crate.rs`)\n\n#[inline(always)]\npub fn used_only_from_this_lib_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_only_from_this_lib_crate_generic_function with {arg:?}\");\n}\n\n#[inline(always)]\npub fn used_from_bin_crate_and_lib_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_from_bin_crate_and_lib_crate_generic_function with {arg:?}\");\n}\n\n#[inline(always)]\npub fn used_with_same_type_from_bin_crate_and_lib_crate_generic_function<T: Debug>(arg: T) {\n    println!(\"used_with_same_type_from_bin_crate_and_lib_crate_generic_function with {arg:?}\");\n}\n\n#[inline(always)]\npub fn unused_generic_function<T: Debug>(arg: T) {\n    println!(\"unused_generic_function with {arg:?}\");\n}\n\n#[inline(always)]\npub fn unused_function() {\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 2;\n    if !is_true {\n        countdown = 20;\n    }\n}\n\n#[inline(always)]\n#[allow(dead_code)]\nfn unused_private_function() {\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 2;\n    if !is_true {\n        countdown = 20;\n    }\n}\n\nfn use_this_lib_crate() {\n    used_from_bin_crate_and_lib_crate_generic_function(\"used from library used_crate.rs\");\n    used_with_same_type_from_bin_crate_and_lib_crate_generic_function(\n        \"used from library used_crate.rs\",\n    );\n    let some_vec = vec![5, 6, 7, 8];\n    used_only_from_this_lib_crate_generic_function(some_vec);\n    used_only_from_this_lib_crate_generic_function(\"used ONLY from library used_crate.rs\");\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/await_ready.rs",
    "content": "#![feature(coverage_attribute)]\n#![coverage(off)]\n//@ edition: 2021\n\n//@ aux-build: executor.rs\nextern crate executor;\n\nasync fn ready() -> u8 {\n    1\n}\n\n#[coverage(on)]\n#[rustfmt::skip]\nasync fn await_ready() -> u8 {\n    // await should be covered even if the function never yields\n    ready()\n        .await\n}\n\nfn main() {\n    let mut future = Box::pin(await_ready());\n    executor::block_on(future.as_mut());\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/bad_counter_ids.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Copt-level=0 -Zmir-opt-level=3\n\n// Regression test for <https://github.com/rust-lang/rust/issues/117012>.\n//\n// If some coverage counters were removed by MIR optimizations, we need to take\n// care not to refer to those counter IDs in coverage mappings, and instead\n// replace them with a constant zero value. If we don't, `llvm-cov` might see\n// a too-large counter ID and silently discard the entire function from its\n// coverage reports.\n\n#[derive(Debug, PartialEq, Eq)]\nstruct Foo(u32);\n\nfn eq_good() {\n    println!(\"a\");\n    assert_eq!(Foo(1), Foo(1));\n}\n\nfn eq_good_message() {\n    println!(\"b\");\n    assert_eq!(Foo(1), Foo(1), \"message b\");\n}\n\nfn ne_good() {\n    println!(\"c\");\n    assert_ne!(Foo(1), Foo(3));\n}\n\nfn ne_good_message() {\n    println!(\"d\");\n    assert_ne!(Foo(1), Foo(3), \"message d\");\n}\n\nfn eq_bad() {\n    println!(\"e\");\n    assert_eq!(Foo(1), Foo(3));\n}\n\nfn eq_bad_message() {\n    println!(\"f\");\n    assert_eq!(Foo(1), Foo(3), \"message f\");\n}\n\nfn ne_bad() {\n    println!(\"g\");\n    assert_ne!(Foo(1), Foo(1));\n}\n\nfn ne_bad_message() {\n    println!(\"h\");\n    assert_ne!(Foo(1), Foo(1), \"message h\");\n}\n\n#[coverage(off)]\nfn main() {\n    eq_good();\n    eq_good_message();\n    ne_good();\n    ne_good_message();\n\n    assert!(std::panic::catch_unwind(eq_bad).is_err());\n    assert!(std::panic::catch_unwind(eq_bad_message).is_err());\n    assert!(std::panic::catch_unwind(ne_bad).is_err());\n    assert!(std::panic::catch_unwind(ne_bad_message).is_err());\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/bench.rs",
    "content": "#![feature(test)]\n//@ edition: 2021\n//@ compile-flags: --test\n\nextern crate test;\n\n#[bench]\nfn my_bench(_b: &mut test::Bencher) {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/generics.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\nfn print_size<T>() {\n    if std::mem::size_of::<T>() > 4 {\n        println!(\"size > 4\");\n    } else {\n        println!(\"size <= 4\");\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    print_size::<()>();\n    print_size::<u32>();\n    print_size::<u64>();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/guard.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nfn branch_match_guard(x: Option<u32>) {\n    no_merge!();\n\n    match x {\n        Some(0) => {\n            println!(\"zero\");\n        }\n        Some(x) if x % 2 == 0 => {\n            println!(\"is nonzero and even\");\n        }\n        Some(x) if x % 3 == 0 => {\n            println!(\"is nonzero and odd, but divisible by 3\");\n        }\n        _ => {\n            println!(\"something else\");\n        }\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    branch_match_guard(Some(0));\n    branch_match_guard(Some(2));\n    branch_match_guard(Some(6));\n    branch_match_guard(Some(3));\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/if-let.rs",
    "content": "#![feature(coverage_attribute, let_chains)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nfn if_let(input: Option<&str>) {\n    no_merge!();\n\n    if let Some(x) = input {\n        say(x);\n    } else {\n        say(\"none\");\n    }\n    say(\"done\");\n}\n\nfn if_let_chain(a: Option<&str>, b: Option<&str>) {\n    if let Some(x) = a\n        && let Some(y) = b\n    {\n        say(x);\n        say(y);\n    } else {\n        say(\"not both\");\n    }\n    say(\"done\");\n}\n\n#[coverage(off)]\nfn say(message: &str) {\n    core::hint::black_box(message);\n}\n\n#[coverage(off)]\nfn main() {\n    if_let(Some(\"x\"));\n    if_let(Some(\"x\"));\n    if_let(None);\n\n    for _ in 0..8 {\n        if_let_chain(Some(\"a\"), Some(\"b\"));\n    }\n    for _ in 0..4 {\n        if_let_chain(Some(\"a\"), None);\n    }\n    for _ in 0..2 {\n        if_let_chain(None, Some(\"b\"));\n    }\n    if_let_chain(None, None);\n}\n\n// FIXME(#124118) Actually instrument if-let and let-chains for branch coverage.\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/if.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nfn branch_not(a: bool) {\n    no_merge!();\n\n    if a {\n        say(\"a\")\n    }\n    if !a {\n        say(\"not a\");\n    }\n    if !!a {\n        say(\"not not a\");\n    }\n    if !!!a {\n        say(\"not not not a\");\n    }\n}\n\nfn branch_not_as(a: bool) {\n    no_merge!();\n\n    if !(a as bool) {\n        say(\"not (a as bool)\");\n    }\n    if !!(a as bool) {\n        say(\"not not (a as bool)\");\n    }\n    if !!!(a as bool) {\n        say(\"not not (a as bool)\");\n    }\n}\n\nfn branch_and(a: bool, b: bool) {\n    no_merge!();\n\n    if a && b {\n        say(\"both\");\n    } else {\n        say(\"not both\");\n    }\n}\n\nfn branch_or(a: bool, b: bool) {\n    no_merge!();\n\n    if a || b {\n        say(\"either\");\n    } else {\n        say(\"neither\");\n    }\n}\n\n#[coverage(off)]\nfn say(message: &str) {\n    core::hint::black_box(message);\n}\n\n#[coverage(off)]\nfn main() {\n    for a in [false, true, true] {\n        branch_not(a);\n        branch_not_as(a);\n    }\n\n    for a in [false, true, true, true, true] {\n        for b in [false, true, true] {\n            branch_and(a, b);\n            branch_or(a, b);\n        }\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/lazy-boolean.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\n// Tests for branch coverage of the lazy boolean operators `&&` and `||`,\n// as ordinary expressions that aren't part of an `if` condition or similar.\n\nuse core::hint::black_box;\n\n// Helper macro to prevent start-of-function spans from being merged into\n// spans on the lines we care about.\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nfn branch_and(a: bool, b: bool) {\n    no_merge!();\n\n    //      |13  |18 (no branch)\n    let c = a && b;\n    black_box(c);\n}\n\nfn branch_or(a: bool, b: bool) {\n    no_merge!();\n\n    //      |13  |18 (no branch)\n    let c = a || b;\n    black_box(c);\n}\n\n// Test for chaining one operator several times.\nfn chain(x: u32) {\n    no_merge!();\n\n    //      |13      |22      |31      |40 (no branch)\n    let c = x > 1 && x > 2 && x > 4 && x > 8;\n    black_box(c);\n\n    //      |13      |22      |31      |40 (no branch)\n    let d = x < 1 || x < 2 || x < 4 || x < 8;\n    black_box(d);\n}\n\n// Test for nested combinations of different operators.\nfn nested_mixed(x: u32) {\n    no_merge!();\n\n    //       |14      |23         |35      |44 (no branch)\n    let c = (x < 4 || x >= 9) && (x < 2 || x >= 10);\n    black_box(c);\n\n    //       |14      |23        |34       |44 (no branch)\n    let d = (x < 4 && x < 1) || (x >= 8 && x >= 10);\n    black_box(d);\n}\n\n#[coverage(off)]\nfn main() {\n    // Use each set of arguments (2^n) times, so that each combination has a\n    // unique sum, and we can use those sums to verify expected control flow.\n    // 1x (false, false)\n    // 2x (false, true)\n    // 4x (true, false)\n    // 8x (true, true)\n    for a in [false, true, true, true, true] {\n        for b in [false, true, true] {\n            branch_and(a, b);\n            branch_or(a, b);\n        }\n    }\n\n    for x in 0..16 {\n        chain(x);\n        nested_mixed(x);\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/let-else.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nfn let_else(value: Option<&str>) {\n    no_merge!();\n\n    let Some(x) = value else {\n        say(\"none\");\n        return;\n    };\n\n    say(x);\n}\n\n#[coverage(off)]\nfn say(message: &str) {\n    core::hint::black_box(message);\n}\n\n#[coverage(off)]\nfn main() {\n    let_else(Some(\"x\"));\n    let_else(Some(\"x\"));\n    let_else(None);\n}\n\n// FIXME(#124118) Actually instrument let-else for branch coverage.\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/match-arms.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\n// Tests for branch coverage of various kinds of match arms.\n\n// Helper macro to prevent start-of-function spans from being merged into\n// spans on the lines we care about.\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\n#[derive(Clone, Copy, Debug)]\nenum Enum {\n    A(u32),\n    B(u32),\n    C(u32),\n    D(u32),\n}\n\nfn match_arms(value: Enum) {\n    no_merge!();\n\n    match value {\n        Enum::D(d) => consume(d),\n        Enum::C(c) => consume(c),\n        Enum::B(b) => consume(b),\n        Enum::A(a) => consume(a),\n    }\n\n    consume(0);\n}\n\nfn or_patterns(value: Enum) {\n    no_merge!();\n\n    match value {\n        Enum::D(x) | Enum::C(x) => consume(x),\n        Enum::B(y) | Enum::A(y) => consume(y),\n    }\n\n    consume(0);\n}\n\nfn guards(value: Enum, cond: bool) {\n    no_merge!();\n\n    match value {\n        Enum::D(d) if cond => consume(d),\n        Enum::C(c) if cond => consume(c),\n        Enum::B(b) if cond => consume(b),\n        Enum::A(a) if cond => consume(a),\n        _ => consume(0),\n    }\n\n    consume(0);\n}\n\n#[coverage(off)]\nfn consume<T>(x: T) {\n    core::hint::black_box(x);\n}\n\n#[coverage(off)]\nfn main() {\n    #[coverage(off)]\n    fn call_everything(e: Enum) {\n        match_arms(e);\n        or_patterns(e);\n        for cond in [false, false, true] {\n            guards(e, cond);\n        }\n    }\n\n    call_everything(Enum::A(0));\n    for b in 0..2 {\n        call_everything(Enum::B(b));\n    }\n    for c in 0..4 {\n        call_everything(Enum::C(c));\n    }\n    for d in 0..8 {\n        call_everything(Enum::D(d));\n    }\n}\n\n// FIXME(#124118) Actually instrument match arms for branch coverage.\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/match-trivial.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\n// When instrumenting match expressions for branch coverage, make sure we don't\n// cause an ICE or produce weird coverage output for matches with <2 arms.\n\n// Helper macro to prevent start-of-function spans from being merged into\n// spans on the lines we care about.\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nenum Uninhabited {}\nenum Trivial {\n    Value,\n}\n\nfn _uninhabited(x: Uninhabited) {\n    no_merge!();\n\n    match x {}\n\n    consume(\"done\");\n}\n\nfn trivial(x: Trivial) {\n    no_merge!();\n\n    match x {\n        Trivial::Value => consume(\"trivial\"),\n    }\n\n    consume(\"done\");\n}\n\n#[coverage(off)]\nfn consume<T>(x: T) {\n    core::hint::black_box(x);\n}\n\n#[coverage(off)]\nfn main() {\n    trivial(Trivial::Value);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/mod.rs",
    "content": "mod generics;\nmod guard;\n// #[path = \"if-let.rs\"]\n// mod if_let;\n#[path = \"if.rs\"]\nmod if_;\n#[path = \"lazy-boolean.rs\"]\nmod lazy_boolean;\n#[path = \"let-else.rs\"]\nmod let_else;\n#[path = \"match-arms.rs\"]\nmod match_arms;\n#[path = \"match-trivial.rs\"]\nmod match_trivial;\n#[path = \"no-mir-spans.rs\"]\nmod no_mir_spans;\n#[path = \"while.rs\"]\nmod while_;\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/no-mir-spans.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch,no-mir-spans\n//@ llvm-cov-flags: --show-branches=count\n\n// Tests the behaviour of the `-Zcoverage-options=no-mir-spans` debugging flag.\n// The actual code below is just some non-trivial code copied from another test\n// (`while.rs`), and has no particular significance.\n\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nfn while_cond() {\n    no_merge!();\n\n    let mut a = 8;\n    while a > 0 {\n        a -= 1;\n    }\n}\n\nfn while_cond_not() {\n    no_merge!();\n\n    let mut a = 8;\n    while !(a == 0) {\n        a -= 1;\n    }\n}\n\nfn while_op_and() {\n    no_merge!();\n\n    let mut a = 8;\n    let mut b = 4;\n    while a > 0 && b > 0 {\n        a -= 1;\n        b -= 1;\n    }\n}\n\nfn while_op_or() {\n    no_merge!();\n\n    let mut a = 4;\n    let mut b = 8;\n    while a > 0 || b > 0 {\n        a -= 1;\n        b -= 1;\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    while_cond();\n    while_cond_not();\n    while_op_and();\n    while_op_or();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/branch/while.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=branch\n//@ llvm-cov-flags: --show-branches=count\n\nmacro_rules! no_merge {\n    () => {\n        for _ in 0..1 {}\n    };\n}\n\nfn while_cond() {\n    no_merge!();\n\n    let mut a = 8;\n    while a > 0 {\n        a -= 1;\n    }\n}\n\nfn while_cond_not() {\n    no_merge!();\n\n    let mut a = 8;\n    while !(a == 0) {\n        a -= 1;\n    }\n}\n\nfn while_op_and() {\n    no_merge!();\n\n    let mut a = 8;\n    let mut b = 4;\n    while a > 0 && b > 0 {\n        a -= 1;\n        b -= 1;\n    }\n}\n\nfn while_op_or() {\n    no_merge!();\n\n    let mut a = 4;\n    let mut b = 8;\n    while a > 0 || b > 0 {\n        a -= 1;\n        b -= 1;\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    while_cond();\n    while_cond_not();\n    while_op_and();\n    while_op_or();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/closure.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n//@ compile-flags: -C opt-level=2\n\n// This test used to be sensitive to certain coverage-specific hacks in\n// `rustc_middle/mir/mono.rs`, but those hacks were later cleaned up by\n// <https://github.com/rust-lang/rust/pull/83666>.\n\n#[rustfmt::skip]\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n    let is_false = !is_true;\n\n    let mut some_string = Some(String::from(\"the string content\"));\n    println!(\n        \"The string or alt: {}\"\n        ,\n        some_string\n            .\n            unwrap_or_else\n        (\n            ||\n            {\n                let mut countdown = 0;\n                if is_false {\n                    countdown = 10;\n                }\n                \"alt string 1\".to_owned()\n            }\n        )\n    );\n\n    some_string = Some(String::from(\"the string content\"));\n    let\n        a\n    =\n        ||\n    {\n        let mut countdown = 0;\n        if is_false {\n            countdown = 10;\n        }\n        \"alt string 2\".to_owned()\n    };\n    println!(\n        \"The string or alt: {}\"\n        ,\n        some_string\n            .\n            unwrap_or_else\n        (\n            a\n        )\n    );\n\n    some_string = None;\n    println!(\n        \"The string or alt: {}\"\n        ,\n        some_string\n            .\n            unwrap_or_else\n        (\n            ||\n            {\n                let mut countdown = 0;\n                if is_false {\n                    countdown = 10;\n                }\n                \"alt string 3\".to_owned()\n            }\n        )\n    );\n\n    some_string = None;\n    let\n        a\n    =\n        ||\n    {\n        let mut countdown = 0;\n        if is_false {\n            countdown = 10;\n        }\n        \"alt string 4\".to_owned()\n    };\n    println!(\n        \"The string or alt: {}\"\n        ,\n        some_string\n            .\n            unwrap_or_else\n        (\n            a\n        )\n    );\n\n    let\n        quote_closure\n    =\n        |val|\n    {\n        let mut countdown = 0;\n        if is_false {\n            countdown = 10;\n        }\n        format!(\"'{}'\", val)\n    };\n    println!(\n        \"Repeated, quoted string: {:?}\"\n        ,\n        std::iter::repeat(\"repeat me\")\n            .take(5)\n            .map\n        (\n            quote_closure\n        )\n            .collect::<Vec<_>>()\n    );\n\n    let\n        _unused_closure\n    =\n        |\n            mut countdown\n        |\n    {\n        if is_false {\n            countdown = 10;\n        }\n        \"closure should be unused\".to_owned()\n    };\n\n    let mut countdown = 10;\n    let _short_unused_closure = | _unused_arg: u8 | countdown += 1;\n\n\n    let short_used_covered_closure_macro = | used_arg: u8 | println!(\"called\");\n    let short_used_not_covered_closure_macro = | used_arg: u8 | println!(\"not called\");\n    let _short_unused_closure_macro = | _unused_arg: u8 | println!(\"not called\");\n\n\n\n\n    let _short_unused_closure_block = | _unused_arg: u8 | { println!(\"not called\") };\n\n    let _shortish_unused_closure = | _unused_arg: u8 | {\n        println!(\"not called\")\n    };\n\n    let _as_short_unused_closure = |\n        _unused_arg: u8\n    | { println!(\"not called\") };\n\n    let _almost_as_short_unused_closure = |\n        _unused_arg: u8\n    | { println!(\"not called\") }\n    ;\n\n\n\n\n\n    let _short_unused_closure_line_break_no_block = | _unused_arg: u8 |\nprintln!(\"not called\")\n    ;\n\n    let _short_unused_closure_line_break_no_block2 =\n        | _unused_arg: u8 |\n            println!(\n                \"not called\"\n            )\n    ;\n\n    let short_used_not_covered_closure_line_break_no_block_embedded_branch =\n        | _unused_arg: u8 |\n            println!(\n                \"not called: {}\",\n                if is_true { \"check\" } else { \"me\" }\n            )\n    ;\n\n    let short_used_not_covered_closure_line_break_block_embedded_branch =\n        | _unused_arg: u8 |\n        {\n            println!(\n                \"not called: {}\",\n                if is_true { \"check\" } else { \"me\" }\n            )\n        }\n    ;\n\n    let short_used_covered_closure_line_break_no_block_embedded_branch =\n        | _unused_arg: u8 |\n            println!(\n                \"not called: {}\",\n                if is_true { \"check\" } else { \"me\" }\n            )\n    ;\n\n    let short_used_covered_closure_line_break_block_embedded_branch =\n        | _unused_arg: u8 |\n        {\n            println!(\n                \"not called: {}\",\n                if is_true { \"check\" } else { \"me\" }\n            )\n        }\n    ;\n\n    if is_false {\n        short_used_not_covered_closure_macro(0);\n        short_used_not_covered_closure_line_break_no_block_embedded_branch(0);\n        short_used_not_covered_closure_line_break_block_embedded_branch(0);\n    }\n    short_used_covered_closure_macro(0);\n    short_used_covered_closure_line_break_no_block_embedded_branch(0);\n    short_used_covered_closure_line_break_block_embedded_branch(0);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/closure_bug.rs",
    "content": "// Regression test for #115930.\n// All of these closures are identical, and should produce identical output in\n// the coverage report. However, an unstable sort was causing them to be treated\n// inconsistently when preparing coverage spans.\n\n#[rustfmt::skip]\nfn main() {\n    let truthy = std::env::args().len() == 1;\n\n    let a\n        =\n        |\n        |\n        if truthy { true } else { false };\n\n    a();\n    if truthy { a(); }\n\n    let b\n        =\n        |\n        |\n        if truthy { true } else { false };\n\n    b();\n    if truthy { b(); }\n\n    let c\n        =\n        |\n        |\n        if truthy { true } else { false };\n\n    c();\n    if truthy { c(); }\n\n    let d\n        =\n        |\n        |\n        if truthy { true } else { false };\n\n    d();\n    if truthy { d(); }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/closure_macro.rs",
    "content": "//@ edition: 2018\n\nmacro_rules! bail {\n    ($msg:literal $(,)?) => {\n        if $msg.len() > 0 {\n            println!(\"no msg\");\n        } else {\n            println!($msg);\n        }\n        return Err(String::from($msg));\n    };\n}\n\nmacro_rules! on_error {\n    ($value:expr, $error_message:expr) => {\n        $value.or_else(|e| {\n            // This closure, which is declared in a macro, should be instrumented.\n            let message = format!($error_message, e);\n            if message.len() > 0 {\n                println!(\"{}\", message);\n                Ok(String::from(\"ok\"))\n            } else {\n                bail!(\"error\");\n            }\n        })\n    };\n}\n\nfn load_configuration_files() -> Result<String, String> {\n    Ok(String::from(\"config\"))\n}\n\npub fn main() -> Result<(), String> {\n    println!(\"Starting service\");\n    let config = on_error!(load_configuration_files(), \"Error loading configs: {}\")?;\n\n    let startup_delay_duration = String::from(\"arg\");\n    let _ = (config, startup_delay_duration);\n    Ok(())\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/closure_macro_async.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2018\n\n//@ aux-build: executor.rs\nextern crate executor;\n\nmacro_rules! bail {\n    ($msg:literal $(,)?) => {\n        if $msg.len() > 0 {\n            println!(\"no msg\");\n        } else {\n            println!($msg);\n        }\n        return Err(String::from($msg));\n    };\n}\n\nmacro_rules! on_error {\n    ($value:expr, $error_message:expr) => {\n        $value.or_else(|e| {\n            // This closure, which is declared in a macro, should be instrumented.\n            let message = format!($error_message, e);\n            if message.len() > 0 {\n                println!(\"{}\", message);\n                Ok(String::from(\"ok\"))\n            } else {\n                bail!(\"error\");\n            }\n        })\n    };\n}\n\nfn load_configuration_files() -> Result<String, String> {\n    Ok(String::from(\"config\"))\n}\n\npub async fn test() -> Result<(), String> {\n    println!(\"Starting service\");\n    let config = on_error!(load_configuration_files(), \"Error loading configs: {}\")?;\n\n    let startup_delay_duration = String::from(\"arg\");\n    let _ = (config, startup_delay_duration);\n    Ok(())\n}\n\n#[coverage(off)]\nfn main() {\n    executor::block_on(test()).unwrap();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/closure_unit_return.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\n// Regression test for an inconsistency between functions that return the value\n// of their trailing expression, and functions that implicitly return `()`.\n\nfn explicit_unit() {\n    let closure = || {\n        ();\n    };\n\n    drop(closure);\n    () // explicit return of trailing value\n}\n\nfn implicit_unit() {\n    let closure = || {\n        ();\n    };\n\n    drop(closure);\n    // implicit return of `()`\n}\n\n#[coverage(off)]\nfn main() {\n    explicit_unit();\n    implicit_unit();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/color.rs",
    "content": "//@ edition: 2021\n//@ ignore-coverage-map\n//@ ignore-windows\n//@ llvm-cov-flags: --use-color\n\n// Verify that telling `llvm-cov` to use colored output actually works.\n// Ignored on Windows because we can't tell the tool to use ANSI escapes.\n\nfn main() {\n    for _i in 0..0 {}\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/condition/conditions.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=condition\n//@ llvm-cov-flags: --show-branches=count\n\nuse core::hint::black_box;\n\nfn simple_assign(a: bool) {\n    let x = a;\n    black_box(x);\n}\n\nfn assign_and(a: bool, b: bool) {\n    let x = a && b;\n    black_box(x);\n}\n\nfn assign_or(a: bool, b: bool) {\n    let x = a || b;\n    black_box(x);\n}\n\nfn assign_3_or_and(a: bool, b: bool, c: bool) {\n    let x = a || b && c;\n    black_box(x);\n}\n\nfn assign_3_and_or(a: bool, b: bool, c: bool) {\n    let x = a && b || c;\n    black_box(x);\n}\n\nfn foo(a: bool) -> bool {\n    black_box(a)\n}\n\nfn func_call(a: bool, b: bool) {\n    foo(a && b);\n}\n\n#[coverage(off)]\nfn main() {\n    simple_assign(true);\n    simple_assign(false);\n\n    assign_and(true, false);\n    assign_and(true, true);\n    assign_and(false, false);\n\n    assign_or(true, false);\n    assign_or(true, true);\n    assign_or(false, false);\n\n    assign_3_or_and(true, false, false);\n    assign_3_or_and(true, true, false);\n    assign_3_or_and(false, false, true);\n    assign_3_or_and(false, true, true);\n\n    assign_3_and_or(true, false, false);\n    assign_3_and_or(true, true, false);\n    assign_3_and_or(false, false, true);\n    assign_3_and_or(false, true, true);\n\n    func_call(true, false);\n    func_call(true, true);\n    func_call(false, false);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/condition/mod.rs",
    "content": "mod conditions;\n"
  },
  {
    "path": "rustc-coverage-tests/src/conditions.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n\nfn main() {\n    let mut countdown = 0;\n    if true {\n        countdown = 10;\n    }\n\n    const B: u32 = 100;\n    let x = if countdown > 7 {\n        countdown -= 4;\n        B\n    } else if countdown > 2 {\n        if countdown < 1 || countdown > 5 || countdown != 9 {\n            countdown = 0;\n        }\n        countdown -= 5;\n        countdown\n    } else {\n        return;\n    };\n\n    let mut countdown = 0;\n    if true {\n        countdown = 10;\n    }\n\n    if countdown > 7 {\n        countdown -= 4;\n    } else if countdown > 2 {\n        if countdown < 1 || countdown > 5 || countdown != 9 {\n            countdown = 0;\n        }\n        countdown -= 5;\n    } else {\n        return;\n    }\n\n    if true {\n        let mut countdown = 0;\n        if true {\n            countdown = 10;\n        }\n\n        if countdown > 7 {\n            countdown -= 4;\n        }\n        //\n        else if countdown > 2 {\n            if countdown < 1 || countdown > 5 || countdown != 9 {\n                countdown = 0;\n            }\n            countdown -= 5;\n        } else {\n            return;\n        }\n    }\n\n    let mut countdown = 0;\n    if true {\n        countdown = 1;\n    }\n\n    let z = if countdown > 7 {\n        countdown -= 4;\n    } else if countdown > 2 {\n        if countdown < 1 || countdown > 5 || countdown != 9 {\n            countdown = 0;\n        }\n        countdown -= 5;\n    } else {\n        let should_be_reachable = countdown;\n        println!(\"reached\");\n        return;\n    };\n\n    let w = if countdown > 7 {\n        countdown -= 4;\n    } else if countdown > 2 {\n        if countdown < 1 || countdown > 5 || countdown != 9 {\n            countdown = 0;\n        }\n        countdown -= 5;\n    } else {\n        return;\n    };\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/continue.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n\nfn main() {\n    let is_true = std::env::args().len() == 1;\n\n    let mut x = 0;\n    for _ in 0..10 {\n        match is_true {\n            true => {\n                continue;\n            }\n            _ => {\n                x = 1;\n            }\n        }\n        x = 3;\n    }\n    for _ in 0..10 {\n        match is_true {\n            false => {\n                x = 1;\n            }\n            _ => {\n                continue;\n            }\n        }\n        x = 3;\n    }\n    for _ in 0..10 {\n        match is_true {\n            true => {\n                x = 1;\n            }\n            _ => {\n                continue;\n            }\n        }\n        x = 3;\n    }\n    for _ in 0..10 {\n        if is_true {\n            continue;\n        }\n        x = 3;\n    }\n    for _ in 0..10 {\n        match is_true {\n            false => {\n                x = 1;\n            }\n            _ => {\n                let _ = x;\n            }\n        }\n        x = 3;\n    }\n    for _ in 0..10 {\n        match is_true {\n            false => {\n                x = 1;\n            }\n            _ => {\n                break;\n            }\n        }\n        x = 3;\n    }\n    let _ = x;\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/coroutine.rs",
    "content": "#![feature(coroutines, coroutine_trait, stmt_expr_attributes)]\n\nuse std::ops::{Coroutine, CoroutineState};\nuse std::pin::Pin;\n\n// The following implementation of a function called from a `yield` statement\n// (apparently requiring the Result and the `String` type or constructor)\n// creates conditions where the `coroutine::StateTransform` MIR transform will\n// drop all `Counter` `Coverage` statements from a MIR. `simplify.rs` has logic\n// to handle this condition, and still report dead block coverage.\nfn get_u32(val: bool) -> Result<u32, String> {\n    if val {\n        Ok(1) //\n    } else {\n        Err(String::from(\"some error\")) //\n    }\n}\n\nfn main() {\n    let is_true = std::env::args().len() == 1;\n    let mut coroutine = #[coroutine]\n    || {\n        yield get_u32(is_true);\n        return \"foo\";\n    };\n\n    match Pin::new(&mut coroutine).resume(()) {\n        CoroutineState::Yielded(Ok(1)) => {}\n        _ => panic!(\"unexpected return from resume\"),\n    }\n    match Pin::new(&mut coroutine).resume(()) {\n        CoroutineState::Complete(\"foo\") => {}\n        _ => panic!(\"unexpected return from resume\"),\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/coverage_attr_closure.rs",
    "content": "#![feature(coverage_attribute, stmt_expr_attributes)]\n#![allow(dead_code)]\n//@ edition: 2021\n\nstatic GLOBAL_CLOSURE_ON: fn(&str) = #[coverage(on)]\n|input: &str| {\n    println!(\"{input}\");\n};\nstatic GLOBAL_CLOSURE_OFF: fn(&str) = #[coverage(off)]\n|input: &str| {\n    println!(\"{input}\");\n};\n\n#[coverage(on)]\nfn contains_closures_on() {\n    let _local_closure_on = #[coverage(on)]\n    |input: &str| {\n        println!(\"{input}\");\n    };\n    let _local_closure_off = #[coverage(off)]\n    |input: &str| {\n        println!(\"{input}\");\n    };\n}\n\n#[coverage(off)]\nfn contains_closures_off() {\n    let _local_closure_on = #[coverage(on)]\n    |input: &str| {\n        println!(\"{input}\");\n    };\n    let _local_closure_off = #[coverage(off)]\n    |input: &str| {\n        println!(\"{input}\");\n    };\n}\n\n#[coverage(off)]\nfn main() {\n    contains_closures_on();\n    contains_closures_off();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/dead_code.rs",
    "content": "#![allow(dead_code, unused_assignments, unused_variables)]\n\npub fn unused_pub_fn_not_in_library() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut countdown = 0;\n    if is_true {\n        countdown = 10;\n    }\n}\n\nfn unused_fn() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut countdown = 0;\n    if is_true {\n        countdown = 10;\n    }\n}\n\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut countdown = 0;\n    if is_true {\n        countdown = 10;\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/discard-all-issue-133606.rs",
    "content": "//! Regression test for <https://github.com/rust-lang/rust/issues/133606>.\n//!\n//! In rare cases, all of a function's coverage spans are discarded at a late\n//! stage during codegen. When that happens, the subsequent code needs to take\n//! special care to avoid emitting coverage metadata that would cause `llvm-cov`\n//! to fail with a fatal error.\n//!\n//! We currently don't know of a concise way to reproduce that scenario with\n//! ordinary Rust source code, so instead we set a special testing-only flag to\n//! force it to occur.\n\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=discard-all-spans-in-codegen\n\n// The `llvm-cov` tool will complain if the test binary ends up having no\n// coverage metadata at all. To prevent that, we also link to instrumented\n// code in an auxiliary crate that doesn't have the special flag set.\n\n//@ aux-build: discard_all_helper.rs\nextern crate discard_all_helper;\n\nfn main() {\n    discard_all_helper::external_function();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/drop_trait.rs",
    "content": "#![allow(unused_assignments)]\n//@ failure-status: 1\n\nstruct Firework {\n    strength: i32,\n}\n\nimpl Drop for Firework {\n    fn drop(&mut self) {\n        println!(\"BOOM times {}!!!\", self.strength);\n    }\n}\n\nfn main() -> Result<(), u8> {\n    let _firecracker = Firework { strength: 1 };\n\n    let _tnt = Firework { strength: 100 };\n\n    if true {\n        println!(\"Exiting with error...\");\n        return Err(1);\n    }\n\n    let _ = Firework { strength: 1000 };\n\n    Ok(())\n}\n\n// Expected program output:\n//   Exiting with error...\n//   BOOM times 100!!!\n//   BOOM times 1!!!\n//   Error: 1\n"
  },
  {
    "path": "rustc-coverage-tests/src/fn_sig_into_try.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\n// Regression test for inconsistent handling of function signature spans that\n// are followed by code using the `?` operator.\n//\n// For each of these similar functions, the line containing the function\n// signature should be handled in the same way.\n\nfn a() -> Option<i32>\n//\n{\n    Some(7i32);\n    Some(0)\n}\n\nfn b() -> Option<i32>\n//\n{\n    Some(7i32)?;\n    Some(0)\n}\n\nfn c() -> Option<i32>\n//\n{\n    let _ = Some(7i32)?;\n    Some(0)\n}\n\nfn d() -> Option<i32>\n//\n{\n    let _: () = ();\n    Some(7i32)?;\n    Some(0)\n}\n\n#[coverage(off)]\nfn main() {\n    a();\n    b();\n    c();\n    d();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/generic-unused-impl.rs",
    "content": "// Regression test for #135235.\ntrait Foo {\n    type Assoc;\n\n    fn from(s: Self::Assoc) -> Self;\n}\n\nstruct W<T>(T);\n\nimpl<T: Foo> From<[T::Assoc; 1]> for W<T> {\n    fn from(from: [T::Assoc; 1]) -> Self {\n        let [item] = from;\n        W(Foo::from(item))\n    }\n}\n\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/generics.rs",
    "content": "#![allow(unused_assignments)]\n//@ failure-status: 1\n\nstruct Firework<T: Copy + std::fmt::Display> {\n    strength: T,\n}\n\nimpl<T: Copy + std::fmt::Display> Firework<T> {\n    #[inline(always)]\n    fn set_strength(&mut self, new_strength: T) {\n        self.strength = new_strength;\n    }\n}\n\nimpl<T: Copy + std::fmt::Display> Drop for Firework<T> {\n    #[inline(always)]\n    fn drop(&mut self) {\n        println!(\"BOOM times {}!!!\", self.strength);\n    }\n}\n\nfn main() -> Result<(), u8> {\n    let mut firecracker = Firework { strength: 1 };\n    firecracker.set_strength(2);\n\n    let mut tnt = Firework { strength: 100.1 };\n    tnt.set_strength(200.1);\n    tnt.set_strength(300.3);\n\n    if true {\n        println!(\"Exiting with error...\");\n        return Err(1);\n    }\n\n    let _ = Firework { strength: 1000 };\n\n    Ok(())\n}\n\n// Expected program output:\n//   Exiting with error...\n//   BOOM times 100!!!\n//   BOOM times 1!!!\n//   Error: 1\n"
  },
  {
    "path": "rustc-coverage-tests/src/holes.rs",
    "content": "//@ edition: 2021\n\n// Nested items/closures should be treated as \"holes\", so that their spans are\n// not displayed as executable code in the enclosing function.\n\nuse core::hint::black_box;\n\nfn main() {\n    black_box(());\n\n    static MY_STATIC: () = ();\n\n    black_box(());\n\n    const MY_CONST: () = ();\n\n    // Splitting this across multiple lines makes it easier to see where the\n    // coverage mapping regions begin and end.\n    #[rustfmt::skip]\n    let _closure =\n        |\n            _arg: (),\n        |\n        {\n            black_box(());\n        }\n        ;\n\n    black_box(());\n\n    fn _unused_fn() {}\n\n    black_box(());\n\n    struct MyStruct {\n        _x: u32,\n        _y: u32,\n    }\n\n    black_box(());\n\n    impl MyStruct {\n        fn _method(&self) {}\n    }\n\n    black_box(());\n\n    trait MyTrait {}\n\n    black_box(());\n\n    impl MyTrait for MyStruct {}\n\n    black_box(());\n\n    macro_rules! _my_macro {\n        () => {};\n    }\n\n    black_box(());\n\n    #[rustfmt::skip]\n    let _const =\n        const\n        {\n            7 + 4\n        }\n        ;\n\n    black_box(());\n\n    #[rustfmt::skip]\n    let _async =\n        async\n        {\n            7 + 4\n        }\n        ;\n\n    black_box(());\n\n    // This tests the edge case of a const block nested inside an \"anon const\",\n    // such as the length of an array literal. Handling this case requires\n    // `nested_filter::OnlyBodies` or equivalent.\n    #[rustfmt::skip]\n    let _const_block_inside_anon_const =\n        [\n            0\n            ;\n            7\n            +\n            const\n            {\n                3\n            }\n        ]\n        ;\n\n    black_box(());\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/if.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n\n#[rustfmt::skip]\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let\n    is_true\n    =\n        std::env::args().len()\n    ==\n        1\n    ;\n    let\n        mut\n    countdown\n    =\n        0\n    ;\n    if\n        is_true\n    {\n        countdown\n        =\n            10\n        ;\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/if_else.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n\n#[rustfmt::skip]\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut countdown = 0;\n    if\n        is_true\n    {\n        countdown\n        =\n            10\n        ;\n    }\n    else // Note coverage region difference without semicolon\n    {\n        countdown\n        =\n            100\n    }\n\n    if\n        is_true\n    {\n        countdown\n        =\n            10\n        ;\n    }\n    else\n    {\n        countdown\n        =\n            100\n        ;\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/if_not.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\n#[rustfmt::skip]\nfn if_not(cond: bool) {\n    if\n        !\n        cond\n    {\n        println!(\"cond was false\");\n    }\n\n    if\n        !\n        cond\n    {\n        println!(\"cond was false\");\n    }\n\n    if\n        !\n        cond\n    {\n        println!(\"cond was false\");\n    } else {\n        println!(\"cond was true\");\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    for _ in 0..8 {\n        if_not(std::hint::black_box(true));\n    }\n    for _ in 0..4 {\n        if_not(std::hint::black_box(false));\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/ignore_map.rs",
    "content": "//@ ignore-coverage-map\n\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/ignore_run.rs",
    "content": "//@ ignore-coverage-run\n\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/inline-dead.rs",
    "content": "// Regression test for issue #98833.\n//@ compile-flags: -Zinline-mir -Cdebug-assertions=off\n\nfn main() {\n    println!(\"{}\", live::<false>());\n\n    let f = |x: bool| {\n        debug_assert!(x);\n    };\n    f(false);\n}\n\n#[inline]\nfn live<const B: bool>() -> u32 {\n    if B {\n        dead() //\n    } else {\n        0\n    }\n}\n\n#[inline]\nfn dead() -> u32 {\n    42\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/inline.rs",
    "content": "//@ compile-flags: -Zinline-mir\n\nuse std::fmt::Display;\n\nfn main() {\n    permutations(&['a', 'b', 'c']);\n}\n\n#[inline(always)]\nfn permutations<T: Copy + Display>(xs: &[T]) {\n    let mut ys = xs.to_owned();\n    permutate(&mut ys, 0);\n}\n\nfn permutate<T: Copy + Display>(xs: &mut [T], k: usize) {\n    let n = length(xs);\n    if k == n {\n        display(xs);\n    } else if k < n {\n        for i in k..n {\n            swap(xs, i, k);\n            permutate(xs, k + 1);\n            swap(xs, i, k);\n        }\n    } else {\n        error();\n    }\n}\n\nfn length<T>(xs: &[T]) -> usize {\n    xs.len()\n}\n\n#[inline]\nfn swap<T: Copy>(xs: &mut [T], i: usize, j: usize) {\n    let t = xs[i];\n    xs[i] = xs[j];\n    xs[j] = t;\n}\n\nfn display<T: Display>(xs: &[T]) {\n    for x in xs {\n        print!(\"{}\", x);\n    }\n    println!();\n}\n\n#[inline(always)]\nfn error() {\n    panic!(\"error\");\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/inline_mixed.rs",
    "content": "//@ edition: 2021\n//@ compile-flags: -Cinstrument-coverage=off\n//@ ignore-coverage-run\n//@ aux-crate: inline_mixed_helper=inline_mixed_helper.rs\n\n// Regression test for <https://github.com/rust-lang/rust/pull/132395>.\n// Various forms of cross-crate inlining can cause coverage statements to be\n// inlined into crates that are being built without coverage instrumentation.\n// At the very least, we need to not ICE when that happens.\n\nfn main() {\n    inline_mixed_helper::inline_me();\n    inline_mixed_helper::no_inlining_please();\n    inline_mixed_helper::generic::<u32>();\n}\n\n// FIXME(#132437): We currently don't test this in coverage-run mode, because\n// whether or not it produces a `.profraw` file appears to differ between\n// platforms.\n"
  },
  {
    "path": "rustc-coverage-tests/src/inner_items.rs",
    "content": "#![allow(unused_assignments, unused_variables, dead_code)]\n\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut countdown = 0;\n    if is_true {\n        countdown = 10;\n    }\n\n    mod in_mod {\n        const IN_MOD_CONST: u32 = 1000;\n    }\n\n    fn in_func(a: u32) {\n        let b = 1;\n        let c = a + b;\n        println!(\"c = {}\", c)\n    }\n\n    struct InStruct {\n        in_struct_field: u32,\n    }\n\n    const IN_CONST: u32 = 1234;\n\n    trait InTrait {\n        fn trait_func(&mut self, incr: u32);\n\n        fn default_trait_func(&mut self) {\n            in_func(IN_CONST);\n            self.trait_func(IN_CONST);\n        }\n    }\n\n    impl InTrait for InStruct {\n        fn trait_func(&mut self, incr: u32) {\n            self.in_struct_field += incr;\n            in_func(self.in_struct_field);\n        }\n    }\n\n    type InType = String;\n\n    if is_true {\n        in_func(countdown);\n    }\n\n    let mut val = InStruct {\n        in_struct_field: 101, //\n    };\n\n    val.default_trait_func();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/issue-83601.rs",
    "content": "// Shows that rust-lang/rust/83601 is resolved\n\n#[derive(Debug, PartialEq, Eq)]\nstruct Foo(u32);\n\nfn main() {\n    let bar = Foo(1);\n    assert_eq!(bar, Foo(1));\n    let baz = Foo(0);\n    assert_ne!(baz, Foo(1));\n    println!(\"{:?}\", Foo(1));\n    println!(\"{:?}\", bar);\n    println!(\"{:?}\", baz);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/issue-84561.rs",
    "content": "// This demonstrated Issue #84561: function-like macros produce unintuitive coverage results.\n\n//@ failure-status: 101\n#[derive(PartialEq, Eq)]\nstruct Foo(u32);\n\n#[rustfmt::skip]\nfn test3() {\n    let is_true = std::env::args().len() == 1;\n    let bar = Foo(1);\n    assert_eq!(bar, Foo(1));\n    let baz = Foo(0);\n    assert_ne!(baz, Foo(1));\n    println!(\"{:?}\", Foo(1));\n    println!(\"{:?}\", bar);\n    println!(\"{:?}\", baz);\n\n    assert_eq!(Foo(1), Foo(1));\n    assert_ne!(Foo(0), Foo(1));\n    assert_eq!(Foo(2), Foo(2));\n    let bar = Foo(0);\n    assert_ne!(bar, Foo(3));\n    assert_ne!(Foo(0), Foo(4));\n    assert_eq!(Foo(3), Foo(3), \"with a message\");\n    println!(\"{:?}\", bar);\n    println!(\"{:?}\", Foo(1));\n\n    assert_ne!(Foo(0), Foo(5), \"{}\", if is_true { \"true message\" } else { \"false message\" });\n    assert_ne!(\n        Foo(0)\n        ,\n        Foo(5)\n        ,\n        \"{}\"\n        ,\n        if\n        is_true\n        {\n            \"true message\"\n        } else {\n            \"false message\"\n        }\n    );\n\n    let is_true = std::env::args().len() == 1;\n\n    assert_eq!(\n        Foo(1),\n        Foo(1)\n    );\n    assert_ne!(\n        Foo(0),\n        Foo(1)\n    );\n    assert_eq!(\n        Foo(2),\n        Foo(2)\n    );\n    let bar = Foo(1);\n    assert_ne!(\n        bar,\n        Foo(3)\n    );\n    if is_true {\n        assert_ne!(\n            Foo(0),\n            Foo(4)\n        );\n    } else {\n        assert_eq!(\n            Foo(3),\n            Foo(3)\n        );\n    }\n    if is_true {\n        assert_ne!(\n            Foo(0),\n            Foo(4),\n            \"with a message\"\n        );\n    } else {\n        assert_eq!(\n            Foo(3),\n            Foo(3),\n            \"with a message\"\n        );\n    }\n    assert_ne!(\n        if is_true {\n            Foo(0)\n        } else {\n            Foo(1)\n        },\n        Foo(5)\n    );\n    assert_ne!(\n        Foo(5),\n        if is_true {\n            Foo(0)\n        } else {\n            Foo(1)\n        }\n    );\n    assert_ne!(\n        if is_true {\n            assert_eq!(\n                Foo(3),\n                Foo(3)\n            );\n            Foo(0)\n        } else {\n            assert_ne!(\n                if is_true {\n                    Foo(0)\n                } else {\n                    Foo(1)\n                },\n                Foo(5)\n            );\n            Foo(1)\n        },\n        Foo(5),\n        \"with a message\"\n    );\n    assert_eq!(\n        Foo(1),\n        Foo(3),\n        \"this assert should fail\"\n    );\n    assert_eq!(\n        Foo(3),\n        Foo(3),\n        \"this assert should not be reached\"\n    );\n}\n\nimpl std::fmt::Debug for Foo {\n    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n        write!(f, \"try and succeed\")?;\n        Ok(())\n    }\n}\n\nstatic mut DEBUG_LEVEL_ENABLED: bool = false;\n\nmacro_rules! debug {\n    ($($arg:tt)+) => (\n        if unsafe { DEBUG_LEVEL_ENABLED } {\n            println!($($arg)+);\n        }\n    );\n}\n\nfn test1() {\n    debug!(\"debug is enabled\");\n    debug!(\"debug is enabled\");\n    let _ = 0;\n    debug!(\"debug is enabled\");\n    unsafe {\n        DEBUG_LEVEL_ENABLED = true;\n    }\n    debug!(\"debug is enabled\");\n}\n\nmacro_rules! call_debug {\n    ($($arg:tt)+) => (\n        fn call_print(s: &str) {\n            print!(\"{}\", s);\n        }\n\n        call_print(\"called from call_debug: \");\n        debug!($($arg)+);\n    );\n}\n\nfn test2() {\n    call_debug!(\"debug is enabled\");\n}\n\nfn main() {\n    test1();\n    test2();\n    test3();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/issue-85461.rs",
    "content": "// Regression test for #85461: MSVC sometimes fail to link with dead code and #[inline(always)]\n\n//@ aux-build:inline_always_with_dead_code.rs\nextern crate inline_always_with_dead_code;\n\nuse inline_always_with_dead_code::{bar, baz};\n\nfn main() {\n    bar::call_me();\n    baz::call_me();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/issue-93054.rs",
    "content": "#![allow(dead_code, unreachable_code)]\n//@ edition: 2021\n\n// Regression test for #93054: Functions using uninhabited types often only have a single,\n// unreachable basic block which doesn't get instrumented. This should not cause llvm-cov to fail.\n// Since these kinds functions can't be invoked anyway, it's ok to not have coverage data for them.\n\nenum Never {}\n\nimpl Never {\n    fn foo(self) {\n        match self {}\n        make().map(|never| match never {});\n    }\n\n    fn bar(&self) {\n        match *self {}\n    }\n}\n\nasync fn foo2(never: Never) {\n    match never {}\n}\n\nfn make() -> Option<Never> {\n    None\n}\n\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/lazy_boolean.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n\n#[rustfmt::skip]\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let (mut a, mut b, mut c) = (0, 0, 0);\n    if is_true {\n        a = 1;\n        b = 10;\n        c = 100;\n    }\n    let\n        somebool\n        =\n            a < b\n        ||\n            b < c\n    ;\n    let\n        somebool\n        =\n            b < a\n        ||\n            b < c\n    ;\n    let somebool = a < b && b < c;\n    let somebool = b < a && b < c;\n\n    if\n        !\n        is_true\n    {\n        a = 2\n        ;\n    }\n\n    if\n        is_true\n    {\n        b = 30\n        ;\n    }\n    else\n    {\n        c = 400\n        ;\n    }\n\n    if !is_true {\n        a = 2;\n    }\n\n    if is_true {\n        b = 30;\n    } else {\n        c = 400;\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/let_else_loop.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\n// Regression test for <https://github.com/rust-lang/rust/issues/122738>.\n// These code patterns should not trigger an ICE when allocating a physical\n// counter to a node and also one of its in-edges, because that is allowed\n// when the node contains a tight loop to itself.\n\nfn loopy(cond: bool) {\n    let true = cond else { loop {} };\n}\n\n// Variant that also has `loop {}` on the success path.\n// This isn't needed to catch the original ICE, but might help detect regressions.\nfn _loop_either_way(cond: bool) {\n    let true = cond else { loop {} };\n    loop {}\n}\n\n// Variant using regular `if` instead of let-else.\n// This doesn't trigger the original ICE, but might help detect regressions.\nfn _if(cond: bool) {\n    if cond {\n        loop {}\n    } else {\n        loop {}\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    loopy(true);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/lib.rs",
    "content": "#![feature(coverage_attribute)]\n#![allow(unused_attributes)]\n#![allow(dead_code)]\n#![allow(unreachable_code)]\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod attr;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod auxiliary;\n/* Modules that are commented out are not used by any test target.\nThey are kept in case they need to be added to a target in the future. */\n// mod branch;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\"\n))]\nmod abort;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\"\n))]\nmod assert;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\n#[path = \"assert-ne.rs\"]\nmod assert_ne;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod assert_not;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod condition;\nmod mcdc;\n// mod async_block;\n// mod async_closure;\n// mod r#async;\n// mod async2;\n// mod await_ready;\n// mod bad_counter_ids;\n// mod bench;\n// mod closure_bug;\n// mod closure_macro_async;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod closure_macro;\n// mod closure;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod closure_unit_return;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod color;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod conditions;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\"))]\n#[path = \"continue.rs\"]\nmod continue_;\n// mod coroutine;\n// mod coverage_attr_closure;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod dead_code;\n// #[path = \"discard-all-issue-133606.rs\"]\n// mod discard_all_issue_133606;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod drop_trait;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod fn_sig_into_try;\n#[cfg(any(feature = \"json\", feature = \"fstar\", feature = \"coq\"))]\nmod generics;\n// #[path = \"generic-unused-impl.rs\"]\n// mod generic_unused_impl;\n// mod holes;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\n#[path = \"if.rs\"]\nmod if_;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod if_else;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod if_not;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod ignore_map;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod ignore_run;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\n#[path = \"inline-dead.rs\"]\nmod inline_dead;\n// mod inline_mixed;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod inline;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod inner_items;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\n#[path = \"issue-83601.rs\"]\nmod issue_83601;\n// #[path = \"issue-84561.rs\"]\n// mod issue_84561;\n// #[path = \"issue-85461.rs\"]\n// mod issue_85461;\n// #[path = \"issue-93054.rs\"]\n// mod issue_93054;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod lazy_boolean;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod let_else_loop;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod long_and_wide;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\n#[path = \"loop-break.rs\"]\nmod loop_break;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod loop_break_value;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod loops_branches;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod macro_in_closure;\n// mod macro_name_span;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod match_or_pattern;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\"))]\nmod nested_loops;\n// #[path = \"no-core.rs\"]\n// mod no_core;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod no_cov_crate;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod no_spans;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod no_spans_if_not;\n#[cfg(any(feature = \"json\", feature = \"fstar\"))]\nmod overflow;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\"\n))]\nmod panic_unwind;\n#[cfg(any(feature = \"json\", feature = \"fstar\", feature = \"coq\"))]\nmod partial_eq;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod simple_loop;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod simple_match;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod sort_groups;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod test_harness;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\nmod tight_inf_loop;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod trivial;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\", feature = \"coq\"))]\nmod try_error_result;\n#[cfg(any(feature = \"json\"))]\nmod unicode;\n// mod unreachable;\n#[cfg(any(feature = \"json\", feature = \"lean\", feature = \"fstar\"))]\nmod unused;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"lean\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod unused_mod;\n// mod uses_crate;\n// mod uses_inline_crate;\n#[cfg(any(feature = \"json\", feature = \"lean\"))]\n#[path = \"while.rs\"]\nmod while_;\n#[cfg(any(feature = \"json\", feature = \"fstar\"))]\nmod while_early_ret;\n// mod r#yield;\n"
  },
  {
    "path": "rustc-coverage-tests/src/long_and_wide.rs",
    "content": "//@ edition: 2021\n// ignore-tidy-linelength\n\n// This file deliberately contains line and column numbers larger than 127,\n// to verify that `coverage-dump`'s ULEB128 parser can handle them.\n\nfn main() {\n    wide_function();\n    long_function();\n    far_function();\n}\n\n#[rustfmt::skip]\nfn wide_function() { /*                                                                                                           */ (); }\n\nfn long_function() {\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n    //\n}\n\nfn far_function() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/loop-break.rs",
    "content": "//@ edition: 2021\n\nfn main() {\n    loop {\n        if core::hint::black_box(true) {\n            break;\n        }\n    }\n}\n\n// This test is a lightly-modified version of `tests/mir-opt/coverage/instrument_coverage.rs`.\n// If this test needs to be blessed, then the mir-opt version probably needs to\n// be blessed too!\n"
  },
  {
    "path": "rustc-coverage-tests/src/loop_break_value.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n\n#[rustfmt::skip]\nfn main() {\n    let result\n        =\n            loop\n        {\n            break\n            10\n            ;\n        }\n    ;\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/loops_branches.rs",
    "content": "#![allow(unused_assignments, unused_variables, while_true)]\n\n// This test confirms that (1) unexecuted infinite loops are handled correctly by the\n// InstrumentCoverage MIR pass; and (2) Counter Expressions that subtract from zero can be dropped.\n\nstruct DebugTest;\n\nimpl std::fmt::Debug for DebugTest {\n    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n        if true {\n            if false {\n                while true {}\n            }\n            write!(f, \"cool\")?;\n        } else {\n        }\n\n        for i in 0..10 {\n            if true {\n                if false {\n                    while true {}\n                }\n                write!(f, \"cool\")?;\n            } else {\n            }\n        }\n        Ok(())\n    }\n}\n\nstruct DisplayTest;\n\nimpl std::fmt::Display for DisplayTest {\n    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n        if false {\n        } else {\n            if false {\n                while true {}\n            }\n            write!(f, \"cool\")?;\n        }\n        for i in 0..10 {\n            if false {\n            } else {\n                if false {\n                    while true {}\n                }\n                write!(f, \"cool\")?;\n            }\n        }\n        Ok(())\n    }\n}\n\nfn main() {\n    let debug_test = DebugTest;\n    println!(\"{:?}\", debug_test);\n    let display_test = DisplayTest;\n    println!(\"{}\", display_test);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/macro_in_closure.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\n// If a closure body consists entirely of a single bang-macro invocation, the\n// body span ends up inside the macro-expansion, so we need to un-expand it\n// back to the declaration site.\nstatic NO_BLOCK: fn() = || println!(\"hello\");\n\nstatic WITH_BLOCK: fn() = || {\n    println!(\"hello\");\n};\n\n#[coverage(off)]\nfn main() {\n    NO_BLOCK();\n    WITH_BLOCK();\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/macro_name_span.rs",
    "content": "//@ edition: 2021\n\n// Regression test for <https://github.com/rust-lang/rust/issues/117788>.\n// Under some circumstances, the heuristics that detect macro name spans can\n// get confused and produce incorrect spans beyond the bounds of the span\n// being processed.\n\n//@ aux-build: macro_name_span_helper.rs\nextern crate macro_name_span_helper;\n\nfn main() {\n    affected_function();\n}\n\nmacro_rules! macro_with_an_unreasonably_and_egregiously_long_name {\n    () => {\n        println!(\"hello\");\n    };\n}\n\nmacro_name_span_helper::macro_that_defines_a_function! {\n    fn affected_function() {\n        macro_with_an_unreasonably_and_egregiously_long_name!();\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/match_or_pattern.rs",
    "content": "fn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut a: u8 = 0;\n    let mut b: u8 = 0;\n    if is_true {\n        a = 2;\n        b = 0;\n    }\n    match (a, b) {\n        // Or patterns generate MIR `SwitchInt` with multiple targets to the same `BasicBlock`.\n        // This test confirms a fix for Issue #79569.\n        (0 | 1, 2 | 3) => {}\n        _ => {}\n    }\n    if is_true {\n        a = 0;\n        b = 0;\n    }\n    match (a, b) {\n        (0 | 1, 2 | 3) => {}\n        _ => {}\n    }\n    if is_true {\n        a = 2;\n        b = 2;\n    }\n    match (a, b) {\n        (0 | 1, 2 | 3) => {}\n        _ => {}\n    }\n    if is_true {\n        a = 0;\n        b = 2;\n    }\n    match (a, b) {\n        (0 | 1, 2 | 3) => {}\n        _ => {}\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/mcdc/condition-limit.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=mcdc\n//@ llvm-cov-flags: --show-branches=count --show-mcdc\n\nfn accept_7_conditions(bool_arr: [bool; 7]) {\n    let [a, b, c, d, e, f, g] = bool_arr;\n    if a && b && c && d && e && f && g {\n        core::hint::black_box(\"hello\");\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    accept_7_conditions([false; 7]);\n    accept_7_conditions([true; 7]);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/mcdc/if.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=mcdc\n//@ llvm-cov-flags: --show-branches=count --show-mcdc\n\nfn mcdc_check_neither(a: bool, b: bool) {\n    if a && b {\n        say(\"a and b\");\n    } else {\n        say(\"not both\");\n    }\n}\n\nfn mcdc_check_a(a: bool, b: bool) {\n    if a && b {\n        say(\"a and b\");\n    } else {\n        say(\"not both\");\n    }\n}\n\nfn mcdc_check_b(a: bool, b: bool) {\n    if a && b {\n        say(\"a and b\");\n    } else {\n        say(\"not both\");\n    }\n}\n\nfn mcdc_check_both(a: bool, b: bool) {\n    if a && b {\n        say(\"a and b\");\n    } else {\n        say(\"not both\");\n    }\n}\n\nfn mcdc_check_tree_decision(a: bool, b: bool, c: bool) {\n    // This expression is intentionally written in a way\n    // where 100% branch coverage indicates 100% mcdc coverage.\n    if a && (b || c) {\n        say(\"pass\");\n    } else {\n        say(\"reject\");\n    }\n}\n\nfn mcdc_check_not_tree_decision(a: bool, b: bool, c: bool) {\n    // Contradict to `mcdc_check_tree_decision`,\n    // 100% branch coverage of this expression does not indicate 100% mcdc coverage.\n    if (a || b) && c {\n        say(\"pass\");\n    } else {\n        say(\"reject\");\n    }\n}\n\nfn mcdc_nested_if(a: bool, b: bool, c: bool) {\n    if a || b {\n        say(\"a or b\");\n        if b && c {\n            say(\"b and c\");\n        }\n    } else {\n        say(\"neither a nor b\");\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    mcdc_check_neither(false, false);\n    mcdc_check_neither(false, true);\n\n    mcdc_check_a(true, true);\n    mcdc_check_a(false, true);\n\n    mcdc_check_b(true, true);\n    mcdc_check_b(true, false);\n\n    mcdc_check_both(false, true);\n    mcdc_check_both(true, true);\n    mcdc_check_both(true, false);\n\n    mcdc_check_tree_decision(false, true, true);\n    mcdc_check_tree_decision(true, true, false);\n    mcdc_check_tree_decision(true, false, false);\n    mcdc_check_tree_decision(true, false, true);\n\n    mcdc_check_not_tree_decision(false, true, true);\n    mcdc_check_not_tree_decision(true, true, false);\n    mcdc_check_not_tree_decision(true, false, false);\n    mcdc_check_not_tree_decision(true, false, true);\n\n    mcdc_nested_if(true, false, true);\n    mcdc_nested_if(true, true, true);\n    mcdc_nested_if(true, true, false);\n}\n\n#[coverage(off)]\nfn say(message: &str) {\n    core::hint::black_box(message);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/mcdc/inlined_expressions.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=mcdc -Copt-level=z -Cllvm-args=--inline-threshold=0\n//@ llvm-cov-flags: --show-branches=count --show-mcdc\n\n#[inline(always)]\nfn inlined_instance(a: bool, b: bool) -> bool {\n    a && b\n}\n\n#[coverage(off)]\nfn main() {\n    let _ = inlined_instance(true, false);\n    let _ = inlined_instance(false, true);\n    let _ = inlined_instance(true, true);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/mcdc/mod.rs",
    "content": "#[path = \"condition-limit.rs\"]\n#[cfg(any(feature = \"json\"))]\nmod condition_limit;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\n#[path = \"if.rs\"]\nmod if_;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod inlined_expressions;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod nested_if;\n#[cfg(any(\n    feature = \"json\",\n    feature = \"fstar\",\n    feature = \"fstar-lax\",\n    feature = \"coq\"\n))]\nmod non_control_flow;\n"
  },
  {
    "path": "rustc-coverage-tests/src/mcdc/nested_if.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=mcdc\n//@ llvm-cov-flags: --show-branches=count --show-mcdc\n\nfn nested_if_in_condition(a: bool, b: bool, c: bool) {\n    if a && if b || c { true } else { false } {\n        say(\"yes\");\n    } else {\n        say(\"no\");\n    }\n}\n\nfn doubly_nested_if_in_condition(a: bool, b: bool, c: bool, d: bool) {\n    if a && if b || if c && d { true } else { false } {\n        false\n    } else {\n        true\n    } {\n        say(\"yes\");\n    } else {\n        say(\"no\");\n    }\n}\n\nfn nested_single_condition_decision(a: bool, b: bool) {\n    // Decision with only 1 decision should not be instrumented by MCDC because\n    // branch-coverage is equivalent to MCDC coverage in this case, and we don't\n    // want to waste bitmap space for this.\n    if a && if b { false } else { true } {\n        say(\"yes\");\n    } else {\n        say(\"no\");\n    }\n}\n\nfn nested_in_then_block_in_condition(a: bool, b: bool, c: bool, d: bool, e: bool) {\n    if a && if b || c {\n        if d && e {\n            true\n        } else {\n            false\n        }\n    } else {\n        false\n    } {\n        say(\"yes\");\n    } else {\n        say(\"no\");\n    }\n}\n\n#[coverage(off)]\nfn main() {\n    nested_if_in_condition(true, false, false);\n    nested_if_in_condition(true, true, true);\n    nested_if_in_condition(true, false, true);\n    nested_if_in_condition(false, true, true);\n\n    doubly_nested_if_in_condition(true, false, false, true);\n    doubly_nested_if_in_condition(true, true, true, true);\n    doubly_nested_if_in_condition(true, false, true, true);\n    doubly_nested_if_in_condition(false, true, true, true);\n\n    nested_single_condition_decision(true, true);\n    nested_single_condition_decision(true, false);\n    nested_single_condition_decision(false, false);\n\n    nested_in_then_block_in_condition(false, false, false, false, false);\n    nested_in_then_block_in_condition(true, false, false, false, false);\n    nested_in_then_block_in_condition(true, true, false, false, false);\n    nested_in_then_block_in_condition(true, false, true, false, false);\n    nested_in_then_block_in_condition(true, false, true, true, false);\n    nested_in_then_block_in_condition(true, false, true, false, true);\n    nested_in_then_block_in_condition(true, false, true, true, true);\n}\n\n#[coverage(off)]\nfn say(message: &str) {\n    core::hint::black_box(message);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/mcdc/non_control_flow.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n//@ compile-flags: -Zcoverage-options=mcdc\n//@ llvm-cov-flags: --show-branches=count --show-mcdc\n\n// This test ensures that boolean expressions that are not inside control flow\n// decisions are correctly instrumented.\n\nuse core::hint::black_box;\n\nfn assign_and(a: bool, b: bool) {\n    let x = a && b;\n    black_box(x);\n}\n\nfn assign_or(a: bool, b: bool) {\n    let x = a || b;\n    black_box(x);\n}\n\nfn assign_3(a: bool, b: bool, c: bool) {\n    let x = a || b && c;\n    black_box(x);\n}\n\nfn assign_3_bis(a: bool, b: bool, c: bool) {\n    let x = a && b || c;\n    black_box(x);\n}\n\nfn right_comb_tree(a: bool, b: bool, c: bool, d: bool, e: bool) {\n    let x = a && (b && (c && (d && (e))));\n    black_box(x);\n}\n\nfn foo(a: bool) -> bool {\n    black_box(a)\n}\n\nfn func_call(a: bool, b: bool) {\n    foo(a && b);\n}\n\n#[coverage(off)]\nfn main() {\n    assign_and(true, false);\n    assign_and(true, true);\n    assign_and(false, false);\n\n    assign_or(true, false);\n    assign_or(true, true);\n    assign_or(false, false);\n\n    assign_3(true, false, false);\n    assign_3(true, true, false);\n    assign_3(false, false, true);\n    assign_3(false, true, true);\n\n    assign_3_bis(true, false, false);\n    assign_3_bis(true, true, false);\n    assign_3_bis(false, false, true);\n    assign_3_bis(false, true, true);\n\n    right_comb_tree(false, false, false, true, true);\n    right_comb_tree(true, false, false, true, true);\n    right_comb_tree(true, true, true, true, true);\n\n    func_call(true, false);\n    func_call(true, true);\n    func_call(false, false);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/nested_loops.rs",
    "content": "fn main() {\n    let is_true = std::env::args().len() == 1;\n    let mut countdown = 10;\n\n    'outer: while countdown > 0 {\n        let mut a = 100;\n        let mut b = 100;\n        for _ in 0..50 {\n            if a < 30 {\n                break;\n            }\n            a -= 5;\n            b -= 5;\n            if b < 90 {\n                a -= 10;\n                if is_true {\n                    break 'outer;\n                } else {\n                    a -= 2;\n                }\n            }\n        }\n        countdown -= 1;\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/no-core.rs",
    "content": "#![feature(no_core)]\n#![no_core]\n//@ edition: 2021\n\n// Test that coverage instrumentation works for `#![no_core]` crates.\n\n// For this test, we pull in std anyway, to avoid having to set up our own\n// no-core or no-std environment. What's important is that the compiler allows\n// coverage for a crate with the `#![no_core]` annotation.\nextern crate std;\n\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/no_cov_crate.rs",
    "content": "#![feature(coverage_attribute)]\n// Enables `coverage(off)` on the entire crate\n//@ reference: attributes.coverage.intro\n//@ reference: attributes.coverage.nesting\n\n#[coverage(off)]\nfn do_not_add_coverage_1() {\n    println!(\"called but not covered\");\n}\n\nfn do_not_add_coverage_2() {\n    #![coverage(off)]\n    println!(\"called but not covered\");\n}\n\n#[coverage(off)]\n#[allow(dead_code)]\nfn do_not_add_coverage_not_called() {\n    println!(\"not called and not covered\");\n}\n\nfn add_coverage_1() {\n    println!(\"called and covered\");\n}\n\nfn add_coverage_2() {\n    println!(\"called and covered\");\n}\n\n#[allow(dead_code)]\nfn add_coverage_not_called() {\n    println!(\"not called but covered\");\n}\n\n// FIXME: These test-cases illustrate confusing results of nested functions.\n// See https://github.com/rust-lang/rust/issues/93319\nmod nested_fns {\n    #[coverage(off)]\n    pub fn outer_not_covered(is_true: bool) {\n        fn inner(is_true: bool) {\n            if is_true {\n                println!(\"called and covered\");\n            } else {\n                println!(\"absolutely not covered\");\n            }\n        }\n        println!(\"called but not covered\");\n        inner(is_true);\n    }\n\n    pub fn outer(is_true: bool) {\n        println!(\"called and covered\");\n        inner_not_covered(is_true);\n\n        #[coverage(off)]\n        fn inner_not_covered(is_true: bool) {\n            if is_true {\n                println!(\"called but not covered\");\n            } else {\n                println!(\"absolutely not covered\");\n            }\n        }\n    }\n\n    pub fn outer_both_covered(is_true: bool) {\n        println!(\"called and covered\");\n        inner(is_true);\n\n        fn inner(is_true: bool) {\n            if is_true {\n                println!(\"called and covered\");\n            } else {\n                println!(\"absolutely not covered\");\n            }\n        }\n    }\n}\n\nfn main() {\n    let is_true = std::env::args().len() == 1;\n\n    do_not_add_coverage_1();\n    do_not_add_coverage_2();\n    add_coverage_1();\n    add_coverage_2();\n\n    nested_fns::outer_not_covered(is_true);\n    nested_fns::outer(is_true);\n    nested_fns::outer_both_covered(is_true);\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/no_spans.rs",
    "content": "#![feature(coverage_attribute)]\n//@ edition: 2021\n\n// If the span extractor can't find any relevant spans for a function, the\n// refinement loop will terminate with nothing in its `prev` slot. If the\n// subsequent code tries to unwrap `prev`, it will panic.\n//\n// This scenario became more likely after #118525 started discarding spans that\n// can't be un-expanded back to within the function body.\n//\n// Regression test for \"invalid attempt to unwrap a None some_prev\", as seen\n// in issues such as #118643 and #118662.\n\n#[coverage(off)]\nfn main() {\n    affected_function()();\n}\n\nmacro_rules! macro_that_defines_a_function {\n    (fn $name:ident () $body:tt) => {\n        fn $name () -> impl Fn() $body\n    }\n}\n\nmacro_that_defines_a_function! {\n    fn affected_function() {\n        || ()\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/no_spans_if_not.rs",
    "content": "//@ edition: 2021\n\n// If the span extractor can't find any relevant spans for a function,\n// but the function contains coverage span-marker statements (e.g. inserted\n// for `if !`), coverage codegen may think that it is instrumented and\n// consequently complain that it has no spans.\n//\n// Regression test for <https://github.com/rust-lang/rust/issues/118850>,\n// \"A used function should have had coverage mapping data but did not\".\n\nfn main() {\n    affected_function();\n}\n\nmacro_rules! macro_that_defines_a_function {\n    (fn $name:ident () $body:tt) => {\n        fn $name () $body\n    }\n}\n\nmacro_that_defines_a_function! {\n    fn affected_function() {\n        if !false {\n            ()\n        } else {\n            ()\n        }\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/overflow.rs",
    "content": "#![allow(unused_assignments)]\n//@ compile-flags: -Coverflow-checks=yes\n//@ failure-status: 101\n\nfn might_overflow(to_add: u32) -> u32 {\n    if to_add > 5 {\n        println!(\"this will probably overflow\");\n    }\n    let add_to = u32::MAX - 5;\n    println!(\"does {} + {} overflow?\", add_to, to_add);\n    let result = to_add + add_to;\n    println!(\"continuing after overflow check\");\n    result\n}\n\nfn main() -> Result<(), u8> {\n    let mut countdown = 10;\n    while countdown > 0 {\n        if countdown == 1 {\n            let result = might_overflow(10);\n            println!(\"Result: {}\", result);\n        } else if countdown < 5 {\n            let result = might_overflow(1);\n            println!(\"Result: {}\", result);\n        }\n        countdown -= 1;\n    }\n    Ok(())\n}\n\n// Notes:\n//   1. Compare this program and its coverage results to those of the very similar test `assert.rs`,\n//      and similar tests `panic_unwind.rs`, abort.rs` and `try_error_result.rs`.\n//   2. This test confirms the coverage generated when a program passes or fails a\n//      compiler-generated `TerminatorKind::Assert` (based on an overflow check, in this case).\n//   3. Similar to how the coverage instrumentation handles `TerminatorKind::Call`,\n//      compiler-generated assertion failures are assumed to be a symptom of a program bug, not\n//      expected behavior. To simplify the coverage graphs and keep instrumented programs as\n//      small and fast as possible, `Assert` terminators are assumed to always succeed, and\n//      therefore are considered \"non-branching\" terminators. So, an `Assert` terminator does not\n//      get its own coverage counter.\n//   4. After an unhandled panic or failed Assert, coverage results may not always be intuitive.\n//      In this test, the final count for the statements after the `if` block in `might_overflow()`\n//      is 4, even though the lines after `to_add + add_to` were executed only 3 times. Depending\n//      on the MIR graph and the structure of the code, this count could have been 3 (which might\n//      have been valid for the overflowed add `+`, but should have been 4 for the lines before\n//      the overflow. The reason for this potential uncertainty is, a `CounterKind` is incremented\n//      via StatementKind::Counter at the end of the block, but (as in the case in this test),\n//      a CounterKind::Expression is always evaluated. In this case, the expression was based on\n//      a `Counter` incremented as part of the evaluation of the `if` expression, which was\n//      executed, and counted, 4 times, before reaching the overflow add.\n\n// If the program did not overflow, the coverage for `might_overflow()` would look like this:\n//\n//     4|       |fn might_overflow(to_add: u32) -> u32 {\n//     5|      4|    if to_add > 5 {\n//     6|      0|        println!(\"this will probably overflow\");\n//     7|      4|    }\n//     8|      4|    let add_to = u32::MAX - 5;\n//     9|      4|    println!(\"does {} + {} overflow?\", add_to, to_add);\n//    10|      4|    let result = to_add + add_to;\n//    11|      4|    println!(\"continuing after overflow check\");\n//    12|      4|    result\n//    13|      4|}\n"
  },
  {
    "path": "rustc-coverage-tests/src/panic_unwind.rs",
    "content": "#![allow(unused_assignments)]\n//@ failure-status: 101\n\nfn might_panic(should_panic: bool) {\n    if should_panic {\n        println!(\"panicking...\");\n        panic!(\"panics\");\n    } else {\n        println!(\"Don't Panic\");\n    }\n}\n\nfn main() -> Result<(), u8> {\n    let mut countdown = 10;\n    while countdown > 0 {\n        if countdown == 1 {\n            might_panic(true);\n        } else if countdown < 5 {\n            might_panic(false);\n        }\n        countdown -= 1;\n    }\n    Ok(())\n}\n\n// Notes:\n//   1. Compare this program and its coverage results to those of the similar tests `abort.rs` and\n//      `try_error_result.rs`.\n//   2. Since the `panic_unwind.rs` test is allowed to unwind, it is also allowed to execute the\n//      normal program exit cleanup, including writing out the current values of the coverage\n//      counters.\n"
  },
  {
    "path": "rustc-coverage-tests/src/partial_eq.rs",
    "content": "// This test confirms an earlier problem was resolved, supporting the MIR graph generated by the\n// structure of this test.\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\npub struct Version {\n    major: usize,\n    minor: usize,\n    patch: usize,\n}\n\nimpl Version {\n    pub fn new(major: usize, minor: usize, patch: usize) -> Self {\n        Self {\n            major,\n            minor,\n            patch,\n        }\n    }\n}\n\nfn main() {\n    let version_3_2_1 = Version::new(3, 2, 1);\n    let version_3_3_0 = Version::new(3, 3, 0);\n\n    println!(\n        \"{:?} < {:?} = {}\",\n        version_3_2_1,\n        version_3_3_0,\n        version_3_2_1 < version_3_3_0, //\n    );\n}\n\n/*\n\nThis test verifies a bug was fixed that otherwise generated this error:\n\nthread 'rustc' panicked at 'No counters provided the source_hash for function:\n    Instance {\n        def: Item(WithOptConstParam {\n            did: DefId(0:101 ~ autocfg[c44a]::version::{impl#2}::partial_cmp),\n            const_param_did: None\n        }),\n        args: []\n    }'\nThe `PartialOrd` derived by `Version` happened to generate a MIR that generated coverage\nwithout a code region associated with any `Counter`. Code regions were associated with at least\none expression, which is allowed, but the `function_source_hash` was only passed to the codegen\n(coverage mapgen) phase from a `Counter`s code region. A new method was added to pass the\n`function_source_hash` without a code region, if necessary.\n\n*/\n"
  },
  {
    "path": "rustc-coverage-tests/src/simple_loop.rs",
    "content": "#![allow(unused_assignments)]\n\n#[rustfmt::skip]\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut countdown = 0;\n\n    if\n        is_true\n    {\n        countdown\n        =\n            10\n        ;\n    }\n\n    loop\n    {\n        if\n            countdown\n                ==\n            0\n        {\n            break\n            ;\n        }\n        countdown\n        -=\n        1\n        ;\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/simple_match.rs",
    "content": "#![allow(unused_assignments, unused_variables)]\n\n#[rustfmt::skip]\nfn main() {\n    // Initialize test constants in a way that cannot be determined at compile time, to ensure\n    // rustc and LLVM cannot optimize out statements (or coverage counters) downstream from\n    // dependent conditions.\n    let is_true = std::env::args().len() == 1;\n\n    let mut countdown = 1;\n    if is_true {\n        countdown = 0;\n    }\n\n    for\n        _\n    in\n        0..2\n    {\n        let z\n        ;\n        match\n            countdown\n        {\n            x\n            if\n                x\n                    <\n                1\n            =>\n            {\n                z = countdown\n                ;\n                let y = countdown\n                ;\n                countdown = 10\n                ;\n            }\n            _\n            =>\n            {}\n        }\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/sort_groups.rs",
    "content": "//@ edition: 2021\n\n// Demonstrate that `sort_subviews.py` can sort instantiation groups into a\n// predictable order, while preserving their heterogeneous contents.\n\nfn main() {\n    let cond = std::env::args().len() > 1;\n    generic_fn::<()>(cond);\n    generic_fn::<&'static str>(!cond);\n    if std::hint::black_box(false) {\n        generic_fn::<char>(cond);\n    }\n    generic_fn::<i32>(cond);\n    other_fn();\n}\n\nfn generic_fn<T>(cond: bool) {\n    if cond {\n        println!(\"{}\", std::any::type_name::<T>());\n    }\n}\n\nfn other_fn() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/test_harness.rs",
    "content": "// Verify that the entry point injected by the test harness doesn't cause\n// weird artifacts in the coverage report (e.g. issue #10749).\n\n//@ compile-flags: --test\n\n#[allow(dead_code)]\nfn unused() {}\n\n#[test]\nfn my_test() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/tight_inf_loop.rs",
    "content": "fn main() {\n    if false {\n        loop {}\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/trivial.rs",
    "content": "//@ edition: 2021\n\nfn main() {}\n"
  },
  {
    "path": "rustc-coverage-tests/src/try_error_result.rs",
    "content": "#![allow(unused_assignments)]\n#![cfg_attr(rustfmt, rustfmt::skip)]\n//@ failure-status: 1\n\nfn call(return_error: bool) -> Result<(), ()> {\n    if return_error {\n        Err(())\n    } else {\n        Ok(())\n    }\n}\n\nfn test1() -> Result<(), ()> {\n    let mut\n        countdown = 10\n    ;\n    for\n        _\n    in\n        0..10\n    {\n        countdown\n            -= 1\n        ;\n        if\n            countdown < 5\n        {\n            call(/*return_error=*/ true)?;\n            call(/*return_error=*/ false)?;\n        }\n        else\n        {\n            call(/*return_error=*/ false)?;\n        }\n    }\n    Ok(())\n}\n\nstruct Thing1;\nimpl Thing1 {\n    fn get_thing_2(&self, return_error: bool) -> Result<Thing2, ()> {\n        if return_error {\n            Err(())\n        } else {\n            Ok(Thing2 {})\n        }\n    }\n}\n\nstruct Thing2;\nimpl Thing2 {\n    fn call(&self, return_error: bool) -> Result<u32, ()> {\n        if return_error {\n            Err(())\n        } else {\n            Ok(57)\n        }\n    }\n}\n\nfn test2() -> Result<(), ()> {\n    let thing1 = Thing1{};\n    let mut\n        countdown = 10\n    ;\n    for\n        _\n    in\n        0..10\n    {\n        countdown\n            -= 1\n        ;\n        if\n            countdown < 5\n        {\n            thing1.get_thing_2(/*err=*/ false)?.call(/*err=*/ true).expect_err(\"call should fail\");\n            thing1\n                .\n                get_thing_2(/*return_error=*/ false)\n                ?\n                .\n                call(/*return_error=*/ true)\n                .\n                expect_err(\n                    \"call should fail\"\n                );\n            let val = thing1.get_thing_2(/*return_error=*/ true)?.call(/*return_error=*/ true)?;\n            assert_eq!(val, 57);\n            let val = thing1.get_thing_2(/*return_error=*/ true)?.call(/*return_error=*/ false)?;\n            assert_eq!(val, 57);\n        }\n        else\n        {\n            let val = thing1.get_thing_2(/*return_error=*/ false)?.call(/*return_error=*/ false)?;\n            assert_eq!(val, 57);\n            let val = thing1\n                .get_thing_2(/*return_error=*/ false)?\n                .call(/*return_error=*/ false)?;\n            assert_eq!(val, 57);\n            let val = thing1\n                .get_thing_2(/*return_error=*/ false)\n                ?\n                .call(/*return_error=*/ false)\n                ?\n                ;\n            assert_eq!(val, 57);\n        }\n    }\n    Ok(())\n}\n\nfn main() -> Result<(), ()> {\n    test1().expect_err(\"test1 should fail\");\n    test2()\n    ?\n    ;\n    Ok(())\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/unicode.rs",
    "content": "//@ edition: 2021\n//@ ignore-windows - we can't force `llvm-cov` to use ANSI escapes on Windows\n//@ llvm-cov-flags: --use-color\n\n// Check that column numbers are denoted in bytes, so that they don't cause\n// `llvm-cov` to fail or emit malformed output.\n//\n// Note that when `llvm-cov` prints ^ arrows on a subsequent line, it simply\n// inserts one space character for each \"column\", with no understanding of\n// Unicode or character widths. So those arrows will tend to be misaligned\n// for non-ASCII source code, regardless of whether column numbers are code\n// points or bytes.\n\nfn main() {\n    for _İ in 'А'..='Я' { /* Я */ }\n\n    if 申し訳ございません() && 申し訳ございません() {\n        println!(\"true\");\n    }\n\n    サビ();\n}\n\nfn 申し訳ございません() -> bool {\n    std::hint::black_box(false)\n}\n\nmacro_rules! macro_that_defines_a_function {\n    (fn $名:ident () $体:tt) => {\n        fn $名 () $体 fn 他 () {}\n    }\n}\n\nmacro_that_defines_a_function! {\n    fn サビ() {}\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/unreachable.rs",
    "content": "#![feature(core_intrinsics, coverage_attribute)]\n//@ edition: 2021\n\n// <https://github.com/rust-lang/rust/issues/116171>\n// If we instrument a function for coverage, but all of its counter-increment\n// statements are removed by MIR optimizations, LLVM will think it isn't\n// instrumented and it will disappear from coverage maps and coverage reports.\n// Most MIR opts won't cause this because they tend not to remove statements\n// from bb0, but `UnreachablePropagation` can do so if it sees that bb0 ends\n// with `TerminatorKind::Unreachable`.\n\nuse std::hint::{black_box, unreachable_unchecked};\n\nstatic UNREACHABLE_CLOSURE: fn() = || unsafe { unreachable_unchecked() };\n\nfn unreachable_function() {\n    unsafe { unreachable_unchecked() }\n}\n\n// Use an intrinsic to more reliably trigger unreachable-propagation.\nfn unreachable_intrinsic() {\n    unsafe { std::intrinsics::unreachable() }\n}\n\n#[coverage(off)]\nfn main() {\n    if black_box(false) {\n        UNREACHABLE_CLOSURE();\n    }\n    if black_box(false) {\n        unreachable_function();\n    }\n    if black_box(false) {\n        unreachable_intrinsic();\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/unused.rs",
    "content": "#![allow(dead_code, unused_assignments, unused_must_use, unused_variables)]\n\nfn foo<T>(x: T) {\n    let mut i = 0;\n    while i < 10 {\n        i != 0 || i != 0;\n        i += 1;\n    }\n}\n\nfn unused_template_func<T>(x: T) {\n    let mut i = 0;\n    while i < 10 {\n        i != 0 || i != 0;\n        i += 1;\n    }\n}\n\nfn unused_func(mut a: u32) {\n    if a != 0 {\n        a += 1;\n    }\n}\n\nfn unused_func2(mut a: u32) {\n    if a != 0 {\n        a += 1;\n    }\n}\n\nfn unused_func3(mut a: u32) {\n    if a != 0 {\n        a += 1;\n    }\n}\n\nfn main() -> Result<(), u8> {\n    foo::<u32>(0);\n    foo::<f32>(0.0);\n    Ok(())\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/unused_mod.rs",
    "content": "#[path = \"auxiliary/unused_mod_helper.rs\"]\nmod unused_module;\n\nfn main() {\n    println!(\"hello world!\");\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/uses_crate.rs",
    "content": "// This test was failing on Linux for a while due to #110393 somehow making\n// the unused functions not instrumented, but it seems to be fine now.\n\n// Validates coverage now works with optimizations\n//@ compile-flags: -C opt-level=3\n\n#![allow(unused_assignments, unused_variables)]\n\n//@ aux-build:used_crate.rs\nextern crate used_crate;\n\nfn main() {\n    used_crate::used_function();\n    let some_vec = vec![1, 2, 3, 4];\n    used_crate::used_only_from_bin_crate_generic_function(&some_vec);\n    used_crate::used_only_from_bin_crate_generic_function(\"used from bin uses_crate.rs\");\n    used_crate::used_from_bin_crate_and_lib_crate_generic_function(some_vec);\n    used_crate::used_with_same_type_from_bin_crate_and_lib_crate_generic_function(\"interesting?\");\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/uses_inline_crate.rs",
    "content": "// This test was failing on Linux for a while due to #110393 somehow making\n// the unused functions not instrumented, but it seems to be fine now.\n\n// Validates coverage now works with optimizations\n//@ compile-flags: -C opt-level=3\n\n#![allow(unused_assignments, unused_variables)]\n\n//@ aux-build:used_inline_crate.rs\nextern crate used_inline_crate;\n\nfn main() {\n    used_inline_crate::used_function();\n    used_inline_crate::used_inline_function();\n    let some_vec = vec![1, 2, 3, 4];\n    used_inline_crate::used_only_from_bin_crate_generic_function(&some_vec);\n    used_inline_crate::used_only_from_bin_crate_generic_function(\"used from bin uses_crate.rs\");\n    used_inline_crate::used_from_bin_crate_and_lib_crate_generic_function(some_vec);\n    used_inline_crate::used_with_same_type_from_bin_crate_and_lib_crate_generic_function(\n        \"interesting?\",\n    );\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/while.rs",
    "content": "fn main() {\n    let num = 9;\n    while num >= 10 {\n        // loop body\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/src/while_early_ret.rs",
    "content": "#![allow(unused_assignments)]\n//@ failure-status: 1\n\n#[rustfmt::skip]\nfn main() -> Result<(), u8> {\n    let mut countdown = 10;\n    while\n        countdown\n            >\n        0\n    {\n        if\n            countdown\n                <\n            5\n        {\n            return\n                if\n                    countdown\n                        >\n                    8\n                {\n                    Ok(())\n                }\n                else\n                {\n                    Err(1)\n                }\n                ;\n        }\n        countdown\n            -=\n        1\n        ;\n    }\n    Ok(())\n}\n\n// ISSUE(77553): Originally, this test had `Err(1)` on line 22 (instead of `Ok(())`) and\n// `std::process::exit(2)` on line 26 (instead of `Err(1)`); and this worked as expected on Linux\n// and MacOS. But on Windows (MSVC, at least), the call to `std::process::exit()` exits the program\n// without saving the InstrProf coverage counters. The use of `std::process:exit()` is not critical\n// to the coverage test for early returns, but this is a limitation that should be fixed.\n"
  },
  {
    "path": "rustc-coverage-tests/src/yield.rs",
    "content": "#![feature(coroutines, coroutine_trait, stmt_expr_attributes)]\n#![allow(unused_assignments)]\n\nuse std::ops::{Coroutine, CoroutineState};\nuse std::pin::Pin;\n\nfn main() {\n    let mut coroutine = #[coroutine]\n    || {\n        yield 1;\n        return \"foo\";\n    };\n\n    match Pin::new(&mut coroutine).resume(()) {\n        CoroutineState::Yielded(1) => {}\n        _ => panic!(\"unexpected value from resume\"),\n    }\n    match Pin::new(&mut coroutine).resume(()) {\n        CoroutineState::Complete(\"foo\") => {}\n        _ => panic!(\"unexpected value from resume\"),\n    }\n\n    let mut coroutine = #[coroutine]\n    || {\n        yield 1;\n        yield 2;\n        yield 3;\n        return \"foo\";\n    };\n\n    match Pin::new(&mut coroutine).resume(()) {\n        CoroutineState::Yielded(1) => {}\n        _ => panic!(\"unexpected value from resume\"),\n    }\n    match Pin::new(&mut coroutine).resume(()) {\n        CoroutineState::Yielded(2) => {}\n        _ => panic!(\"unexpected value from resume\"),\n    }\n}\n"
  },
  {
    "path": "rustc-coverage-tests/test_config.yaml",
    "content": "tests:\n  attr::impl_:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n    - lean\n    - lean-tc\n  attr::module:\n    - json\n    - coq\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n  attr::off_on_sandwich:\n    - json\n    - coq\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n  attr::trait_impl_inherit:\n    - json\n    - lean\n  auxiliary::discard_all_helper:\n    - json\n    - coq\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n  auxiliary::used_crate:\n    - json\n    - coq\n    - fstar\n    - lean\n  auxiliary::used_inline_crate:\n    - json\n    - coq\n    - fstar\n    - lean\n  condition::conditions:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n    - lean\n  mcdc::condition_limit:\n    - json\n    - lean\n  mcdc::if_:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n    - lean\n  mcdc::inlined_expressions:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n    - lean\n    - lean-tc\n  mcdc::nested_if:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n  mcdc::non_control_flow:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n    - lean\n  abort:\n    - json\n    - lean\n    - fstar\n    - fstar-lax\n  assert:\n    - json\n    - lean\n    - fstar\n    - fstar-lax\n  assert_ne:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n  assert_not:\n    - json\n    - coq\n    - lean\n    - fstar\n    - fstar-lax\n  closure_macro:\n    - json\n    - coq\n    - fstar\n    - lean\n  closure_unit_return:\n    - json\n    - coq\n    - lean\n    - fstar\n    - fstar-lax\n  color:\n    - json\n    - lean\n    - coq\n    - fstar\n    - fstar-lax\n  conditions:\n    - json\n    - coq\n    - fstar\n    - lean\n  continue_:\n    - json\n    - fstar\n  dead_code:\n    - json\n    - coq\n    - fstar\n    - lean\n  drop_trait:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n    - lean\n  fn_sig_into_try:\n    - json\n    - coq\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n  generics:\n    - json\n    - coq\n    - fstar\n  if_:\n    - json\n    - coq\n    - fstar\n    - lean\n  if_else:\n    - json\n    - coq\n    - fstar\n    - lean\n  if_not:\n    - json\n    - lean\n    - coq\n    - fstar\n    - fstar-lax\n  ignore_map:\n    - json\n    - coq\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n  ignore_run:\n    - json\n    - coq\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n  inline_dead:\n    - json\n    - coq\n    - fstar\n  inline:\n    - json\n    - lean\n    - coq\n    - fstar\n  inner_items:\n    - json\n    - lean\n  issue_83601:\n    - json\n    - coq\n    - fstar\n    - fstar-lax\n    - lean\n  lazy_boolean:\n    - json\n    - coq\n    - fstar\n    - lean\n  let_else_loop:\n    - json\n  long_and_wide:\n    - json\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n    - coq\n\n  loop_break:\n    - json\n    - lean\n\n  loop_break_value:\n    - json\n    - lean\n\n  loops_branches:\n    - json\n    - fstar\n    - lean\n\n  macro_in_closure:\n    - json\n    - lean\n    - fstar\n    - fstar-lax\n    - coq\n\n  match_or_pattern:\n    - json\n    - fstar\n    - coq\n\n  nested_loops:\n    - json\n    - fstar\n    - lean\n\n  no_cov_crate:\n    - json\n    - lean\n    - fstar\n    - coq\n\n  no_spans:\n    - json\n    - lean\n    - fstar\n    - coq\n\n  no_spans_if_not:\n    - json\n    - lean\n    - fstar\n    - fstar-lax\n    - coq\n\n  overflow:\n    - json\n    - fstar\n    - fstar-lax\n    - lean\n\n  panic_unwind:\n    - json\n    - lean\n    - fstar\n    - fstar-lax\n\n  partial_eq:\n    - json\n    - fstar\n    - coq\n    - fstar-lax\n    - lean\n\n  simple_loop:\n    - json\n    - lean\n\n  simple_match:\n    - json\n    - lean\n\n  sort_groups:\n    - json\n    - lean\n    - fstar\n    - coq\n\n  test_harness:\n    - json\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n    - coq\n\n  tight_inf_loop:\n    - json\n    - lean\n\n  trivial:\n    - json\n    - lean\n    - lean-tc\n    - fstar\n    - fstar-lax\n    - coq\n\n  try_error_result:\n    - json\n    - lean\n    - fstar\n    - coq\n\n  unicode:\n    - json\n    - coq\n    - fstar\n\n  unused:\n    - json\n    - fstar\n\n  unused_mod:\n    - json\n    - lean\n    - fstar\n    - fstar-lax\n    - coq\n\n  while_:\n    - json\n    - lean\n    - fstar\n    - fstar-lax\n\n  while_early_ret:\n    - json\n    - fstar\n    - fstar-lax\n"
  },
  {
    "path": "rustc-coverage-tests/update-test-sources.sh",
    "content": "#!/bin/bash\n\n# Get the necessary part of the rust repo\ngit clone --depth 1 --filter=blob:none --no-checkout https://github.com/rust-lang/rust.git\ncd rust\ngit sparse-checkout init --cone\ngit checkout master\ngit sparse-checkout set tests/coverage\n\n\n# Copy the rust files\ncd ..\nfind rust/tests/coverage -type f -name \"*.rs\" -exec bash -c '\n  for file; do\n    dest=\"src/$(dirname \"$file\" | sed \"s|rust/tests/coverage||\")\"\n    mkdir -p \"$dest\"\n    cp -f \"$file\" \"$dest\"\n  done\n' bash {} +\n\n# Cleanup\ncargo fmt\nrm -rf rust\n"
  },
  {
    "path": "rustfmt.toml",
    "content": "style_edition = \"2024\"\nedition = \"2024\"\n"
  },
  {
    "path": "setup.sh",
    "content": "#!/usr/bin/env bash\n\nset -eu\n\nSCRIPTPATH=\"$( cd -- \"$(dirname \"$0\")\" >/dev/null 2>&1 ; pwd -P )\"\n\nopam_jobs=4\nCLEANUP_WORKSPACE=on\n\n# Parse command line arguments.\nall_args=(\"$@\")\nwhile [ $# -gt 0 ]; do\n    case \"$1\" in\n    -j)\n        opam_jobs=$2\n        shift\n        ;;\n    --no-cleanup)\n        CLEANUP_WORKSPACE=off\n        ;;\n    --help)\n        echo \"hax setup script\"\n        echo \"\"\n        echo \"Usage: $0 [OPTIONS]\"\n        echo \"\"\n        echo \"Options:\"\n        echo ' -j <JOBS>     The number of opam jobs to run in parallel'\n        echo ' --no-cleanup  Disables the default behavior that runs `cargo clean` and `opam clean`'\n        exit\n        ;;\n    esac\n    shift\ndone\n\n# Cleanup the cargo and dune workspace, to make sure we are in a clean\n# state\ncleanup_workspace() {\n    cargo clean\n    (\n        cd engine\n        opam clean\n    )\n}\n\n# Warns if we're building in a dirty checkout of hax: while hacking on\n# hax, we should really be using `just build`.\nwarn_if_dirty() {\n    (\n        cd \"$SCRIPTPATH\"\n        if ! git diff-index --quiet HEAD -- >& /dev/null; then\n            printf '\\e[33mWarning: This is a dirty checkout of hax!\\n         If you are hacking on hax, please use the \\e[1m`./.utils/rebuild.sh`\\e[0m\\e[33m script.\\e[0m\\n\\n'\n        fi\n    )\n}\n\n# Ensures a given binary is available in PATH\nensure_binary_available() {\n    command -v \"$1\" >/dev/null 2>&1 || {\n        printf '\\e[31mError: binary \\e[1m%s\\e[0m\\e[31m was not found.\\e[0m\\n' \"$1\"\n        printf '\\e[37m(Did you look at \\e[1mManual installation\\e[0m\\e[37m in \\e[1mREADME.md\\e[0m\\e[37m?)\\e[0m.\\n'\n        exit 1\n    }\n}\n\nNODE_VERSION_MIN_MAJOR=17\nensure_node_is_recent_enough() {\n    function strip_first_char () {\n        cut -c2-\n    }\n    function get_major () {\n        cut -d'.' -f1\n    }\n    VERSION=$(node --version)\n    MAJOR=$(echo \"$VERSION\" | strip_first_char | get_major)\n    if [[ \"$MAJOR\" -lt \"$NODE_VERSION_MIN_MAJOR\" ]]; then\n        printf '\\e[31mError: \\e[1m%s\\e[0m\\e[31m appears to be too old.\\e[0m\\n' \"NodeJS\"\n        printf '\\e[37m(the minimal version required is \\e[1m%s\\e[0m\\e[37m, yours is \\e[1m%s\\e[0m\\e[37m)\\e[0m.\\n' \"v${NODE_VERSION_MIN_MAJOR}.*.*\" \"$VERSION\"\n        exit 1\n    fi\n}\n\n# Installs the Rust CLI & frontend, providing `cargo-hax` and `driver-hax`\ninstall_rust_binaries() {\n    for i in driver subcommands ../engine/names/extract ../rust-engine; do\n        (\n            set -x\n            cargo install --locked --force --path \"cli/$i\"\n        )\n    done\n}\n\n# Provides the `hax-engine` binary\ninstall_ocaml_engine() {\n    # Fixes out of memory issues (https://github.com/hacspec/hax/issues/197)\n    {\n        # Limit the number of thread spawned by opam\n        export OPAMJOBS=$opam_jobs\n        # Make the garbadge collector of OCaml more agressive (see\n        # https://discuss.ocaml.org/t/how-to-limit-the-amount-of-memory-the-ocaml-compiler-is-allowed-to-use/797)\n        export OCAMLRUNPARAM=\"o=20\"\n    }\n    # Make opam show logs when an error occurs\n    export OPAMERRLOGLEN=0\n    # Make opam ignore system dependencies (it doesn't handle properly certain situations)\n    export OPAMASSUMEDEPEXTS=1\n    (\n        set -x\n        opam uninstall hax-engine || true\n        # Lift the soft stack limit for ocamlopt: large preprocessed\n        # files (e.g. `lib/types.pp.ml`) overflow the default stack on\n        # recent GitHub Actions runner images. macOS rejects\n        # `unlimited`, so try `hard` first.\n        ulimit -s hard 2>/dev/null || ulimit -s unlimited 2>/dev/null || true\n        opam install --yes ./engine\n    )\n}\n\nwarn_if_dirty\n\nfor binary in opam node rustup jq; do\n    ensure_binary_available $binary\ndone\nensure_node_is_recent_enough\n\n# Make sure the correct rust toolchain is installed\nrustup show active-toolchain || rustup toolchain install \n\nif [ \"$CLEANUP_WORKSPACE\" = \"on\" ]; then\n    cleanup_workspace\nfi\n\ninstall_rust_binaries\ninstall_ocaml_engine\n"
  },
  {
    "path": "test-harness/.gitignore",
    "content": "*.snap.new"
  },
  {
    "path": "test-harness/Cargo.toml",
    "content": "[package]\nname = \"hax-test-harness\"\nversion.workspace = true\nauthors.workspace = true\nlicense.workspace = true\nhomepage.workspace = true\nedition.workspace = true\nrepository.workspace = true\nreadme.workspace = true\n\n[[test]]\nname = \"toolchain\"\npath = \"src/harness.rs\"\nharness = false\ntest = false\n\n[dev-dependencies]\nlibtest-mimic = \"0.6\"\ncargo_metadata.workspace = true\nenum-iterator = \"1.4\"\nserde_json = \"1.0\"\nlazy_static = \"1.4\"\nassert_cmd = \"2.0\"\ninsta = {version = \"1.29.0\", features = [\"filters\", \"toml\"]}\nserde = { version = \"1.0\", features = [\"derive\"] }\nregex = \"1\"\nhax-types.workspace = true\n\n[package.metadata.release]\nrelease = false\n"
  },
  {
    "path": "test-harness/README.md",
    "content": "# Tests the whole toolchain\n\nThis crate defines a custom test harness[^1][^2] that scans for packages\nin the Cargo workspace `../tests/Cargo.toml`.\n\nEach package in that workspace should define a sequence of tests to be\nrun in the `package.metadata.hax-tests` dictionary of its\n`Cargo.toml` manifest.\n\nNote this cargo test is disabled by default, since it requires both\nthe Cargo and Dune package to be built. To run this test, please use\nthe command `cargo test --test toolchain`.\n\n## Format for `package.metadata.hax-tests`\n\n`package.metadata.hax-tests` is a map from a target (e.g. `into\nfstar` or `lint hacspec`) to a **test specification** (see below).\n\n`package.metadata.hax-tests` is expected to be a **dictionary** with\nthe following optional fields:\n\n- `lint`, a map from a **linter name** to a **test specification**.\n- `into`, a map from a **backend name** to a **test specification**.\n\nNote that instead of linter or backend names, conjunction are allowed,\nfor instance `fstar+coq`.\n\n### Test specifications\n\nA **test specification** is a dictionary with the following fields:\n\n- <code><b>positive</b>: bool <i>⟨true⟩</i></code>: is the test positive (the exit code of the `cargo hax` command is `0`) or negative (the exit code is non-null)?\n- <code><b>snapshots</b></code>: should we enforce the stability of the output of the `cargo hax` command?\n  - <code>snapshots.<b>stdout</b>: bool <i>⟨true⟩</i></code>\n  - <code>snapshots.<b>stderr</b>: bool <i>⟨true⟩</i></code>  \n    **Note:** this field can also be set to the following strings: `stdout`, `stderr`, `both` or `none`.\n- <code><b>optional</b>: bool <i>⟨false⟩</i></code>: is the test optional? (useful for slow tests for instance)\n- <code><b>broken</b>: bool <i>⟨false⟩</i></code>: is this test broken because of some feature not being implemented?\n- <code><b>issue_id</b>: u64 <i>⟨null⟩</i></code>: when the test has a companion issue on GitHub (closed or not)\n\n### Linter names\n\nThe available linters can be listed by running `cargo hax lint --help`.\n\n### Backend names\n\nThe available backends can be listed by running `cargo hax into --help`.\n\n## The `insta` tool and library\n\nThose tests are written using the [`insta`\nlibrary](https://insta.rs/). This allows us to enforce the stability\nof `stdout` and `stderr` for negative tests. In the future, we will\nalso ensure the files produced by the different backends remains the\nsame in positive tests of extraction.\n\nWhen some `stderr` changes, one can review (by interactively accepting\nor rejecting changes) changes using the [`cargo-insta`\nsubcommand](https://insta.rs/docs/cli/).\n\n[^1]: https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-harness-field\n[^2]: https://nexte.st/book/custom-test-harnesses.html\n\n## Miscelaneous\n - If the environment variable `DUNEJOBS` is set, it will set the `-j`\n   flag when `dune build`ing, controlling the maximum number of jobs\n   `dune build` will run in parallel.\n"
  },
  {
    "path": "test-harness/src/command_hax_ext.rs",
    "content": "use lazy_static::{__Deref, lazy_static};\nuse std::{ffi::OsStr, process::Command};\n\npub trait CommandHaxExt {\n    fn hax<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(args: I) -> Self;\n}\n\n/// Computes a list of arguments that setup the number of parallel\n/// jobs for dune accordingly to environment variable `DUNEJOBS`.\nfn dune_jobs_args() -> Vec<String> {\n    if let Ok(jobs) = std::env::var(\"DUNEJOBS\") {\n        vec![\"-j\".into(), jobs]\n    } else {\n        vec![]\n    }\n}\n\nimpl CommandHaxExt for Command {\n    fn hax<I: IntoIterator<Item = S>, S: AsRef<OsStr>>(args: I) -> Command {\n        use assert_cmd::cargo::cargo_bin;\n        use std::path::PathBuf;\n        struct Paths {\n            engine: PathBuf,\n            rust_engine: PathBuf,\n            cargo_hax: PathBuf,\n        }\n        const CARGO_HAX: &str = \"cargo-hax\";\n        const HAX_RUST_ENGINE: &str = \"hax-rust-engine\";\n        lazy_static! {\n            static ref PATHS: Option<Paths> = {\n                if let \"yes\" | \"y\" | \"true\" | \"1\" = std::env::var(\"CARGO_TESTS_ASSUME_BUILT\").unwrap_or(\"\".into()).to_lowercase().as_str() {\n                    return None;\n                }\n                let root = std::env::current_dir().unwrap();\n                let root = root.parent().unwrap();\n                let engine_dir = root.join(\"engine\");\n                // Make sure binaries are built. Note this doesn't\n                // include `hax-engine-names-extract`: its build\n                // script requires the driver and CLI of `hax` to be\n                // available.\n                assert!(Command::new(\"cargo\")\n                            .args(&[\"build\", \"--bins\"])\n                            .current_dir(&root)\n                            .status()\n                            .unwrap()\n                            .success());\n                let cargo_hax = cargo_bin(CARGO_HAX);\n                // Now the driver & CLI are installed, call `cargo\n                // build` injecting their paths\n                assert!(Command::new(\"cargo\")\n                        .args(&[\"build\", \"--workspace\", \"--bin\", \"hax-engine-names-extract\"])\n                        .env(\"HAX_CARGO_COMMAND_PATH\", &cargo_hax)\n                        .current_dir(&root)\n                        .status()\n                        .unwrap()\n                        .success());\n                assert!(Command::new(\"cargo\")\n                        .args(&[\"build\"])\n                        .current_dir(&root.join(\"rust-engine\"))\n                        .status()\n                        .unwrap()\n                        .success());\n                assert!(Command::new(\"dune\")\n                        .args(&[\"build\"])\n                        .args(dune_jobs_args())\n                        .env(\"HAX_JSON_SCHEMA_EXPORTER_BINARY\", cargo_bin(\"hax-export-json-schemas\"))\n                        .env(\"HAX_ENGINE_NAMES_EXTRACT_BINARY\", cargo_bin(\"hax-engine-names-extract\"))\n                        .current_dir(engine_dir.clone())\n                        .status()\n                        .unwrap()\n                        .success());\n                let rust_engine = cargo_bin(HAX_RUST_ENGINE);\n                Some(Paths {\n                    cargo_hax,\n                    rust_engine,\n                    engine: engine_dir.join(\"_build/install/default/bin/hax-engine\"),\n                })\n            };\n        }\n        let mut cmd = match PATHS.deref() {\n            Some(paths) => {\n                let mut cmd = Command::new(paths.cargo_hax.clone());\n                cmd.env(\"HAX_ENGINE_BINARY\", paths.engine.clone());\n                cmd.env(\"HAX_RUST_ENGINE_BINARY\", paths.rust_engine.clone());\n                cmd\n            }\n            None => Command::new(CARGO_HAX),\n        };\n        cmd.args(args);\n        // As documented in\n        // https://doc.rust-lang.org/cargo/reference/environment-variables.html#dynamic-library-paths,\n        // [cargo run] (and thus also [cargo test]) sets dynamic\n        // library paths, which causes some issues with dependencies\n        // when compiling without rustup\n        for env in [\"DYLD_FALLBACK_LIBRARY_PATH\", \"LD_LIBRARY_PATH\"] {\n            cmd.env_remove(env);\n        }\n        cmd\n    }\n}\n"
  },
  {
    "path": "test-harness/src/harness.rs",
    "content": "#![feature(rustc_private)]\nmod command_hax_ext;\nuse command_hax_ext::*;\nuse serde_json::{Map, Value};\nuse std::process::{Command, Stdio};\n\n#[derive(Clone, Debug, serde::Serialize)]\npub enum TestKind {\n    Translate { backend: String },\n}\n\nimpl TestKind {\n    fn as_name(&self) -> String {\n        (match self {\n            TestKind::Translate { backend } => [\"into\".to_string(), backend.clone()],\n        })\n        .join(\"-\")\n    }\n}\n\n#[allow(dead_code)]\nfn bool_true() -> bool {\n    true\n}\n\n#[derive(Clone, Debug, serde::Serialize)]\npub struct TestSnapshot {\n    #[serde(default = \"bool_true\")]\n    pub stderr: bool,\n    #[serde(default = \"bool_true\")]\n    pub stdout: bool,\n}\n\n#[derive(Clone, Debug, serde::Serialize)]\npub struct TestSpec {\n    /// is the test optional? (useful for slow tests for instance)\n    pub optional: bool,\n    /// a broken test a test that should succeed (or fail) but does\n    /// not dues to a bug to be fixed (see field [issue_id] below)\n    pub broken: bool,\n    /// Github issue ID\n    pub issue_id: Option<u64>,\n    /// Is that a positive or a negative test?\n    pub positive: bool,\n    pub snapshot: TestSnapshot,\n    pub include_flag: Option<String>,\n    pub backend_options: Option<Vec<String>>,\n}\n\nimpl From<Value> for TestSpec {\n    /// Parse a JSON value into a TestSpec\n    fn from(o: Value) -> Self {\n        fn as_opt_bool(v: &Value, def: bool) -> Option<bool> {\n            if v.is_null() {\n                return Some(def);\n            }\n            v.as_bool()\n        }\n        fn as_bool(o: &Value, k: &str, def: bool) -> bool {\n            let v = &o[k];\n            as_opt_bool(v, def)\n                .expect(format!(\"[{}] was expected to be a boolean, got {}\", k, v).as_str())\n        }\n        let snapshot = &o[\"snapshot\"];\n        TestSpec {\n            optional: as_bool(&o, \"optional\", false),\n            broken: as_bool(&o, \"broken\", false),\n            positive: as_bool(&o, \"positive\", true),\n            issue_id: o[\"positive\"].as_u64(),\n            include_flag: o[\"include-flag\"].as_str().map(|s| s.into()),\n            backend_options: serde_json::from_value(o[\"backend-options\"].clone()).unwrap(),\n            snapshot: as_opt_bool(snapshot, true)\n                .map(|b| TestSnapshot {\n                    stderr: b,\n                    stdout: b,\n                })\n                .or_else(|| match snapshot.as_str() {\n                    Some(v @ (\"stdout\" | \"stderr\" | \"both\" | \"none\")) => Some(TestSnapshot {\n                        stdout: matches!(v, \"stdout\" | \"both\"),\n                        stderr: matches!(v, \"stderr\" | \"both\"),\n                    }),\n                    Some(v) => panic!(\n                        \"[snapshot] is \\\"{}\\\" but was expected to be \\\"stderr\\\", \\\"stdout\\\" or \\\"both\\\"\", v\n                    ),\n                    None => None,\n                })\n                .unwrap_or_else(|| TestSnapshot {\n                    stderr: as_bool(&snapshot, \"stderr\", true),\n                    stdout: as_bool(&snapshot, \"stdout\", true),\n                }),\n        }\n    }\n}\n\n/// The information for a test is given by `cargo metadata`\n#[derive(Clone, Debug, serde::Serialize)]\npub struct TestInfo {\n    pub name: String,\n    pub manifest: std::path::PathBuf,\n    pub description: Option<String>,\n}\n\n#[derive(Clone, Debug, serde::Serialize)]\npub struct Test {\n    pub kind: TestKind,\n    pub info: TestInfo,\n    pub spec: TestSpec,\n}\n\nimpl Test {\n    fn as_args(&self) -> Vec<String> {\n        match &self.kind {\n            TestKind::Translate { backend } => {\n                let mut args = vec![];\n                args.push(\"into\".to_string());\n                if let Some(i) = self.spec.include_flag.as_ref() {\n                    args.push(\"-i\".to_string());\n                    args.push(i.to_string());\n                }\n                args.push(\"--dry-run\".to_string());\n                args.push(backend.clone());\n                if let Some(backend_options) = &self.spec.backend_options {\n                    args.extend_from_slice(backend_options.clone().as_slice());\n                }\n                args\n            }\n        }\n    }\n}\n\nimpl std::fmt::Display for Test {\n    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n        write!(f, \"{} - {:?}\", self.info.name, self.kind)?;\n        if let Some(issue_id) = self.spec.issue_id {\n            write!(f, \" #{}\", issue_id)?;\n        };\n        Ok(())\n    }\n}\n\nimpl Test {\n    fn into_runner(self, workspace: String) -> Result<(), libtest_mimic::Failed> {\n        // 1. cook a command\n        let mut cmd = Command::hax(&[\"-C\"]);\n        cmd.arg(\"--manifest-path\").arg(self.info.manifest.clone());\n        cmd.arg(\";\");\n        cmd.stdout(Stdio::piped()).stderr(Stdio::piped());\n        cmd.args(self.as_args());\n\n        // 2. execute it (twice, idea of @franziskuskiefer, so that\n        // the messages related to building dependencies are not\n        // included in the second one)\n        let _ = cmd.output().unwrap();\n        let out = cmd.output().unwrap();\n\n        let command_successful = out.status.success();\n        let cleanup = |s: String| {\n            use lazy_static::lazy_static;\n            use regex::Regex;\n            lazy_static! {\n                // Regex [TIME] matches compile times\n                static ref TIME: Regex = Regex::new(r\"\\bin \\d+(\\.\\d+)?s\\b\").unwrap();\n                static ref LOCK: Regex = Regex::new(r\"Blocking waiting for \\w+ lock on (the registry index|build directory|package cache)\").unwrap();\n            }\n            TIME.replace_all(\n                LOCK.replace_all(\n                    &s.replace(r\"\\\", \"/\").replace(&workspace, \"WORKSPACE_ROOT\"),\n                    \"\",\n                )\n                .as_ref(),\n                \"in XXs\",\n            )\n            .trim()\n            .to_string()\n        };\n        let serr = cleanup(String::from_utf8_lossy(&out.stderr).to_string());\n        let sout = String::from_utf8_lossy(&out.stdout).to_string();\n\n        // 3. make sure the test is successful\n        let mut snapshot: Map<String, Value> = Map::new();\n        if self.spec.snapshot.stderr {\n            snapshot.insert(\"stderr\".to_string(), Value::String(serr.clone()));\n        }\n        if self.spec.snapshot.stdout {\n            snapshot.insert(\n                \"stdout\".to_string(),\n                serde_json::from_str(&sout)\n                    .map(|out: hax_types::engine_api::Output| {\n                        use serde_json::json;\n                        json!({\n                            \"diagnostics\": Value::Array(out.diagnostics.into_iter().map(|diag| json!({\n                                \"spans\": Value::Array(diag.span.clone().into_iter().map(|span| Value::String(format!(\"{:?}\", span))).collect()),\n                                \"message\": Value::String(format!(\"{}\", diag)),\n                            })).collect()),\n                            \"files\": Value::Object(out.files.into_iter().map(|file| (file.path, Value::String(file.contents))).collect())\n                        })\n                    })\n                    .unwrap_or_else(|_| Value::String(cleanup(sout.clone()))),\n            );\n        }\n\n        if !snapshot.is_empty() {\n            let exit = out.status.code().unwrap_or(std::i32::MAX);\n            snapshot.insert(\"exit\".to_string(), exit.into());\n            let snapshot = Value::Object(snapshot);\n            let name = format!(\"{} {}\", self.info.name, self.kind.as_name());\n\n            let mut info = self.clone();\n            info.info.manifest = info.info.manifest.strip_prefix(workspace).unwrap().into();\n            insta::with_settings!({\n                info => &info,\n            }, { insta::assert_toml_snapshot!(name, snapshot) })\n        }\n\n        let err = |s: &str| {\n            Err(format!(\n                \"Command {s}.\\nThe command was: {:?}{}\",\n                cmd,\n                if command_successful {\n                    \"\".to_string()\n                } else {\n                    format!(\"\\nSTDOUT:\\n{}\\nSTDERR:\\n{}\", sout, serr)\n                }\n            ))\n        };\n        match (command_successful, (self.spec.positive, self.spec.broken)) {\n            (true, (true, false) | (false, true)) => Ok(()),\n            (false, (false, false) | (true, true)) => Ok(()),\n            (false, (false, true)) => err(\"failed, but this is a negative test marked broken\")?,\n            (false, (true, false)) => err(\"failed\")?,\n            (true, (true, true)) => err(\"succeeded, but this is a positive test marked broken\")?,\n            (true, (false, false)) => err(\"succeeded, but this is a negative test\")?,\n        }\n    }\n\n    fn into_trial(&self, workspace: &String) -> libtest_mimic::Trial {\n        libtest_mimic::Trial::test(format!(\"{}\", &self), {\n            let test = self.clone();\n            let workspace = workspace.clone();\n            move || test.clone().into_runner(workspace)\n        })\n        .with_kind(if self.spec.positive {\n            \"positive\"\n        } else {\n            \"negative\"\n        })\n        .with_ignored_flag(self.spec.optional)\n    }\n}\n\n/// Given [metadata] the table declared in a test's [Cargo.toml]\n/// [workspace.hax-tests], this function returns a list of tests\nfn parse_hax_tests_metadata(info: TestInfo, metadata: &Value) -> Vec<Test> {\n    if metadata.is_null() {\n        return vec![];\n    }\n\n    metadata\n        .as_object()\n        .expect(\n            format!(\n                \"Expected value at key [hax-tests] to be a dictionary for package {:#?}\",\n                info\n            )\n            .as_str(),\n        )\n        .into_iter()\n        .flat_map(|(a, o)| {\n            o.as_object()\n                .expect(\n                    format!(\n                        \"Expected value at key [{}] be a dictionary for package {:#?}\",\n                        a, info\n                    )\n                    .as_str(),\n                )\n                .into_iter()\n                .flat_map(|(key, o)| key.split(\"+\").map(|k| (k.trim().to_string(), o.clone())))\n                .map(|(b, o)| (a.clone(), b, o))\n        })\n        .map(|(a, b, o)| Test {\n            spec: o.into(),\n            info: info.clone(),\n            kind: match a.as_str() {\n                \"into\" => TestKind::Translate { backend: b },\n                _ => panic!(\n                    \"unexpected metadata [hax-tests.{}.{}] for package {:#?}\",\n                    a, b, info\n                ),\n            },\n        })\n        .collect()\n}\n\nfn main() {\n    let metadata = cargo_metadata::MetadataCommand::new()\n        .manifest_path(\"../tests/Cargo.toml\")\n        .exec()\n        .unwrap();\n    let workspace_root: String = metadata.workspace_root.into();\n\n    let mut args = libtest_mimic::Arguments::from_args();\n    args.test_threads = Some(1);\n\n    libtest_mimic::run(\n        &args,\n        metadata\n            .packages\n            .into_iter()\n            .flat_map(|o| {\n                parse_hax_tests_metadata(\n                    TestInfo {\n                        name: o.name,\n                        description: o.description,\n                        manifest: o.manifest_path.into(),\n                    },\n                    &o.metadata[\"hax-tests\"],\n                )\n            })\n            .map(|test| test.into_trial(&workspace_root))\n            .collect(),\n    )\n    .exit();\n}\n"
  },
  {
    "path": "test-harness/src/lib.rs",
    "content": "\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__assert into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: assert\n    manifest: assert/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Assert.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nDefinition asserts '(_ : unit) : unit :=\n  let _ := assert ((true : bool)) in\n  let _ := assert (f_eq ((1 : t_i32)) ((1 : t_i32))) in\n  let _ := match ((2 : t_i32),(2 : t_i32)) with\n  | (left_val,right_val) =>\n    assert (f_eq (left_val) (right_val))\n  end in\n  let _ := match ((1 : t_i32),(2 : t_i32)) with\n  | (left_val,right_val) =>\n    assert (f_not (f_eq (left_val) (right_val)))\n  end in\n  tt.\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nAssert.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__assert into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: assert\n    manifest: assert/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Assert.fst\" = '''\nmodule Assert\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet asserts (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = Hax_lib.v_assert true in\n  let _:Prims.unit = Hax_lib.v_assert (mk_i32 1 =. mk_i32 1 <: bool) in\n  let _:Prims.unit =\n    match mk_i32 2, mk_i32 2 <: (i32 & i32) with\n    | left_val, right_val -> Hax_lib.v_assert (left_val =. right_val <: bool)\n  in\n  let _:Prims.unit =\n    match mk_i32 1, mk_i32 2 <: (i32 & i32) with\n    | left_val, right_val -> Hax_lib.v_assert (~.(left_val =. right_val <: bool) <: bool)\n  in\n  ()\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__assert into-ssprove.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: ssprove\n  info:\n    name: assert\n    manifest: assert/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Assert.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations asserts (_ : both 'unit) : both 'unit :=\n  asserts _  :=\n    letb _ := assert (ret_both (true : 'bool)) in\n    letb _ := assert ((ret_both (1 : int32)) =.? (ret_both (1 : int32))) in\n    letb _ := matchb prod_b (ret_both (2 : int32),ret_both (2 : int32)) with\n    | '(left_val,right_val) =>\n      assert (left_val =.? right_val)\n    end in\n    letb _ := matchb prod_b (ret_both (1 : int32),ret_both (2 : int32)) with\n    | '(left_val,right_val) =>\n      assert (f_not (left_val =.? right_val))\n    end in\n    ret_both (tt : 'unit) : both 'unit.\nFail Next Obligation.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__attribute-opaque into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: attribute-opaque\n    manifest: attribute-opaque/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options:\n      - \"--interfaces\"\n      - +**\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Attribute_opaque.fst\" = '''\nmodule Attribute_opaque\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nassume\nval t_OpaqueStruct': v_X: usize -> v_T: Type0 -> v_U: Type0 -> eqtype\n\nlet t_OpaqueStruct (v_X: usize) (v_T v_U: Type0) = t_OpaqueStruct' v_X v_T v_U\n\nassume\nval t_OpaqueEnum': v_X: usize -> v_T: Type0 -> v_U: Type0 -> eqtype\n\nlet t_OpaqueEnum (v_X: usize) (v_T v_U: Type0) = t_OpaqueEnum' v_X v_T v_U\n\nassume\nval ff_generic': v_X: usize -> #v_T: Type0 -> #v_U: Type0 -> x: v_U\n  -> Prims.Pure (t_OpaqueEnum v_X v_T v_U) Prims.l_True (fun _ -> Prims.l_True)\n\nlet ff_generic (v_X: usize) (#v_T #v_U: Type0) = ff_generic' v_X #v_T #v_U\n\nassume\nval f': x: bool -> y: bool -> Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)\n\nlet f = f'\n\nassume\nval ff_pre_post': x: bool -> y: bool\n  -> Prims.Pure bool\n      (requires x)\n      (ensures\n        fun result ->\n          let result:bool = result in\n          result =. y)\n\nlet ff_pre_post = ff_pre_post'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_T_for_u8': t_T u8\n\nlet impl_T_for_u8 = impl_T_for_u8'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': #v_U: Type0 -> {| i0: Core_models.Clone.t_Clone v_U |} -> t_TrGeneric i32 v_U\n\nlet impl_2\n      (#v_U: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_U)\n     = impl_2' #v_U #i0\n\nassume\nval v_C': u8\n\nlet v_C = v_C'\n\nassume\nval impl_S1__ff_s1': Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n\nlet impl_S1__ff_s1 = impl_S1__ff_s1'\n\nassume\nval impl_S2__ff_s2': Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n\nlet impl_S2__ff_s2 = impl_S2__ff_s2'\n'''\n\"Attribute_opaque.fsti\" = '''\nmodule Attribute_opaque\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nval t_OpaqueStruct (v_X: usize) (v_T v_U: Type0) : eqtype\n\nval t_OpaqueEnum (v_X: usize) (v_T v_U: Type0) : eqtype\n\nval ff_generic (v_X: usize) (#v_T #v_U: Type0) (x: v_U)\n    : Prims.Pure (t_OpaqueEnum v_X v_T v_U) Prims.l_True (fun _ -> Prims.l_True)\n\nval f (x y: bool) : Prims.Pure bool Prims.l_True (fun _ -> Prims.l_True)\n\nval ff_pre_post (x y: bool)\n    : Prims.Pure bool\n      (requires x)\n      (ensures\n        fun result ->\n          let result:bool = result in\n          result =. y)\n\nclass t_T (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_U:Type0;\n  f_c:u8;\n  f_d_pre:Prims.unit -> Type0;\n  f_d_post:Prims.unit -> Prims.unit -> Type0;\n  f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result);\n  f_m_pre:self_: v_Self -> x: u8 -> pred: Type0{x =. mk_u8 0 ==> pred};\n  f_m_post:v_Self -> u8 -> bool -> Type0;\n  f_m:x0: v_Self -> x1: u8 -> Prims.Pure bool (f_m_pre x0 x1) (fun result -> f_m_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_T_for_u8:t_T u8\n\nclass t_TrGeneric (v_Self: Type0) (v_U: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Clone.t_Clone v_U;\n  f_f_pre:v_U -> Type0;\n  f_f_post:v_U -> v_Self -> Type0;\n  f_f:x0: v_U -> Prims.Pure v_Self (f_f_pre x0) (fun result -> f_f_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) (v_U:Type0) {|i: t_TrGeneric v_Self v_U|} -> i._super_i0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nval impl_2 (#v_U: Type0) {| i0: Core_models.Clone.t_Clone v_U |} : t_TrGeneric i32 v_U\n\nval v_C:u8\n\ntype t_S1 = | S1 : t_S1\n\nval impl_S1__ff_s1: Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n\ntype t_S2 = | S2 : t_S2\n\nval impl_S2__ff_s2: Prims.unit -> Prims.Pure Prims.unit Prims.l_True (fun _ -> Prims.l_True)\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__attributes into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: attributes\n    manifest: attributes/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Attributes.Ensures_on_arity_zero_fns.fst\" = '''\nmodule Attributes.Ensures_on_arity_zero_fns\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet doing_nothing (_: Prims.unit)\n    : Prims.Pure Prims.unit\n      (requires true)\n      (ensures\n        fun e_x ->\n          let e_x:Prims.unit = e_x in\n          true) = ()\n\nlet basically_a_constant (_: Prims.unit)\n    : Prims.Pure u8\n      (requires true)\n      (ensures\n        fun x ->\n          let x:u8 = x in\n          x >. mk_u8 100) = mk_u8 127\n'''\n\"Attributes.Future_self.fst\" = '''\nmodule Attributes.Future_self\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Dummy = | Dummy : t_Dummy\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1': Core_models.Marker.t_StructuralPartialEq t_Dummy\n\nunfold\nlet impl_1 = impl_1'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': Core_models.Cmp.t_PartialEq t_Dummy t_Dummy\n\nunfold\nlet impl_2 = impl_2'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl': Core_models.Cmp.t_Eq t_Dummy\n\nunfold\nlet impl = impl'\n\nlet impl_Dummy__f (self: t_Dummy)\n    : Prims.Pure t_Dummy\n      Prims.l_True\n      (ensures\n        fun self_e_future ->\n          let self_e_future:t_Dummy = self_e_future in\n          self_e_future =. self) = self\n'''\n\"Attributes.Inlined_code_ensures_requires.fst\" = '''\nmodule Attributes.Inlined_code_ensures_requires\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet increment_array (v: t_Array u8 (mk_usize 4))\n    : Prims.Pure (t_Array u8 (mk_usize 4))\n      (requires forall i. FStar.Seq.index v i <. mk_u8 254)\n      (ensures\n        fun vv_future ->\n          let vv_future:t_Array u8 (mk_usize 4) = vv_future in\n          let future_v:t_Array u8 (mk_usize 4) = vv_future in\n          forall i. FStar.Seq.index future_v i >. mk_u8 0) =\n  let v:t_Array u8 (mk_usize 4) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v\n      (mk_usize 0)\n      ((v.[ mk_usize 0 ] <: u8) +! mk_u8 1 <: u8)\n  in\n  let v:t_Array u8 (mk_usize 4) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v\n      (mk_usize 1)\n      ((v.[ mk_usize 1 ] <: u8) +! mk_u8 1 <: u8)\n  in\n  let v:t_Array u8 (mk_usize 4) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v\n      (mk_usize 2)\n      ((v.[ mk_usize 2 ] <: u8) +! mk_u8 1 <: u8)\n  in\n  let v:t_Array u8 (mk_usize 4) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize v\n      (mk_usize 3)\n      ((v.[ mk_usize 3 ] <: u8) +! mk_u8 1 <: u8)\n  in\n  v\n'''\n\"Attributes.Int_model.fst\" = '''\nmodule Attributes.Int_model\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nunfold type t_Int = int\n\nlet impl_1: Core_models.Clone.t_Clone t_Int =\n  { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl': Core_models.Marker.t_Copy t_Int\n\nunfold\nlet impl = impl'\n\nunfold let add x y = x + y\n\nunfold instance impl: Core.Ops.Arith.t_Sub t_Int t_Int =\n  {\n    f_Output = t_Int;\n    f_sub_pre = (fun (self: t_Int) (other: t_Int) -> true);\n    f_sub_post = (fun (self: t_Int) (other: t_Int) (out: t_Int) -> true);\n    f_sub = fun (self: t_Int) (other: t_Int) -> self + other\n  }\n'''\n\"Attributes.Issue_1266_.fst\" = '''\nmodule Attributes.Issue_1266_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_T (v_Self: Type0) = {\n  f_v_pre:v_Self -> Type0;\n  f_v_post:x: v_Self -> x_future: v_Self -> pred: Type0{pred ==> true};\n  f_v:x0: v_Self -> Prims.Pure v_Self (f_v_pre x0) (fun result -> f_v_post x0 result)\n}\n'''\n\"Attributes.Issue_1276_.fst\" = '''\nmodule Attributes.Issue_1276_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_S = | S : u8 -> t_S\n\nlet impl_S__f (self: t_S) (self_ self_0_ self_1_ self_2_: u8)\n    : Prims.Pure Prims.unit\n      (requires self._0 =. mk_u8 0 && self_ =. self_1_ && self_2_ =. mk_u8 9)\n      (fun _ -> Prims.l_True) = ()\n'''\n\"Attributes.Issue_evit_57_.fst\" = '''\nmodule Attributes.Issue_evit_57_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo = | Foo : t_Foo\n\nlet impl_Foo__f (self: t_Foo) : Prims.Pure Prims.unit (requires true) (fun _ -> Prims.l_True) = ()\n'''\n\"Attributes.Nested_refinement_elim.fst\" = '''\nmodule Attributes.Nested_refinement_elim\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet t_DummyRefinement = x: u16{true}\n\nlet elim_twice (x: t_DummyRefinement) : u16 = ((x <: u16) <: t_DummyRefinement) <: u16\n'''\n\"Attributes.Newtype_pattern.fst\" = '''\nmodule Attributes.Newtype_pattern\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_MAX: usize = mk_usize 10\n\ntype t_SafeIndex = { f_i:f_i: usize{b2t (f_i <. v_MAX <: bool)} }\n\nlet impl_SafeIndex__new (i: usize) : Core_models.Option.t_Option t_SafeIndex =\n  if i <. v_MAX\n  then\n    Core_models.Option.Option_Some ({ f_i = i } <: t_SafeIndex)\n    <:\n    Core_models.Option.t_Option t_SafeIndex\n  else Core_models.Option.Option_None <: Core_models.Option.t_Option t_SafeIndex\n\nlet impl_SafeIndex__as_usize (self: t_SafeIndex) : usize = self.f_i\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1 (#v_T: Type0) : Core_models.Ops.Index.t_Index (t_Array v_T (mk_usize 10)) t_SafeIndex =\n  {\n    f_Output = v_T;\n    f_index_pre = (fun (self: t_Array v_T (mk_usize 10)) (index: t_SafeIndex) -> true);\n    f_index_post = (fun (self: t_Array v_T (mk_usize 10)) (index: t_SafeIndex) (out: v_T) -> true);\n    f_index = fun (self: t_Array v_T (mk_usize 10)) (index: t_SafeIndex) -> self.[ index.f_i ]\n  }\n'''\n\"Attributes.Postprocess_with.Somewhere.fst\" = '''\nmodule Attributes.Postprocess_with.Somewhere\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet some_hypothetical_tactic (some_param: u8) : Prims.unit = ()\n'''\n\"Attributes.Postprocess_with.fst\" = '''\nmodule Attributes.Postprocess_with\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\n[@@FStar.Tactics.postprocess_with (fun _ -> FStar.Tactics.trefl ())]\n\nlet f (_: Prims.unit) : Prims.unit = ()\n\n[@@FStar.Tactics.postprocess_with ( fun temp_0_ ->\n  let ():Prims.unit = temp_0_ in\n  Attributes.Postprocess_with.Somewhere.some_hypothetical_tactic (mk_u8 12) )]\n\nlet g (_: Prims.unit) : Prims.unit = ()\n'''\n\"Attributes.Pre_post_on_traits_and_impls.fst\" = '''\nmodule Attributes.Pre_post_on_traits_and_impls\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_Operation (v_Self: Type0) = {\n  f_double_pre:x: u8\n    -> pred:\n      Type0\n        { (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <=\n          (127 <: Hax_lib.Int.t_Int) ==>\n          pred };\n  f_double_post:x: u8 -> result: u8\n    -> pred:\n      Type0\n        { pred ==>\n          ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int)\n            <:\n            Hax_lib.Int.t_Int) =\n          (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int) };\n  f_double:x0: u8 -> Prims.Pure u8 (f_double_pre x0) (fun result -> f_double_post x0 result)\n}\n\ntype t_ViaAdd = | ViaAdd : t_ViaAdd\n\ntype t_ViaMul = | ViaMul : t_ViaMul\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_Operation t_ViaAdd =\n  {\n    f_double_pre\n    =\n    (fun (x: u8) ->\n        (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <= (127 <: Hax_lib.Int.t_Int));\n    f_double_post\n    =\n    (fun (x: u8) (result: u8) ->\n        ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int)\n          <:\n          Hax_lib.Int.t_Int) =\n        (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int));\n    f_double = fun (x: u8) -> x +! x\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Operation_for_ViaMul: t_Operation t_ViaMul =\n  {\n    f_double_pre\n    =\n    (fun (x: u8) ->\n        (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <= (127 <: Hax_lib.Int.t_Int));\n    f_double_post\n    =\n    (fun (x: u8) (result: u8) ->\n        ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int)\n          <:\n          Hax_lib.Int.t_Int) =\n        (Rust_primitives.Hax.Int.from_machine result <: Hax_lib.Int.t_Int));\n    f_double = fun (x: u8) -> x *! mk_u8 2\n  }\n\nclass t_TraitWithRequiresAndEnsures (v_Self: Type0) = {\n  f_method_pre:self_: v_Self -> x: u8 -> pred: Type0{x <. mk_u8 100 ==> pred};\n  f_method_post:self_: v_Self -> x: u8 -> r: u8 -> pred: Type0{pred ==> r >. mk_u8 88};\n  f_method:x0: v_Self -> x1: u8\n    -> Prims.Pure u8 (f_method_pre x0 x1) (fun result -> f_method_post x0 x1 result)\n}\n\nlet test\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_TraitWithRequiresAndEnsures v_T)\n      (x: v_T)\n    : u8 = (f_method #v_T #FStar.Tactics.Typeclasses.solve x (mk_u8 99) <: u8) -! mk_u8 88\n'''\n\"Attributes.Props.fst\" = '''\nmodule Attributes.Props\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet f (x: Hax_lib.Prop.t_Prop) (y: bool) : Hax_lib.Prop.t_Prop =\n  let (xprop: Hax_lib.Prop.t_Prop):Hax_lib.Prop.t_Prop = b2t y in\n  let p:Hax_lib.Prop.t_Prop = b2t y /\\ xprop /\\ b2t y /\\ b2t y in\n  ~(p \\/ b2t y ==>\n    (forall (x: u8). b2t (x <=. Core_models.Num.impl_u8__MAX <: bool)) /\\\n    (exists (x: u16). b2t (x >. mk_u16 300 <: bool)))\n'''\n\"Attributes.Refined_arithmetic.fst\" = '''\nmodule Attributes.Refined_arithmetic\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo = | Foo : u8 -> t_Foo\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Ops.Arith.t_Add t_Foo t_Foo =\n  {\n    f_Output = t_Foo;\n    f_add_pre = (fun (self_: t_Foo) (rhs: t_Foo) -> self_._0 <. (mk_u8 255 -! rhs._0 <: u8));\n    f_add_post = (fun (self: t_Foo) (rhs: t_Foo) (out: t_Foo) -> true);\n    f_add = fun (self: t_Foo) (rhs: t_Foo) -> Foo (self._0 +! rhs._0) <: t_Foo\n  }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Core_models.Ops.Arith.t_Mul t_Foo t_Foo =\n  {\n    f_Output = t_Foo;\n    f_mul_pre\n    =\n    (fun (self_: t_Foo) (rhs: t_Foo) -> rhs._0 =. mk_u8 0 || self_._0 <. (mk_u8 255 /! rhs._0 <: u8)\n    );\n    f_mul_post = (fun (self: t_Foo) (rhs: t_Foo) (out: t_Foo) -> true);\n    f_mul = fun (self: t_Foo) (rhs: t_Foo) -> Foo (self._0 *! rhs._0) <: t_Foo\n  }\n'''\n\"Attributes.Refined_indexes.fst\" = '''\nmodule Attributes.Refined_indexes\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_MAX: usize = mk_usize 10\n\ntype t_MyArray = | MyArray : t_Array u8 (mk_usize 10) -> t_MyArray\n\n/// Triple dash comment\n(** Multiline double star comment Maecenas blandit accumsan feugiat.\n    Done vitae ullamcorper est.\n    Curabitur id dui eget sem viverra interdum. *)\nlet mutation_example\n      (uuse_generic_update_at: t_MyArray)\n      (uuse_specialized_update_at: t_Slice u8)\n      (specialized_as_well: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n    : (t_MyArray & t_Slice u8 & Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) =\n  let uuse_generic_update_at:t_MyArray =\n    Rust_primitives.Hax.update_at uuse_generic_update_at (mk_usize 2) (mk_u8 0)\n  in\n  let uuse_specialized_update_at:t_Slice u8 =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize uuse_specialized_update_at\n      (mk_usize 2)\n      (mk_u8 0)\n  in\n  let specialized_as_well:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice\n              specialized_as_well\n            <:\n            t_Slice u8)\n          (mk_usize 2)\n          (mk_u8 0)\n        <:\n        t_Slice u8)\n  in\n  uuse_generic_update_at, uuse_specialized_update_at, specialized_as_well\n  <:\n  (t_MyArray & t_Slice u8 & Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Ops.Index.t_Index t_MyArray usize =\n  {\n    f_Output = u8;\n    f_index_pre = (fun (self_: t_MyArray) (index: usize) -> index <. v_MAX);\n    f_index_post = (fun (self: t_MyArray) (index: usize) (out: u8) -> true);\n    f_index = fun (self: t_MyArray) (index: usize) -> self.[ index ]\n  }\n'''\n\"Attributes.Refinement_types.fst\" = '''\nmodule Attributes.Refinement_types\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet t_BoundedU8 (v_MIN v_MAX: u8) = x: u8{x >=. v_MIN && x <=. v_MAX}\n\nlet bounded_u8 (x: t_BoundedU8 (mk_u8 12) (mk_u8 15)) (y: t_BoundedU8 (mk_u8 10) (mk_u8 11))\n    : t_BoundedU8 (mk_u8 1) (mk_u8 23) = (x <: u8) +! (y <: u8) <: t_BoundedU8 (mk_u8 1) (mk_u8 23)\n\n/// Even `u8` numbers. Constructing pub Even values triggers static\n/// proofs in the extraction.\nlet t_Even = x: u8{(x %! mk_u8 2 <: u8) =. mk_u8 0}\n\nlet double (x: u8) : Prims.Pure t_Even (requires x <. mk_u8 127) (fun _ -> Prims.l_True) =\n  x +! x <: t_Even\n\nlet double_refine (x: u8) : Prims.Pure t_Even (requires x <. mk_u8 127) (fun _ -> Prims.l_True) =\n  x +! x <: t_Even\n\n/// A string that contains no space.\nlet t_NoE =\n  x:\n  Alloc.String.t_String\n    { let (_: Core_models.Str.Iter.t_Chars), (out: bool) =\n        Core_models.Iter.Traits.Iterator.f_any #Core_models.Str.Iter.t_Chars\n          #FStar.Tactics.Typeclasses.solve\n          #(FStar.Char.char -> bool)\n          (Core_models.Str.impl_str__chars (Core_models.Ops.Deref.f_deref #Alloc.String.t_String\n                  #FStar.Tactics.Typeclasses.solve\n                  x\n                <:\n                string)\n            <:\n            Core_models.Str.Iter.t_Chars)\n          (fun ch ->\n              let ch:FStar.Char.char = ch in\n              ch =. ' ' <: bool)\n      in\n      ~.out }\n\n/// A modular mutliplicative inverse\nlet t_ModInverse (v_MOD: u32) =\n  n:\n  u32\n    { (((cast (n <: u32) <: u128) *! (cast (v_MOD <: u32) <: u128) <: u128) %!\n        (cast (v_MOD <: u32) <: u128)\n        <:\n        u128) =.\n      mk_u128 1 }\n\n/// A field element\nlet t_FieldElement = x: u16{x <=. mk_u16 2347}\n\n/// Example of a specific constraint on a value\nlet t_CompressionFactor = x: u8{x =. mk_u8 4 || x =. mk_u8 5 || x =. mk_u8 10 || x =. mk_u8 11}\n\nlet t_BoundedAbsI16 (v_B: usize) =\n  x:\n  i16\n    { (Rust_primitives.Hax.Int.from_machine v_B <: Hax_lib.Int.t_Int) < (32768 <: Hax_lib.Int.t_Int) &&\n      (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) >=\n      (- (Rust_primitives.Hax.Int.from_machine v_B <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int) &&\n      (Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) <=\n      (Rust_primitives.Hax.Int.from_machine v_B <: Hax_lib.Int.t_Int) }\n\nlet impl (v_B: usize) : Core_models.Clone.t_Clone (t_BoundedAbsI16 v_B) =\n  { f_clone = (fun x -> x); f_clone_pre = (fun _ -> True); f_clone_post = (fun _ _ -> True) }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1': v_B: usize -> Core_models.Marker.t_Copy (t_BoundedAbsI16 v_B)\n\nunfold\nlet impl_1 (v_B: usize) = impl_1' v_B\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_3': v_B: usize -> Core_models.Marker.t_StructuralPartialEq (t_BoundedAbsI16 v_B)\n\nunfold\nlet impl_3 (v_B: usize) = impl_3' v_B\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_4': v_B: usize -> Core_models.Cmp.t_PartialEq (t_BoundedAbsI16 v_B) (t_BoundedAbsI16 v_B)\n\nunfold\nlet impl_4 (v_B: usize) = impl_4' v_B\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': v_B: usize -> Core_models.Cmp.t_Eq (t_BoundedAbsI16 v_B)\n\nunfold\nlet impl_2 (v_B: usize) = impl_2' v_B\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_6': v_B: usize -> Core_models.Cmp.t_PartialOrd (t_BoundedAbsI16 v_B) (t_BoundedAbsI16 v_B)\n\nunfold\nlet impl_6 (v_B: usize) = impl_6' v_B\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_5': v_B: usize -> Core_models.Cmp.t_Ord (t_BoundedAbsI16 v_B)\n\nunfold\nlet impl_5 (v_B: usize) = impl_5' v_B\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_7': v_B: usize -> Core_models.Hash.t_Hash (t_BoundedAbsI16 v_B)\n\nunfold\nlet impl_7 (v_B: usize) = impl_7' v_B\n\nlet double_abs_i16 (v_N v_M: usize) (x: t_BoundedAbsI16 v_N)\n    : Prims.Pure (t_BoundedAbsI16 v_M)\n      (requires\n        (Rust_primitives.Hax.Int.from_machine v_M <: Hax_lib.Int.t_Int) <\n        (32768 <: Hax_lib.Int.t_Int) &&\n        (Rust_primitives.Hax.Int.from_machine v_M <: Hax_lib.Int.t_Int) =\n        ((Rust_primitives.Hax.Int.from_machine v_N <: Hax_lib.Int.t_Int) * (2 <: Hax_lib.Int.t_Int)\n          <:\n          Hax_lib.Int.t_Int))\n      (fun _ -> Prims.l_True) =\n  Core_models.Ops.Arith.f_mul #(t_BoundedAbsI16 v_N)\n    #i16\n    #FStar.Tactics.Typeclasses.solve\n    x\n    (mk_i16 2)\n  <:\n  t_BoundedAbsI16 v_M\n'''\n\"Attributes.Reorder.fst\" = '''\nmodule Attributes.Reorder\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo = {\n  f_field_3_:u8;\n  f_field_4_:u8;\n  f_field_2_:u8;\n  f_field_1_:u8\n}\n\ntype t_Bar =\n  | Bar_A {\n    f_a_field_3_:u8;\n    f_a_field_1_:u8;\n    f_a_field_2_:u8\n  }: t_Bar\n  | Bar_B {\n    f_b_field_1_:u8;\n    f_b_field_3_:u8;\n    f_b_field_2_:u8\n  }: t_Bar\n'''\n\"Attributes.Replace_body.fst\" = '''\nmodule Attributes.Replace_body\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet f (x y: u8) : u8 = magic x\n\ntype t_Foo = | Foo : t_Foo\n\nlet impl_Foo__assoc_fn (self: t_Foo) (x: u8) : Prims.unit = (magic (self <: t_Foo)) x\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Alloc.String.t_ToString t_Foo =\n  {\n    f_to_string_pre = (fun (self: t_Foo) -> true);\n    f_to_string_post = (fun (self: t_Foo) (out: Alloc.String.t_String) -> true);\n    f_to_string = fun (self: t_Foo) -> \"The type was t_Foo\"\n  }\n'''\n\"Attributes.Requires_mut.fst\" = '''\nmodule Attributes.Requires_mut\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_Foo (v_Self: Type0) = {\n  f_f_pre:x: u8 -> y: u8\n    -> pred:\n      Type0\n        { ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n            (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n            <:\n            Hax_lib.Int.t_Int) <\n          (254 <: Hax_lib.Int.t_Int) ==>\n          pred };\n  f_f_post:x: u8 -> y: u8 -> x1: (u8 & u8)\n    -> pred:\n      Type0\n        { pred ==>\n          (let (y_future: u8), (output_variable: u8) = x1 in\n            output_variable =. y_future) };\n  f_f:x0: u8 -> x1: u8 -> Prims.Pure (u8 & u8) (f_f_pre x0 x1) (fun result -> f_f_post x0 x1 result);\n  f_g_pre:u8 -> u8 -> Type0;\n  f_g_post:u8 -> u8 -> u8 -> Type0;\n  f_g:x0: u8 -> x1: u8 -> Prims.Pure u8 (f_g_pre x0 x1) (fun result -> f_g_post x0 x1 result);\n  f_h_pre:u8 -> u8 -> Type0;\n  f_h_post:u8 -> u8 -> Prims.unit -> Type0;\n  f_h:x0: u8 -> x1: u8\n    -> Prims.Pure Prims.unit (f_h_pre x0 x1) (fun result -> f_h_post x0 x1 result);\n  f_i_pre:u8 -> u8 -> Type0;\n  f_i_post:u8 -> u8 -> u8 -> Type0;\n  f_i:x0: u8 -> x1: u8 -> Prims.Pure u8 (f_i_pre x0 x1) (fun result -> f_i_post x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_Foo Prims.unit =\n  {\n    f_f_pre\n    =\n    (fun (x: u8) (y: u8) ->\n        ((Rust_primitives.Hax.Int.from_machine x <: Hax_lib.Int.t_Int) +\n          (Rust_primitives.Hax.Int.from_machine y <: Hax_lib.Int.t_Int)\n          <:\n          Hax_lib.Int.t_Int) <\n        (254 <: Hax_lib.Int.t_Int));\n    f_f_post\n    =\n    (fun (x: u8) (y: u8) (y_future, output_variable: (u8 & u8)) -> output_variable =. y_future);\n    f_f\n    =\n    (fun (x: u8) (y: u8) ->\n        let y:u8 = y +! x in\n        let hax_temp_output:u8 = y in\n        y, hax_temp_output <: (u8 & u8));\n    f_g_pre = (fun (x: u8) (y: u8) -> true);\n    f_g_post = (fun (x: u8) (y: u8) (output_variable: u8) -> output_variable =. y);\n    f_g = (fun (x: u8) (y: u8) -> y);\n    f_h_pre = (fun (x: u8) (y: u8) -> true);\n    f_h_post\n    =\n    (fun (x: u8) (y: u8) (output_variable: Prims.unit) -> output_variable =. (() <: Prims.unit));\n    f_h = (fun (x: u8) (y: u8) -> () <: Prims.unit);\n    f_i_pre = (fun (x: u8) (y: u8) -> true);\n    f_i_post = (fun (x: u8) (y: u8) (y_future: u8) -> y_future =. y);\n    f_i\n    =\n    fun (x: u8) (y: u8) ->\n      let _:Prims.unit = () <: Prims.unit in\n      y\n  }\n'''\n\"Attributes.Verifcation_status.fst\" = '''\nmodule Attributes.Verifcation_status\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\n#push-options \"--admit_smt_queries true\"\n\nlet a_function_which_only_laxes (_: Prims.unit) : Prims.unit = Hax_lib.v_assert false\n\n#pop-options\n\nlet a_panicfree_function (_: Prims.unit)\n    : Prims.Pure u8\n      Prims.l_True\n      (ensures\n        fun x ->\n          let x:u8 = x in\n          false) =\n  let a:u8 = mk_u8 3 in\n  let b:u8 = mk_u8 6 in\n  let result:u8 = a +! b in\n  let _:Prims.unit = admit () (* Panic freedom *) in\n  result\n\nlet another_panicfree_function (_: Prims.unit)\n    : Prims.Pure Prims.unit\n      Prims.l_True\n      (ensures\n        fun x ->\n          let x:Prims.unit = x in\n          false) =\n  let not_much:i32 = mk_i32 0 in\n  let nothing:i32 = mk_i32 0 in\n  let still_not_much:i32 = not_much +! nothing in\n  admit () (* Panic freedom *)\n'''\n\"Attributes.fst\" = '''\nmodule Attributes\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet u32_max: u32 = mk_u32 90000\n\n/// A doc comment on `add3`\n///another doc comment on add3\nlet add3 (x y z: u32)\n    : Prims.Pure u32\n      (requires\n        x >. mk_u32 10 && y >. mk_u32 10 && z >. mk_u32 10 &&\n        ((x +! y <: u32) +! z <: u32) <. u32_max)\n      (ensures\n        fun result ->\n          let result:u32 = result in\n          b2t true ==> b2t (result >. mk_u32 32 <: bool)) = (x +! y <: u32) +! z\n\nlet swap_and_mut_req_ens (x y: u32)\n    : Prims.Pure (u32 & u32 & u32)\n      (requires x <. mk_u32 40 && y <. mk_u32 300)\n      (ensures\n        fun temp_0_ ->\n          let (x_future: u32), (y_future: u32), (result: u32) = temp_0_ in\n          x_future =. y && y_future =. x && result =. (x +! y <: u32)) =\n  let x0:u32 = x in\n  let x:u32 = y in\n  let y:u32 = x0 in\n  let hax_temp_output:u32 = x +! y in\n  x, y, hax_temp_output <: (u32 & u32 & u32)\n\nlet issue_844_ (e_x: u8)\n    : Prims.Pure u8\n      Prims.l_True\n      (ensures\n        fun e_x_future ->\n          let e_x_future:u8 = e_x_future in\n          true) = e_x\n\nlet add3_lemma (x: u32)\n    : Lemma\n    (ensures\n      x <=. mk_u32 10 || x >=. (u32_max /! mk_u32 3 <: u32) ||\n      (add3 x x x <: u32) =. (x *! mk_u32 3 <: u32)) = ()\n\nlet dummy_function (x: u32) : u32 = x\n\nlet apply_dummy_function_lemma (x: u32) : Lemma (ensures x =. (dummy_function x <: u32)) [SMTPat x] =\n  ()\n\ntype t_Foo = {\n  f_x:u32;\n  f_y:f_y: u32{b2t (f_y >. mk_u32 3 <: bool)};\n  f_z:f_z: u32{b2t (((f_y +! f_x <: u32) +! f_z <: u32) >. mk_u32 3 <: bool)}\n}\n\nlet props (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = Hax_lib.v_assume True in\n  let _:Prims.unit = Hax_lib.assert_prop True in\n  let _:Prims.unit = () in\n  ()\n\nlet inlined_code__v_V: u8 = mk_u8 12\n\nlet before_inlined_code = \"example before\"\n\nlet inlined_code (foo: t_Foo) : Prims.unit =\n  let vv_a:i32 = mk_i32 13 in\n  let _:Prims.unit =\n    let x = foo.f_x in\n    let { f_x = _ ; f_y = y ; f_z = _ } = foo in\n    add3 ((fun _ -> 3ul) foo) vv_a inlined_code__v_V y\n  in\n  ()\n\nlet inlined_code_after = \"example after\"\n\nlet before_1 = \"example before 1\"\n\nlet before_2 = \"example before 2\"\n\nlet before_3 = \"example before 3\"\n\nlet mutliple_before_after (_: Prims.unit) : Prims.unit = ()\n\nlet after 1 = \"example after 1\"\n\nlet after 2 = \"example after 2\"\n\nlet after 3 = \"example after 3\"\n\nunfold let some_function _ = \"hello from F*\"\n\nlet rec fib (x: usize) : Prims.Tot usize (decreases x) =\n  if x <=. mk_usize 2\n  then x\n  else\n    Core_models.Num.impl_usize__wrapping_add (fib (x -! mk_usize 1 <: usize) <: usize)\n      (fib (x -! mk_usize 2 <: usize) <: usize)\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__constructor-as-closure into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: constructor-as-closure\n    manifest: constructor-as-closure/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Constructor_as_closure.fst\" = '''\nmodule Constructor_as_closure\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Test = | Test : i32 -> t_Test\n\nlet impl_Test__test (x: Core_models.Option.t_Option i32) : Core_models.Option.t_Option t_Test =\n  Core_models.Option.impl__map #i32 #t_Test #(i32 -> t_Test) x Test\n\ntype t_Context =\n  | Context_A : i32 -> t_Context\n  | Context_B : i32 -> t_Context\n\nlet impl_Context__test (x: Core_models.Option.t_Option i32) : Core_models.Option.t_Option t_Context =\n  Core_models.Option.impl__map #i32 #t_Context #(i32 -> t_Context) x Context_B\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__cyclic-modules into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: cyclic-modules\n    manifest: cyclic-modules/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Cyclic_modules.B.fst\" = '''\nmodule Cyclic_modules.B\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle {g as g}\n'''\n\"Cyclic_modules.Bundle.fst\" = '''\nmodule Cyclic_modules.Bundle\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet f (_: Prims.unit) : Prims.unit = ()\n\nlet h2 (_: Prims.unit) : Prims.unit = Cyclic_modules.C.i ()\n\nlet g (_: Prims.unit) : Prims.unit = f ()\n\nlet h (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = g () in\n  Cyclic_modules.C.i ()\n'''\n\"Cyclic_modules.Bundle_d.fst\" = '''\nmodule Cyclic_modules.Bundle_d\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet d1 (_: Prims.unit) : Prims.unit = ()\n\nlet e1 (_: Prims.unit) : Prims.unit = d1 ()\n\nlet de1 (_: Prims.unit) : Prims.unit = e1 ()\n\nlet d2 (_: Prims.unit) : Prims.unit = de1 ()\n'''\n\"Cyclic_modules.Bundle_disjoint_cycle_a.fst\" = '''\nmodule Cyclic_modules.Bundle_disjoint_cycle_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet g (_: Prims.unit) : Prims.unit = ()\n\nlet h (_: Prims.unit) : Prims.unit = ()\n\nlet f (_: Prims.unit) : Prims.unit = h ()\n\nlet i (_: Prims.unit) : Prims.unit = g ()\n'''\n\"Cyclic_modules.Bundle_enums_a.fst\" = '''\nmodule Cyclic_modules.Bundle_enums_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_U =\n  | U_A : t_U\n  | U_B : t_U\n  | U_C : Alloc.Vec.t_Vec t_T Alloc.Alloc.t_Global -> t_U\n\nand t_T__from__enums_b =\n  | T_A : t_T__from__enums_b\n  | T_B : t_T__from__enums_b\n  | T_C : Alloc.Vec.t_Vec t_T Alloc.Alloc.t_Global -> t_T__from__enums_b\n\nand t_T =\n  | T_A__from__enums_a : t_T\n  | T_B__from__enums_a : t_T\n  | T_C__from__enums_a : Alloc.Vec.t_Vec t_U Alloc.Alloc.t_Global -> t_T\n  | T_D : Alloc.Vec.t_Vec t_T__from__enums_b Alloc.Alloc.t_Global -> t_T\n\nlet f (_: Prims.unit) : t_T__from__enums_b = T_A <: t_T__from__enums_b\n'''\n\"Cyclic_modules.Bundle_late_skip_a.fst\" = '''\nmodule Cyclic_modules.Bundle_late_skip_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet rec f (_: Prims.unit) : Prims.Pure Prims.unit (requires true) (fun _ -> Prims.l_True) =\n  f__from__late_skip_a ()\n\nand f__from__late_skip_a (_: Prims.unit) : Prims.unit = f ()\n'''\n\"Cyclic_modules.Bundle_m1.fst\" = '''\nmodule Cyclic_modules.Bundle_m1\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet d (_: Prims.unit) : Prims.unit = ()\n\nlet c (_: Prims.unit) : Prims.unit = ()\n\nlet a (_: Prims.unit) : Prims.unit = c ()\n\nlet b (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = a () in\n  d ()\n'''\n\"Cyclic_modules.Bundle_rec1_same_name.fst\" = '''\nmodule Cyclic_modules.Bundle_rec1_same_name\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet rec f (x: i32) : i32 =\n  if x >. mk_i32 0 then f__from__rec1_same_name (x -! mk_i32 1 <: i32) else mk_i32 0\n\nand f__from__rec1_same_name (x: i32) : i32 = f x\n'''\n\"Cyclic_modules.Bundle_typ_a.fst\" = '''\nmodule Cyclic_modules.Bundle_typ_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_T1 = | T1_T1 : t_T1\n\ntype t_T = | T_T : t_T1 -> t_T\n\nlet t_T1_cast_to_repr (x: t_T1) : isize = match x <: t_T1 with | T1_T1  -> mk_isize 0\n\ntype t_T2 = | T2_T2 : t_T -> t_T2\n\ntype t_T2Rec = | T2Rec_T2 : t_TRec -> t_T2Rec\n\nand t_T1Rec = | T1Rec_T1 : t_T2Rec -> t_T1Rec\n\nand t_TRec =\n  | TRec_T : t_T1Rec -> t_TRec\n  | TRec_Empty : t_TRec\n'''\n\"Cyclic_modules.Bundle_variant_constructor_a.fst\" = '''\nmodule Cyclic_modules.Bundle_variant_constructor_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Context =\n  | Context_A : i32 -> t_Context\n  | Context_B : i32 -> t_Context\n\nlet impl__test (x: Core_models.Option.t_Option i32) : Core_models.Option.t_Option t_Context =\n  Core_models.Option.impl__map #i32 #t_Context #(i32 -> t_Context) x Context_A\n\nlet h (_: Prims.unit) : t_Context = Context_A (mk_i32 1) <: t_Context\n\nlet f (_: Prims.unit) : t_Context = h ()\n'''\n\"Cyclic_modules.C.fst\" = '''\nmodule Cyclic_modules.C\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet i (_: Prims.unit) : Prims.unit = ()\n'''\n\"Cyclic_modules.D.fst\" = '''\nmodule Cyclic_modules.D\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_d {d1 as d1}\n\ninclude Cyclic_modules.Bundle_d {d2 as d2}\n'''\n\"Cyclic_modules.De.fst\" = '''\nmodule Cyclic_modules.De\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_d {de1 as de1}\n'''\n\"Cyclic_modules.Disjoint_cycle_a.fst\" = '''\nmodule Cyclic_modules.Disjoint_cycle_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_disjoint_cycle_a {f as f}\n\ninclude Cyclic_modules.Bundle_disjoint_cycle_a {g as g}\n'''\n\"Cyclic_modules.Disjoint_cycle_b.fst\" = '''\nmodule Cyclic_modules.Disjoint_cycle_b\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_disjoint_cycle_a {h as h}\n\ninclude Cyclic_modules.Bundle_disjoint_cycle_a {i as i}\n'''\n\"Cyclic_modules.E.fst\" = '''\nmodule Cyclic_modules.E\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_d {e1 as e1}\n'''\n\"Cyclic_modules.Enums_a.fst\" = '''\nmodule Cyclic_modules.Enums_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_enums_a {t_T as t_T}\n\ninclude Cyclic_modules.Bundle_enums_a {T_A__from__enums_a as T_A}\n\ninclude Cyclic_modules.Bundle_enums_a {T_B__from__enums_a as T_B}\n\ninclude Cyclic_modules.Bundle_enums_a {T_C__from__enums_a as T_C}\n\ninclude Cyclic_modules.Bundle_enums_a {T_D as T_D}\n'''\n\"Cyclic_modules.Enums_b.fst\" = '''\nmodule Cyclic_modules.Enums_b\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_enums_a {t_U as t_U}\n\ninclude Cyclic_modules.Bundle_enums_a {U_A as U_A}\n\ninclude Cyclic_modules.Bundle_enums_a {U_B as U_B}\n\ninclude Cyclic_modules.Bundle_enums_a {U_C as U_C}\n\ninclude Cyclic_modules.Bundle_enums_a {t_T__from__enums_b as t_T}\n\ninclude Cyclic_modules.Bundle_enums_a {T_A as T_A}\n\ninclude Cyclic_modules.Bundle_enums_a {T_B as T_B}\n\ninclude Cyclic_modules.Bundle_enums_a {T_C as T_C}\n\ninclude Cyclic_modules.Bundle_enums_a {f as f}\n'''\n\"Cyclic_modules.Issue_1823_.First_example.A.fst\" = '''\nmodule Cyclic_modules.Issue_1823_.First_example.A\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Issue_1823_.First_example.Bundle {t_A as t_A}\n\ninclude Cyclic_modules.Issue_1823_.First_example.Bundle {A as A}\n\ninclude Cyclic_modules.Issue_1823_.First_example.Bundle {impl__mkb as impl_A__mkb}\n'''\n\"Cyclic_modules.Issue_1823_.First_example.B.fst\" = '''\nmodule Cyclic_modules.Issue_1823_.First_example.B\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Issue_1823_.First_example.Bundle {t_B as t_B}\n\ninclude Cyclic_modules.Issue_1823_.First_example.Bundle {B as B}\n\ninclude Cyclic_modules.Issue_1823_.First_example.Bundle {impl__mka as impl_B__mka}\n'''\n\"Cyclic_modules.Issue_1823_.First_example.Bundle.fst\" = '''\nmodule Cyclic_modules.Issue_1823_.First_example.Bundle\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_A = | A : t_A\n\ntype t_B = | B : t_B\n\nlet impl__mkb (self: t_A) : t_B = B <: t_B\n\nlet impl__mka (self: t_B) : t_A = A <: t_A\n'''\n\"Cyclic_modules.Issue_1823_.Second_example.A.fst\" = '''\nmodule Cyclic_modules.Issue_1823_.Second_example.A\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Issue_1823_.Second_example.Bundle {call_b as call_b}\n\ninclude Cyclic_modules.Issue_1823_.Second_example.Bundle {a as a}\n'''\n\"Cyclic_modules.Issue_1823_.Second_example.B.fst\" = '''\nmodule Cyclic_modules.Issue_1823_.Second_example.B\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Issue_1823_.Second_example.Bundle {call_a as call_a}\n\ninclude Cyclic_modules.Issue_1823_.Second_example.Bundle {b as b}\n'''\n\"Cyclic_modules.Issue_1823_.Second_example.Bundle.fst\" = '''\nmodule Cyclic_modules.Issue_1823_.Second_example.Bundle\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet a (_: Prims.unit) : Prims.unit = ()\n\nlet call_a (_: Prims.unit) : Prims.unit = a ()\n\nlet b (_: Prims.unit) : Prims.unit = ()\n\nlet call_b (_: Prims.unit) : Prims.unit = b ()\n'''\n\"Cyclic_modules.Late_skip_a.fst\" = '''\nmodule Cyclic_modules.Late_skip_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_late_skip_a {f__from__late_skip_a as f}\n'''\n\"Cyclic_modules.Late_skip_b.fst\" = '''\nmodule Cyclic_modules.Late_skip_b\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_late_skip_a {f as f}\n'''\n\"Cyclic_modules.M1.fst\" = '''\nmodule Cyclic_modules.M1\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_m1 {a as a}\n'''\n\"Cyclic_modules.M2.fst\" = '''\nmodule Cyclic_modules.M2\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_m1 {d as d}\n\ninclude Cyclic_modules.Bundle_m1 {b as b}\n\ninclude Cyclic_modules.Bundle_m1 {c as c}\n'''\n\"Cyclic_modules.Rec.fst\" = '''\nmodule Cyclic_modules.Rec\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_T =\n  | T_t1 : t_T\n  | T_t2 : t_T\n\nlet t_T_cast_to_repr (x: t_T) : isize =\n  match x <: t_T with\n  | T_t1  -> mk_isize 0\n  | T_t2  -> mk_isize 1\n\nlet rec hf (x: t_T) : t_T =\n  match x <: t_T with\n  | T_t1  -> hf (T_t2 <: t_T)\n  | T_t2  -> x\n\nlet rec g2 (x: t_T) : t_T =\n  match x <: t_T with\n  | T_t1  -> g1 x\n  | T_t2  -> hf x\n\nand g1 (x: t_T) : t_T =\n  match x <: t_T with\n  | T_t1  -> g2 x\n  | T_t2  -> T_t1 <: t_T\n'''\n\"Cyclic_modules.Rec1_same_name.fst\" = '''\nmodule Cyclic_modules.Rec1_same_name\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_rec1_same_name {f__from__rec1_same_name as f}\n'''\n\"Cyclic_modules.Rec2_same_name.fst\" = '''\nmodule Cyclic_modules.Rec2_same_name\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_rec1_same_name {f as f}\n'''\n\"Cyclic_modules.Typ_a.fst\" = '''\nmodule Cyclic_modules.Typ_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_typ_a {t_TRec as t_TRec}\n\ninclude Cyclic_modules.Bundle_typ_a {TRec_T as TRec_T}\n\ninclude Cyclic_modules.Bundle_typ_a {TRec_Empty as TRec_Empty}\n\ninclude Cyclic_modules.Bundle_typ_a {t_T as t_T}\n\ninclude Cyclic_modules.Bundle_typ_a {T_T as T_T}\n'''\n\"Cyclic_modules.Typ_b.fst\" = '''\nmodule Cyclic_modules.Typ_b\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_typ_a {t_T1Rec as t_T1Rec}\n\ninclude Cyclic_modules.Bundle_typ_a {T1Rec_T1 as T1Rec_T1}\n\ninclude Cyclic_modules.Bundle_typ_a {t_T2Rec as t_T2Rec}\n\ninclude Cyclic_modules.Bundle_typ_a {T2Rec_T2 as T2Rec_T2}\n\ninclude Cyclic_modules.Bundle_typ_a {t_T1_cast_to_repr as t_T1_cast_to_repr}\n\ninclude Cyclic_modules.Bundle_typ_a {t_T1 as t_T1}\n\ninclude Cyclic_modules.Bundle_typ_a {T1_T1 as T1_T1}\n\ninclude Cyclic_modules.Bundle_typ_a {t_T2 as t_T2}\n\ninclude Cyclic_modules.Bundle_typ_a {T2_T2 as T2_T2}\n'''\n\"Cyclic_modules.Variant_constructor_a.fst\" = '''\nmodule Cyclic_modules.Variant_constructor_a\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_variant_constructor_a {t_Context as t_Context}\n\ninclude Cyclic_modules.Bundle_variant_constructor_a {Context_A as Context_A}\n\ninclude Cyclic_modules.Bundle_variant_constructor_a {Context_B as Context_B}\n\ninclude Cyclic_modules.Bundle_variant_constructor_a {f as f}\n\ninclude Cyclic_modules.Bundle_variant_constructor_a {impl__test as impl_Context__test}\n'''\n\"Cyclic_modules.Variant_constructor_b.fst\" = '''\nmodule Cyclic_modules.Variant_constructor_b\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle_variant_constructor_a {h as h}\n'''\n\"Cyclic_modules.fst\" = '''\nmodule Cyclic_modules\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ninclude Cyclic_modules.Bundle {f as f}\n\ninclude Cyclic_modules.Bundle {h as h}\n\ninclude Cyclic_modules.Bundle {h2 as h2}\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__cyclic-modules into-lean.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: lean\n  info:\n    name: cyclic-modules\n    manifest: cyclic-modules/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"cyclic_modules.lean\" = '''\n\n-- Experimental lean backend for Hax\n-- The Hax prelude library can be found in hax/proof-libs/lean\nimport Hax\nimport Std.Tactic.Do\nimport Std.Do.Triple\nimport Std.Tactic.Do.Syntax\nopen Std.Do\nopen Std.Tactic\n\nset_option mvcgen.warning false\nset_option linter.unusedVariables false\n\n\nnamespace cyclic_modules.typ_b\n\ninductive T1 : Type\n| T1 : T1\n\nend cyclic_modules.typ_b\n\n\nnamespace cyclic_modules.typ_a\n\ninductive T : Type\n| T : cyclic_modules.typ_b.T1 -> T\n\nend cyclic_modules.typ_a\n\n\nnamespace cyclic_modules.typ_b\n\n@[spec]\ndef T1_cast_to_repr (x : T1) : RustM isize := do\n  match x with | (T1.T1 ) => do (pure (0 : isize))\n\ninductive T2 : Type\n| T2 : cyclic_modules.typ_a.T -> T2\n\nend cyclic_modules.typ_b\n\n\nnamespace cyclic_modules\n\n@[spec]\ndef f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules\n\n\nnamespace cyclic_modules.b\n\n@[spec]\ndef g (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.f rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.b\n\n\nnamespace cyclic_modules.c\n\n@[spec]\ndef i (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.c\n\n\nnamespace cyclic_modules\n\n@[spec]\ndef h (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  let _ ← (cyclic_modules.b.g rust_primitives.hax.Tuple0.mk);\n  (cyclic_modules.c.i rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef h2 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.c.i rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules\n\n\nnamespace cyclic_modules.d\n\n@[spec]\ndef d1 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.d\n\n\nnamespace cyclic_modules.e\n\n@[spec]\ndef e1 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.d.d1 rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.e\n\n\nnamespace cyclic_modules.de\n\n@[spec]\ndef de1 (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.e.e1 rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.de\n\n\nnamespace cyclic_modules.d\n\n@[spec]\ndef d2 (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.de.de1 rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.d\n\n\nnamespace cyclic_modules.rec\n\ninductive T : Type\n| t1 : T\n| t2 : T\n\n@[spec]\ndef T_cast_to_repr (x : T) : RustM isize := do\n  match x with\n    | (T.t1 ) => do (pure (0 : isize))\n    | (T.t2 ) => do (pure (1 : isize))\n\nend cyclic_modules.rec\n\n\nnamespace cyclic_modules.m2\n\n@[spec]\ndef d (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef c (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.m2\n\n\nnamespace cyclic_modules.m1\n\n@[spec]\ndef a (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.m2.c rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.m1\n\n\nnamespace cyclic_modules.m2\n\n@[spec]\ndef b (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  let _ ← (cyclic_modules.m1.a rust_primitives.hax.Tuple0.mk);\n  (d rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.m2\n\n\nnamespace cyclic_modules.disjoint_cycle_a\n\n@[spec]\ndef g (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.disjoint_cycle_a\n\n\nnamespace cyclic_modules.disjoint_cycle_b\n\n@[spec]\ndef h (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.disjoint_cycle_b\n\n\nnamespace cyclic_modules.disjoint_cycle_a\n\n@[spec]\ndef f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.disjoint_cycle_b.h rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.disjoint_cycle_a\n\n\nnamespace cyclic_modules.disjoint_cycle_b\n\n@[spec]\ndef i (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.disjoint_cycle_a.g rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.disjoint_cycle_b\n\n\nnamespace cyclic_modules.variant_constructor_a\n\ninductive Context : Type\n| A : i32 -> Context\n| B : i32 -> Context\n\n@[spec]\ndef Impl.test (x : (core_models.option.Option i32)) :\n    RustM (core_models.option.Option Context) := do\n  (core_models.option.Impl.map i32 Context (i32 -> RustM Context) x Context.A)\n\nend cyclic_modules.variant_constructor_a\n\n\nnamespace cyclic_modules.variant_constructor_b\n\n@[spec]\ndef h (_ : rust_primitives.hax.Tuple0) :\n    RustM cyclic_modules.variant_constructor_a.Context := do\n  (pure (cyclic_modules.variant_constructor_a.Context.A (1 : i32)))\n\nend cyclic_modules.variant_constructor_b\n\n\nnamespace cyclic_modules.variant_constructor_a\n\n@[spec]\ndef f (_ : rust_primitives.hax.Tuple0) : RustM Context := do\n  (cyclic_modules.variant_constructor_b.h rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.variant_constructor_a\n\n\nnamespace cyclic_modules.issue_1823.first_example.a\n\nstructure A where\n  -- no fields\n\nend cyclic_modules.issue_1823.first_example.a\n\n\nnamespace cyclic_modules.issue_1823.first_example.b\n\nstructure B where\n  -- no fields\n\nend cyclic_modules.issue_1823.first_example.b\n\n\nnamespace cyclic_modules.issue_1823.first_example.a\n\n@[spec]\ndef Impl.mkb (self : A) :\n    RustM cyclic_modules.issue_1823.first_example.b.B := do\n  (pure cyclic_modules.issue_1823.first_example.b.B.mk)\n\nend cyclic_modules.issue_1823.first_example.a\n\n\nnamespace cyclic_modules.issue_1823.first_example.b\n\n@[spec]\ndef Impl.mka (self : B) :\n    RustM cyclic_modules.issue_1823.first_example.a.A := do\n  (pure cyclic_modules.issue_1823.first_example.a.A.mk)\n\nend cyclic_modules.issue_1823.first_example.b\n\n\nnamespace cyclic_modules.issue_1823.second_example.a\n\n@[spec]\ndef a (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.issue_1823.second_example.a\n\n\nnamespace cyclic_modules.issue_1823.second_example.b\n\n@[spec]\ndef call_a (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.issue_1823.second_example.a.a rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef b (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.issue_1823.second_example.b\n\n\nnamespace cyclic_modules.issue_1823.second_example.a\n\n@[spec]\ndef call_b (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.issue_1823.second_example.b.b rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.issue_1823.second_example.a\n\n\nnamespace cyclic_modules.typ_b\n\ninductive T2Rec : Type\n| T2 : cyclic_modules.typ_a.TRec -> T2Rec\n\ninductive T1Rec : Type\n| T1 : T2Rec -> T1Rec\n\nend cyclic_modules.typ_b\n\n\nnamespace cyclic_modules.typ_a\n\ninductive TRec : Type\n| T : cyclic_modules.typ_b.T1Rec -> TRec\n| Empty : TRec\n\nend cyclic_modules.typ_a\n\n\nnamespace cyclic_modules.rec\n\n@[spec]\ndef hf (x : T) : RustM T := do\n  match x with | (T.t1 ) => do (hf T.t2) | (T.t2 ) => do (pure x)\npartial_fixpoint\n\nend cyclic_modules.rec\n\n\nnamespace cyclic_modules.rec2_same_name\n\n@[spec]\ndef f (x : i32) : RustM i32 := do\n  if (← (x >? (0 : i32))) then do\n    (cyclic_modules.rec1_same_name.f (← (x -? (1 : i32))))\n  else do\n    (pure (0 : i32))\n\nend cyclic_modules.rec2_same_name\n\n\nnamespace cyclic_modules.rec1_same_name\n\n@[spec]\ndef f (x : i32) : RustM i32 := do (cyclic_modules.rec2_same_name.f x)\n\nend cyclic_modules.rec1_same_name\n\n\nnamespace cyclic_modules.late_skip_b\n\ndef f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.late_skip_a.f rust_primitives.hax.Tuple0.mk)\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef f.spec (_ : rust_primitives.hax.Tuple0) :\n    Spec (requires := do (pure true)) (ensures := fun _ => pure True) (f ⟨⟩) :=\n{\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [f] <;> bv_decide\n}\n\nend cyclic_modules.late_skip_b\n\n\nnamespace cyclic_modules.late_skip_a\n\n@[spec]\ndef f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (cyclic_modules.late_skip_b.f rust_primitives.hax.Tuple0.mk)\n\nend cyclic_modules.late_skip_a\n\n\nnamespace cyclic_modules.enums_b\n\ninductive U : Type\n| A : U\n| B : U\n| C : (alloc.vec.Vec cyclic_modules.enums_a.T alloc.alloc.Global) -> U\n\ninductive T : Type\n| A : T\n| B : T\n| C : (alloc.vec.Vec cyclic_modules.enums_a.T alloc.alloc.Global) -> T\n\nend cyclic_modules.enums_b\n\n\nnamespace cyclic_modules.enums_a\n\ninductive T : Type\n| A : T\n| B : T\n| C : (alloc.vec.Vec cyclic_modules.enums_b.U alloc.alloc.Global) -> T\n| D : (alloc.vec.Vec cyclic_modules.enums_b.T alloc.alloc.Global) -> T\n\nend cyclic_modules.enums_a\n\n\nnamespace cyclic_modules.enums_b\n\n@[spec]\ndef f (_ : rust_primitives.hax.Tuple0) : RustM T := do (pure T.A)\n\nend cyclic_modules.enums_b\n\n\nnamespace cyclic_modules.rec\n\n@[spec]\ndef g2 (x : T) : RustM T := do\n  match x with | (T.t1 ) => do (g1 x) | (T.t2 ) => do (hf x)\n\n@[spec]\ndef g1 (x : T) : RustM T := do\n  match x with | (T.t1 ) => do (g2 x) | (T.t2 ) => do (pure T.t1)\n\nend cyclic_modules.rec\n\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__dyn into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: dyn\n    manifest: dyn/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Dyn.fst\" = '''\nmodule Dyn\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_Printable (v_Self: Type0) (v_S: Type0) = {\n  f_stringify_pre:v_Self -> Type0;\n  f_stringify_post:v_Self -> v_S -> Type0;\n  f_stringify:x0: v_Self\n    -> Prims.Pure v_S (f_stringify_pre x0) (fun result -> f_stringify_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_Printable i32 Alloc.String.t_String =\n  {\n    f_stringify_pre = (fun (self: i32) -> true);\n    f_stringify_post = (fun (self: i32) (out: Alloc.String.t_String) -> true);\n    f_stringify\n    =\n    fun (self: i32) -> Alloc.String.f_to_string #i32 #FStar.Tactics.Typeclasses.solve self\n  }\n\nlet print (a: dyn 1 (fun z -> t_Printable z Alloc.String.t_String)) : Prims.unit =\n  let args:Alloc.String.t_String =\n    f_stringify #(dyn 1 (fun z -> t_Printable z Alloc.String.t_String))\n      #Alloc.String.t_String\n      #FStar.Tactics.Typeclasses.solve\n      a\n    <:\n    Alloc.String.t_String\n  in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 1) =\n    let list = [Core_models.Fmt.Rt.impl__new_display #Alloc.String.t_String args] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n    Rust_primitives.Hax.array_of_list 1 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 2)\n          (mk_usize 1)\n          (let list = [\"\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n            Rust_primitives.Hax.array_of_list 2 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  let _:Prims.unit = () in\n  ()\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__enum-repr into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: enum-repr\n    manifest: enum-repr/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Enum_repr.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nInductive t_EnumWithRepr : Type :=\n| EnumWithRepr_ExplicitDiscr1\n| EnumWithRepr_ExplicitDiscr2\n| EnumWithRepr_ImplicitDiscrEmptyTuple\n| EnumWithRepr_ImplicitDiscrEmptyStruct.\n\n\n\n\n\nDefinition anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 : t_u16 :=\n  (1 : t_u16).\n\nDefinition anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 : t_u16 :=\n  (5 : t_u16).\n\nDefinition t_EnumWithRepr_cast_to_repr (x : t_EnumWithRepr) : t_u16 :=\n  match x with\n  | EnumWithRepr_ExplicitDiscr1 =>\n    anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0\n  | EnumWithRepr_ExplicitDiscr2 =>\n    anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0\n  | EnumWithRepr_ImplicitDiscrEmptyTuple =>\n    f_add (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0) ((1 : t_u16))\n  | EnumWithRepr_ImplicitDiscrEmptyStruct =>\n    f_add (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0) ((2 : t_u16))\n  end.\n\nInductive t_ImplicitReprs : Type :=\n| ImplicitReprs_A\n| ImplicitReprs_B\n| ImplicitReprs_C\n| ImplicitReprs_D\n| ImplicitReprs_E\n| ImplicitReprs_F\n| ImplicitReprs_G\n| ImplicitReprs_H\n| ImplicitReprs_I.\n\n\n\n\n\n\n\n\n\n\nDefinition anon_const_ImplicitReprs_E__anon_const_0 : t_u64 :=\n  (30 : t_u64).\n\nDefinition t_ImplicitReprs_cast_to_repr (x : t_ImplicitReprs) : t_u64 :=\n  match x with\n  | ImplicitReprs_A =>\n    (0 : t_u64)\n  | ImplicitReprs_B =>\n    (1 : t_u64)\n  | ImplicitReprs_C =>\n    (2 : t_u64)\n  | ImplicitReprs_D =>\n    (3 : t_u64)\n  | ImplicitReprs_E =>\n    anon_const_ImplicitReprs_E__anon_const_0\n  | ImplicitReprs_F =>\n    f_add (anon_const_ImplicitReprs_E__anon_const_0) ((1 : t_u64))\n  | ImplicitReprs_G =>\n    f_add (anon_const_ImplicitReprs_E__anon_const_0) ((2 : t_u64))\n  | ImplicitReprs_H =>\n    f_add (anon_const_ImplicitReprs_E__anon_const_0) ((3 : t_u64))\n  | ImplicitReprs_I =>\n    f_add (anon_const_ImplicitReprs_E__anon_const_0) ((4 : t_u64))\n  end.\n\nDefinition f '(_ : unit) : t_u32 :=\n  let e_x := cast (f_add (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0) ((0 : t_u16))) in\n  f_add (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyTuple))) (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyStruct))).\n\nDefinition f__v_CONST : t_u16 :=\n  cast (f_add (anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0) ((0 : t_u16))).\n\nDefinition get_repr (x : t_EnumWithRepr) : t_u16 :=\n  t_EnumWithRepr_cast_to_repr (x).\n\nDefinition get_casted_repr (x : t_EnumWithRepr) : t_u64 :=\n  cast (t_EnumWithRepr_cast_to_repr (x)).\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nEnum_repr.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__enum-repr into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: enum-repr\n    manifest: enum-repr/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Enum_repr.fst\" = '''\nmodule Enum_repr\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_EnumWithRepr =\n  | EnumWithRepr_ExplicitDiscr1 : t_EnumWithRepr\n  | EnumWithRepr_ExplicitDiscr2 : t_EnumWithRepr\n  | EnumWithRepr_ImplicitDiscrEmptyTuple : t_EnumWithRepr\n  | EnumWithRepr_ImplicitDiscrEmptyStruct : t_EnumWithRepr\n\nlet anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0: u16 = mk_u16 1\n\nlet anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0: u16 = mk_u16 5\n\nlet t_EnumWithRepr_cast_to_repr (x: t_EnumWithRepr) : u16 =\n  match x <: t_EnumWithRepr with\n  | EnumWithRepr_ExplicitDiscr1  -> anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0\n  | EnumWithRepr_ExplicitDiscr2  -> anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0\n  | EnumWithRepr_ImplicitDiscrEmptyTuple  ->\n    anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 +! mk_u16 1\n  | EnumWithRepr_ImplicitDiscrEmptyStruct  ->\n    anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 +! mk_u16 2\n\ntype t_ImplicitReprs =\n  | ImplicitReprs_A : t_ImplicitReprs\n  | ImplicitReprs_B : t_ImplicitReprs\n  | ImplicitReprs_C : t_ImplicitReprs\n  | ImplicitReprs_D : t_ImplicitReprs\n  | ImplicitReprs_E : t_ImplicitReprs\n  | ImplicitReprs_F : t_ImplicitReprs\n  | ImplicitReprs_G : t_ImplicitReprs\n  | ImplicitReprs_H : t_ImplicitReprs\n  | ImplicitReprs_I : t_ImplicitReprs\n\nlet anon_const_ImplicitReprs_E__anon_const_0: u64 = mk_u64 30\n\nlet t_ImplicitReprs_cast_to_repr (x: t_ImplicitReprs) : u64 =\n  match x <: t_ImplicitReprs with\n  | ImplicitReprs_A  -> mk_u64 0\n  | ImplicitReprs_B  -> mk_u64 1\n  | ImplicitReprs_C  -> mk_u64 2\n  | ImplicitReprs_D  -> mk_u64 3\n  | ImplicitReprs_E  -> anon_const_ImplicitReprs_E__anon_const_0\n  | ImplicitReprs_F  -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 1\n  | ImplicitReprs_G  -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 2\n  | ImplicitReprs_H  -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 3\n  | ImplicitReprs_I  -> anon_const_ImplicitReprs_E__anon_const_0 +! mk_u64 4\n\nlet f (_: Prims.unit) : u32 =\n  let e_x:u16 =\n    cast (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 +! mk_u16 0 <: u16) <: u16\n  in\n  (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyTuple <: t_EnumWithRepr) <: u16\n      )\n    <:\n    u32) +!\n  (cast (t_EnumWithRepr_cast_to_repr (EnumWithRepr_ImplicitDiscrEmptyStruct <: t_EnumWithRepr)\n        <:\n        u16)\n    <:\n    u32)\n\nlet f__v_CONST: u16 =\n  cast (anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 +! mk_u16 0 <: u16) <: u16\n\nlet get_repr (x: t_EnumWithRepr) : u16 = t_EnumWithRepr_cast_to_repr x\n\nlet get_casted_repr (x: t_EnumWithRepr) : u64 = cast (t_EnumWithRepr_cast_to_repr x <: u16) <: u64\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__enum-repr into-ssprove.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: ssprove\n  info:\n    name: enum-repr\n    manifest: enum-repr/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Enum_repr.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nDefinition t_EnumWithRepr : choice_type :=\n  ('unit ∐ 'unit ∐ 'unit ∐ 'unit).\nNotation \"'EnumWithRepr_ExplicitDiscr1_case'\" := (inl (inl (inl tt))) (at level 100).\nEquations EnumWithRepr_ExplicitDiscr1 : both t_EnumWithRepr :=\n  EnumWithRepr_ExplicitDiscr1  :=\n    ret_both (inl (inl (inl (tt : 'unit))) : t_EnumWithRepr) : both t_EnumWithRepr.\nFail Next Obligation.\nNotation \"'EnumWithRepr_ExplicitDiscr2_case'\" := (inl (inl (inr tt))) (at level 100).\nEquations EnumWithRepr_ExplicitDiscr2 : both t_EnumWithRepr :=\n  EnumWithRepr_ExplicitDiscr2  :=\n    ret_both (inl (inl (inr (tt : 'unit))) : t_EnumWithRepr) : both t_EnumWithRepr.\nFail Next Obligation.\nNotation \"'EnumWithRepr_ImplicitDiscrEmptyTuple_case'\" := (inl (inr tt)) (at level 100).\nEquations EnumWithRepr_ImplicitDiscrEmptyTuple : both t_EnumWithRepr :=\n  EnumWithRepr_ImplicitDiscrEmptyTuple  :=\n    ret_both (inl (inr (tt : 'unit)) : t_EnumWithRepr) : both t_EnumWithRepr.\nFail Next Obligation.\nNotation \"'EnumWithRepr_ImplicitDiscrEmptyStruct_case'\" := (inr tt) (at level 100).\nEquations EnumWithRepr_ImplicitDiscrEmptyStruct : both t_EnumWithRepr :=\n  EnumWithRepr_ImplicitDiscrEmptyStruct  :=\n    ret_both (inr (tt : 'unit) : t_EnumWithRepr) : both t_EnumWithRepr.\nFail Next Obligation.\n\nEquations anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 : both int16 :=\n  anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0  :=\n    ret_both (1 : int16) : both int16.\nFail Next Obligation.\n\nEquations anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 : both int16 :=\n  anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0  :=\n    ret_both (5 : int16) : both int16.\nFail Next Obligation.\n\nEquations t_EnumWithRepr_cast_to_repr (x : both t_EnumWithRepr) : both int16 :=\n  t_EnumWithRepr_cast_to_repr x  :=\n    matchb x with\n    | EnumWithRepr_ExplicitDiscr1_case  =>\n      anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0\n    | EnumWithRepr_ExplicitDiscr2_case  =>\n      anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0\n    | EnumWithRepr_ImplicitDiscrEmptyTuple_case  =>\n      anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 .+ (ret_both (1 : int16))\n    | EnumWithRepr_ImplicitDiscrEmptyStruct_case  =>\n      anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 .+ (ret_both (2 : int16))\n    end : both int16.\nFail Next Obligation.\n\nDefinition t_ImplicitReprs : choice_type :=\n  ('unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit ∐ 'unit).\nNotation \"'ImplicitReprs_A_case'\" := (inl (inl (inl (inl (inl (inl (inl (inl tt)))))))) (at level 100).\nEquations ImplicitReprs_A : both t_ImplicitReprs :=\n  ImplicitReprs_A  :=\n    ret_both (inl (inl (inl (inl (inl (inl (inl (inl (tt : 'unit)))))))) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_B_case'\" := (inl (inl (inl (inl (inl (inl (inl (inr tt)))))))) (at level 100).\nEquations ImplicitReprs_B : both t_ImplicitReprs :=\n  ImplicitReprs_B  :=\n    ret_both (inl (inl (inl (inl (inl (inl (inl (inr (tt : 'unit)))))))) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_C_case'\" := (inl (inl (inl (inl (inl (inl (inr tt))))))) (at level 100).\nEquations ImplicitReprs_C : both t_ImplicitReprs :=\n  ImplicitReprs_C  :=\n    ret_both (inl (inl (inl (inl (inl (inl (inr (tt : 'unit))))))) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_D_case'\" := (inl (inl (inl (inl (inl (inr tt)))))) (at level 100).\nEquations ImplicitReprs_D : both t_ImplicitReprs :=\n  ImplicitReprs_D  :=\n    ret_both (inl (inl (inl (inl (inl (inr (tt : 'unit)))))) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_E_case'\" := (inl (inl (inl (inl (inr tt))))) (at level 100).\nEquations ImplicitReprs_E : both t_ImplicitReprs :=\n  ImplicitReprs_E  :=\n    ret_both (inl (inl (inl (inl (inr (tt : 'unit))))) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_F_case'\" := (inl (inl (inl (inr tt)))) (at level 100).\nEquations ImplicitReprs_F : both t_ImplicitReprs :=\n  ImplicitReprs_F  :=\n    ret_both (inl (inl (inl (inr (tt : 'unit)))) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_G_case'\" := (inl (inl (inr tt))) (at level 100).\nEquations ImplicitReprs_G : both t_ImplicitReprs :=\n  ImplicitReprs_G  :=\n    ret_both (inl (inl (inr (tt : 'unit))) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_H_case'\" := (inl (inr tt)) (at level 100).\nEquations ImplicitReprs_H : both t_ImplicitReprs :=\n  ImplicitReprs_H  :=\n    ret_both (inl (inr (tt : 'unit)) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\nNotation \"'ImplicitReprs_I_case'\" := (inr tt) (at level 100).\nEquations ImplicitReprs_I : both t_ImplicitReprs :=\n  ImplicitReprs_I  :=\n    ret_both (inr (tt : 'unit) : t_ImplicitReprs) : both t_ImplicitReprs.\nFail Next Obligation.\n\nEquations anon_const_ImplicitReprs_E__anon_const_0 : both int64 :=\n  anon_const_ImplicitReprs_E__anon_const_0  :=\n    ret_both (30 : int64) : both int64.\nFail Next Obligation.\n\nEquations t_ImplicitReprs_cast_to_repr (x : both t_ImplicitReprs) : both int64 :=\n  t_ImplicitReprs_cast_to_repr x  :=\n    matchb x with\n    | ImplicitReprs_A_case  =>\n      ret_both (0 : int64)\n    | ImplicitReprs_B_case  =>\n      ret_both (1 : int64)\n    | ImplicitReprs_C_case  =>\n      ret_both (2 : int64)\n    | ImplicitReprs_D_case  =>\n      ret_both (3 : int64)\n    | ImplicitReprs_E_case  =>\n      anon_const_ImplicitReprs_E__anon_const_0\n    | ImplicitReprs_F_case  =>\n      anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (1 : int64))\n    | ImplicitReprs_G_case  =>\n      anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (2 : int64))\n    | ImplicitReprs_H_case  =>\n      anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (3 : int64))\n    | ImplicitReprs_I_case  =>\n      anon_const_ImplicitReprs_E__anon_const_0 .+ (ret_both (4 : int64))\n    end : both int64.\nFail Next Obligation.\n\nEquations f (_ : both 'unit) : both int32 :=\n  f _  :=\n    letb e_x := cast_int (WS2 := _) (anon_const_EnumWithRepr_ExplicitDiscr2__anon_const_0 .+ (ret_both (0 : int16))) in\n    (cast_int (WS2 := _) (t_EnumWithRepr_cast_to_repr EnumWithRepr_ImplicitDiscrEmptyTuple)) .+ (cast_int (WS2 := _) (t_EnumWithRepr_cast_to_repr EnumWithRepr_ImplicitDiscrEmptyStruct)) : both int32.\nFail Next Obligation.\n\nEquations f__v_CONST : both int16 :=\n  f__v_CONST  :=\n    cast_int (WS2 := _) (anon_const_EnumWithRepr_ExplicitDiscr1__anon_const_0 .+ (ret_both (0 : int16))) : both int16.\nFail Next Obligation.\n\nEquations get_repr (x : both t_EnumWithRepr) : both int16 :=\n  get_repr x  :=\n    t_EnumWithRepr_cast_to_repr x : both int16.\nFail Next Obligation.\n\nEquations get_casted_repr (x : both t_EnumWithRepr) : both int64 :=\n  get_casted_repr x  :=\n    cast_int (WS2 := _) (t_EnumWithRepr_cast_to_repr x) : both int64.\nFail Next Obligation.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__functions into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: functions\n    manifest: functions/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 1\n[[stdout.diagnostics]]\nmessage = '''\n(Coq backend) something is not implemented yet.\n[ty] node typ'''\nspans = ['Span { lo: Loc { line: 11, col: 4 }, hi: Loc { line: 17, col: 5 }, filename: Real(LocalPath(\"functions/src/lib.rs\")), rust_span_data: None }']\n\n[stdout.files]\n\"Functions.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\n(*Not implemented yet? todo(item)*)\n\n(*Not implemented yet? todo(item)*)\n\nDefinition calling_function_pointer__f (_ : unit) : unit :=\n  tt.\n\nDefinition calling_function_pointer (_ : unit) : unit :=\n  let f_ptr := calling_function_pointer__f : unit -> unit in\n  let _ := calling_function_pointer__f tt : unit in\n  tt.\n'''\n\"Functions_Issue_1048_.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nRecord t_CallableViaDeref : Type := {\n}.\n\n(*item error backend*)\n\nDefinition call_via_deref (_ : unit) : bool :=\n  f_deref CallableViaDereft_CallableViaDeref_t tt.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__functions into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: functions\n    manifest: functions/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Functions.Issue_1048_.fst\" = '''\nmodule Functions.Issue_1048_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_CallableViaDeref = | CallableViaDeref : t_CallableViaDeref\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Ops.Deref.t_Deref t_CallableViaDeref =\n  {\n    f_Target = Prims.unit -> bool;\n    f_deref_pre = (fun (self: t_CallableViaDeref) -> true);\n    f_deref_post = (fun (self: t_CallableViaDeref) (out: (Prims.unit -> bool)) -> true);\n    f_deref\n    =\n    fun (self: t_CallableViaDeref) ->\n      fun temp_0_ ->\n        let _:Prims.unit = temp_0_ in\n        true\n  }\n\nlet call_via_deref (_: Prims.unit) : bool =\n  Core_models.Ops.Deref.f_deref #t_CallableViaDeref\n    #FStar.Tactics.Typeclasses.solve\n    (CallableViaDeref <: t_CallableViaDeref)\n    ()\n'''\n\"Functions.fst\" = '''\nmodule Functions\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet calling_function_pointer__f (#v_T: Type0) (_: Prims.unit) : Prims.unit = ()\n\n/// Issue #757\nlet calling_function_pointer (_: Prims.unit) : Prims.unit =\n  let ff_ptr: Prims.unit -> Prims.unit = calling_function_pointer__f in\n  let _:Prims.unit = calling_function_pointer__f #i32 () in\n  ()\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__generics into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: generics\n    manifest: generics/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Generics.Assoc_const_param.fst\" = '''\nmodule Generics.Assoc_const_param\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Test (v_N: usize) = | Test : t_Test v_N\n\nlet impl__A (v_N: usize) : t_Test v_N = Test <: t_Test v_N\n\nlet test (_: Prims.unit) : t_Test (mk_usize 1) = impl__A (mk_usize 1)\n'''\n\"Generics.Defaults_generics.fst\" = '''\nmodule Generics.Defaults_generics\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Defaults (v_T: Type0) (v_N: usize) = | Defaults : t_Array v_T v_N -> t_Defaults v_T v_N\n\nlet f (_: t_Defaults Prims.unit (mk_usize 2)) : Prims.unit = ()\n'''\n\"Generics.Impl_generics.fst\" = '''\nmodule Generics.Impl_generics\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Test = | Test : t_Test\n\nlet impl_Test__set_ciphersuites\n      (#v_S #iimpl_995885649_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_AsRef v_S string)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Iter.Traits.Collect.t_IntoIterator iimpl_995885649_)\n      (#_: unit{i1.Core_models.Iter.Traits.Collect.f_Item == v_S})\n      (self: t_Test)\n      (ciphers: iimpl_995885649_)\n    : Core_models.Result.t_Result Prims.unit Prims.unit =\n  Core_models.Result.Result_Ok (() <: Prims.unit)\n  <:\n  Core_models.Result.t_Result Prims.unit Prims.unit\n\nlet impl_Test__set_alpn_protocols\n      (#v_S #iimpl_995885649_: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Convert.t_AsRef v_S string)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Iter.Traits.Collect.t_IntoIterator iimpl_995885649_)\n      (#_: unit{i1.Core_models.Iter.Traits.Collect.f_Item == v_S})\n      (self: t_Test)\n      (e_protocols: iimpl_995885649_)\n    : Core_models.Result.t_Result Prims.unit Prims.unit =\n  Core_models.Result.Result_Ok (() <: Prims.unit)\n  <:\n  Core_models.Result.t_Result Prims.unit Prims.unit\n'''\n\"Generics.fst\" = '''\nmodule Generics\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet dup\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Clone.t_Clone v_T)\n      (x: v_T)\n    : (v_T & v_T) =\n  Core_models.Clone.f_clone #v_T #FStar.Tactics.Typeclasses.solve x,\n  Core_models.Clone.f_clone #v_T #FStar.Tactics.Typeclasses.solve x\n  <:\n  (v_T & v_T)\n\nlet foo (v_LEN: usize) (arr: t_Array usize v_LEN) : usize =\n  let acc:usize = v_LEN +! mk_usize 9 in\n  let acc:usize =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      v_LEN\n      (fun acc temp_1_ ->\n          let acc:usize = acc in\n          let _:usize = temp_1_ in\n          true)\n      acc\n      (fun acc i ->\n          let acc:usize = acc in\n          let i:usize = i in\n          acc +! (arr.[ i ] <: usize) <: usize)\n  in\n  acc\n\nlet repeat\n      (v_LEN: usize)\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Marker.t_Copy v_T)\n      (x: v_T)\n    : t_Array v_T v_LEN = Rust_primitives.Hax.repeat x v_LEN\n\nlet f (v_N x: usize) : usize = (v_N +! v_N <: usize) +! x\n\nlet call_f (_: Prims.unit) : usize = (f (mk_usize 10) (mk_usize 3) <: usize) +! mk_usize 3\n\nlet g\n      (v_N: usize)\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Convert.t_Into v_T (t_Array usize v_N))\n      (arr: v_T)\n    : usize =\n  (Core_models.Option.impl__unwrap_or #usize\n      (Core_models.Iter.Traits.Iterator.f_max #(Core_models.Array.Iter.t_IntoIter usize v_N)\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Iter.Traits.Collect.f_into_iter #(t_Array usize v_N)\n              #FStar.Tactics.Typeclasses.solve\n              (Core_models.Convert.f_into #v_T\n                  #(t_Array usize v_N)\n                  #FStar.Tactics.Typeclasses.solve\n                  arr\n                <:\n                t_Array usize v_N)\n            <:\n            Core_models.Array.Iter.t_IntoIter usize v_N)\n        <:\n        Core_models.Option.t_Option usize)\n      v_N\n    <:\n    usize) +!\n  v_N\n\nlet call_g (_: Prims.unit) : usize =\n  (g (mk_usize 3)\n      #(t_Array usize (mk_usize 3))\n      (let list = [mk_usize 42; mk_usize 3; mk_usize 49] in\n        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3);\n        Rust_primitives.Hax.array_of_list 3 list)\n    <:\n    usize) +!\n  mk_usize 3\n\nclass t_Foo (v_Self: Type0) = {\n  f_const_add_pre:v_N: usize -> v_Self -> Type0;\n  f_const_add_post:v_N: usize -> v_Self -> usize -> Type0;\n  f_const_add:v_N: usize -> x0: v_Self\n    -> Prims.Pure usize (f_const_add_pre v_N x0) (fun result -> f_const_add_post v_N x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_Foo usize =\n  {\n    f_const_add_pre = (fun (v_N: usize) (self: usize) -> true);\n    f_const_add_post = (fun (v_N: usize) (self: usize) (out: usize) -> true);\n    f_const_add = fun (v_N: usize) (self: usize) -> self +! v_N\n  }\n\ntype t_Bar = | Bar : t_Bar\n\nlet impl_Bar__inherent_impl_generics (#v_T: Type0) (v_N: usize) (x: t_Array v_T v_N) : Prims.unit =\n  ()\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__guards into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: guards\n    manifest: guards/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Guards.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nDefinition if_let_guard (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 :=\n  match x with\n  | Option_None =>\n    (0 : t_i32)\n  | _ =>\n    match match x with\n    | Option_Some (v) =>\n      match v with\n      | Result_Ok (y) =>\n        Option_Some (y)\n      | _ =>\n        Option_None\n      end\n    | _ =>\n      Option_None\n    end with\n    | Option_Some (x) =>\n      x\n    | Option_None =>\n      match x with\n      | Option_Some (Result_Err (y)) =>\n        y\n      | _ =>\n        (1 : t_i32)\n      end\n    end\n  end.\n\nDefinition equivalent (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 :=\n  match x with\n  | Option_None =>\n    (0 : t_i32)\n  | _ =>\n    match match x with\n    | Option_Some (v) =>\n      match v with\n      | Result_Ok (y) =>\n        Option_Some (y)\n      | _ =>\n        Option_None\n      end\n    | _ =>\n      Option_None\n    end with\n    | Option_Some (y) =>\n      y\n    | Option_None =>\n      match x with\n      | Option_Some (Result_Err (y)) =>\n        y\n      | _ =>\n        (1 : t_i32)\n      end\n    end\n  end.\n\nDefinition multiple_guards (x : t_Option ((t_Result ((t_i32)) ((t_i32))))) : t_i32 :=\n  match x with\n  | Option_None =>\n    (0 : t_i32)\n  | _ =>\n    match match x with\n    | Option_Some (Result_Ok (v)) =>\n      match Option_Some (f_add (v) ((1 : t_i32))) with\n      | Option_Some (1) =>\n        Option_Some ((0 : t_i32))\n      | _ =>\n        Option_None\n      end\n    | _ =>\n      Option_None\n    end with\n    | Option_Some (x) =>\n      x\n    | Option_None =>\n      match match x with\n      | Option_Some (v) =>\n        match v with\n        | Result_Ok (y) =>\n          Option_Some (y)\n        | _ =>\n          Option_None\n        end\n      | _ =>\n        Option_None\n      end with\n      | Option_Some (x) =>\n        x\n      | Option_None =>\n        match x with\n        | Option_Some (Result_Err (y)) =>\n          y\n        | _ =>\n          (1 : t_i32)\n        end\n      end\n    end\n  end.\n\nDefinition if_guard (x : t_Option ((t_i32))) : t_i32 :=\n  match match x with\n  | Option_Some (v) =>\n    match f_gt (v) ((0 : t_i32)) with\n    | true =>\n      Option_Some (v)\n    | _ =>\n      Option_None\n    end\n  | _ =>\n    Option_None\n  end with\n  | Option_Some (x) =>\n    x\n  | Option_None =>\n    (0 : t_i32)\n  end.\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nGuards.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__guards into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: guards\n    manifest: guards/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Guards.fst\" = '''\nmodule Guards\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet if_let_guard (x: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32)) : i32 =\n  match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n  | Core_models.Option.Option_None  -> mk_i32 0\n  | _ ->\n    match\n      (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n        | Core_models.Option.Option_Some v ->\n          (match v <: Core_models.Result.t_Result i32 i32 with\n            | Core_models.Result.Result_Ok y ->\n              Core_models.Option.Option_Some y <: Core_models.Option.t_Option i32\n            | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n        | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n      <:\n      Core_models.Option.t_Option i32\n    with\n    | Core_models.Option.Option_Some x -> x\n    | Core_models.Option.Option_None  ->\n      match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n      | Core_models.Option.Option_Some (Core_models.Result.Result_Err y) -> y\n      | _ -> mk_i32 1\n\nlet equivalent (x: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32)) : i32 =\n  match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n  | Core_models.Option.Option_None  -> mk_i32 0\n  | _ ->\n    match\n      (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n        | Core_models.Option.Option_Some v ->\n          (match v <: Core_models.Result.t_Result i32 i32 with\n            | Core_models.Result.Result_Ok y ->\n              Core_models.Option.Option_Some y <: Core_models.Option.t_Option i32\n            | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n        | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n      <:\n      Core_models.Option.t_Option i32\n    with\n    | Core_models.Option.Option_Some y -> y\n    | Core_models.Option.Option_None  ->\n      match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n      | Core_models.Option.Option_Some (Core_models.Result.Result_Err y) -> y\n      | _ -> mk_i32 1\n\nlet multiple_guards (x: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32)) : i32 =\n  match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n  | Core_models.Option.Option_None  -> mk_i32 0\n  | _ ->\n    match\n      (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n        | Core_models.Option.Option_Some (Core_models.Result.Result_Ok v) ->\n          (match\n              Core_models.Option.Option_Some (v +! mk_i32 1) <: Core_models.Option.t_Option i32\n            with\n            | Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 1) ->\n              Core_models.Option.Option_Some (mk_i32 0) <: Core_models.Option.t_Option i32\n            | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n        | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n      <:\n      Core_models.Option.t_Option i32\n    with\n    | Core_models.Option.Option_Some x -> x\n    | Core_models.Option.Option_None  ->\n      match\n        (match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n          | Core_models.Option.Option_Some v ->\n            (match v <: Core_models.Result.t_Result i32 i32 with\n              | Core_models.Result.Result_Ok y ->\n                Core_models.Option.Option_Some y <: Core_models.Option.t_Option i32\n              | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n          | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n        <:\n        Core_models.Option.t_Option i32\n      with\n      | Core_models.Option.Option_Some x -> x\n      | Core_models.Option.Option_None  ->\n        match x <: Core_models.Option.t_Option (Core_models.Result.t_Result i32 i32) with\n        | Core_models.Option.Option_Some (Core_models.Result.Result_Err y) -> y\n        | _ -> mk_i32 1\n\nlet if_guard (x: Core_models.Option.t_Option i32) : i32 =\n  match\n    (match x <: Core_models.Option.t_Option i32 with\n      | Core_models.Option.Option_Some v ->\n        (match v >. mk_i32 0 <: bool with\n          | true -> Core_models.Option.Option_Some v <: Core_models.Option.t_Option i32\n          | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n      | _ -> Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n    <:\n    Core_models.Option.t_Option i32\n  with\n  | Core_models.Option.Option_Some x -> x\n  | Core_models.Option.Option_None  -> mk_i32 0\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__guards into-ssprove.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: ssprove\n  info:\n    name: guards\n    manifest: guards/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Guards.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations if_let_guard (x : both (t_Option (t_Result int32 int32))) : both int32 :=\n  if_let_guard x  :=\n    matchb x with\n    | Option_None_case  =>\n      ret_both (0 : int32)\n    | _ =>\n      matchb matchb x with\n      | Option_Some_case v =>\n        letb v := ret_both ((v) : (t_Result int32 int32)) in\n        matchb v with\n        | Result_Ok_case y =>\n          letb y := ret_both ((y) : (int32)) in\n          Option_Some y\n        | _ =>\n          Option_None\n        end\n      | _ =>\n        Option_None\n      end with\n      | Option_Some_case x =>\n        letb x := ret_both ((x) : (int32)) in\n        x\n      | Option_None_case  =>\n        matchb x with\n        | Option_Some_case Result_Err y =>\n          letb y := ret_both ((((y))) : (t_Result int32 int32)) in\n          y\n        | _ =>\n          ret_both (1 : int32)\n        end\n      end\n    end : both int32.\nFail Next Obligation.\n\nEquations equivalent (x : both (t_Option (t_Result int32 int32))) : both int32 :=\n  equivalent x  :=\n    matchb x with\n    | Option_None_case  =>\n      ret_both (0 : int32)\n    | _ =>\n      matchb matchb x with\n      | Option_Some_case v =>\n        letb v := ret_both ((v) : (t_Result int32 int32)) in\n        matchb v with\n        | Result_Ok_case y =>\n          letb y := ret_both ((y) : (int32)) in\n          Option_Some y\n        | _ =>\n          Option_None\n        end\n      | _ =>\n        Option_None\n      end with\n      | Option_Some_case y =>\n        letb y := ret_both ((y) : (int32)) in\n        y\n      | Option_None_case  =>\n        matchb x with\n        | Option_Some_case Result_Err y =>\n          letb y := ret_both ((((y))) : (t_Result int32 int32)) in\n          y\n        | _ =>\n          ret_both (1 : int32)\n        end\n      end\n    end : both int32.\nFail Next Obligation.\n\nEquations multiple_guards (x : both (t_Option (t_Result int32 int32))) : both int32 :=\n  multiple_guards x  :=\n    matchb x with\n    | Option_None_case  =>\n      ret_both (0 : int32)\n    | _ =>\n      matchb matchb x with\n      | Option_Some_case Result_Ok v =>\n        letb v := ret_both ((((v))) : (t_Result int32 int32)) in\n        matchb Option_Some (v .+ (ret_both (1 : int32))) with\n        | Option_Some_case 1 =>\n          letb 1 := ret_both ((1) : (int32)) in\n          Option_Some (ret_both (0 : int32))\n        | _ =>\n          Option_None\n        end\n      | _ =>\n        Option_None\n      end with\n      | Option_Some_case x =>\n        letb x := ret_both ((x) : (int32)) in\n        x\n      | Option_None_case  =>\n        matchb matchb x with\n        | Option_Some_case v =>\n          letb v := ret_both ((v) : (t_Result int32 int32)) in\n          matchb v with\n          | Result_Ok_case y =>\n            letb y := ret_both ((y) : (int32)) in\n            Option_Some y\n          | _ =>\n            Option_None\n          end\n        | _ =>\n          Option_None\n        end with\n        | Option_Some_case x =>\n          letb x := ret_both ((x) : (int32)) in\n          x\n        | Option_None_case  =>\n          matchb x with\n          | Option_Some_case Result_Err y =>\n            letb y := ret_both ((((y))) : (t_Result int32 int32)) in\n            y\n          | _ =>\n            ret_both (1 : int32)\n          end\n        end\n      end\n    end : both int32.\nFail Next Obligation.\n\nEquations if_guard (x : both (t_Option int32)) : both int32 :=\n  if_guard x  :=\n    matchb matchb x with\n    | Option_Some_case v =>\n      letb v := ret_both ((v) : (int32)) in\n      matchb v >.? (ret_both (0 : int32)) with\n      | true =>\n        Option_Some v\n      | _ =>\n        Option_None\n      end\n    | _ =>\n      Option_None\n    end with\n    | Option_Some_case x =>\n      letb x := ret_both ((x) : (int32)) in\n      x\n    | Option_None_case  =>\n      ret_both (0 : int32)\n    end : both int32.\nFail Next Obligation.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__include-flag into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: include-flag\n    manifest: cli/include-flag/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Include_flag.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nRecord Foo_record : Type :=\n  {\n  }.\n\n#[export]\nNotation \"'Foo_Foo_record'\" := Build_Foo_record.\n\nClass t_Trait (v_Self : Type) : Type :=\n  {\n  }.\nArguments t_Trait (_).\n\nInstance t_Trait_810848144 : t_Trait ((t_Foo)) :=\n  {\n  }.\n\nDefinition main_a_a '(_ : unit) : unit :=\n  tt.\n\nDefinition main_b_a '(_ : unit) : unit :=\n  tt.\n\nDefinition main_c_a '(_ : unit) : unit :=\n  tt.\n\nDefinition main_a_b '(_ : unit) : unit :=\n  tt.\n\nDefinition main_b_b '(_ : unit) : unit :=\n  tt.\n\nDefinition main_c_b '(_ : unit) : unit :=\n  tt.\n\nDefinition main_a_c '(_ : unit) : unit :=\n  tt.\n\nDefinition main_a `{v_T : Type} `{t_Trait (v_T)} (x : v_T) : unit :=\n  let _ := main_a_a (tt) in\n  let _ := main_a_b (tt) in\n  let _ := main_a_c (tt) in\n  tt.\n\nDefinition main_b_c '(_ : unit) : unit :=\n  tt.\n\nDefinition main_b '(_ : unit) : unit :=\n  let _ := main_b_a (tt) in\n  let _ := main_b_b (tt) in\n  let _ := main_b_c (tt) in\n  tt.\n\nDefinition main_c_c '(_ : unit) : unit :=\n  tt.\n\nDefinition main_c '(_ : unit) : unit :=\n  let _ := main_c_a (tt) in\n  let _ := main_c_b (tt) in\n  let _ := main_c_c (tt) in\n  tt.\n\nDefinition main '(_ : unit) : unit :=\n  let _ := main_a (Foo) in\n  let _ := main_b (tt) in\n  let _ := main_c (tt) in\n  tt.\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nInclude_flag.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__include-flag into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: include-flag\n    manifest: cli/include-flag/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Include_flag.fst\" = '''\nmodule Include_flag\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo = | Foo : t_Foo\n\nclass t_Trait (v_Self: Type0) = { __marker_trait_t_Trait:Prims.unit }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_Trait t_Foo = { __marker_trait_t_Trait = () }\n\n/// Indirect dependencies\nlet main_a_a (_: Prims.unit) : Prims.unit = ()\n\nlet main_b_a (_: Prims.unit) : Prims.unit = ()\n\nlet main_c_a (_: Prims.unit) : Prims.unit = ()\n\nlet main_a_b (_: Prims.unit) : Prims.unit = ()\n\nlet main_b_b (_: Prims.unit) : Prims.unit = ()\n\nlet main_c_b (_: Prims.unit) : Prims.unit = ()\n\nlet main_a_c (_: Prims.unit) : Prims.unit = ()\n\n/// Direct dependencies\nlet main_a (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Trait v_T) (x: v_T)\n    : Prims.unit =\n  let _:Prims.unit = main_a_a () in\n  let _:Prims.unit = main_a_b () in\n  let _:Prims.unit = main_a_c () in\n  ()\n\nlet main_b_c (_: Prims.unit) : Prims.unit = ()\n\nlet main_b (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = main_b_a () in\n  let _:Prims.unit = main_b_b () in\n  let _:Prims.unit = main_b_c () in\n  ()\n\nlet main_c_c (_: Prims.unit) : Prims.unit = ()\n\nlet main_c (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = main_c_a () in\n  let _:Prims.unit = main_c_b () in\n  let _:Prims.unit = main_c_c () in\n  ()\n\n/// Entrypoint\nlet main (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit = main_a #t_Foo (Foo <: t_Foo) in\n  let _:Prims.unit = main_b () in\n  let _:Prims.unit = main_c () in\n  ()\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__interface-only into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: interface-only\n    manifest: cli/interface-only/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: \"+:** -interface_only::Foo\"\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Interface_only.fst\" = '''\nmodule Interface_only\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\n/// This item contains unsafe blocks and raw references, two features\n/// not supported by hax. Thanks to the `-i` flag and the `+:`\n/// modifier, `f` is still extractable as an interface.\n/// Expressions within type are still extracted, as well as pre- and\n/// post-conditions.\nassume\nval f': x: u8\n  -> Prims.Pure (t_Array u8 (mk_usize 4))\n      (requires x <. mk_u8 254)\n      (ensures\n        fun r ->\n          let r:t_Array u8 (mk_usize 4) = r in\n          (r.[ mk_usize 0 ] <: u8) >. x)\n\nunfold\nlet f = f'\n\ntype t_Bar = | Bar : t_Bar\n\n/// Non-inherent implementations are extracted, their bodies are not\n/// dropped. This might be a bit surprising: see\n/// https://github.com/hacspec/hax/issues/616.\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl': Core_models.Convert.t_From t_Bar Prims.unit\n\nunfold\nlet impl = impl'\n\n/// If you need to drop the body of a method, please hoist it:\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1': Core_models.Convert.t_From t_Bar u8\n\nunfold\nlet impl_1 = impl_1'\n\nassume\nval f_from__impl_1__from': u8 -> t_Bar\n\nunfold\nlet f_from__impl_1__from = f_from__impl_1__from'\n\ntype t_Holder (v_T: Type0) = { f_value:Alloc.Vec.t_Vec v_T Alloc.Alloc.t_Global }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': #v_T: Type0 -> Core_models.Convert.t_From (t_Holder v_T) Prims.unit\n\nunfold\nlet impl_2 (#v_T: Type0) = impl_2' #v_T\n\ntype t_Param (v_SIZE: usize) = { f_value:t_Array u8 v_SIZE }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_3': v_SIZE: usize -> Core_models.Convert.t_From (t_Param v_SIZE) Prims.unit\n\nunfold\nlet impl_3 (v_SIZE: usize) = impl_3' v_SIZE\n\nassume\nval ff_generic': v_X: usize -> #v_U: Type0 -> e_x: v_U -> t_Param v_X\n\nunfold\nlet ff_generic (v_X: usize) (#v_U: Type0) = ff_generic' v_X #v_U\n\nclass t_T (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Assoc:Type0;\n  f_d_pre:Prims.unit -> Type0;\n  f_d_post:Prims.unit -> Prims.unit -> Type0;\n  f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result)\n}\n\n/// Impls with associated types are not erased\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_T_for_u8: t_T u8 =\n  {\n    f_Assoc = u8;\n    f_d_pre = (fun (_: Prims.unit) -> true);\n    f_d_post = (fun (_: Prims.unit) (out: Prims.unit) -> true);\n    f_d = fun (_: Prims.unit) -> ()\n  }\n\nclass t_T2 (v_Self: Type0) = {\n  f_d_pre:Prims.unit -> Type0;\n  f_d_post:Prims.unit -> Prims.unit -> Type0;\n  f_d:x0: Prims.unit -> Prims.Pure Prims.unit (f_d_pre x0) (fun result -> f_d_post x0 result)\n}\n\n/// Items can be forced to be transparent\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_T2_for_u8: t_T2 u8 =\n  {\n    f_d_pre = (fun (_: Prims.unit) -> false);\n    f_d_post = (fun (_: Prims.unit) (out: Prims.unit) -> true);\n    f_d = fun (_: Prims.unit) -> ()\n  }\n\nassume\nval padlen': b: t_Slice u8 -> n: usize\n  -> Prims.Pure usize\n      (requires (Core_models.Slice.impl__len #u8 b <: usize) >=. n)\n      (ensures\n        fun out ->\n          let out:usize = out in\n          out <=. n)\n\nunfold\nlet padlen = padlen'\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__lean-core-models into-lean.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: lean\n  info:\n    name: lean-core-models\n    manifest: lean-core-models/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"lean_core_models.lean\" = '''\n\n-- Experimental lean backend for Hax\n-- The Hax prelude library can be found in hax/proof-libs/lean\nimport Hax\nimport Std.Tactic.Do\nimport Std.Do.Triple\nimport Std.Tactic.Do.Syntax\nopen Std.Do\nopen Std.Tactic\n\nset_option mvcgen.warning false\nset_option linter.unusedVariables false\n\n\nnamespace lean_core_models.default.structs\n\nstructure S where\n  f1 : usize\n\n@[reducible] instance Impl.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes S\n  where\n\ninstance Impl : core_models.default.Default S where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do\n    (pure (S.mk (f1 := (0 : usize))))\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) : RustM S := do\n  (core_models.default.Default.default S rust_primitives.hax.Tuple0.mk)\n\nend lean_core_models.default.structs\n\n\nnamespace lean_core_models.default.enums\n\ninductive E (T : Type) : Type\n| C1 : u32 -> E (T : Type)\n| C2 : T -> E (T : Type)\n\n@[reducible] instance Impl.AssociatedTypes\n  (T : Type)\n  [trait_constr_Impl_associated_type_i0 :\n    core_models.default.Default.AssociatedTypes\n    T]\n  [trait_constr_Impl_i0 : core_models.default.Default T ] :\n  core_models.default.Default.AssociatedTypes (E T)\n  where\n\ninstance Impl\n  (T : Type)\n  [trait_constr_Impl_associated_type_i0 :\n    core_models.default.Default.AssociatedTypes\n    T]\n  [trait_constr_Impl_i0 : core_models.default.Default T ] :\n  core_models.default.Default (E T)\n  where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do\n    (pure (E.C2\n      (← (core_models.default.Default.default\n        T rust_primitives.hax.Tuple0.mk))))\n\nend lean_core_models.default.enums\n\n\nnamespace lean_core_models.function\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) : RustM u32 := do\n  let f_1 : (u32 -> RustM u32) := (fun _ => (do (pure (9 : u32)) : RustM u32));\n  let f_2 : (u32 -> u32 -> RustM u32) := (fun x y => (do (x +? y) : RustM u32));\n  let f_2_tuple : ((rust_primitives.hax.Tuple2 u32 u32) -> RustM u32) :=\n    (fun ⟨x, y⟩ => (do (x +? y) : RustM u32));\n  ((← ((← (core_models.ops.function.Fn.call\n        (u32 -> RustM u32)\n        (rust_primitives.hax.Tuple1 u32)\n        f_1\n        (rust_primitives.hax.Tuple1.mk (0 : u32))))\n      +? (← (core_models.ops.function.Fn.call\n        (u32 -> u32 -> RustM u32)\n        (rust_primitives.hax.Tuple2 u32 u32)\n        f_2\n        (rust_primitives.hax.Tuple2.mk (1 : u32) (2 : u32))))))\n    +? (← (core_models.ops.function.Fn.call\n      ((rust_primitives.hax.Tuple2 u32 u32) -> RustM u32)\n      (rust_primitives.hax.Tuple1 (rust_primitives.hax.Tuple2 u32 u32))\n      f_2_tuple\n      (rust_primitives.hax.Tuple1.mk\n        (rust_primitives.hax.Tuple2.mk (1 : u32) (2 : u32))))))\n\nend lean_core_models.function\n\n\nnamespace lean_core_models.option\n\nstructure S where\n  f1 : u32\n\ninductive E : Type\n| C : u32 -> E\n\n@[reducible] instance Impl.AssociatedTypes :\n  core_models.default.Default.AssociatedTypes S\n  where\n\ninstance Impl : core_models.default.Default S where\n  default := fun (_ : rust_primitives.hax.Tuple0) => do\n    (pure (S.mk (f1 := (42 : u32))))\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let o1 : (core_models.option.Option i32) :=\n    (core_models.option.Option.Some (4 : i32));\n  let o2 : (core_models.option.Option i32) := core_models.option.Option.None;\n  let o3 : Bool ←\n    (core_models.option.Impl.is_some_and i32 (i32 -> RustM Bool)\n      (← (core_models.clone.Clone.clone (core_models.option.Option i32) o1))\n      (fun x => (do (x ==? (0 : i32)) : RustM Bool)));\n  let o3 : Bool ←\n    (core_models.option.Impl.is_none_or i32 (i32 -> RustM Bool)\n      (← (core_models.clone.Clone.clone (core_models.option.Option i32) o1))\n      (fun x => (do (x ==? (0 : i32)) : RustM Bool)));\n  let o4 : i32 ←\n    (core_models.option.Impl.unwrap i32\n      (core_models.option.Option.Some (0 : i32)));\n  let o5 : i32 ←\n    (core_models.option.Impl.unwrap_or i32\n      (core_models.option.Option.Some (0 : i32))\n      (9 : i32));\n  let o6 : i32 ←\n    (core_models.option.Impl.unwrap_or_else\n      i32\n      (rust_primitives.hax.Tuple0 -> RustM i32)\n      (core_models.option.Option.Some (0 : i32))\n      (fun _ => (do (pure (9 : i32)) : RustM i32)));\n  let o7 : S ←\n    (core_models.option.Impl.unwrap_or_default S\n      core_models.option.Option.None);\n  let o8 : (core_models.option.Option i32) ←\n    (core_models.option.Impl.map i32 i32 (i32 -> RustM i32)\n      (core_models.option.Option.Some (0 : i32))\n      (fun x => (do (x +? (1 : i32)) : RustM i32)));\n  let o9 : i32 ←\n    (core_models.option.Impl.map_or i32 i32 (i32 -> RustM i32)\n      (core_models.option.Option.Some (1 : i32))\n      (9 : i32)\n      (fun x => (do (x +? (1 : i32)) : RustM i32)));\n  let o10 : i32 ←\n    (core_models.option.Impl.map_or_else\n      i32\n      i32\n      (rust_primitives.hax.Tuple0 -> RustM i32)\n      (i32 -> RustM i32)\n      (core_models.option.Option.Some (2 : i32))\n      (fun _ => (do (pure (9 : i32)) : RustM i32))\n      (fun x => (do (x +? (1 : i32)) : RustM i32)));\n  let o11 : (core_models.result.Result i32 E) ←\n    (core_models.option.Impl.ok_or i32 E\n      (core_models.option.Option.Some (3 : i32))\n      (E.C (0 : u32)));\n  let o12 : (core_models.result.Result i32 E) ←\n    (core_models.option.Impl.ok_or_else\n      i32\n      E\n      (rust_primitives.hax.Tuple0 -> RustM E)\n      (core_models.option.Option.Some (1 : i32))\n      (fun _ => (do (pure (E.C (1 : u32))) : RustM E)));\n  let o13 : (core_models.option.Option u32) ←\n    (core_models.option.Impl.and_then\n      u32\n      u32\n      (u32 -> RustM (core_models.option.Option u32))\n      core_models.option.Option.None\n      (fun x =>\n        (do\n        (pure (core_models.option.Option.Some x)) :\n        RustM (core_models.option.Option u32))));\n  let ⟨_, out⟩ ←\n    (core_models.option.Impl.take S\n      (core_models.option.Option.Some (S.mk (f1 := (9 : u32)))));\n  let o14 : (core_models.option.Option S) := out;\n  let o15 : Bool ←\n    (core_models.option.Impl.is_some i32\n      (core_models.option.Option.Some (1 : i32)));\n  let o16 : Bool ←\n    (core_models.option.Impl.is_none i32\n      (core_models.option.Option.Some (2 : i32)));\n  let o17 : i32 ←\n    (core_models.option.Impl.expect i32\n      (core_models.option.Option.Some (3 : i32))\n      \"Should be Some\");\n  let o18 : i32 ←\n    (core_models.option.Impl.unwrap i32\n      (core_models.option.Option.Some (4 : i32)));\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_core_models.option\n\n\nnamespace lean_core_models.phantom\n\nclass Foo.AssociatedTypes (Self : Type) where\n\nclass Foo (Self : Type)\n  [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type))]\n  where\n\nstructure Bar\n  (F : Type)\n  [trait_constr_Bar_associated_type_i0 : Foo.AssociatedTypes F]\n  [trait_constr_Bar_i0 : Foo F ]\n  where\n  _phantom : (core_models.marker.PhantomData F)\n\n@[spec]\ndef Impl.new\n    (F : Type)\n    [trait_constr_new_associated_type_i0 : Foo.AssociatedTypes F]\n    [trait_constr_new_i0 : Foo F ]\n    (_ : rust_primitives.hax.Tuple0) :\n    RustM (Bar F) := do\n  (pure (Bar.mk (_phantom := core_models.marker.PhantomData.mk)))\n\nend lean_core_models.phantom\n\n\nnamespace lean_core_models.result\n\ninductive E1 : Type\n| C1 : E1\n| C2 : u32 -> E1\n\n@[instance] opaque Impl.AssociatedTypes :\n  core_models.clone.Clone.AssociatedTypes E1 :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl :\n  core_models.clone.Clone E1 :=\n  by constructor <;> exact Inhabited.default\n\ninductive E2 : Type\n| C1 : E2\n| C2 : u32 -> E2\n\n@[spec]\ndef tests (_ : rust_primitives.hax.Tuple0) :\n    RustM (core_models.result.Result u32 E1) := do\n  let v1 : (core_models.result.Result u32 E1) :=\n    (core_models.result.Result.Ok (1 : u32));\n  let v2 : (core_models.result.Result u32 E1) :=\n    (core_models.result.Result.Err E1.C1);\n  let f : (u32 -> RustM u32) := (fun x => (do (x +? (1 : u32)) : RustM u32));\n  let v5 : (core_models.result.Result i32 E1) ←\n    (core_models.result.Impl.map i32 E1 i32 (i32 -> RustM i32)\n      (core_models.result.Result.Ok (1 : i32))\n      (fun v => (do (v +? (1 : i32)) : RustM i32)));\n  let v6 : u32 ←\n    (core_models.result.Impl.map_or u32 E1 u32 (u32 -> RustM u32)\n      (core_models.result.Result.Ok (1 : u32))\n      (9 : u32)\n      f);\n  let v7 : u32 ←\n    (core_models.result.Impl.map_or_else\n      u32\n      E1\n      u32\n      (E1 -> RustM u32)\n      (u32 -> RustM u32)\n      (core_models.result.Result.Ok (1 : u32))\n      (fun _ => (do (pure (10 : u32)) : RustM u32))\n      f);\n  let v8 : (core_models.result.Result i32 E2) ←\n    (core_models.result.Impl.map_err i32 E1 E2 (E1 -> RustM E2)\n      (core_models.result.Result.Ok (0 : i32))\n      (fun e =>\n        (do\n        match e with\n          | (E1.C1 ) => do (pure E2.C1)\n          | (E1.C2  x) => do (pure (E2.C2 (← (x +? (1 : u32))))) :\n        RustM E2)));\n  let v9 : Bool ← (core_models.result.Impl.is_ok u32 E1 v1);\n  let v10 : Bool ← (core_models.result.Impl.is_err u32 E1 v1);\n  let v11 : (core_models.result.Result u32 E1) ←\n    (core_models.result.Impl.and_then\n      u32\n      E1\n      u32\n      (u32 -> RustM (core_models.result.Result u32 E1))\n      (← (core_models.clone.Clone.clone (core_models.result.Result u32 E1) v1))\n      (fun x =>\n        (do\n        (pure (core_models.result.Result.Ok (← (x +? (1 : u32))))) :\n        RustM (core_models.result.Result u32 E1))));\n  let v12 : u32 ←\n    (core_models.result.Impl.unwrap u32 u32\n      (← (core_models.clone.Clone.clone\n        (core_models.result.Result u32 u32)\n        (core_models.result.Result.Ok (0 : u32)))));\n  let v13 : u32 ←\n    (core_models.result.Impl.expect u32 u32\n      (← (core_models.clone.Clone.clone\n        (core_models.result.Result u32 u32)\n        (core_models.result.Result.Ok (0 : u32))))\n      \"Should be Ok\");\n  match\n    (← (core_models.result.Impl.map u32 E1 u32 (u32 -> RustM u32) v1 f))\n  with\n    | (core_models.result.Result.Ok  hoist2) => do\n      match v2 with\n        | (core_models.result.Result.Ok  hoist1) => do\n          let v3 : u32 ← (hoist2 +? hoist1);\n          (pure (core_models.result.Result.Ok v3))\n        | (core_models.result.Result.Err  err) => do\n          (pure (core_models.result.Result.Err err))\n    | (core_models.result.Result.Err  err) => do\n      (pure (core_models.result.Result.Err err))\n\nend lean_core_models.result\n\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__lean-tests into-lean.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: lean\n  info:\n    name: lean-tests\n    manifest: lean-tests/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"lean_tests.lean\" = \"\"\"\n\n-- Experimental lean backend for Hax\n-- The Hax prelude library can be found in hax/proof-libs/lean\nimport Hax\nimport Std.Tactic.Do\nimport Std.Do.Triple\nimport Std.Tactic.Do.Syntax\nopen Std.Do\nopen Std.Tactic\n\nset_option mvcgen.warning false\nset_option linter.unusedVariables false\n\n\nnamespace lean_tests.array\n\n@[spec]\ndef f (N : usize) (x : (RustArray u8 N)) :\n    RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef g (N : usize) (x : (RustArray u8 N)) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ ← (f (N) x);\n  let _ ←\n    (f ((10 : usize)) (← (rust_primitives.hax.repeat (0 : u8) (10 : usize))));\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.array\n\n\nnamespace lean_tests.associated_types.projection\n\nclass T1.AssociatedTypes (Self : Type) where\n  A1 : Type\n\nattribute [reducible] T1.AssociatedTypes.A1\n\nabbrev T1.A1 :=\n  T1.AssociatedTypes.A1\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n\nend lean_tests.associated_types.projection\n\n\nnamespace lean_tests.associated_types.multiple_projections\n\nclass FnOnce.AssociatedTypes (Self : Type) (T : Type) where\n  Output : Type\n\nattribute [reducible] FnOnce.AssociatedTypes.Output\n\nabbrev FnOnce.Output :=\n  FnOnce.AssociatedTypes.Output\n\nclass FnOnce (Self : Type) (T : Type)\n  [associatedTypes : outParam (FnOnce.AssociatedTypes (Self : Type) (T : Type))]\n  where\n\n@[spec]\ndef func\n    (T : Type)\n    (U : Type)\n    (D : Type)\n    (F : Type)\n    [trait_constr_func_associated_type_i0 : FnOnce.AssociatedTypes F T]\n    [trait_constr_func_i0 : FnOnce\n      F\n      T\n      (associatedTypes := {\n        show FnOnce.AssociatedTypes F T\n        by infer_instance\n        with Output := U})]\n    [trait_constr_func_associated_type_i1 : FnOnce.AssociatedTypes D T]\n    [trait_constr_func_i1 : FnOnce\n      D\n      T\n      (associatedTypes := {\n        show FnOnce.AssociatedTypes D T\n        by infer_instance\n        with Output := U})]\n    (d : D)\n    (f : F)\n    (u : U) :\n    RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.associated_types.multiple_projections\n\n\nnamespace lean_tests.binops\n\n@[spec]\ndef noop (x : i32) : RustM i32 := do (pure x)\n\n@[spec]\ndef neg_int (x : i32) : RustM i32 := do (-? x)\n\n@[spec]\ndef not_int (x : i32) : RustM i32 := do (~? x)\n\n@[spec]\ndef not_bool (x : Bool) : RustM Bool := do (!? x)\n\n@[spec]\ndef index (x : (RustArray i32 1)) : RustM i32 := do x[(0 : usize)]_?\n\n@[spec]\ndef add_int (x : i32) (y : i32) : RustM i32 := do (x +? y)\n\n@[spec]\ndef sub_int (x : i32) (y : i32) : RustM i32 := do (x -? y)\n\n@[spec]\ndef mul_int (x : i32) (y : i32) : RustM i32 := do (x *? y)\n\n@[spec]\ndef div_int (x : i32) (y : i32) : RustM i32 := do (x /? y)\n\n@[spec]\ndef rem_int (x : i32) (y : i32) : RustM i32 := do (x %? y)\n\n@[spec]\ndef shr_int (x : i32) (y : i32) : RustM i32 := do (x >>>? y)\n\n@[spec]\ndef shl_int (x : i32) (y : i32) : RustM i32 := do (x <<<? y)\n\n@[spec]\ndef bitand_int (x : i32) (y : i32) : RustM i32 := do (x &&&? y)\n\n@[spec]\ndef bitand_bool (x : Bool) (y : Bool) : RustM Bool := do (x &&? y)\n\n@[spec]\ndef bitor_int (x : i32) (y : i32) : RustM i32 := do (x |||? y)\n\n@[spec]\ndef bitor_bool (x : Bool) (y : Bool) : RustM Bool := do (x ||? y)\n\n@[spec]\ndef bitxor_int (x : i32) (y : i32) : RustM i32 := do (x ^^^? y)\n\n@[spec]\ndef bitxor_bool (x : Bool) (y : Bool) : RustM Bool := do (x ^^? y)\n\n@[spec]\ndef logical_op_and (x : Bool) (y : Bool) : RustM Bool := do (x &&? y)\n\n@[spec]\ndef logical_op_or (x : Bool) (y : Bool) : RustM Bool := do (x ||? y)\n\n@[spec]\ndef eq_int (x : i32) (y : i32) : RustM Bool := do (x ==? y)\n\n@[spec]\ndef eq_bool (x : Bool) (y : Bool) : RustM Bool := do (x ==? y)\n\n@[spec]\ndef neq_int (x : i32) (y : i32) : RustM Bool := do (x !=? y)\n\n@[spec]\ndef neq_bool (x : Bool) (y : Bool) : RustM Bool := do (x !=? y)\n\n@[spec]\ndef lt_int (x : i32) (y : i32) : RustM Bool := do (x <? y)\n\n@[spec]\ndef le_int (x : i32) (y : i32) : RustM Bool := do (x <=? y)\n\n@[spec]\ndef gt_int (x : i32) (y : i32) : RustM Bool := do (x >? y)\n\n@[spec]\ndef ge_int (x : i32) (y : i32) : RustM Bool := do (x >=? y)\n\nstructure S where\n  -- no fields\n\n@[reducible] instance Impl.AssociatedTypes :\n  core_models.ops.bit.Not.AssociatedTypes S\n  where\n  Output := S\n\ninstance Impl : core_models.ops.bit.Not S where\n  not := fun (self : S) => do (pure self)\n\n@[reducible] instance Impl_1.AssociatedTypes :\n  core_models.ops.arith.Add.AssociatedTypes S S\n  where\n  Output := S\n\ninstance Impl_1 : core_models.ops.arith.Add S S where\n  add := fun (self : S) (rhs : S) => do (pure self)\n\n@[spec]\ndef not_s (x : S) : RustM S := do (core_models.ops.bit.Not.not S x)\n\n@[spec]\ndef add_s (x : S) (y : S) : RustM S := do\n  (core_models.ops.arith.Add.add S S x y)\n\nend lean_tests.binops\n\n\nnamespace lean_tests.casts\n\n--  Returns true if all casting edge cases behave as expected.\ndef casting_edge_cases (_dummy : Bool) : RustM Bool := do\n  let case1 : Bool ←\n    ((← (rust_primitives.hax.cast_op (256 : u16) : RustM u8)) ==? (0 : u8));\n  let case2 : Bool ←\n    ((← (rust_primitives.hax.cast_op (-1 : i16) : RustM u8)) ==? (255 : u8));\n  let case3 : Bool ←\n    ((← (rust_primitives.hax.cast_op (-1 : i8) : RustM i16)) ==? (-1 : i16));\n  let case4 : Bool ←\n    ((← (rust_primitives.hax.cast_op (128 : u8) : RustM i8)) ==? (-128 : i8));\n  let case5 : Bool ←\n    ((← (rust_primitives.hax.cast_op (4294967295 : u32) : RustM i32))\n      ==? (-1 : i32));\n  ((← ((← ((← (case1 &&? case2)) &&? case3)) &&? case4)) &&? case5)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef casting_edge_cases.spec (_dummy : Bool) :\n    Spec\n      (requires := do pure True)\n      (ensures := fun result => do (pure result))\n      (casting_edge_cases (_dummy : Bool)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [casting_edge_cases] <;> bv_decide\n}\n\n--  https://github.com/cryspen/hax/issues/1912\n@[spec]\ndef shift_after_cast (x : u16) (n : u8) : RustM u32 := do\n  ((← (rust_primitives.hax.cast_op x : RustM u32))\n    <<<? (← (rust_primitives.hax.cast_op n : RustM u32)))\n\n--  https://github.com/cryspen/hax/issues/1911\n@[spec]\ndef add_after_cast (a : u8) (b : u8) (c : u8) : RustM u16 := do\n  ((← ((← (rust_primitives.hax.cast_op a : RustM u16))\n      +? (← (rust_primitives.hax.cast_op b : RustM u16))))\n    +? (← (rust_primitives.hax.cast_op c : RustM u16)))\n\nend lean_tests.casts\n\n\nnamespace lean_tests.comments\n\n--  Single line doc comment\n@[spec]\ndef f (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\n/--\n   Block doc-comment : Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum rutrum\n  orci ac tellus ullamcorper sollicitudin. Sed fringilla mi id arcu suscipit rhoncus. Pellentesque et\n  metus a ante feugiat lobortis. Nam a mauris eget nisl congue egestas. Duis et gravida\n  nulla. Curabitur mattis leo vel molestie posuere. Etiam malesuada et augue eget\n  varius. Pellentesque quis tincidunt erat. Vestibulum id consectetur turpis. Cras elementum magna id\n  urna volutpat fermentum. In vel erat quis nunc rhoncus porta. Aliquam sed pellentesque\n  tellus. Quisque odio diam, mollis ut venenatis non, scelerisque at nulla. Nunc urna ante, tristique\n  quis nisi quis, congue maximus nisl. Curabitur non efficitur odio. \n  -/\n@[spec]\ndef heavily_documented (_ : rust_primitives.hax.Tuple0) : RustM u32 := do\n  (pure (4 : u32))\n\nend lean_tests.comments\n\n\nnamespace lean_tests.constants\n\ndef C1 : u32 := (5678 : u32)\n\ndef C2 : u32 := RustM.of_isOk (do (C1 +? (1 : u32))) (by rfl)\n\ndef C3 : u32 :=\n  RustM.of_isOk\n    (do if true then do (pure (890 : u32)) else do ((9 : u32) /? (0 : u32)))\n    (by rfl)\n\n@[spec]\ndef computation (x : u32) : RustM u32 := do ((← (x +? x)) +? (1 : u32))\n\ndef C4 : u32 := RustM.of_isOk (do ((← (computation C1)) +? C2)) (by rfl)\n\ndef C5 : (rust_primitives.hax.Tuple2 u32 u32) :=\n  RustM.of_isOk\n    (do\n    (pure (rust_primitives.hax.Tuple2.mk\n      (← ((0 : u32) +? (0 : u32)))\n      (0 : u32))))\n    (by rfl)\n\ndef C6 : (RustArray u32 1) :=\n  RustM.of_isOk (do (pure (RustArray.ofVec #v[(0 : u32)]))) (by rfl)\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let x : u32 ← (C1 +? (1 : u32));\n  let y : u32 ← (C2 +? C3);\n  let z : u32 ← (C4 -? C3);\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.constants\n\n\nnamespace lean_tests.constants.const_parameters\n\n--  Function with const parameter\n@[spec]\ndef f (N : usize) (_ : rust_primitives.hax.Tuple0) : RustM usize := do (pure N)\n\ndef N0 : usize := (1 : usize)\n\ndef N1 : usize := (10 : usize)\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ ←\n    ((← (f ((9 : usize)) rust_primitives.hax.Tuple0.mk))\n      +? (← (f ((10 : usize)) rust_primitives.hax.Tuple0.mk)));\n  (pure rust_primitives.hax.Tuple0.mk)\n\n--  Trait definition\nclass T.AssociatedTypes (Self : Type) (N_TRAIT : usize) where\n\nclass T (Self : Type) (N_TRAIT : usize)\n  [associatedTypes : outParam (T.AssociatedTypes (Self : Type) (N_TRAIT :\n      usize))]\n  where\n  f (Self) (N_TRAIT) (N_FIELD : usize) : (Self -> RustM usize)\n\n--  Struct definition\nstructure S (N : usize) where\n  _0 : u32\n\n@[reducible] instance Impl.AssociatedTypes (N_TRAIT : usize) :\n  T.AssociatedTypes (S (N_TRAIT)) (N_TRAIT)\n  where\n\ninstance Impl (N_TRAIT : usize) : T (S (N_TRAIT)) (N_TRAIT) where\n  f := fun (N_FIELD : usize) (self : (S (N_TRAIT))) => do (N_TRAIT -? N_FIELD)\n\n@[spec]\ndef test2\n    (N2 : usize)\n    (A : Type)\n    [trait_constr_test2_associated_type_i0 : T.AssociatedTypes A (N2)]\n    [trait_constr_test2_i0 : T A (N2) ]\n    (x : A) :\n    RustM usize := do\n  let s : (S ((10 : usize))) := (S.mk (9 : u32));\n  let _ ←\n    ((← (T.f (S ((10 : usize))) ((10 : usize)) ((1 : usize)) s))\n      +? (← (T.f A (N2) ((11 : usize)) x)));\n  let s : (S ((3 : usize))) := (S.mk (9 : u32));\n  (T.f A (N2) ((4 : usize)) x)\n\nend lean_tests.constants.const_parameters\n\n\nnamespace lean_tests.enums\n\ninductive E : Type\n| V1 : E\n| V2 : E\n| V3 : usize -> E\n| V4 : usize -> usize -> usize -> E\n| V5 (f1 : usize) (f2 : usize) : E\n| V6 (f1 : usize) (f2 : usize) : E\n\nend lean_tests.enums\n\n\nnamespace lean_tests.floats\n\ndef N : f32 := (1.0 : f32)\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let l0 : f64 := (1.0 : f64);\n  let l1 : f64 := (0.9 : f64);\n  let l2 : f32 := (5.0 : f32);\n  let l5 : f32 := N;\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef f (x : f64) (y : f32) : RustM f32 := do (pure y)\n\nend lean_tests.floats\n\n\nnamespace lean_tests.ite\n\n@[spec]\ndef test1 (_ : rust_primitives.hax.Tuple0) : RustM i32 := do\n  let x : i32 ← if true then do (pure (0 : i32)) else do (pure (1 : i32));\n  if false then do (pure (2 : i32)) else do (pure (3 : i32))\n\n@[spec]\ndef test2 (b : Bool) : RustM i32 := do\n  let x : i32 ← if b then do (pure (0 : i32)) else do (pure (9 : i32));\n  let y : i32 := (0 : i32);\n  let y : i32 ←\n    if true then do\n      ((← (y +? x)) +? (1 : i32))\n    else do\n      ((← (y -? x)) -? (1 : i32));\n  if b then do\n    let z : i32 ← (y +? y);\n    ((← (z +? y)) +? x)\n  else do\n    let z : i32 ← (y -? x);\n    ((← (z +? y)) +? x)\n\nend lean_tests.ite\n\n\nnamespace lean_tests.loops\n\n--  Simple for-loop\n@[spec]\ndef loop1 (_ : rust_primitives.hax.Tuple0) : RustM u32 := do\n  let x : u32 := (0 : u32);\n  let x : u32 ←\n    (rust_primitives.hax.folds.fold_range\n      (1 : u32)\n      (10 : u32)\n      (fun x _ => (do (pure true) : RustM Bool))\n      x\n      (fun x i => (do (x +? i) : RustM u32)));\n  (pure x)\n\n--  For-loop with a return\n@[spec]\ndef loop2 (_ : rust_primitives.hax.Tuple0) : RustM u32 := do\n  let x : u32 := (0 : u32);\n  match\n    (← (rust_primitives.hax.folds.fold_range_return\n      (1 : u32)\n      (10 : u32)\n      (fun x _ => (do (pure true) : RustM Bool))\n      x\n      (fun x i =>\n        (do\n        if (← (i ==? (5 : u32))) then do\n          (pure (core_models.ops.control_flow.ControlFlow.Break\n            (core_models.ops.control_flow.ControlFlow.Break x)))\n        else do\n          (pure (core_models.ops.control_flow.ControlFlow.Continue\n            (← (x +? i)))) :\n        RustM\n        (core_models.ops.control_flow.ControlFlow\n          (core_models.ops.control_flow.ControlFlow\n            u32\n            (rust_primitives.hax.Tuple2 rust_primitives.hax.Tuple0 u32))\n          u32)))))\n  with\n    | (core_models.ops.control_flow.ControlFlow.Break  ret) => do (pure ret)\n    | (core_models.ops.control_flow.ControlFlow.Continue  x) => do (pure x)\n\n--  For-loop with a spec\ndef for_loop_with_spec (y : u64) : RustM u64 := do\n  let x : u64 := y;\n  let x : u64 ←\n    (rust_primitives.hax.folds.fold_range\n      (0 : u64)\n      y\n      (fun x i => (do (x >? (0 : u64)) : RustM Bool))\n      x\n      (fun x i =>\n        (do\n        if (← ((← (x %? (5 : u64))) ==? (0 : u64))) then do\n          let x : u64 := (200 : u64);\n          (pure x)\n        else do\n          let x : u64 ← (x %? (5 : u64));\n          (pure x) :\n        RustM u64)));\n  (pure x)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef for_loop_with_spec.spec (y : u64) :\n    Spec\n      (requires := do (y >? (0 : u64)))\n      (ensures := fun res => do (res >? (0 : u64)))\n      (for_loop_with_spec (y : u64)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [for_loop_with_spec] <;> bv_decide\n}\n\n--  while-loop\ndef while_loop1 (s : u32) : RustM u32 := do\n  let x : u32 := s;\n  let x : u32 ←\n    (rust_primitives.hax.while_loop\n      (fun x => (do (pure true) : RustM Bool))\n      (fun x => (do (x >? (0 : u32)) : RustM Bool))\n      (fun x =>\n        (do (rust_primitives.hax.int.from_machine x) : RustM hax_lib.int.Int))\n      x\n      (fun x => (do let x : u32 ← (x -? (1 : u32)); (pure x) : RustM u32)));\n  (pure x)\n\nset_option hax_mvcgen.specset \\\"int\\\" in\n@[hax_spec]\ndef while_loop1.spec (s : u32) :\n    Spec\n      (requires := do pure True)\n      (ensures := fun r => do (r ==? (0 : u32)))\n      (while_loop1 (s : u32)) := {\n  pureRequires := by hax_construct_pure <;> grind\n  pureEnsures := by hax_construct_pure <;> grind\n  contract := by hax_mvcgen [while_loop1] <;> grind\n}\n\nend lean_tests.loops\n\n\nnamespace lean_tests.loops.errors\n\ninductive Error : Type\n| Foo : Error\n| Bar : u32 -> Error\n\n@[spec]\ndef loop3 (_ : rust_primitives.hax.Tuple0) :\n    RustM (core_models.result.Result u32 Error) := do\n  let x : u32 := (0 : u32);\n  let _end : u32 := (10 : u32);\n  match\n    (← (rust_primitives.hax.folds.fold_range_return\n      (1 : u32)\n      _end\n      (fun x _ => (do (pure true) : RustM Bool))\n      x\n      (fun x i =>\n        (do\n        if (← (i ==? (5 : u32))) then do\n          (pure (core_models.ops.control_flow.ControlFlow.Break\n            (core_models.ops.control_flow.ControlFlow.Break\n              (core_models.result.Result.Err Error.Foo))))\n        else do\n          (pure (core_models.ops.control_flow.ControlFlow.Continue\n            (← (x +? (5 : u32))))) :\n        RustM\n        (core_models.ops.control_flow.ControlFlow\n          (core_models.ops.control_flow.ControlFlow\n            (core_models.result.Result u32 Error)\n            (rust_primitives.hax.Tuple2 rust_primitives.hax.Tuple0 u32))\n          u32)))))\n  with\n    | (core_models.ops.control_flow.ControlFlow.Break  ret) => do (pure ret)\n    | (core_models.ops.control_flow.ControlFlow.Continue  x) => do\n      (pure (core_models.result.Result.Ok x))\n\n@[spec]\ndef loop4 (_ : rust_primitives.hax.Tuple0) :\n    RustM\n    (core_models.result.Result (rust_primitives.hax.Tuple2 u32 u32) Error)\n    := do\n  let e : u32 := (0 : u32);\n  let f : (rust_primitives.hax.Tuple0 -> RustM u32) :=\n    (fun ⟨⟩ => (do (pure (42 : u32)) : RustM u32));\n  match\n    (← (rust_primitives.hax.folds.fold_range_return\n      (0 : u32)\n      (← (core_models.ops.function.Fn.call\n        (rust_primitives.hax.Tuple0 -> RustM u32)\n        (rust_primitives.hax.Tuple1 rust_primitives.hax.Tuple0)\n        f\n        (rust_primitives.hax.Tuple1.mk rust_primitives.hax.Tuple0.mk)))\n      (fun e _ => (do (pure true) : RustM Bool))\n      e\n      (fun e i =>\n        (do\n        if (← (i >? (10 : u32))) then do\n          (pure (core_models.ops.control_flow.ControlFlow.Break\n            (core_models.ops.control_flow.ControlFlow.Break\n              (core_models.result.Result.Err (Error.Bar e)))))\n        else do\n          (pure (core_models.ops.control_flow.ControlFlow.Continue\n            (← (e +? i)))) :\n        RustM\n        (core_models.ops.control_flow.ControlFlow\n          (core_models.ops.control_flow.ControlFlow\n            (core_models.result.Result\n              (rust_primitives.hax.Tuple2 u32 u32)\n              Error)\n            (rust_primitives.hax.Tuple2 rust_primitives.hax.Tuple0 u32))\n          u32)))))\n  with\n    | (core_models.ops.control_flow.ControlFlow.Break  ret) => do (pure ret)\n    | (core_models.ops.control_flow.ControlFlow.Continue  e) => do\n      (pure (core_models.result.Result.Ok (rust_primitives.hax.Tuple2.mk e e)))\n\nend lean_tests.loops.errors\n\n\nnamespace lean_tests.matching\n\n@[spec]\ndef test_const_matching (x : u32) (c : Char) (s : String) (b : Bool) :\n    RustM u32 := do\n  let x : u32 ←\n    match x with | 0 => do (pure (42 : u32)) | _ => do (pure (0 : u32));\n  let c : u32 ←\n    match c with | 'a' => do (pure (42 : u32)) | _ => do (pure (0 : u32));\n  let s : u32 ←\n    match s with | \\\"Hello\\\" => do (pure (42 : u32)) | _ => do (pure (0 : u32));\n  let b : u32 ←\n    match b with | true => do (pure (42 : u32)) | false => do (pure (0 : u32));\n  ((← ((← (x +? c)) +? s)) +? b)\n\n@[spec]\ndef test_binding_subpattern_matching\n    (x : (rust_primitives.hax.Tuple2 u8 (rust_primitives.hax.Tuple2 u8 u8))) :\n    RustM u8 := do\n  match x with\n    | ⟨0, pair@⟨a, b⟩⟩ => do\n      ((← ((← (a +? b)) +? (rust_primitives.hax.Tuple2._0 pair)))\n        +? (rust_primitives.hax.Tuple2._1 pair))\n    | _ => do (pure (0 : u8))\n\ninductive test_ellipsis_records.E : Type\n| C (f1 : u8) (f2 : u8) (f3 : u8) (f4 : u8) : test_ellipsis_records.E\n\n@[spec]\ndef test_ellipsis_records (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let c : test_ellipsis_records.E :=\n    (test_ellipsis_records.E.C\n      (f1 := (1 : u8))\n      (f2 := (2 : u8))\n      (f3 := (3 : u8))\n      (f4 := (4 : u8)));\n  let _ ←\n    match c with | (test_ellipsis_records.E.C _ ..) => do (hax_lib.assert true);\n  let _ ←\n    match c with\n      | (test_ellipsis_records.E.C (f1 := f1) ..) => do\n        (hax_lib.assert (← (f1 ==? (1 : u8))));\n  let _ ←\n    match c with\n      | (test_ellipsis_records.E.C (f1 := f1) (f2 := f2) ..) => do\n        (hax_lib.assert (← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8))))));\n  let _ ←\n    match c with\n      | (test_ellipsis_records.E.C (f2 := f2) (f4 := f4) ..) => do\n        (hax_lib.assert (← ((← (f2 ==? (2 : u8))) &&? (← (f4 ==? (4 : u8))))));\n  let _ ←\n    match c with\n      | (test_ellipsis_records.E.C  (f1 := f1) (f2 := f2) (f3 := f3) (f4 := f4))\n        => do\n        (hax_lib.assert\n          (← ((← ((← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8)))))\n              &&? (← (f3 ==? (3 : u8)))))\n            &&? (← (f4 ==? (4 : u8))))));\n  (pure rust_primitives.hax.Tuple0.mk)\n\nstructure test_ellipsis_structs.S where\n  f1 : u8\n  f2 : u8\n  f3 : u8\n  f4 : u8\n\n@[spec]\ndef test_ellipsis_structs (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let c : test_ellipsis_structs.S :=\n    (test_ellipsis_structs.S.mk\n      (f1 := (1 : u8))\n      (f2 := (2 : u8))\n      (f3 := (3 : u8))\n      (f4 := (4 : u8)));\n  let _ ← match c with | _ => do (hax_lib.assert true);\n  let _ ←\n    match c with | {f1 := f1, ..} => do (hax_lib.assert (← (f1 ==? (1 : u8))));\n  let _ ←\n    match c with\n      | {f1 := f1, f2 := f2, ..} => do\n        (hax_lib.assert (← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8))))));\n  let _ ←\n    match c with\n      | {f2 := f2, f4 := f4, ..} => do\n        (hax_lib.assert (← ((← (f2 ==? (2 : u8))) &&? (← (f4 ==? (4 : u8))))));\n  let _ ←\n    match c with\n      | {f1 := f1, f2 := f2, f3 := f3, f4 := f4} => do\n        (hax_lib.assert\n          (← ((← ((← ((← (f1 ==? (1 : u8))) &&? (← (f2 ==? (2 : u8)))))\n              &&? (← (f3 ==? (3 : u8)))))\n            &&? (← (f4 ==? (4 : u8))))));\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef test_ellipsis_bare_tuples (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let t : (rust_primitives.hax.Tuple4 u8 u8 u8 u8) :=\n    (rust_primitives.hax.Tuple4.mk (1 : u8) (2 : u8) (3 : u8) (4 : u8));\n  let _ ← match t with | ⟨_, _, _, _⟩ => do (hax_lib.assert true);\n  let _ ←\n    match t with | ⟨a, _, _, _⟩ => do (hax_lib.assert (← (a ==? (1 : u8))));\n  let _ ←\n    match t with\n      | ⟨a, b, _, _⟩ => do\n        (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8))))));\n  let _ ←\n    match t with | ⟨_, _, _, d⟩ => do (hax_lib.assert (← (d ==? (4 : u8))));\n  let _ ←\n    match t with\n      | ⟨_, _, c, d⟩ => do\n        (hax_lib.assert (← ((← (c ==? (3 : u8))) &&? (← (d ==? (4 : u8))))));\n  let _ ←\n    match t with\n      | ⟨a, _, _, d⟩ => do\n        (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (d ==? (4 : u8))))));\n  let _ ←\n    match t with\n      | ⟨a, b, c, d⟩ => do\n        (hax_lib.assert\n          (← ((← ((← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8)))))\n              &&? (← (c ==? (3 : u8)))))\n            &&? (← (d ==? (4 : u8))))));\n  (pure rust_primitives.hax.Tuple0.mk)\n\ninductive test_ellipsis_tuples.F : Type\n| D : u8 -> u8 -> u8 -> u8 -> test_ellipsis_tuples.F\n\n@[spec]\ndef test_ellipsis_tuples (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let d : test_ellipsis_tuples.F :=\n    (test_ellipsis_tuples.F.D (1 : u8) (2 : u8) (3 : u8) (4 : u8));\n  let _ ←\n    match d with\n      | (test_ellipsis_tuples.F.D  _ _ _ _) => do (hax_lib.assert true);\n  let _ ←\n    match d with\n      | (test_ellipsis_tuples.F.D  a _ _ _) => do\n        (hax_lib.assert (← (a ==? (1 : u8))));\n  let _ ←\n    match d with\n      | (test_ellipsis_tuples.F.D  a b _ _) => do\n        (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8))))));\n  let _ ←\n    match d with\n      | (test_ellipsis_tuples.F.D  _ _ _ d) => do\n        (hax_lib.assert (← (d ==? (4 : u8))));\n  let _ ←\n    match d with\n      | (test_ellipsis_tuples.F.D  _ _ c d) => do\n        (hax_lib.assert (← ((← (c ==? (3 : u8))) &&? (← (d ==? (4 : u8))))));\n  let _ ←\n    match d with\n      | (test_ellipsis_tuples.F.D  a _ _ d) => do\n        (hax_lib.assert (← ((← (a ==? (1 : u8))) &&? (← (d ==? (4 : u8))))));\n  let _ ←\n    match d with\n      | (test_ellipsis_tuples.F.D  a b c d) => do\n        (hax_lib.assert\n          (← ((← ((← ((← (a ==? (1 : u8))) &&? (← (b ==? (2 : u8)))))\n              &&? (← (c ==? (3 : u8)))))\n            &&? (← (d ==? (4 : u8))))));\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.matching\n\n\nnamespace lean_tests.monadic\n\nstructure S where\n  f : u32\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ := (9 : i32);\n  let _ ← ((9 : i32) +? (9 : i32));\n  let _ := (S.mk (f := (9 : u32)));\n  let _ := (S.mk (f := (← ((9 : u32) +? (9 : u32)))));\n  let _ := (S.f (S.mk (f := (← ((9 : u32) +? (9 : u32))))));\n  let _ ← ((S.f (S.mk (f := (← ((9 : u32) +? (9 : u32)))))) +? (9 : u32));\n  let _ ←\n    if true then do ((3 : i32) +? (4 : i32)) else do ((3 : i32) -? (4 : i32));\n  let _ ←\n    if (← ((← ((9 : i32) +? (9 : i32))) ==? (0 : i32))) then do\n      ((3 : i32) +? (4 : i32))\n    else do\n      ((3 : i32) -? (4 : i32));\n  let _ ←\n    if true then do\n      let x : i32 := (9 : i32);\n      let _ ← ((3 : i32) +? x);\n      (pure rust_primitives.hax.Tuple0.mk)\n    else do\n      let y : i32 := (19 : i32);\n      let _ ← ((← ((3 : i32) +? y)) -? (4 : i32));\n      (pure rust_primitives.hax.Tuple0.mk);\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.monadic\n\n\nnamespace lean_tests.monadic.trait_constants\n\nclass Foo.AssociatedTypes (Self : Type) where\n\nclass Foo (Self : Type)\n  [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type))]\n  where\n  F (Self) : u32\n\nclass Bar.AssociatedTypes (Self : Type) where\n\nclass Bar (Self : Type)\n  [associatedTypes : outParam (Bar.AssociatedTypes (Self : Type))]\n  where\n  B (Self) : u32\n\nstructure Baz where\n  -- no fields\n\n@[reducible] instance Impl.AssociatedTypes : Foo.AssociatedTypes Baz where\n\ninstance Impl : Foo Baz where\n  F := (1 : u32)\n\n@[reducible] instance Impl_1.AssociatedTypes : Bar.AssociatedTypes Baz where\n\ninstance Impl_1 : Bar Baz where\n  B := RustM.of_isOk (do ((Foo.F Baz) -? (1 : u32))) (by rfl)\n\nend lean_tests.monadic.trait_constants\n\n\nnamespace lean_tests.nested_control_flow\n\n@[spec]\ndef nested_control_flow (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let x1 : i32 ←\n    ((1 : i32)\n      +? (← if true then do (pure (0 : i32)) else do (pure (1 : i32))));\n  let x2 : i32 ←\n    ((1 : i32)\n      +? (← match (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32)) with\n        | _ => do (pure (0 : i32))));\n  let x : i32 := (9 : i32);\n  let x3 : i32 ← ((1 : i32) +? (← (x +? (1 : i32))));\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef explicit_hoisting (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let x1_tmp : i32 ← if true then do (pure (0 : i32)) else do (pure (1 : i32));\n  let x1 : i32 ← ((1 : i32) +? x1_tmp);\n  let x2_tmp : i32 ←\n    match (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32)) with\n      | _ => do (pure (0 : i32));\n  let x2 : i32 ← ((1 : i32) +? x2_tmp);\n  let x3_tmp_x : i32 := (9 : i32);\n  let x3_tmp : i32 ← (x3_tmp_x +? (1 : i32));\n  let x3 : i32 ← ((1 : i32) +? x3_tmp);\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef complex_nesting (_ : rust_primitives.hax.Tuple0) :\n    RustM\n    (rust_primitives.hax.Tuple2\n      rust_primitives.hax.Tuple0\n      rust_primitives.hax.Tuple0)\n    := do\n  let x1 : i32 ←\n    if true then do\n      let y : i32 ←\n        if false then do\n          let z : i32 ←\n            match rust_primitives.hax.Tuple0.mk with | _ => do (pure (9 : i32));\n          let z : i32 ← ((1 : i32) +? z);\n          (z +? (1 : i32))\n        else do\n          let z : i32 := (9 : i32);\n          let z : i32 ← (z +? (1 : i32));\n          (pure z);\n      let y : i32 ← (y +? (1 : i32));\n      (y +? (1 : i32))\n    else do\n      (pure (0 : i32));\n  let x1 : i32 ← (x1 +? (1 : i32));\n  let x2 : i32 ←\n    match (core_models.option.Option.Some (89 : i32)) with\n      | (core_models.option.Option.Some  a) => do\n        let y : i32 ← ((1 : i32) +? a);\n        let y : i32 ← (y +? (1 : i32));\n        if (← (y ==? (0 : i32))) then do\n          let z : i32 := (9 : i32);\n          let z : i32 ← ((← (z +? y)) +? (1 : i32));\n          (pure z)\n        else do\n          (pure (10 : i32))\n      | (core_models.option.Option.None ) => do\n        let y : i32 ←\n          if false then do\n            (pure (9 : i32))\n          else do\n            let z : i32 := (9 : i32);\n            let z : i32 ← (z +? (1 : i32));\n            (z +? (9 : i32));\n        let y : i32 ← (y +? (1 : i32));\n        (pure y);\n  (pure (rust_primitives.hax.Tuple2.mk\n    rust_primitives.hax.Tuple0.mk\n    rust_primitives.hax.Tuple0.mk))\n\nend lean_tests.nested_control_flow\n\n\nnamespace lean_tests.opaque\n\nopaque an_opaque_fn (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0\n\nclass T.AssociatedTypes (Self : Type) where\n  A : Type\n\nattribute [reducible] T.AssociatedTypes.A\n\nabbrev T.A :=\n  T.AssociatedTypes.A\n\nclass T (Self : Type)\n  [associatedTypes : outParam (T.AssociatedTypes (Self : Type))]\n  where\n  f (Self) : (rust_primitives.hax.Tuple0 -> RustM rust_primitives.hax.Tuple0)\n\nstructure S where\n  -- no fields\n\n@[instance] opaque Impl.AssociatedTypes :\n  T.AssociatedTypes S :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl : T S := by constructor <;> exact Inhabited.default\n\nopaque OpaqueStruct : Type\n\nend lean_tests.opaque\n\n\nnamespace lean_tests.specs\n\ndef test (x : u8) : RustM u8 := do (pure x)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef test.spec (x : u8) :\n    Spec\n      (requires := do (x >? (0 : u8)))\n      (ensures := fun r => do (r ==? x))\n      (test (x : u8)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [test] <;> bv_decide\n}\n\ndef use_previous_result (x : u8) : RustM u8 := do (test x)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef use_previous_result.spec (x : u8) :\n    Spec\n      (requires := do (x >? (0 : u8)))\n      (ensures := fun r => do (r ==? x))\n      (use_previous_result (x : u8)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [use_previous_result] <;> bv_decide\n}\n\ndef test_proof (x : u8) : RustM u8 := do (pure x)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef test_proof.spec (x : u8) :\n    Spec\n      (requires := do (x >? (0 : u8)))\n      (ensures := fun r => do (r ==? x))\n      (test_proof (x : u8)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by unfold lean_tests.specs.test_proof; hax_bv_decide\n}\n\ndef square (x : u8) : RustM u8 := do (x *? x)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef square.spec (x : u8) :\n    Spec\n      (requires := do (x <? (16 : u8)))\n      (ensures := fun res => do (res >=? x))\n      (square (x : u8)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [square] <;> bv_decide\n}\n\ndef forall_and_exists (x : u8) : RustM u8 := do (pure x)\n\nset_option hax_mvcgen.specset \\\"int\\\" in\n@[hax_spec]\ndef forall_and_exists.spec (x : u8) :\n    Spec\n      (requires := do\n        (hax_lib.prop.constructors.forall\n          (fun i =>\n            (do\n            (hax_lib.prop.constructors.implies\n              (← (hax_lib.prop.constructors.from_bool (← (i <? (20 : u8)))))\n              (← (hax_lib.prop.constructors.from_bool (← (x >? i))))) :\n            RustM hax_lib.prop.Prop))))\n      (ensures := fun\n          r => do\n          (hax_lib.prop.constructors.not\n            (← (hax_lib.prop.constructors.exists\n              (fun i =>\n                (do\n                (hax_lib.prop.constructors.not\n                  (← (hax_lib.prop.constructors.implies\n                    (← (hax_lib.prop.constructors.from_bool\n                      (← (i <? (20 : u8)))))\n                    (← (hax_lib.prop.constructors.from_bool (← (r >? i))))))) :\n                RustM hax_lib.prop.Prop))))))\n      (forall_and_exists (x : u8)) := {\n  pureRequires := by hax_construct_pure <;> grind\n  pureEnsures := by hax_construct_pure <;> grind\n  contract := by hax_mvcgen [forall_and_exists] <;> grind\n}\n\n--  Test function without arguments\n--  https://github.com/cryspen/hax/issues/1856\ndef fn_without_args (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef fn_without_args.spec (_ : rust_primitives.hax.Tuple0) :\n    Spec\n      (requires := do pure True)\n      (ensures := fun _ => do (pure true))\n      (fn_without_args ⟨⟩) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [fn_without_args] <;> bv_decide\n}\n\nend lean_tests.specs\n\n\nnamespace lean_tests.specs.issue_1852\n\nstructure T where\n  -- no fields\n\n@[spec]\ndef Impl.test (self : T) : RustM Bool := do (pure true)\n\ndef Impl.func (self : T) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef Impl.func.spec (self : T) :\n    Spec\n      (requires := do (Impl.test self))\n      (ensures := fun _ => pure True)\n      (Impl.func (self : T)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [Impl.func] <;> bv_decide\n}\n\nend lean_tests.specs.issue_1852\n\n\nnamespace lean_tests.specs\n\ndef custom_pure_proofs (x : u8) : RustM rust_primitives.hax.Tuple0 := do\n  (pure rust_primitives.hax.Tuple0.mk)\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef custom_pure_proofs.spec (x : u8) :\n    Spec\n      (requires := do (pure true))\n      (ensures := fun r => do (pure true))\n      (custom_pure_proofs (x : u8)) := {\n  pureRequires := ⟨True, by mvcgen⟩\n  pureEnsures := ⟨fun _ => True, by intros; mvcgen⟩\n  contract := by hax_mvcgen [custom_pure_proofs] <;> bv_decide\n}\n\nend lean_tests.specs\n\n\nnamespace lean_tests.specs.issue_1945\n\ndef mktuple (a : i32) : RustM Bool := do let x : i32 := a; (a ==? (0 : i32))\n\nset_option hax_mvcgen.specset \\\"bv\\\" in\n@[hax_spec]\ndef mktuple.spec (a : i32) :\n    Spec\n      (requires := do let x : i32 := a; (a ==? (0 : i32)))\n      (ensures := fun _ => pure True)\n      (mktuple (a : i32)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [mktuple] <;> bv_decide\n}\n\nend lean_tests.specs.issue_1945\n\n\nnamespace lean_tests.structs\n\nstructure T0 where\n  -- no fields\n\nstructure T1 (A : Type) where\n  _0 : A\n\nstructure T2 (A : Type) (B : Type) where\n  _0 : A\n  _1 : B\n\nstructure T3 (A : Type) (B : Type) (C : Type) where\n  _0 : A\n  _1 : B\n  _2 : C\n\nstructure T3p (A : Type) (B : Type) (C : Type) where\n  _0 : A\n  _1 : (T2 B C)\n\n@[spec]\ndef tuple_structs (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let t0 : T0 := T0.mk;\n  let t1 : (T1 i32) := (T1.mk (1 : i32));\n  let t2 : (T2 i32 i32) := (T2.mk (1 : i32) (2 : i32));\n  let t3 : (T3 T0 (T1 i32) (T2 i32 i32)) :=\n    (T3.mk T0.mk (T1.mk (1 : i32)) (T2.mk (1 : i32) (2 : i32)));\n  let t3p : (T3p T0 (T1 i32) (T2 i32 i32)) :=\n    (T3p.mk T0.mk (T2.mk (T1.mk (1 : i32)) (T2.mk (1 : i32) (2 : i32))));\n  let ⟨⟩ := t0;\n  let ⟨u1⟩ := t1;\n  let ⟨u2, u3⟩ := t2;\n  let ⟨⟨⟩, ⟨_⟩, ⟨_, _⟩⟩ := t3;\n  let ⟨⟨⟩, ⟨⟨_⟩, ⟨_, _⟩⟩⟩ := t3p;\n  let _ := (T1._0 t1);\n  let _ := (T2._0 t2);\n  let _ := (T2._1 t2);\n  let _ := (T3._0 t3);\n  let _ := (T3._1 t3);\n  let _ := (T3._2 t3);\n  let _ := (T2._1 (T3._2 t3));\n  let _ := (T3p._0 t3p);\n  let _ := (T3p._1 t3p);\n  let _ := (T2._0 (T2._1 (T3p._1 t3p)));\n  let _ := (T2._0 (T3p._1 t3p));\n  let _ := (T2._1 (T3p._1 t3p));\n  let _ ← match t0 with | ⟨⟩ => do (pure rust_primitives.hax.Tuple0.mk);\n  let _ ← match t1 with | ⟨u1⟩ => do (pure rust_primitives.hax.Tuple0.mk);\n  let _ ← match t2 with | ⟨u2, u3⟩ => do (pure rust_primitives.hax.Tuple0.mk);\n  let _ ←\n    match t3 with\n      | ⟨⟨⟩, ⟨u1⟩, ⟨u2, u3⟩⟩ => do (pure rust_primitives.hax.Tuple0.mk);\n  let _ ←\n    match t3p with\n      | ⟨⟨⟩, ⟨⟨u1⟩, ⟨u2, u3⟩⟩⟩ => do (pure rust_primitives.hax.Tuple0.mk);\n  (pure rust_primitives.hax.Tuple0.mk)\n\nstructure S1 where\n  f1 : usize\n  f2 : usize\n\nstructure S2 where\n  f1 : S1\n  f2 : usize\n\nstructure S3 where\n  _end : usize\n  _def : usize\n  _theorem : usize\n  _structure : usize\n  _inductive : usize\n\n@[spec]\ndef normal_structs (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let s1 : S1 := (S1.mk (f1 := (0 : usize)) (f2 := (1 : usize)));\n  let s2 : S2 :=\n    (S2.mk\n      (f1 := (S1.mk (f1 := (2 : usize)) (f2 := (3 : usize))))\n      (f2 := (4 : usize)));\n  let s3 : S3 :=\n    (S3.mk\n      (_end := (0 : usize))\n      (_def := (0 : usize))\n      (_theorem := (0 : usize))\n      (_structure := (0 : usize))\n      (_inductive := (0 : usize)));\n  let {f1 := f1, f2 := f2} := s1;\n  let {f1 := f1, f2 := other_name_for_f2} := s1;\n  let {f1 := {f1 := f1, f2 := f2}, f2 := other_name_for_f2} := s2;\n  let\n    {_end := _end,\n     _def := _def,\n     _theorem := _theorem,\n     _structure := _structure,\n     _inductive := _inductive} :=\n    s3;\n  let _ := (rust_primitives.hax.Tuple2.mk (S1.f1 s1) (S1.f2 s1));\n  let _ :=\n    (rust_primitives.hax.Tuple8.mk\n      (S1.f1 s1)\n      (S1.f2 s1)\n      (S1.f1 (S2.f1 s2))\n      (S1.f2 (S2.f1 s2))\n      (S2.f2 s2)\n      (S3._end s3)\n      (S3._def s3)\n      (S3._theorem s3));\n  let _ ←\n    match s1 with\n      | {f1 := f1, f2 := f2} => do (pure rust_primitives.hax.Tuple0.mk);\n  let _ ←\n    match s2 with\n      | {f1 := {f1 := f1, f2 := other_name_for_f2}, f2 := f2} => do\n        (pure rust_primitives.hax.Tuple0.mk);\n  match s3 with\n    | {_end := _end,\n       _def := _def,\n       _theorem := _theorem,\n       _structure := _structure,\n       _inductive := _inductive} => do\n      (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.structs\n\n\nnamespace lean_tests.structs.miscellaneous\n\nstructure S where\n  f : i32\n\n@[spec]\ndef test_tuples (_ : rust_primitives.hax.Tuple0) :\n    RustM (rust_primitives.hax.Tuple2 i32 i32) := do\n  let lit : i32 := (1 : i32);\n  let constr : S := (S.mk (f := (42 : i32)));\n  let proj : i32 := (S.f constr);\n  let ite : (rust_primitives.hax.Tuple2 i32 i32) ←\n    if true then do\n      (pure (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32)))\n    else do\n      let z : i32 ← ((1 : i32) +? (2 : i32));\n      (pure (rust_primitives.hax.Tuple2.mk z z));\n  (pure (rust_primitives.hax.Tuple2.mk (1 : i32) (2 : i32)))\n\nend lean_tests.structs.miscellaneous\n\n\nnamespace lean_tests.structs.base_expressions\n\nstructure S where\n  f1 : u32\n  f2 : u32\n  f3 : u32\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let s1 : S := (S.mk (f1 := (1 : u32)) (f2 := (2 : u32)) (f3 := (3 : u32)));\n  let _ := {s1 with f1 := (0 : u32)};\n  let _ := {s1 with f2 := (0 : u32)};\n  let _ := {s1 with f3 := (0 : u32)};\n  let _ := {s1 with f1 := (0 : u32), f2 := (1 : u32)};\n  let _ := {s1 with f2 := (0 : u32), f3 := (1 : u32)};\n  let _ := {s1 with f3 := (0 : u32), f1 := (2 : u32)};\n  let _ := {s1 with f1 := (0 : u32), f2 := (1 : u32), f3 := (0 : u32)};\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.structs.base_expressions\n\n\nnamespace lean_tests.traits.basic\n\nclass T1.AssociatedTypes (Self : Type) where\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n  f1 (Self) : (Self -> RustM usize)\n  f2 (Self) : (Self -> Self -> RustM usize)\n\nstructure S where\n  -- no fields\n\n@[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S where\n\ninstance Impl : T1 S where\n  f1 := fun (self : S) => do (pure (42 : usize))\n  f2 := fun (self : S) (other : S) => do (pure (43 : usize))\n\n@[spec]\ndef f\n    (T : Type)\n    [trait_constr_f_associated_type_i0 : T1.AssociatedTypes T]\n    [trait_constr_f_i0 : T1 T ]\n    (x : T) :\n    RustM usize := do\n  ((← (T1.f1 T x)) +? (← (T1.f2 T x x)))\n\nend lean_tests.traits.basic\n\n\nnamespace lean_tests.traits.bounds\n\nclass T1.AssociatedTypes (Self : Type) where\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n  f1 (Self) : (Self -> RustM usize)\n\nclass T2.AssociatedTypes (Self : Type) where\n\nclass T2 (Self : Type)\n  [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))]\n  where\n  f2 (Self) : (Self -> RustM usize)\n\nclass Test.AssociatedTypes (Self : Type) (T : Type) where\n  [trait_constr_Test_i0 : T2.AssociatedTypes Self]\n  [trait_constr_Test_i1 : T1.AssociatedTypes T]\n\nattribute [instance_reducible, instance]\n  Test.AssociatedTypes.trait_constr_Test_i0\n\nattribute [instance_reducible, instance]\n  Test.AssociatedTypes.trait_constr_Test_i1\n\nclass Test (Self : Type) (T : Type)\n  [associatedTypes : outParam (Test.AssociatedTypes (Self : Type) (T : Type))]\n  where\n  [trait_constr_Test_i0 : T2 Self]\n  [trait_constr_Test_i1 : T1 T]\n  f_test (Self) (T) : (Self -> T -> RustM usize)\n\nattribute [instance_reducible, instance] Test.trait_constr_Test_i0\n\nattribute [instance_reducible, instance] Test.trait_constr_Test_i1\n\nstructure S1 where\n  -- no fields\n\n@[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S1 where\n\ninstance Impl : T1 S1 where\n  f1 := fun (self : S1) => do (pure (0 : usize))\n\nstructure S2 where\n  -- no fields\n\n@[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes S2 where\n\ninstance Impl_1 : T2 S2 where\n  f2 := fun (self : S2) => do (pure (1 : usize))\n\n@[reducible] instance Impl_2.AssociatedTypes : Test.AssociatedTypes S2 S1 where\n\ninstance Impl_2 : Test S2 S1 where\n  f_test := fun (self : S2) (x : S1) => do\n    ((← ((← (T1.f1 S1 x)) +? (← (T2.f2 S2 self)))) +? (1 : usize))\n\n@[spec]\ndef test (x1 : S1) (x2 : S2) : RustM usize := do\n  ((← (Test.f_test S2 S1 x2 x1)) +? (← (T1.f1 S1 x1)))\n\nend lean_tests.traits.bounds\n\n\nnamespace lean_tests.traits.associated_types\n\nclass Foo.AssociatedTypes (Self : Type) (T : Type) where\n\nclass Foo (Self : Type) (T : Type)\n  [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type) (T : Type))]\n  where\n\nclass Bar.AssociatedTypes (Self : Type) where\n\nclass Bar (Self : Type)\n  [associatedTypes : outParam (Bar.AssociatedTypes (Self : Type))]\n  where\n\nstructure S where\n  -- no fields\n\n@[reducible] instance Impl_2.AssociatedTypes : Bar.AssociatedTypes i16 where\n\ninstance Impl_2 : Bar i16 where\n\n@[reducible] instance Impl_3.AssociatedTypes (A : Type) :\n  Foo.AssociatedTypes (rust_primitives.hax.Tuple2 u32 A) i16\n  where\n\ninstance Impl_3 (A : Type) : Foo (rust_primitives.hax.Tuple2 u32 A) i16 where\n\nclass Chain0.AssociatedTypes (Self : Type) where\n\nclass Chain0 (Self : Type)\n  [associatedTypes : outParam (Chain0.AssociatedTypes (Self : Type))]\n  where\n\n@[reducible] instance Impl_4.AssociatedTypes : Chain0.AssociatedTypes u8 where\n\ninstance Impl_4 : Chain0 u8 where\n\nend lean_tests.traits.associated_types\n\n\nnamespace lean_tests.traits.overlapping_methods\n\nclass T1.AssociatedTypes (Self : Type) where\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n  f (Self) : (Self -> RustM usize)\n\nclass T2.AssociatedTypes (Self : Type) where\n\nclass T2 (Self : Type)\n  [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))]\n  where\n  f (Self) : (Self -> RustM usize)\n\nclass T3.AssociatedTypes (Self : Type) where\n\nclass T3 (Self : Type)\n  [associatedTypes : outParam (T3.AssociatedTypes (Self : Type))]\n  where\n  f (Self) : (Self -> RustM usize)\n\n@[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes u32 where\n\ninstance Impl : T1 u32 where\n  f := fun (self : u32) => do (pure (0 : usize))\n\n@[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes u32 where\n\ninstance Impl_1 : T2 u32 where\n  f := fun (self : u32) => do (pure (1 : usize))\n\n@[reducible] instance Impl_2.AssociatedTypes : T3.AssociatedTypes u32 where\n\ninstance Impl_2 : T3 u32 where\n  f := fun (self : u32) => do (pure (2 : usize))\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) : RustM usize := do\n  let x : u32 := (9 : u32);\n  ((← ((← (T1.f u32 x)) +? (← (T2.f u32 x)))) +? (← (T3.f u32 x)))\n\nend lean_tests.traits.overlapping_methods\n\n\nnamespace lean_tests.traits.inheritance\n\nclass T1.AssociatedTypes (Self : Type) where\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n  f1 (Self) : (Self -> RustM usize)\n\nclass T2.AssociatedTypes (Self : Type) where\n\nclass T2 (Self : Type)\n  [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))]\n  where\n  f2 (Self) : (Self -> RustM usize)\n\nclass T3.AssociatedTypes (Self : Type) where\n  [trait_constr_T3_i0 : T2.AssociatedTypes Self]\n  [trait_constr_T3_i1 : T1.AssociatedTypes Self]\n\nattribute [instance_reducible, instance] T3.AssociatedTypes.trait_constr_T3_i0\n\nattribute [instance_reducible, instance] T3.AssociatedTypes.trait_constr_T3_i1\n\nclass T3 (Self : Type)\n  [associatedTypes : outParam (T3.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_T3_i0 : T2 Self]\n  [trait_constr_T3_i1 : T1 Self]\n  f3 (Self) : (Self -> RustM usize)\n\nattribute [instance_reducible, instance] T3.trait_constr_T3_i0\n\nattribute [instance_reducible, instance] T3.trait_constr_T3_i1\n\nclass Tp1.AssociatedTypes (Self : Type) where\n\nclass Tp1 (Self : Type)\n  [associatedTypes : outParam (Tp1.AssociatedTypes (Self : Type))]\n  where\n  f1 (Self) : (Self -> RustM usize)\n\nclass Tp2.AssociatedTypes (Self : Type) where\n  [trait_constr_Tp2_i0 : Tp1.AssociatedTypes Self]\n  [trait_constr_Tp2_i1 : T3.AssociatedTypes Self]\n\nattribute [instance_reducible, instance] Tp2.AssociatedTypes.trait_constr_Tp2_i0\n\nattribute [instance_reducible, instance] Tp2.AssociatedTypes.trait_constr_Tp2_i1\n\nclass Tp2 (Self : Type)\n  [associatedTypes : outParam (Tp2.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_Tp2_i0 : Tp1 Self]\n  [trait_constr_Tp2_i1 : T3 Self]\n  fp2 (Self) : (Self -> RustM usize)\n\nattribute [instance_reducible, instance] Tp2.trait_constr_Tp2_i0\n\nattribute [instance_reducible, instance] Tp2.trait_constr_Tp2_i1\n\nstructure S where\n  -- no fields\n\n@[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S where\n\ninstance Impl : T1 S where\n  f1 := fun (self : S) => do (pure (1 : usize))\n\n@[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes S where\n\ninstance Impl_1 : T2 S where\n  f2 := fun (self : S) => do (pure (2 : usize))\n\n@[reducible] instance Impl_2.AssociatedTypes : T3.AssociatedTypes S where\n\ninstance Impl_2 : T3 S where\n  f3 := fun (self : S) => do (pure (3 : usize))\n\n@[reducible] instance Impl_3.AssociatedTypes : Tp1.AssociatedTypes S where\n\ninstance Impl_3 : Tp1 S where\n  f1 := fun (self : S) => do (pure (10 : usize))\n\n@[reducible] instance Impl_4.AssociatedTypes : Tp2.AssociatedTypes S where\n\ninstance Impl_4 : Tp2 S where\n  fp2 := fun (self : S) => do\n    ((← ((← ((← (Tp1.f1 S self)) +? (← (T1.f1 S self)))) +? (← (T2.f2 S self))))\n      +? (← (T3.f3 S self)))\n\n@[spec]\ndef test (_ : rust_primitives.hax.Tuple0) : RustM usize := do\n  let s : S := S.mk;\n  ((← (T3.f3 S s)) +? (1 : usize))\n\nend lean_tests.traits.inheritance\n\n\nnamespace lean_tests.traits.default\n\nclass Easy.AssociatedTypes (Self : Type) where\n\nclass Easy (Self : Type)\n  [associatedTypes : outParam (Easy.AssociatedTypes (Self : Type))]\n  where\n  dft (Self) (self : Self) :RustM usize := do (pure (32 : usize))\n\n@[reducible] instance Impl.AssociatedTypes : Easy.AssociatedTypes usize where\n\ninstance Impl : Easy usize where\n  dft := fun (self : usize) => do (self +? (1 : usize))\n\n@[reducible] instance Impl_1.AssociatedTypes : Easy.AssociatedTypes u32 where\n\ninstance Impl_1 : Easy u32 where\n\nclass T1.AssociatedTypes (Self : Type) where\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n  f1 (Self) : (Self -> RustM usize)\n  f2 (Self) (self : Self) :RustM usize := do (pure (1 : usize))\n  f3 (Self) (A : Type) (self : Self) (x : A) :RustM usize := do\n    (pure (1 : usize))\n  f4 (Self)\n    (A : Type)\n    [trait_constr_f4_associated_type_i1 : Easy.AssociatedTypes A]\n    [trait_constr_f4_i1 : Easy A ] (self : Self) (x : A) :RustM usize := do\n    ((← (Easy.dft A x)) +? (1 : usize))\n\nstructure S (A : Type) where\n  _0 : usize\n  _1 : A\n\n@[reducible] instance Impl_2.AssociatedTypes :\n  T1.AssociatedTypes (S usize)\n  where\n\ninstance Impl_2 : T1 (S usize) where\n  f1 := fun (self : (S usize)) => do ((S._0 self) +? (S._1 self))\n  f2 := fun (self : (S usize)) => do (pure (S._1 self))\n\n@[reducible] instance Impl_3.AssociatedTypes : T1.AssociatedTypes (S Bool) where\n\ninstance Impl_3 : T1 (S Bool) where\n  f1 := fun (self : (S Bool)) => do\n    if (S._1 self) then do (pure (S._0 self)) else do (pure (9 : usize))\n  f2 := fun (self : (S Bool)) => do ((S._0 self) +? (1 : usize))\n\n@[reducible] instance Impl_4.AssociatedTypes :\n  T1.AssociatedTypes (S alloc.string.String)\n  where\n\ninstance Impl_4 : T1 (S alloc.string.String) where\n  f1 := fun (self : (S alloc.string.String)) => do (pure (0 : usize))\n\nend lean_tests.traits.default\n\n\nnamespace lean_tests.traits.trait_level_args\n\nclass T1.AssociatedTypes (Self : Type) (A : Type) (B : Type) where\n\nclass T1 (Self : Type) (A : Type) (B : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type) (A : Type) (B :\n      Type))]\n  where\n  f1 (Self) (A) (B) (C : Type) (D : Type) :\n    (Self -> RustM rust_primitives.hax.Tuple0)\n  f2 (Self) (A) (B) (C : Type) (D : Type) :\n    (Self -> A -> RustM rust_primitives.hax.Tuple0)\n  f3 (Self) (A) (B) (C : Type) (D : Type) :\n    (Self -> A -> B -> RustM rust_primitives.hax.Tuple0)\n\n@[reducible] instance Impl.AssociatedTypes :\n  T1.AssociatedTypes usize u32 u64\n  where\n\ninstance Impl : T1 usize u32 u64 where\n  f1 := fun (C : Type) (D : Type) (self : usize) => do\n    (pure rust_primitives.hax.Tuple0.mk)\n  f2 := fun (C : Type) (D : Type) (self : usize) (x : u32) => do\n    (pure rust_primitives.hax.Tuple0.mk)\n  f3 := fun (C : Type) (D : Type) (self : usize) (x : u32) (y : u64) => do\n    (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef test\n    (A : Type)\n    (B : Type)\n    (C : Type)\n    (D : Type)\n    (U : Type)\n    [trait_constr_test_associated_type_i0 : T1.AssociatedTypes U A B]\n    [trait_constr_test_i0 : T1 U A B ]\n    (x : U)\n    (a : A)\n    (b : B) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ ← (T1.f1 U A B C D x);\n  let _ ← (T1.f2 U A B C D x a);\n  let _ ← (T1.f3 U A B C D x a b);\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.traits.trait_level_args\n\n\nnamespace lean_tests.traits.trait_with_constraints\n\nclass T1.AssociatedTypes (Self : Type) where\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n\nclass T2.AssociatedTypes (Self : Type) where\n\nclass T2 (Self : Type)\n  [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))]\n  where\n  func (Self)\n    [trait_constr_func_associated_type_i1 : T1.AssociatedTypes Self]\n    [trait_constr_func_i1 : T1 Self ] :\n    (Self -> RustM Bool)\n\n@[reducible] instance Impl.AssociatedTypes\n  (A : Type)\n  [trait_constr_Impl_associated_type_i0 : T1.AssociatedTypes A]\n  [trait_constr_Impl_i0 : T1 A ] :\n  T2.AssociatedTypes A\n  where\n\ninstance Impl\n  (A : Type)\n  [trait_constr_Impl_associated_type_i0 : T1.AssociatedTypes A]\n  [trait_constr_Impl_i0 : T1 A ] :\n  T2 A\n  where\n  func :=\n    fun\n      [trait_constr_func_associated_type_i1 : T1.AssociatedTypes A]\n      [trait_constr_func_i1 : T1 A ] (self : A) => do\n    (pure true)\n\nend lean_tests.traits.trait_with_constraints\n\n\nnamespace lean_tests.traits.associated_constant\n\nclass Foo.AssociatedTypes (Self : Type) where\n\nclass Foo (Self : Type)\n  [associatedTypes : outParam (Foo.AssociatedTypes (Self : Type))]\n  where\n  f (Self) : Bool\n  x (Self) :u8 := (0 : u8)\n\nstructure Bar where\n  -- no fields\n\n@[reducible] instance Impl.AssociatedTypes : Foo.AssociatedTypes Bar where\n\ninstance Impl : Foo Bar where\n  f := true\n  x := RustM.of_isOk (do ((1 : u8) +? (1 : u8))) (by rfl)\n\nclass Baz.AssociatedTypes (Self : Type) where\n\nclass Baz (Self : Type)\n  [associatedTypes : outParam (Baz.AssociatedTypes (Self : Type))]\n  where\n  One (Self) :u32 := (1 : u32)\n\n@[spec]\ndef foo\n    (F : Type)\n    [trait_constr_foo_associated_type_i0 : Baz.AssociatedTypes F]\n    [trait_constr_foo_i0 : Baz F ]\n    (n : u32) :\n    RustM u32 := do\n  (n +? (Baz.One F))\n\nend lean_tests.traits.associated_constant\n\n\nnamespace lean_tests.types\n\nabbrev UsizeAlias : Type := usize\n\nabbrev MyOption (A : Type) : Type := (core_models.option.Option A)\n\nabbrev MyResult (A : Type) (B : Type) :\n  Type :=\n  (core_models.result.Result (core_models.option.Option A) B)\n\nabbrev ErrorMonad (A : Type) (E : Type) :\n  Type :=\n  (core_models.result.Result A E)\n\nabbrev StateMonad (A : Type) (S : Type) :\n  Type :=\n  (rust_primitives.hax.Tuple2 A S)\n\nabbrev ESMonad (A : Type) (S : Type) (E : Type) :\n  Type :=\n  (rust_primitives.hax.Tuple2 (core_models.result.Result A E) S)\n\nend lean_tests.types\n\n\nnamespace lean_tests\n\ndef FORTYTWO : usize := (42 : usize)\n\ndef MINUS_FORTYTWO : isize := (-42 : isize)\n\n@[spec]\ndef returns42 (_ : rust_primitives.hax.Tuple0) : RustM usize := do\n  (pure FORTYTWO)\n\n@[spec]\ndef add_two_numbers (x : usize) (y : usize) : RustM usize := do (x +? y)\n\n@[spec]\ndef letBinding (x : usize) (y : usize) : RustM usize := do\n  let useless : rust_primitives.hax.Tuple0 := rust_primitives.hax.Tuple0.mk;\n  let result1 : usize ← (x +? y);\n  let result2 : usize ← (result1 +? (2 : usize));\n  (result2 +? (1 : usize))\n\n@[spec]\ndef closure (_ : rust_primitives.hax.Tuple0) : RustM i32 := do\n  let x : i32 := (41 : i32);\n  let f1 : (i32 -> RustM i32) := (fun y => (do (y +? x) : RustM i32));\n  let f2 : (i32 -> i32 -> RustM i32) :=\n    (fun y z => (do ((← (y +? x)) +? z) : RustM i32));\n  let res1 : i32 ←\n    (core_models.ops.function.Fn.call\n      (i32 -> RustM i32)\n      (rust_primitives.hax.Tuple1 i32)\n      f1\n      (rust_primitives.hax.Tuple1.mk (1 : i32)));\n  let res2 : i32 ←\n    (core_models.ops.function.Fn.call\n      (i32 -> i32 -> RustM i32)\n      (rust_primitives.hax.Tuple2 i32 i32)\n      f2\n      (rust_primitives.hax.Tuple2.mk (2 : i32) (3 : i32)));\n  (res1 +? res2)\n\nexample : Nat := 42\n\n@[spec]\ndef test_before_verbatime_single_line (x : u8) : RustM u8 := do (pure (42 : u8))\n\n\ndef multiline : Unit := ()\n\n\n@[spec]\ndef test_before_verbatim_multi_line (x : u8) : RustM u8 := do (pure (32 : u8))\n\ndef NULL_CHAR : Char := '\\u0000'\n\n--  Test string literals with escape sequences\n@[spec]\ndef string_escapes (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _empty : String := \\\"\\\";\n  let _plain : String := \\\"hello world\\\";\n  let _with_quotes : String := \\\"she said \\\\\\\"hello\\\\\\\"\\\";\n  let _with_single_quote : String := \\\"it\\\\'s fine\\\";\n  let _with_backslash : String := \\\"path\\\\\\\\to\\\\\\\\file\\\";\n  let _with_newline : String := \\\"line1\\\\nline2\\\";\n  let _with_tab : String := \\\"col1\\\\tcol2\\\";\n  let _with_carriage_return : String := \\\"before\\\\rafter\\\";\n  let _mixed : String := \\\"say \\\\\\\"hello\\\\\\\"\\\\nand\\\\t\\\\'goodbye\\\\'\\\\\\\\end\\\";\n  let _carriage_return : String := \\\"carriage\\\\rreturn\\\";\n  let _control_chars : String := \\\"null\\\\x00byte bell\\\\x07char font\\\\x1b[0mreset\\\";\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests\n\n\nnamespace lean_tests.associated_types.basic\n\nclass Iterable.AssociatedTypes (Self : Type) where\n  Item : Type\n\nattribute [reducible] Iterable.AssociatedTypes.Item\n\nabbrev Iterable.Item :=\n  Iterable.AssociatedTypes.Item\n\nclass Iterable (Self : Type)\n  [associatedTypes : outParam (Iterable.AssociatedTypes (Self : Type))]\n  where\n  first (Self) : (Self -> RustM associatedTypes.Item)\n\nend lean_tests.associated_types.basic\n\n\nnamespace lean_tests.associated_types.projection\n\nclass T2.AssociatedTypes (Self : Type) where\n  A2 : Type\n\nattribute [reducible] T2.AssociatedTypes.A2\n\nabbrev T2.A2 :=\n  T2.AssociatedTypes.A2\n\nclass T2 (Self : Type)\n  [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_A2_associated_type_i1 : T1.AssociatedTypes associatedTypes.A2]\n  [trait_constr_A2_i1 : T1 associatedTypes.A2 ]\n  f (Self) : (rust_primitives.hax.Tuple0 -> RustM (T1.A1 associatedTypes.A2))\n\nend lean_tests.associated_types.projection\n\n\nnamespace lean_tests.associated_types.multiple_associated_types\n\nclass Pair.AssociatedTypes (Self : Type) where\n  First : Type\n  Second : Type\n\nattribute [reducible] Pair.AssociatedTypes.First\n\nattribute [reducible] Pair.AssociatedTypes.Second\n\nabbrev Pair.First :=\n  Pair.AssociatedTypes.First\n\nabbrev Pair.Second :=\n  Pair.AssociatedTypes.Second\n\nclass Pair (Self : Type)\n  [associatedTypes : outParam (Pair.AssociatedTypes (Self : Type))]\n  where\n  first (Self) : (Self -> RustM associatedTypes.First)\n  second (Self) : (Self -> RustM associatedTypes.Second)\n\nend lean_tests.associated_types.multiple_associated_types\n\n\nnamespace lean_tests.enums\n\ninductive MyList (T : Type) : Type\n| Nil : MyList (T : Type)\n| Cons (hd : T) (tl : (MyList T)) : MyList (T : Type)\n\nend lean_tests.enums\n\n\nnamespace lean_tests.recursion\n\n@[spec]\ndef factorial (n : u32) : RustM u32 := do\n  if (← (n ==? (0 : u32))) then do\n    (pure (1 : u32))\n  else do\n    (n *? (← (factorial (← (n -? (1 : u32))))))\npartial_fixpoint\n\nend lean_tests.recursion\n\n\nnamespace lean_tests.traits.associated_types\n\nclass T1.AssociatedTypes (Self : Type) where\n  T : Type\n\nattribute [reducible] T1.AssociatedTypes.T\n\nabbrev T1.T :=\n  T1.AssociatedTypes.T\n\nclass T1 (Self : Type)\n  [associatedTypes : outParam (T1.AssociatedTypes (Self : Type))]\n  where\n  f (Self) : (Self -> associatedTypes.T -> RustM associatedTypes.T)\n\nclass T3.AssociatedTypes (Self : Type) where\n  T : Type\n  Tp : Type\n\nattribute [reducible] T3.AssociatedTypes.T\n\nattribute [reducible] T3.AssociatedTypes.Tp\n\nabbrev T3.T :=\n  T3.AssociatedTypes.T\n\nabbrev T3.Tp :=\n  T3.AssociatedTypes.Tp\n\nclass T3 (Self : Type)\n  [associatedTypes : outParam (T3.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_T_associated_type_i1 : Bar.AssociatedTypes associatedTypes.T]\n  [trait_constr_T_i1 : Bar associatedTypes.T ]\n  (A : Type)\n  [trait_constr_Tp_associated_type_i1 : Foo.AssociatedTypes\n    associatedTypes.Tp\n    associatedTypes.T]\n  [trait_constr_Tp_i1 : Foo associatedTypes.Tp associatedTypes.T ]\n  f (Self)\n    (A : Type)\n    [trait_constr_f_associated_type_i1 : Bar.AssociatedTypes A]\n    [trait_constr_f_i1 : Bar A ] :\n    (Self -> associatedTypes.T -> associatedTypes.Tp -> RustM usize)\n\nclass Chain1.AssociatedTypes (Self : Type) where\n  A : Type\n  B : Type\n\nattribute [reducible] Chain1.AssociatedTypes.A\n\nattribute [reducible] Chain1.AssociatedTypes.B\n\nabbrev Chain1.A :=\n  Chain1.AssociatedTypes.A\n\nabbrev Chain1.B :=\n  Chain1.AssociatedTypes.B\n\nclass Chain1 (Self : Type)\n  [associatedTypes : outParam (Chain1.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_A_associated_type_i1 : Chain0.AssociatedTypes associatedTypes.A]\n  [trait_constr_A_i1 : Chain0 associatedTypes.A ]\n  [trait_constr_B_associated_type_i1 : Chain0.AssociatedTypes associatedTypes.B]\n  [trait_constr_B_i1 : Chain0 associatedTypes.B ]\n\nend lean_tests.traits.associated_types\n\n\nnamespace lean_tests.associated_types.basic\n\n@[spec]\ndef just_the_first\n    (I : Type)\n    [trait_constr_just_the_first_associated_type_i0 : Iterable.AssociatedTypes\n      I]\n    [trait_constr_just_the_first_i0 : Iterable I ]\n    (iter : I) :\n    RustM (Iterable.Item I) := do\n  (Iterable.first I iter)\n\n@[spec]\ndef first_plus_1\n    (I : Type)\n    [trait_constr_first_plus_1_associated_type_i0 : Iterable.AssociatedTypes I]\n    [trait_constr_first_plus_1_i0 : Iterable\n      I\n      (associatedTypes := {\n        show Iterable.AssociatedTypes I\n        by infer_instance\n        with Item := i32})]\n    (iter : I) :\n    RustM i32 := do\n  ((← (Iterable.first I iter)) +? (1 : i32))\n\n@[reducible] instance Impl.AssociatedTypes : Iterable.AssociatedTypes Bool where\n  Item := i32\n\ninstance Impl : Iterable Bool where\n  first := fun (self : Bool) => do (pure (3 : i32))\n\n@[spec]\ndef a (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  let _ ← (first_plus_1 Bool true);\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.associated_types.basic\n\n\nnamespace lean_tests.associated_types.multiple_associated_types\n\n@[spec]\ndef get_both\n    (P : Type)\n    [trait_constr_get_both_associated_type_i0 : Pair.AssociatedTypes P]\n    [trait_constr_get_both_i0 : Pair P ]\n    (pair : P) :\n    RustM (rust_primitives.hax.Tuple2 (Pair.First P) (Pair.Second P)) := do\n  (pure (rust_primitives.hax.Tuple2.mk\n    (← (Pair.first P pair))\n    (← (Pair.second P pair))))\n\n@[reducible] instance Impl.AssociatedTypes :\n  Pair.AssociatedTypes (rust_primitives.hax.Tuple2 i32 Bool)\n  where\n  First := i32\n  Second := Bool\n\ninstance Impl : Pair (rust_primitives.hax.Tuple2 i32 Bool) where\n  first := fun (self : (rust_primitives.hax.Tuple2 i32 Bool)) => do\n    (pure (rust_primitives.hax.Tuple2._0 self))\n  second := fun (self : (rust_primitives.hax.Tuple2 i32 Bool)) => do\n    (pure (rust_primitives.hax.Tuple2._1 self))\n\n@[spec]\ndef b (_ : rust_primitives.hax.Tuple0) : RustM rust_primitives.hax.Tuple0 := do\n  let pair : (rust_primitives.hax.Tuple2 i32 Bool) :=\n    (rust_primitives.hax.Tuple2.mk (42 : i32) true);\n  let both : (rust_primitives.hax.Tuple2 i32 Bool) ←\n    (get_both (rust_primitives.hax.Tuple2 i32 Bool) pair);\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef get_first_as_i32\n    (P : Type)\n    [trait_constr_get_first_as_i32_associated_type_i0 : Pair.AssociatedTypes P]\n    [trait_constr_get_first_as_i32_i0 : Pair\n      P\n      (associatedTypes := {\n        show Pair.AssociatedTypes P\n        by infer_instance\n        with First := i32})]\n    (pair : P) :\n    RustM i32 := do\n  (Pair.first P pair)\n\nend lean_tests.associated_types.multiple_associated_types\n\n\nnamespace lean_tests.enums\n\n@[spec]\ndef enums (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let e_v1 : E := E.V1;\n  let e_v2 : E := E.V2;\n  let e_v3 : E := (E.V3 (23 : usize));\n  let e_v4 : E := (E.V4 (23 : usize) (12 : usize) (1 : usize));\n  let e_v5 : E := (E.V5 (f1 := (23 : usize)) (f2 := (43 : usize)));\n  let e_v6 : E := (E.V6 (f1 := (12 : usize)) (f2 := (13 : usize)));\n  let nil : (MyList usize) := MyList.Nil;\n  let cons_1 : (MyList usize) := (MyList.Cons (hd := (1 : usize)) (tl := nil));\n  let cons_2_1 : (MyList usize) :=\n    (MyList.Cons (hd := (2 : usize)) (tl := cons_1));\n  match e_v1 with\n    | (E.V1 ) => do (pure rust_primitives.hax.Tuple0.mk)\n    | (E.V2 ) => do (pure rust_primitives.hax.Tuple0.mk)\n    | (E.V3  _) => do (pure rust_primitives.hax.Tuple0.mk)\n    | (E.V4  x1 x2 x3) => do\n      let y1 : usize ← (x1 +? x2);\n      let y2 : usize ← (y1 -? x2);\n      let y3 : usize ← (y2 +? x3);\n      (pure rust_primitives.hax.Tuple0.mk)\n    | (E.V5  (f1 := f1) (f2 := f2)) => do (pure rust_primitives.hax.Tuple0.mk)\n    | (E.V6  (f1 := f1) (f2 := other_name_for_f2)) => do\n      (pure rust_primitives.hax.Tuple0.mk)\n\nend lean_tests.enums\n\n\nnamespace lean_tests.traits.associated_types\n\n@[reducible] instance Impl.AssociatedTypes : T1.AssociatedTypes S where\n  T := i32\n\ninstance Impl : T1 S where\n  f := fun (self : S) (x : i32) => do (pure (2121 : i32))\n\nclass Chain2.AssociatedTypes (Self : Type) where\n  [trait_constr_Chain2_i0 : Chain1.AssociatedTypes Self]\n\nattribute [instance_reducible, instance]\n  Chain2.AssociatedTypes.trait_constr_Chain2_i0\n\nclass Chain2 (Self : Type)\n  [associatedTypes : outParam (Chain2.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_Chain2_i0 : Chain1 Self]\n\nattribute [instance_reducible, instance] Chain2.trait_constr_Chain2_i0\n\nclass Chain3.AssociatedTypes (Self : Type) where\n  [trait_constr_Chain3_i0 : Chain2.AssociatedTypes Self]\n\nattribute [instance_reducible, instance]\n  Chain3.AssociatedTypes.trait_constr_Chain3_i0\n\nclass Chain3 (Self : Type)\n  [associatedTypes : outParam (Chain3.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_Chain3_i0 : Chain2 Self]\n  f (Self) : (rust_primitives.hax.Tuple0 -> RustM (Chain1.A Self))\n\nattribute [instance_reducible, instance] Chain3.trait_constr_Chain3_i0\n\n@[reducible] instance Impl_5.AssociatedTypes : Chain1.AssociatedTypes u8 where\n  A := u8\n  B := u8\n\ninstance Impl_5 : Chain1 u8 where\n\n@[reducible] instance Impl_6.AssociatedTypes : Chain2.AssociatedTypes u8 where\n\ninstance Impl_6 : Chain2 u8 where\n\n@[reducible] instance Impl_7.AssociatedTypes : Chain3.AssociatedTypes u8 where\n\ninstance Impl_7 : Chain3 u8 where\n  f := fun (_ : rust_primitives.hax.Tuple0) => do (pure (0 : u8))\n\nclass T2.AssociatedTypes (Self : Type) where\n  T : Type\n\nattribute [reducible] T2.AssociatedTypes.T\n\nabbrev T2.T :=\n  T2.AssociatedTypes.T\n\nclass T2 (Self : Type)\n  [associatedTypes : outParam (T2.AssociatedTypes (Self : Type))]\n  where\n  [trait_constr_T_associated_type_i1 : T1.AssociatedTypes associatedTypes.T]\n  [trait_constr_T_i1 : T1 associatedTypes.T ]\n  f (Self) : (Self -> associatedTypes.T -> RustM usize)\n\n@[reducible] instance Impl_1.AssociatedTypes : T2.AssociatedTypes S where\n  T := S\n\ninstance Impl_1 : T2 S where\n  f := fun (self : S) (x : S) => do (pure (21 : usize))\n\nend lean_tests.traits.associated_types\n\n\"\"\"\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__let-else into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: let-else\n    manifest: let-else/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Let_else.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nDefinition let_else (opt : t_Option ((t_u32))) : bool :=\n  run (match opt with\n  | Option_Some (x) =>\n    ControlFlow_Continue ((true : bool))\n  | _ =>\n    ControlFlow_Break ((false : bool))\n  end).\n\nDefinition let_else_different_type (opt : t_Option ((t_u32))) : bool :=\n  run (let hoist1 := match opt with\n  | Option_Some (x) =>\n    ControlFlow_Continue (Option_Some (f_add (x) ((1 : t_u32))))\n  | _ =>\n    ControlFlow_Break ((false : bool))\n  end in\n  ControlFlow_Continue (let_else (hoist1))).\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nLet_else.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__let-else into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: let-else\n    manifest: let-else/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Let_else.fst\" = '''\nmodule Let_else\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet let_else (opt: Core_models.Option.t_Option u32) : bool =\n  match opt <: Core_models.Option.t_Option u32 with\n  | Core_models.Option.Option_Some x -> true\n  | _ -> false\n\nlet let_else_different_type (opt: Core_models.Option.t_Option u32) : bool =\n  match opt <: Core_models.Option.t_Option u32 with\n  | Core_models.Option.Option_Some x ->\n    let_else (Core_models.Option.Option_Some (x +! mk_u32 1 <: u32)\n        <:\n        Core_models.Option.t_Option u32)\n  | _ -> false\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__let-else into-ssprove.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: ssprove\n  info:\n    name: let-else\n    manifest: let-else/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Let_else.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations let_else (opt : both (t_Option int32)) : both 'bool :=\n  let_else opt  :=\n    run (matchb opt with\n    | Option_Some_case x =>\n      letb x := ret_both ((x) : (int32)) in\n      ControlFlow_Continue (ret_both (true : 'bool))\n    | _ =>\n      ControlFlow_Break (ret_both (false : 'bool))\n    end) : both 'bool.\nFail Next Obligation.\n\nEquations let_else_different_type (opt : both (t_Option int32)) : both 'bool :=\n  let_else_different_type opt  :=\n    run (letm[choice_typeMonad.result_bind_code 'bool] hoist1 := matchb opt with\n    | Option_Some_case x =>\n      letb x := ret_both ((x) : (int32)) in\n      ControlFlow_Continue (Option_Some (x .+ (ret_both (1 : int32))))\n    | _ =>\n      ControlFlow_Break (ret_both (false : 'bool))\n    end in\n    ControlFlow_Continue (let_else hoist1)) : both 'bool.\nFail Next Obligation.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__literals into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: literals\n    manifest: literals/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Literals.v\" = \"\"\"\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\n\n\nDefinition math_integers (x : t_Int) `{andb (f_gt (x) (impl_Int__e_unsafe_from_str ((\\\"0\\\"%string : string)))) (f_lt (x) (impl_Int__e_unsafe_from_str ((\\\"16\\\"%string : string)))) = true} : t_u8 :=\n  let _ : t_Int := f_lift ((3 : t_usize)) in\n  let e_neg_dec := impl_Int__e_unsafe_from_str ((\\\"-340282366920938463463374607431768211455000\\\"%string : string)) in\n  let e_pos_dec := impl_Int__e_unsafe_from_str ((\\\"340282366920938463463374607431768211455000\\\"%string : string)) in\n  let e_neg_hex := impl_Int__e_unsafe_from_str ((\\\"-340282366920938463463374607431768211455000\\\"%string : string)) in\n  let e_pos_hex := impl_Int__e_unsafe_from_str ((\\\"340282366920938463463374607431768211455000\\\"%string : string)) in\n  let e_neg_octal := impl_Int__e_unsafe_from_str ((\\\"-340282366920938463463374607431768211455000\\\"%string : string)) in\n  let e_pos_octal := impl_Int__e_unsafe_from_str ((\\\"340282366920938463463374607431768211455000\\\"%string : string)) in\n  let e_neg_bin := impl_Int__e_unsafe_from_str ((\\\"-340282366920938463463374607431768211455000\\\"%string : string)) in\n  let e_pos_bin := impl_Int__e_unsafe_from_str ((\\\"340282366920938463463374607431768211455000\\\"%string : string)) in\n  let _ := f_gt (impl_Int__e_unsafe_from_str ((\\\"-340282366920938463463374607431768211455000\\\"%string : string))) (impl_Int__e_unsafe_from_str ((\\\"340282366920938463463374607431768211455000\\\"%string : string))) in\n  let _ := f_lt (x) (x) in\n  let _ := f_ge (x) (x) in\n  let _ := f_le (x) (x) in\n  let _ := f_ne (x) (x) in\n  let _ := f_eq (x) (x) in\n  let _ := f_add (x) (x) in\n  let _ := f_sub (x) (x) in\n  let _ := f_mul (x) (x) in\n  let _ := f_div (x) (x) in\n  let _ : t_i16 := impl_Int__to_i16 (x) in\n  let _ : t_i32 := impl_Int__to_i32 (x) in\n  let _ : t_i64 := impl_Int__to_i64 (x) in\n  let _ : t_i128 := impl_Int__to_i128 (x) in\n  let _ : t_isize := impl_Int__to_isize (x) in\n  let _ : t_u16 := impl_Int__to_u16 (x) in\n  let _ : t_u32 := impl_Int__to_u32 (x) in\n  let _ : t_u64 := impl_Int__to_u64 (x) in\n  let _ : t_u128 := impl_Int__to_u128 (x) in\n  let _ : t_usize := impl_Int__to_usize (x) in\n  impl_Int__to_u8 (f_add (x) (f_mul (x) (x))).\n\nDefinition panic_with_msg '(_ : unit) : unit :=\n  never_to_any (panic_fmt (impl_1__new_const ([(\\\"with msg\\\"%string : string)]))).\n\nRecord Foo_record : Type :=\n  {\n    Foo_f_field : t_u8;\n  }.\n\n\n#[export] Instance settable_Foo_record : Settable _ :=\n  settable! (Build_Foo_record) <Foo_f_field>.\n\n\n\n\n\n\n\nDefinition v_CONSTANT : t_Foo :=\n  Foo ((3 : t_u8)).\n\nDefinition numeric '(_ : unit) : unit :=\n  let _ : t_usize := (123 : t_usize) in\n  let _ : t_isize := (-42 : t_isize) in\n  let _ : t_isize := (42 : t_isize) in\n  let _ : t_i32 := (-42 : t_i32) in\n  let _ : t_u128 := (22222222222222222222 : t_u128) in\n  tt.\n\nDefinition patterns '(_ : unit) : unit :=\n  let _ := match (1 : t_u8) with\n  | 2 =>\n    tt\n  | _ =>\n    tt\n  end in\n  let _ := match ((\\\"hello\\\"%string : string),((123 : t_i32),[(\\\"a\\\"%string : string); (\\\"b\\\"%string : string)])) with\n  | (\\\"hello\\\"%string,(123,e_todo)) =>\n    tt\n  | _ =>\n    tt\n  end in\n  let _ := match Foo ((4 : t_u8)) with\n  | Foo (3) =>\n    tt\n  | _ =>\n    tt\n  end in\n  tt.\n\nDefinition casts (x8 : t_u8) (x16 : t_u16) (x32 : t_u32) (x64 : t_u64) (xs : t_usize) : unit :=\n  let _ : t_u64 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (x64)) (cast (xs)) in\n  let _ : t_u32 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (x32)) (cast (x64))) (cast (xs)) in\n  let _ : t_u16 := f_add (f_add (f_add (f_add (cast (x8)) (x16)) (cast (x32))) (cast (x64))) (cast (xs)) in\n  let _ : t_u8 := f_add (f_add (f_add (f_add (x8) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in\n  let _ : t_i64 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in\n  let _ : t_i32 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in\n  let _ : t_i16 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in\n  let _ : t_i8 := f_add (f_add (f_add (f_add (cast (x8)) (cast (x16))) (cast (x32))) (cast (x64))) (cast (xs)) in\n  tt.\n\nDefinition empty_array '(_ : unit) : unit :=\n  let _ : t_Slice t_u8 := unsize ([]) in\n  tt.\n\nDefinition fn_pointer_cast '(_ : unit) : unit :=\n  let f : t_u32 -> t_u32 := fun x =>\n    x in\n  tt.\n\nDefinition strings '(_ : unit) : unit :=\n  let _ : string := (\\\"hello\\\"%string : string) in\n  let _ : string := (\\\"hello\\\"world\\\"%string : string) in\n  let _ : string := (\\\"it's\\\"%string : string) in\n  let _ : string := (\\\"back\\\\slash\\\"%string : string) in\n  let _ : string := (\\\"line\nbreak\\\"%string : string) in\n  let _ : string := (\\\"carriage\\rreturn\\\"%string : string) in\n  let _ : string := (\\\"tab\\there\\\"%string : string) in\n  let _ : string := (\\\"null\\u0000byte\\\"%string : string) in\n  let _ : string := (\\\"bell\\u0007char\\\"%string : string) in\n  let _ : string := (\\\"\\u001B[0m\\\"%string : string) in\n  let _ : string := (\\\"🦀\\\"%string : string) in\n  tt.\n\"\"\"\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nLiterals.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__literals into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: literals\n    manifest: literals/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Literals.fst\" = \"\"\"\nmodule Literals\n#set-options \\\"--fuel 0 --ifuel 1 --z3rlimit 15\\\"\nopen FStar.Mul\nopen Core_models\n\nlet math_integers (x: Hax_lib.Int.t_Int)\n    : Prims.Pure u8\n      (requires x > (0 <: Hax_lib.Int.t_Int) && x < (16 <: Hax_lib.Int.t_Int))\n      (fun _ -> Prims.l_True) =\n  let _:Hax_lib.Int.t_Int = Rust_primitives.Hax.Int.from_machine (mk_usize 3) in\n  let e_neg_dec:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in\n  let e_pos_dec:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in\n  let e_neg_hex:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in\n  let e_pos_hex:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in\n  let e_neg_octal:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in\n  let e_pos_octal:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in\n  let e_neg_bin:Hax_lib.Int.t_Int = (-340282366920938463463374607431768211455000) in\n  let e_pos_bin:Hax_lib.Int.t_Int = 340282366920938463463374607431768211455000 in\n  let _:bool =\n    ((-340282366920938463463374607431768211455000) <: Hax_lib.Int.t_Int) >\n    (340282366920938463463374607431768211455000 <: Hax_lib.Int.t_Int)\n  in\n  let _:bool = x < x in\n  let _:bool = x >= x in\n  let _:bool = x <= x in\n  let _:bool = x <> x in\n  let _:bool = x = x in\n  let _:Hax_lib.Int.t_Int = x + x in\n  let _:Hax_lib.Int.t_Int = x - x in\n  let _:Hax_lib.Int.t_Int = x * x in\n  let _:Hax_lib.Int.t_Int = x / x in\n  let _:i16 = Hax_lib.Int.impl_Int__to_i16 x in\n  let _:i32 = Hax_lib.Int.impl_Int__to_i32 x in\n  let _:i64 = Hax_lib.Int.impl_Int__to_i64 x in\n  let _:i128 = Hax_lib.Int.impl_Int__to_i128 x in\n  let _:isize = Hax_lib.Int.impl_Int__to_isize x in\n  let _:u16 = Hax_lib.Int.impl_Int__to_u16 x in\n  let _:u32 = Hax_lib.Int.impl_Int__to_u32 x in\n  let _:u64 = Hax_lib.Int.impl_Int__to_u64 x in\n  let _:u128 = Hax_lib.Int.impl_Int__to_u128 x in\n  let _:usize = Hax_lib.Int.impl_Int__to_usize x in\n  Hax_lib.Int.impl_Int__to_u8 (x + (x * x <: Hax_lib.Int.t_Int) <: Hax_lib.Int.t_Int)\n\nlet panic_with_msg (_: Prims.unit) : Prims.unit =\n  Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic_fmt (Core_models.Fmt.Rt.impl_1__new_const\n            (mk_usize 1)\n            (let list = [\\\"with msg\\\"] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n              Rust_primitives.Hax.array_of_list 1 list)\n          <:\n          Core_models.Fmt.t_Arguments)\n      <:\n      Rust_primitives.Hax.t_Never)\n\ntype t_Foo = { f_field:u8 }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl': Core_models.Marker.t_StructuralPartialEq t_Foo\n\nunfold\nlet impl = impl'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_1': Core_models.Cmp.t_PartialEq t_Foo t_Foo\n\nunfold\nlet impl_1 = impl_1'\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nassume\nval impl_2': Core_models.Cmp.t_Eq t_Foo\n\nunfold\nlet impl_2 = impl_2'\n\nlet v_CONSTANT: t_Foo = { f_field = mk_u8 3 } <: t_Foo\n\nlet numeric (_: Prims.unit) : Prims.unit =\n  let _:usize = mk_usize 123 in\n  let _:isize = mk_isize (-42) in\n  let _:isize = mk_isize 42 in\n  let _:i32 = mk_i32 (-42) in\n  let _:u128 = mk_u128 22222222222222222222 in\n  ()\n\nlet patterns (_: Prims.unit) : Prims.unit =\n  let _:Prims.unit =\n    match mk_u8 1 <: u8 with\n    | Rust_primitives.Integers.MkInt 2 -> () <: Prims.unit\n    | _ -> () <: Prims.unit\n  in\n  let _:Prims.unit =\n    match\n      \\\"hello\\\",\n      (mk_i32 123,\n        (let list = [\\\"a\\\"; \\\"b\\\"] in\n          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n          Rust_primitives.Hax.array_of_list 2 list)\n        <:\n        (i32 & t_Array string (mk_usize 2)))\n      <:\n      (string & (i32 & t_Array string (mk_usize 2)))\n    with\n    | \\\"hello\\\", (Rust_primitives.Integers.MkInt 123, e_todo) -> () <: Prims.unit\n    | _ -> () <: Prims.unit\n  in\n  let _:Prims.unit =\n    match { f_field = mk_u8 4 } <: t_Foo with\n    | { f_field = Rust_primitives.Integers.MkInt 3 } -> () <: Prims.unit\n    | _ -> () <: Prims.unit\n  in\n  ()\n\nlet casts (x8: u8) (x16: u16) (x32: u32) (x64: u64) (xs: usize) : Prims.unit =\n  let _:u64 =\n    ((((cast (x8 <: u8) <: u64) +! (cast (x16 <: u16) <: u64) <: u64) +! (cast (x32 <: u32) <: u64)\n        <:\n        u64) +!\n      x64\n      <:\n      u64) +!\n    (cast (xs <: usize) <: u64)\n  in\n  let _:u32 =\n    ((((cast (x8 <: u8) <: u32) +! (cast (x16 <: u16) <: u32) <: u32) +! x32 <: u32) +!\n      (cast (x64 <: u64) <: u32)\n      <:\n      u32) +!\n    (cast (xs <: usize) <: u32)\n  in\n  let _:u16 =\n    ((((cast (x8 <: u8) <: u16) +! x16 <: u16) +! (cast (x32 <: u32) <: u16) <: u16) +!\n      (cast (x64 <: u64) <: u16)\n      <:\n      u16) +!\n    (cast (xs <: usize) <: u16)\n  in\n  let _:u8 =\n    (((x8 +! (cast (x16 <: u16) <: u8) <: u8) +! (cast (x32 <: u32) <: u8) <: u8) +!\n      (cast (x64 <: u64) <: u8)\n      <:\n      u8) +!\n    (cast (xs <: usize) <: u8)\n  in\n  let _:i64 =\n    ((((cast (x8 <: u8) <: i64) +! (cast (x16 <: u16) <: i64) <: i64) +! (cast (x32 <: u32) <: i64)\n        <:\n        i64) +!\n      (cast (x64 <: u64) <: i64)\n      <:\n      i64) +!\n    (cast (xs <: usize) <: i64)\n  in\n  let _:i32 =\n    ((((cast (x8 <: u8) <: i32) +! (cast (x16 <: u16) <: i32) <: i32) +! (cast (x32 <: u32) <: i32)\n        <:\n        i32) +!\n      (cast (x64 <: u64) <: i32)\n      <:\n      i32) +!\n    (cast (xs <: usize) <: i32)\n  in\n  let _:i16 =\n    ((((cast (x8 <: u8) <: i16) +! (cast (x16 <: u16) <: i16) <: i16) +! (cast (x32 <: u32) <: i16)\n        <:\n        i16) +!\n      (cast (x64 <: u64) <: i16)\n      <:\n      i16) +!\n    (cast (xs <: usize) <: i16)\n  in\n  let _:i8 =\n    ((((cast (x8 <: u8) <: i8) +! (cast (x16 <: u16) <: i8) <: i8) +! (cast (x32 <: u32) <: i8)\n        <:\n        i8) +!\n      (cast (x64 <: u64) <: i8)\n      <:\n      i8) +!\n    (cast (xs <: usize) <: i8)\n  in\n  ()\n\nlet empty_array (_: Prims.unit) : Prims.unit =\n  let _:t_Slice u8 =\n    (let list:Prims.list u8 = [] in\n      FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 0);\n      Rust_primitives.Hax.array_of_list 0 list)\n    <:\n    t_Slice u8\n  in\n  ()\n\n/// https://github.com/hacspec/hax/issues/500\nlet fn_pointer_cast (_: Prims.unit) : Prims.unit =\n  let (f: (u32 -> u32)): u32 -> u32 = fun x -> x in\n  ()\n\nlet strings (_: Prims.unit) : Prims.unit =\n  let _:string = \\\"hello\\\" in\n  let _:string = \\\"hello\\\\\\\"world\\\" in\n  let _:string = \\\"it's\\\" in\n  let _:string = \\\"back\\\\slash\\\" in\n  let _:string = \\\"line\\\\nbreak\\\" in\n  let _:string = \\\"carriage\\\\rreturn\\\" in\n  let _:string = \\\"tab\\\\there\\\" in\n  let _:string = \\\"null\\\\0byte\\\" in\n  let _:string = \\\"bell\\u0007char\\\" in\n  let _:string = \\\"\\u001B[0m\\\" in\n  let _:string = \\\"🦀\\\" in\n  ()\n\"\"\"\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__literals into-lean.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: lean\n  info:\n    name: literals\n    manifest: literals/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"literals.lean\" = '''\n\n-- Experimental lean backend for Hax\n-- The Hax prelude library can be found in hax/proof-libs/lean\nimport Hax\nimport Std.Tactic.Do\nimport Std.Do.Triple\nimport Std.Tactic.Do.Syntax\nopen Std.Do\nopen Std.Tactic\n\nset_option mvcgen.warning false\nset_option linter.unusedVariables false\n\n\nnamespace literals\n\ndef math_integers (x : hax_lib.int.Int) : RustM u8 := do\n  let _ : hax_lib.int.Int ← (rust_primitives.hax.int.from_machine (3 : usize));\n  let _neg_dec : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"-340282366920938463463374607431768211455000\");\n  let _pos_dec : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"340282366920938463463374607431768211455000\");\n  let _neg_hex : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"-340282366920938463463374607431768211455000\");\n  let _pos_hex : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"340282366920938463463374607431768211455000\");\n  let _neg_octal : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"-340282366920938463463374607431768211455000\");\n  let _pos_octal : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"340282366920938463463374607431768211455000\");\n  let _neg_bin : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"-340282366920938463463374607431768211455000\");\n  let _pos_bin : hax_lib.int.Int ←\n    (hax_lib.int.Impl_7._unsafe_from_str\n      \"340282366920938463463374607431768211455000\");\n  let _ ←\n    (rust_primitives.hax.int.gt\n      (← (hax_lib.int.Impl_7._unsafe_from_str\n        \"-340282366920938463463374607431768211455000\"))\n      (← (hax_lib.int.Impl_7._unsafe_from_str\n        \"340282366920938463463374607431768211455000\")));\n  let _ ← (rust_primitives.hax.int.lt x x);\n  let _ ← (rust_primitives.hax.int.ge x x);\n  let _ ← (rust_primitives.hax.int.le x x);\n  let _ ← (rust_primitives.hax.int.ne x x);\n  let _ ← (rust_primitives.hax.int.eq x x);\n  let _ ← (rust_primitives.hax.int.add x x);\n  let _ ← (rust_primitives.hax.int.sub x x);\n  let _ ← (rust_primitives.hax.int.mul x x);\n  let _ ← (rust_primitives.hax.int.div x x);\n  let _ : i16 ← (hax_lib.int.Impl_55.to_i16 x);\n  let _ : i32 ← (hax_lib.int.Impl_57.to_i32 x);\n  let _ : i64 ← (hax_lib.int.Impl_59.to_i64 x);\n  let _ : i128 ← (hax_lib.int.Impl_61.to_i128 x);\n  let _ : isize ← (hax_lib.int.Impl_63.to_isize x);\n  let _ : u16 ← (hax_lib.int.Impl_43.to_u16 x);\n  let _ : u32 ← (hax_lib.int.Impl_45.to_u32 x);\n  let _ : u64 ← (hax_lib.int.Impl_47.to_u64 x);\n  let _ : u128 ← (hax_lib.int.Impl_49.to_u128 x);\n  let _ : usize ← (hax_lib.int.Impl_51.to_usize x);\n  (hax_lib.int.Impl_41.to_u8\n    (← (rust_primitives.hax.int.add x (← (rust_primitives.hax.int.mul x x)))))\n\nset_option hax_mvcgen.specset \"bv\" in\n@[hax_spec]\ndef math_integers.spec (x : hax_lib.int.Int) :\n    Spec\n      (requires := do\n        ((← (rust_primitives.hax.int.gt\n            x\n            (← (hax_lib.int.Impl_7._unsafe_from_str \"0\"))))\n          &&? (← (rust_primitives.hax.int.lt\n            x\n            (← (hax_lib.int.Impl_7._unsafe_from_str \"16\"))))))\n      (ensures := fun _ => pure True)\n      (math_integers (x : hax_lib.int.Int)) := {\n  pureRequires := by hax_construct_pure <;> bv_decide\n  pureEnsures := by hax_construct_pure <;> bv_decide\n  contract := by hax_mvcgen [math_integers] <;> bv_decide\n}\n\n@[spec]\ndef panic_with_msg (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  (rust_primitives.hax.never_to_any\n    (← (core_models.panicking.panic_fmt\n      (← (core_models.fmt.rt.Impl_1.new_const ((1 : usize))\n        (RustArray.ofVec #v[\"with msg\"]))))))\n\nstructure Foo where\n  field : u8\n\n@[instance] opaque Impl.AssociatedTypes :\n  core_models.marker.StructuralPartialEq.AssociatedTypes Foo :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl :\n  core_models.marker.StructuralPartialEq Foo :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_1.AssociatedTypes :\n  core_models.cmp.PartialEq.AssociatedTypes Foo Foo :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_1 :\n  core_models.cmp.PartialEq Foo Foo :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_2.AssociatedTypes :\n  core_models.cmp.Eq.AssociatedTypes Foo :=\n  by constructor <;> exact Inhabited.default\n\n@[instance] opaque Impl_2 :\n  core_models.cmp.Eq Foo :=\n  by constructor <;> exact Inhabited.default\n\ndef CONSTANT : Foo :=\n  RustM.of_isOk (do (pure (Foo.mk (field := (3 : u8))))) (by rfl)\n\n@[spec]\ndef numeric (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ : usize := (123 : usize);\n  let _ : isize := (-42 : isize);\n  let _ : isize := (42 : isize);\n  let _ : i32 := (-42 : i32);\n  let _ : u128 := (22222222222222222222 : u128);\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef patterns (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ ←\n    match (1 : u8) with\n      | 2 => do (pure rust_primitives.hax.Tuple0.mk)\n      | _ => do (pure rust_primitives.hax.Tuple0.mk);\n  let _ ←\n    match\n      (rust_primitives.hax.Tuple2.mk\n        \"hello\"\n        (rust_primitives.hax.Tuple2.mk\n          (123 : i32)\n          (RustArray.ofVec #v[\"a\", \"b\"])))\n    with\n      | ⟨\"hello\", ⟨123, _todo⟩⟩ => do (pure rust_primitives.hax.Tuple0.mk)\n      | _ => do (pure rust_primitives.hax.Tuple0.mk);\n  let _ ←\n    match (Foo.mk (field := (4 : u8))) with\n      | {field := 3} => do (pure rust_primitives.hax.Tuple0.mk)\n      | _ => do (pure rust_primitives.hax.Tuple0.mk);\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef casts (x8 : u8) (x16 : u16) (x32 : u32) (x64 : u64) (xs : usize) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ : u64 ←\n    ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM u64))\n            +? (← (rust_primitives.hax.cast_op x16 : RustM u64))))\n          +? (← (rust_primitives.hax.cast_op x32 : RustM u64))))\n        +? x64))\n      +? (← (rust_primitives.hax.cast_op xs : RustM u64)));\n  let _ : u32 ←\n    ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM u32))\n            +? (← (rust_primitives.hax.cast_op x16 : RustM u32))))\n          +? x32))\n        +? (← (rust_primitives.hax.cast_op x64 : RustM u32))))\n      +? (← (rust_primitives.hax.cast_op xs : RustM u32)));\n  let _ : u16 ←\n    ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM u16)) +? x16))\n          +? (← (rust_primitives.hax.cast_op x32 : RustM u16))))\n        +? (← (rust_primitives.hax.cast_op x64 : RustM u16))))\n      +? (← (rust_primitives.hax.cast_op xs : RustM u16)));\n  let _ : u8 ←\n    ((← ((← ((← (x8 +? (← (rust_primitives.hax.cast_op x16 : RustM u8))))\n          +? (← (rust_primitives.hax.cast_op x32 : RustM u8))))\n        +? (← (rust_primitives.hax.cast_op x64 : RustM u8))))\n      +? (← (rust_primitives.hax.cast_op xs : RustM u8)));\n  let _ : i64 ←\n    ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i64))\n            +? (← (rust_primitives.hax.cast_op x16 : RustM i64))))\n          +? (← (rust_primitives.hax.cast_op x32 : RustM i64))))\n        +? (← (rust_primitives.hax.cast_op x64 : RustM i64))))\n      +? (← (rust_primitives.hax.cast_op xs : RustM i64)));\n  let _ : i32 ←\n    ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i32))\n            +? (← (rust_primitives.hax.cast_op x16 : RustM i32))))\n          +? (← (rust_primitives.hax.cast_op x32 : RustM i32))))\n        +? (← (rust_primitives.hax.cast_op x64 : RustM i32))))\n      +? (← (rust_primitives.hax.cast_op xs : RustM i32)));\n  let _ : i16 ←\n    ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i16))\n            +? (← (rust_primitives.hax.cast_op x16 : RustM i16))))\n          +? (← (rust_primitives.hax.cast_op x32 : RustM i16))))\n        +? (← (rust_primitives.hax.cast_op x64 : RustM i16))))\n      +? (← (rust_primitives.hax.cast_op xs : RustM i16)));\n  let _ : i8 ←\n    ((← ((← ((← ((← (rust_primitives.hax.cast_op x8 : RustM i8))\n            +? (← (rust_primitives.hax.cast_op x16 : RustM i8))))\n          +? (← (rust_primitives.hax.cast_op x32 : RustM i8))))\n        +? (← (rust_primitives.hax.cast_op x64 : RustM i8))))\n      +? (← (rust_primitives.hax.cast_op xs : RustM i8)));\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef empty_array (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ : (RustSlice u8) ← (rust_primitives.unsize (RustArray.ofVec #v[]));\n  (pure rust_primitives.hax.Tuple0.mk)\n\n--  https://github.com/hacspec/hax/issues/500\n@[spec]\ndef fn_pointer_cast (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let f : (u32 -> RustM u32) := (fun x => (do (pure x) : RustM u32));\n  (pure rust_primitives.hax.Tuple0.mk)\n\n@[spec]\ndef strings (_ : rust_primitives.hax.Tuple0) :\n    RustM rust_primitives.hax.Tuple0 := do\n  let _ : String := \"hello\";\n  let _ : String := \"hello\\\"world\";\n  let _ : String := \"it\\'s\";\n  let _ : String := \"back\\\\slash\";\n  let _ : String := \"line\\nbreak\";\n  let _ : String := \"carriage\\rreturn\";\n  let _ : String := \"tab\\there\";\n  let _ : String := \"null\\x00byte\";\n  let _ : String := \"bell\\x07char\";\n  let _ : String := \"\\x1b[0m\";\n  let _ : String := \"🦀\";\n  (pure rust_primitives.hax.Tuple0.mk)\n\nend literals\n\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__loops into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: loops\n    manifest: loops/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Loops.And_mut_side_effect_loop.fst\" = '''\nmodule Loops.And_mut_side_effect_loop\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet looping (array: t_Array u8 (mk_usize 5)) : t_Array u8 (mk_usize 5) =\n  let array:t_Array u8 (mk_usize 5) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (Core_models.Slice.impl__len #u8 (array <: t_Slice u8) <: usize)\n      (fun array temp_1_ ->\n          let array:t_Array u8 (mk_usize 5) = array in\n          let _:usize = temp_1_ in\n          true)\n      array\n      (fun array i ->\n          let array:t_Array u8 (mk_usize 5) = array in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize array\n            i\n            (cast (i <: usize) <: u8)\n          <:\n          t_Array u8 (mk_usize 5))\n  in\n  array\n\nlet looping_2_ (array: t_Array u8 (mk_usize 5)) : t_Array u8 (mk_usize 5) =\n  let (array: t_Array u8 (mk_usize 5)), (result: Prims.unit) =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (Core_models.Slice.impl__len #u8 (array <: t_Slice u8) <: usize)\n      (fun array temp_1_ ->\n          let array:t_Array u8 (mk_usize 5) = array in\n          let _:usize = temp_1_ in\n          true)\n      array\n      (fun array i ->\n          let array:t_Array u8 (mk_usize 5) = array in\n          let i:usize = i in\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize array\n            i\n            (cast (i <: usize) <: u8)\n          <:\n          t_Array u8 (mk_usize 5)),\n    ()\n    <:\n    (t_Array u8 (mk_usize 5) & Prims.unit)\n  in\n  let _:Prims.unit = admit () (* Panic freedom *) in\n  let _:Prims.unit = result in\n  array\n'''\n\"Loops.Control_flow.fst\" = '''\nmodule Loops.Control_flow\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet double_sum (_: Prims.unit) : i32 =\n  let sum:i32 = mk_i32 0 in\n  let sum:i32 =\n    Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 1)\n      (mk_i32 10)\n      (fun sum temp_1_ ->\n          let sum:i32 = sum in\n          let _:i32 = temp_1_ in\n          true)\n      sum\n      (fun sum i ->\n          let sum:i32 = sum in\n          let i:i32 = i in\n          if i <. mk_i32 0 <: bool\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break ((), sum <: (Prims.unit & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32\n          else\n            Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32)\n  in\n  sum *! mk_i32 2\n\nlet double_sum2 (_: Prims.unit) : i32 =\n  let sum:i32 = mk_i32 0 in\n  let sum2:i32 = mk_i32 0 in\n  let (sum: i32), (sum2: i32) =\n    Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 1)\n      (mk_i32 10)\n      (fun temp_0_ temp_1_ ->\n          let (sum: i32), (sum2: i32) = temp_0_ in\n          let _:i32 = temp_1_ in\n          true)\n      (sum, sum2 <: (i32 & i32))\n      (fun temp_0_ i ->\n          let (sum: i32), (sum2: i32) = temp_0_ in\n          let i:i32 = i in\n          if i <. mk_i32 0 <: bool\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break\n            ((), (sum, sum2 <: (i32 & i32)) <: (Prims.unit & (i32 & i32)))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32)\n          else\n            let sum:i32 = sum +! i in\n            Core_models.Ops.Control_flow.ControlFlow_Continue (sum, sum2 +! i <: (i32 & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & (i32 & i32)) (i32 & i32))\n  in\n  sum +! sum2\n\nlet double_sum_return (v: t_Slice i32) : i32 =\n  let sum:i32 = mk_i32 0 in\n  match\n    Rust_primitives.Hax.Folds.fold_return (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32\n          )\n          #FStar.Tactics.Typeclasses.solve\n          v\n        <:\n        Core_models.Slice.Iter.t_Iter i32)\n      sum\n      (fun sum i ->\n          let sum:i32 = sum in\n          let i:i32 = i in\n          if i <. mk_i32 0 <: bool\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break\n            (Core_models.Ops.Control_flow.ControlFlow_Break (mk_i32 0)\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32\n          else\n            Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32)\n    <:\n    Core_models.Ops.Control_flow.t_ControlFlow i32 i32\n  with\n  | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n  | Core_models.Ops.Control_flow.ControlFlow_Continue sum -> sum *! mk_i32 2\n\nlet double_sum2_return (v: t_Slice i32) : i32 =\n  let sum:i32 = mk_i32 0 in\n  let sum2:i32 = mk_i32 0 in\n  match\n    Rust_primitives.Hax.Folds.fold_return (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32\n          )\n          #FStar.Tactics.Typeclasses.solve\n          v\n        <:\n        Core_models.Slice.Iter.t_Iter i32)\n      (sum, sum2 <: (i32 & i32))\n      (fun temp_0_ i ->\n          let (sum: i32), (sum2: i32) = temp_0_ in\n          let i:i32 = i in\n          if i <. mk_i32 0 <: bool\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break\n            (Core_models.Ops.Control_flow.ControlFlow_Break (mk_i32 0)\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32)))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32)))\n              (i32 & i32)\n          else\n            let sum:i32 = sum +! i in\n            Core_models.Ops.Control_flow.ControlFlow_Continue (sum, sum2 +! i <: (i32 & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & (i32 & i32)))\n              (i32 & i32))\n    <:\n    Core_models.Ops.Control_flow.t_ControlFlow i32 (i32 & i32)\n  with\n  | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n  | Core_models.Ops.Control_flow.ControlFlow_Continue (sum, sum2) -> sum +! sum2\n\nlet bigger_power_2_ (x: i32) : i32 =\n  let pow:i32 = mk_i32 1 in\n  Rust_primitives.Hax.while_loop_cf (fun pow ->\n        let pow:i32 = pow in\n        true)\n    (fun pow ->\n        let pow:i32 = pow in\n        pow <. mk_i32 1000000 <: bool)\n    (fun pow ->\n        let pow:i32 = pow in\n        Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n    pow\n    (fun pow ->\n        let pow:i32 = pow in\n        let pow:i32 = pow *! mk_i32 2 in\n        if pow <. x\n        then\n          let pow:i32 = pow *! mk_i32 3 in\n          if true\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break ((), pow <: (Prims.unit & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32\n          else\n            Core_models.Ops.Control_flow.ControlFlow_Continue (pow *! mk_i32 2)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32\n        else\n          Core_models.Ops.Control_flow.ControlFlow_Continue (pow *! mk_i32 2)\n          <:\n          Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32)\n\ntype t_M = { f_m:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global }\n\nlet impl_M__decoded_message (self: t_M)\n    : Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) =\n  match\n    Rust_primitives.Hax.Folds.fold_range_return (mk_usize 0)\n      (Alloc.Vec.impl_1__len #u8 #Alloc.Alloc.t_Global self.f_m <: usize)\n      (fun temp_0_ temp_1_ ->\n          let _:Prims.unit = temp_0_ in\n          let _:usize = temp_1_ in\n          true)\n      ()\n      (fun temp_0_ i ->\n          let _:Prims.unit = temp_0_ in\n          let i:usize = i in\n          if i >. mk_usize 5 <: bool\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break\n            (Core_models.Ops.Control_flow.ControlFlow_Break\n              (Core_models.Option.Option_None\n                <:\n                Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global))\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow\n                (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global))\n                (Prims.unit & Prims.unit))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global))\n                  (Prims.unit & Prims.unit)) Prims.unit\n          else\n            Core_models.Ops.Control_flow.ControlFlow_Continue ()\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow\n                  (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global))\n                  (Prims.unit & Prims.unit)) Prims.unit)\n    <:\n    Core_models.Ops.Control_flow.t_ControlFlow\n      (Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)) Prims.unit\n  with\n  | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n  | Core_models.Ops.Control_flow.ControlFlow_Continue _ ->\n    Core_models.Option.Option_Some\n    (Core_models.Clone.f_clone #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n        #FStar.Tactics.Typeclasses.solve\n        self.f_m)\n    <:\n    Core_models.Option.t_Option (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n\nlet nested (_: Prims.unit) : i32 =\n  let sum:i32 = mk_i32 0 in\n  let sum:i32 =\n    Rust_primitives.Hax.Folds.fold_range (mk_i32 1)\n      (mk_i32 10)\n      (fun sum temp_1_ ->\n          let sum:i32 = sum in\n          let _:i32 = temp_1_ in\n          true)\n      sum\n      (fun sum i ->\n          let sum:i32 = sum in\n          let i:i32 = i in\n          let sum:i32 =\n            Rust_primitives.Hax.Folds.fold_range_cf (mk_i32 1)\n              (mk_i32 10)\n              (fun sum temp_1_ ->\n                  let sum:i32 = sum in\n                  let _:i32 = temp_1_ in\n                  true)\n              sum\n              (fun sum j ->\n                  let sum:i32 = sum in\n                  let j:i32 = j in\n                  if j <. mk_i32 0 <: bool\n                  then\n                    Core_models.Ops.Control_flow.ControlFlow_Break ((), sum <: (Prims.unit & i32))\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32\n                  else\n                    Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! j <: i32)\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32)\n          in\n          sum +! i)\n  in\n  sum *! mk_i32 2\n\nlet nested_return (_: Prims.unit) : i32 =\n  let sum:i32 = mk_i32 0 in\n  match\n    Rust_primitives.Hax.Folds.fold_range_return (mk_i32 1)\n      (mk_i32 10)\n      (fun sum temp_1_ ->\n          let sum:i32 = sum in\n          let _:i32 = temp_1_ in\n          true)\n      sum\n      (fun sum i ->\n          let sum:i32 = sum in\n          let i:i32 = i in\n          match\n            Rust_primitives.Hax.Folds.fold_range_return (mk_i32 1)\n              (mk_i32 10)\n              (fun sum temp_1_ ->\n                  let sum:i32 = sum in\n                  let _:i32 = temp_1_ in\n                  true)\n              sum\n              (fun sum j ->\n                  let sum:i32 = sum in\n                  let j:i32 = j in\n                  if j <. mk_i32 0 <: bool\n                  then\n                    Core_models.Ops.Control_flow.ControlFlow_Break\n                    (Core_models.Ops.Control_flow.ControlFlow_Break (mk_i32 0)\n                      <:\n                      Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32))\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32\n                  else\n                    Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! j <: i32)\n                    <:\n                    Core_models.Ops.Control_flow.t_ControlFlow\n                      (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow i32 i32\n          with\n          | Core_models.Ops.Control_flow.ControlFlow_Break ret ->\n            Core_models.Ops.Control_flow.ControlFlow_Break\n            (Core_models.Ops.Control_flow.ControlFlow_Break ret\n              <:\n              Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32\n          | Core_models.Ops.Control_flow.ControlFlow_Continue sum ->\n            Core_models.Ops.Control_flow.ControlFlow_Continue (sum +! i <: i32)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow\n              (Core_models.Ops.Control_flow.t_ControlFlow i32 (Prims.unit & i32)) i32)\n    <:\n    Core_models.Ops.Control_flow.t_ControlFlow i32 i32\n  with\n  | Core_models.Ops.Control_flow.ControlFlow_Break ret -> ret\n  | Core_models.Ops.Control_flow.ControlFlow_Continue sum -> sum *! mk_i32 2\n\nlet continue_only (x: t_Slice i32) : (i32 & Prims.unit) =\n  let product:i32 = mk_i32 1 in\n  Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32\n        )\n        #FStar.Tactics.Typeclasses.solve\n        x\n      <:\n      Core_models.Slice.Iter.t_Iter i32)\n    product\n    (fun product i ->\n        let product:i32 = product in\n        let i:i32 = i in\n        if i =. mk_i32 0 <: bool\n        then product\n        else\n          Core_models.Ops.Arith.f_mul_assign #i32 #i32 #FStar.Tactics.Typeclasses.solve product i\n          <:\n          i32),\n  ()\n  <:\n  (i32 & Prims.unit)\n\nlet continue_and_break (x: t_Slice i32) : (i32 & Prims.unit) =\n  let product:i32 = mk_i32 1 in\n  Rust_primitives.Hax.Folds.fold_cf (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice i32)\n        #FStar.Tactics.Typeclasses.solve\n        x\n      <:\n      Core_models.Slice.Iter.t_Iter i32)\n    product\n    (fun product i ->\n        let product:i32 = product in\n        let i:i32 = i in\n        if i =. mk_i32 0 <: bool\n        then\n          Core_models.Ops.Control_flow.ControlFlow_Continue product\n          <:\n          Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32\n        else\n          if i <. mk_i32 0 <: bool\n          then\n            Core_models.Ops.Control_flow.ControlFlow_Break ((), product <: (Prims.unit & i32))\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32\n          else\n            Core_models.Ops.Control_flow.ControlFlow_Continue\n            (Core_models.Ops.Arith.f_mul_assign #i32 #i32 #FStar.Tactics.Typeclasses.solve product i\n              <:\n              i32)\n            <:\n            Core_models.Ops.Control_flow.t_ControlFlow (Prims.unit & i32) i32),\n  ()\n  <:\n  (i32 & Prims.unit)\n'''\n\"Loops.For_loops.fst\" = '''\nmodule Loops.For_loops\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet range1 (_: Prims.unit) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 15)\n      (fun acc temp_1_ ->\n          let acc:usize = acc in\n          let _:usize = temp_1_ in\n          true)\n      acc\n      (fun acc i ->\n          let acc:usize = acc in\n          let i:usize = i in\n          acc +! i <: usize)\n  in\n  acc\n\nlet range2 (n: usize) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (n +! mk_usize 10 <: usize)\n      (fun acc temp_1_ ->\n          let acc:usize = acc in\n          let _:usize = temp_1_ in\n          true)\n      acc\n      (fun acc i ->\n          let acc:usize = acc in\n          let i:usize = i in\n          (acc +! i <: usize) +! mk_usize 1 <: usize)\n  in\n  acc\n\nlet composed_range (n: usize) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Chain.t_Chain\n              (Core_models.Ops.Range.t_Range usize) (Core_models.Ops.Range.t_Range usize))\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Iter.Traits.Iterator.f_chain #(Core_models.Ops.Range.t_Range usize)\n              #FStar.Tactics.Typeclasses.solve\n              #(Core_models.Ops.Range.t_Range usize)\n              ({ Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = n }\n                <:\n                Core_models.Ops.Range.t_Range usize)\n              ({\n                  Core_models.Ops.Range.f_start = n +! mk_usize 10 <: usize;\n                  Core_models.Ops.Range.f_end = n +! mk_usize 50 <: usize\n                }\n                <:\n                Core_models.Ops.Range.t_Range usize)\n            <:\n            Core_models.Iter.Adapters.Chain.t_Chain (Core_models.Ops.Range.t_Range usize)\n              (Core_models.Ops.Range.t_Range usize))\n        <:\n        Core_models.Iter.Adapters.Chain.t_Chain (Core_models.Ops.Range.t_Range usize)\n          (Core_models.Ops.Range.t_Range usize))\n      acc\n      (fun acc i ->\n          let acc:usize = acc in\n          let i:usize = i in\n          (acc +! i <: usize) +! mk_usize 1 <: usize)\n  in\n  acc\n\nlet rev_range (n: usize) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Rev.t_Rev\n            (Core_models.Ops.Range.t_Range usize))\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Iter.Traits.Iterator.f_rev #(Core_models.Ops.Range.t_Range usize)\n              #FStar.Tactics.Typeclasses.solve\n              ({ Core_models.Ops.Range.f_start = mk_usize 0; Core_models.Ops.Range.f_end = n }\n                <:\n                Core_models.Ops.Range.t_Range usize)\n            <:\n            Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize))\n        <:\n        Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize))\n      acc\n      (fun acc i ->\n          let acc:usize = acc in\n          let i:usize = i in\n          (acc +! i <: usize) +! mk_usize 1 <: usize)\n  in\n  acc\n\nlet chunks (v_CHUNK_LEN: usize) (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize =\n  let acc:usize = mk_usize 0 in\n  let chunks:Core_models.Slice.Iter.t_ChunksExact usize =\n    Core_models.Slice.impl__chunks_exact #usize\n      (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize)\n      v_CHUNK_LEN\n  in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Slice.Iter.t_ChunksExact\n            usize)\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Clone.f_clone #(Core_models.Slice.Iter.t_ChunksExact usize)\n              #FStar.Tactics.Typeclasses.solve\n              chunks\n            <:\n            Core_models.Slice.Iter.t_ChunksExact usize)\n        <:\n        Core_models.Slice.Iter.t_ChunksExact usize)\n      acc\n      (fun acc chunk ->\n          let acc:usize = acc in\n          let chunk:t_Slice usize = chunk in\n          let mean:usize = mk_usize 0 in\n          let mean:usize =\n            Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice\n                    usize)\n                  #FStar.Tactics.Typeclasses.solve\n                  chunk\n                <:\n                Core_models.Slice.Iter.t_Iter usize)\n              mean\n              (fun mean item ->\n                  let mean:usize = mean in\n                  let item:usize = item in\n                  mean +! item <: usize)\n          in\n          let acc:usize = acc +! (mean /! v_CHUNK_LEN <: usize) in\n          acc)\n  in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(t_Slice\n            usize)\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Slice.Iter.impl_88__remainder #usize chunks <: t_Slice usize)\n        <:\n        Core_models.Slice.Iter.t_Iter usize)\n      acc\n      (fun acc item ->\n          let acc:usize = acc in\n          let item:usize = item in\n          acc -! item <: usize)\n  in\n  acc\n\nlet iterator (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Slice.Iter.t_Iter\n            usize)\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Slice.impl__iter #usize (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize)\n            <:\n            Core_models.Slice.Iter.t_Iter usize)\n        <:\n        Core_models.Slice.Iter.t_Iter usize)\n      acc\n      (fun acc item ->\n          let acc:usize = acc in\n          let item:usize = item in\n          acc +! item <: usize)\n  in\n  acc\n\nlet nested (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Slice.Iter.t_Iter\n            usize)\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Slice.impl__iter #usize (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize)\n            <:\n            Core_models.Slice.Iter.t_Iter usize)\n        <:\n        Core_models.Slice.Iter.t_Iter usize)\n      acc\n      (fun acc item ->\n          let acc:usize = acc in\n          let item:usize = item in\n          Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Rev.t_Rev\n                  (Core_models.Ops.Range.t_Range usize))\n                #FStar.Tactics.Typeclasses.solve\n                (Core_models.Iter.Traits.Iterator.f_rev #(Core_models.Ops.Range.t_Range usize)\n                    #FStar.Tactics.Typeclasses.solve\n                    ({\n                        Core_models.Ops.Range.f_start = mk_usize 0;\n                        Core_models.Ops.Range.f_end = item\n                      }\n                      <:\n                      Core_models.Ops.Range.t_Range usize)\n                  <:\n                  Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize))\n              <:\n              Core_models.Iter.Adapters.Rev.t_Rev (Core_models.Ops.Range.t_Range usize))\n            acc\n            (fun acc i ->\n                let acc:usize = acc in\n                let i:usize = i in\n                let acc:usize = acc +! mk_usize 1 in\n                Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter\n                      #(Core_models.Iter.Adapters.Zip.t_Zip (Core_models.Slice.Iter.t_Iter usize)\n                          (Core_models.Ops.Range.t_Range usize))\n                      #FStar.Tactics.Typeclasses.solve\n                      (Core_models.Iter.Traits.Iterator.f_zip #(Core_models.Slice.Iter.t_Iter usize)\n                          #FStar.Tactics.Typeclasses.solve\n                          #(Core_models.Ops.Range.t_Range usize)\n                          (Core_models.Slice.impl__iter #usize\n                              (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize)\n                            <:\n                            Core_models.Slice.Iter.t_Iter usize)\n                          ({\n                              Core_models.Ops.Range.f_start = mk_usize 4;\n                              Core_models.Ops.Range.f_end = i\n                            }\n                            <:\n                            Core_models.Ops.Range.t_Range usize)\n                        <:\n                        Core_models.Iter.Adapters.Zip.t_Zip (Core_models.Slice.Iter.t_Iter usize)\n                          (Core_models.Ops.Range.t_Range usize))\n                    <:\n                    Core_models.Iter.Adapters.Zip.t_Zip (Core_models.Slice.Iter.t_Iter usize)\n                      (Core_models.Ops.Range.t_Range usize))\n                  acc\n                  (fun acc j ->\n                      let acc:usize = acc in\n                      let j:(usize & usize) = j in\n                      (((acc +! item <: usize) +! i <: usize) +! j._1 <: usize) +! j._2 <: usize))\n          <:\n          usize)\n  in\n  acc\n\nlet pattern (arr: Alloc.Vec.t_Vec (usize & usize) Alloc.Alloc.t_Global) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Alloc.Vec.t_Vec\n              (usize & usize) Alloc.Alloc.t_Global)\n          #FStar.Tactics.Typeclasses.solve\n          arr\n        <:\n        Alloc.Vec.Into_iter.t_IntoIter (usize & usize) Alloc.Alloc.t_Global)\n      acc\n      (fun acc temp_1_ ->\n          let acc:usize = acc in\n          let (x: usize), (y: usize) = temp_1_ in\n          acc +! (x *! y <: usize) <: usize)\n  in\n  acc\n\nlet enumerate_chunks (arr: Alloc.Vec.t_Vec usize Alloc.Alloc.t_Global) : usize =\n  let acc:usize = mk_usize 0 in\n  let acc:usize =\n    Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Iter.Adapters.Enumerate.t_Enumerate\n            (Core_models.Slice.Iter.t_Chunks usize))\n          #FStar.Tactics.Typeclasses.solve\n          (Core_models.Iter.Traits.Iterator.f_enumerate #(Core_models.Slice.Iter.t_Chunks usize)\n              #FStar.Tactics.Typeclasses.solve\n              (Core_models.Slice.impl__chunks #usize\n                  (Alloc.Vec.impl_1__as_slice arr <: t_Slice usize)\n                  (mk_usize 4)\n                <:\n                Core_models.Slice.Iter.t_Chunks usize)\n            <:\n            Core_models.Iter.Adapters.Enumerate.t_Enumerate (Core_models.Slice.Iter.t_Chunks usize))\n        <:\n        Core_models.Iter.Adapters.Enumerate.t_Enumerate (Core_models.Slice.Iter.t_Chunks usize))\n      acc\n      (fun acc temp_1_ ->\n          let acc:usize = acc in\n          let (i: usize), (chunk: t_Slice usize) = temp_1_ in\n          Rust_primitives.Hax.Folds.fold_enumerated_slice chunk\n            (fun acc temp_1_ ->\n                let acc:usize = acc in\n                let _:usize = temp_1_ in\n                true)\n            acc\n            (fun acc temp_1_ ->\n                let acc:usize = acc in\n                let (j: usize), (x: usize) = temp_1_ in\n                (i +! j <: usize) +! x <: usize)\n          <:\n          usize)\n  in\n  acc\n\nlet bool_returning (x: u8) : bool = x <. mk_u8 10\n\nlet f (_: Prims.unit) : (u8 & Prims.unit) =\n  let acc:u8 = mk_u8 0 in\n  Rust_primitives.Hax.Folds.fold_range (mk_u8 1)\n    (mk_u8 10)\n    (fun acc temp_1_ ->\n        let acc:u8 = acc in\n        let _:u8 = temp_1_ in\n        true)\n    acc\n    (fun acc i ->\n        let acc:u8 = acc in\n        let i:u8 = i in\n        let acc:u8 = acc +! i in\n        let _:bool = bool_returning i in\n        acc),\n  ()\n  <:\n  (u8 & Prims.unit)\n'''\n\"Loops.Recognized_loops.fst\" = '''\nmodule Loops.Recognized_loops\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet range (_: Prims.unit) : (u64 & Prims.unit) =\n  let count:u64 = mk_u64 0 in\n  Rust_primitives.Hax.Folds.fold_range (mk_u8 0)\n    (mk_u8 10)\n    (fun count i ->\n        let count:u64 = count in\n        let i:u8 = i in\n        i <=. mk_u8 10 <: bool)\n    count\n    (fun count i ->\n        let count:u64 = count in\n        let i:u8 = i in\n        let count:u64 = count +! mk_u64 1 in\n        count),\n  ()\n  <:\n  (u64 & Prims.unit)\n\nlet range_step_by (_: Prims.unit) : (u64 & Prims.unit) =\n  let count:u64 = mk_u64 0 in\n  Rust_primitives.Hax.Folds.fold_range_step_by (mk_u8 0)\n    (mk_u8 10)\n    (mk_usize 2)\n    (fun count i ->\n        let count:u64 = count in\n        let i:u8 = i in\n        i <=. mk_u8 10 <: bool)\n    count\n    (fun count i ->\n        let count:u64 = count in\n        let i:u8 = i in\n        let count:u64 = count +! mk_u64 1 in\n        count),\n  ()\n  <:\n  (u64 & Prims.unit)\n\nlet enumerated_slice (#v_T: Type0) (slice: t_Slice v_T) : (u64 & Prims.unit) =\n  let count:u64 = mk_u64 0 in\n  Rust_primitives.Hax.Folds.fold_enumerated_slice slice\n    (fun count i ->\n        let count:u64 = count in\n        let i:usize = i in\n        i <=. mk_usize 10 <: bool)\n    count\n    (fun count i ->\n        let count:u64 = count in\n        let i:(usize & v_T) = i in\n        let count:u64 = count +! mk_u64 2 in\n        count),\n  ()\n  <:\n  (u64 & Prims.unit)\n\nlet enumerated_chunked_slice (#v_T: Type0) (slice: t_Slice v_T) : (u64 & Prims.unit) =\n  let count:u64 = mk_u64 0 in\n  Rust_primitives.Hax.Folds.fold_enumerated_chunked_slice (mk_usize 3)\n    slice\n    (fun count i ->\n        let count:u64 = count in\n        let i:usize = i in\n        i <= Core_models.Slice.impl__len #v_T slice)\n    count\n    (fun count i ->\n        let count:u64 = count in\n        let i:(usize & t_Slice v_T) = i in\n        let count:u64 = count +! mk_u64 3 in\n        count),\n  ()\n  <:\n  (u64 & Prims.unit)\n'''\n\"Loops.While_loops.fst\" = '''\nmodule Loops.While_loops\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet f (_: Prims.unit) : u8 =\n  let x:u8 = mk_u8 0 in\n  let x:u8 =\n    Rust_primitives.Hax.while_loop (fun x ->\n          let x:u8 = x in\n          true)\n      (fun x ->\n          let x:u8 = x in\n          x <. mk_u8 10 <: bool)\n      (fun x ->\n          let x:u8 = x in\n          Rust_primitives.Hax.Int.from_machine (mk_u32 0) <: Hax_lib.Int.t_Int)\n      x\n      (fun x ->\n          let x:u8 = x in\n          let x:u8 = x +! mk_u8 3 in\n          x)\n  in\n  x +! mk_u8 12\n\nlet while_invariant_decr (_: Prims.unit) : u8 =\n  let x:u8 = mk_u8 0 in\n  let x:u8 =\n    Rust_primitives.Hax.while_loop (fun x ->\n          let x:u8 = x in\n          b2t (x <=. mk_u8 10 <: bool))\n      (fun x ->\n          let x:u8 = x in\n          x <. mk_u8 10 <: bool)\n      (fun x ->\n          let x:u8 = x in\n          Rust_primitives.Hax.Int.from_machine (mk_u8 10 -! x <: u8) <: Hax_lib.Int.t_Int)\n      x\n      (fun x ->\n          let x:u8 = x in\n          let x:u8 = x +! mk_u8 3 in\n          x)\n  in\n  x +! mk_u8 12\n\nlet while_invariant_decr_rev (_: Prims.unit) : u8 =\n  let x:u8 = mk_u8 0 in\n  let x:u8 =\n    Rust_primitives.Hax.while_loop (fun x ->\n          let x:u8 = x in\n          b2t (x <=. mk_u8 10 <: bool))\n      (fun x ->\n          let x:u8 = x in\n          x <. mk_u8 10 <: bool)\n      (fun x ->\n          let x:u8 = x in\n          Rust_primitives.Hax.Int.from_machine (mk_u8 10 -! x <: u8) <: Hax_lib.Int.t_Int)\n      x\n      (fun x ->\n          let x:u8 = x in\n          let x:u8 = x +! mk_u8 3 in\n          x)\n  in\n  x +! mk_u8 12\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__mut-ref-functionalization into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: mut-ref-functionalization\n    manifest: mut-ref-functionalization/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Mut_ref_functionalization.fst\" = '''\nmodule Mut_ref_functionalization\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_S = { f_b:t_Array u8 (mk_usize 5) }\n\nlet foo (lhs rhs: t_S) : t_S =\n  let lhs:t_S =\n    Rust_primitives.Hax.Folds.fold_range (mk_usize 0)\n      (mk_usize 1)\n      (fun lhs temp_1_ ->\n          let lhs:t_S = lhs in\n          let _:usize = temp_1_ in\n          true)\n      lhs\n      (fun lhs i ->\n          let lhs:t_S = lhs in\n          let i:usize = i in\n          {\n            lhs with\n            f_b\n            =\n            Rust_primitives.Hax.Monomorphized_update_at.update_at_usize lhs.f_b\n              i\n              ((lhs.f_b.[ i ] <: u8) +! (rhs.f_b.[ i ] <: u8) <: u8)\n            <:\n            t_Array u8 (mk_usize 5)\n          }\n          <:\n          t_S)\n  in\n  lhs\n\nlet impl_S__update (self: t_S) (x: u8) : t_S =\n  let self:t_S =\n    {\n      self with\n      f_b = Rust_primitives.Hax.Monomorphized_update_at.update_at_usize self.f_b (mk_usize 0) x\n    }\n    <:\n    t_S\n  in\n  self\n\nlet index_mutation (x: Core_models.Ops.Range.t_Range usize) (a: t_Slice u8) : Prims.unit =\n  let v:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__into_vec #u8\n      #Alloc.Alloc.t_Global\n      ((let list = [mk_u8 1] in\n          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n          Rust_primitives.Hax.array_of_list 1 list)\n        <:\n        t_Slice u8)\n  in\n  let v:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_range (Alloc.Vec.impl_1__as_slice\n              v\n            <:\n            t_Slice u8)\n          x\n          (Core_models.Slice.impl__copy_from_slice #u8 (v.[ x ] <: t_Slice u8) a <: t_Slice u8)\n        <:\n        t_Slice u8)\n  in\n  let v:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice\n              v\n            <:\n            t_Slice u8)\n          (mk_usize 1)\n          (mk_u8 3)\n        <:\n        t_Slice u8)\n  in\n  ()\n\nlet index_mutation_unsize (x: t_Array u8 (mk_usize 12)) : u8 =\n  let x:t_Array u8 (mk_usize 12) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_range x\n      ({ Core_models.Ops.Range.f_start = mk_usize 4; Core_models.Ops.Range.f_end = mk_usize 5 }\n        <:\n        Core_models.Ops.Range.t_Range usize)\n      (Core_models.Slice.impl__copy_from_slice #u8\n          (x.[ {\n                Core_models.Ops.Range.f_start = mk_usize 4;\n                Core_models.Ops.Range.f_end = mk_usize 5\n              }\n              <:\n              Core_models.Ops.Range.t_Range usize ]\n            <:\n            t_Slice u8)\n          ((let list = [mk_u8 1; mk_u8 2] in\n              FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n              Rust_primitives.Hax.array_of_list 2 list)\n            <:\n            t_Slice u8)\n        <:\n        t_Slice u8)\n  in\n  mk_u8 42\n\nlet build_vec (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n  Alloc.Slice.impl__into_vec #u8\n    #Alloc.Alloc.t_Global\n    ((let list = [mk_u8 1; mk_u8 2; mk_u8 3] in\n        FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3);\n        Rust_primitives.Hax.array_of_list 3 list)\n      <:\n      t_Slice u8)\n\nlet test_append (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n  let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl__new #u8 () in\n  let vec2:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__into_vec #u8\n      #Alloc.Alloc.t_Global\n      ((let list = [mk_u8 1; mk_u8 2; mk_u8 3] in\n          FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3);\n          Rust_primitives.Hax.array_of_list 3 list)\n        <:\n        t_Slice u8)\n  in\n  let\n  (tmp0: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global), (tmp1: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) =\n    Alloc.Vec.impl_1__append #u8 #Alloc.Alloc.t_Global vec1 vec2\n  in\n  let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = tmp0 in\n  let vec2:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = tmp1 in\n  let _:Prims.unit = () in\n  let vec1:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Vec.impl_1__append #u8\n      #Alloc.Alloc.t_Global\n      vec1\n      (build_vec () <: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n  in\n  vec1\n\nlet f (_: Prims.unit) : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n  let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global = Alloc.Vec.impl__new #u8 () in\n  let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global vec (mk_u8 1)\n  in\n  let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global vec (mk_u8 2)\n  in\n  let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8\n          (Alloc.Vec.impl_1__as_slice vec <: t_Slice u8)\n          (mk_usize 0)\n          (mk_usize 1)\n        <:\n        t_Slice u8)\n  in\n  let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8\n          (Alloc.Vec.impl_1__as_slice vec <: t_Slice u8)\n          (mk_usize 0)\n          (mk_usize 1)\n        <:\n        t_Slice u8)\n  in\n  vec\n\ntype t_Foo = { f_field:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global }\n\ntype t_Pair (v_T: Type0) = {\n  f_a:v_T;\n  f_b:t_Foo\n}\n\nlet g (x: t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global))\n    : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n  let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in\n  let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) =\n    Rust_primitives.Hax.Folds.fold_range (mk_u8 1)\n      (mk_u8 10)\n      (fun x temp_1_ ->\n          let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in\n          let _:u8 = temp_1_ in\n          true)\n      x\n      (fun x i ->\n          let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) = x in\n          let i:u8 = i in\n          {\n            x with\n            f_a\n            =\n            Alloc.Vec.impl_1__push #u8 #Alloc.Alloc.t_Global x.f_a i\n            <:\n            Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global\n          }\n          <:\n          t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global))\n  in\n  let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) =\n    {\n      x with\n      f_a\n      =\n      Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8\n            (Alloc.Vec.impl_1__as_slice x.f_a <: t_Slice u8)\n            (mk_usize 0)\n            (mk_usize 1)\n          <:\n          t_Slice u8)\n    }\n    <:\n    t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n  in\n  let x:t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) =\n    {\n      x with\n      f_b\n      =\n      {\n        x.f_b with\n        f_field\n        =\n        Alloc.Slice.impl__to_vec (Core_models.Slice.impl__swap #u8\n              (Alloc.Vec.impl_1__as_slice x.f_b.f_field <: t_Slice u8)\n              (mk_usize 0)\n              (mk_usize 1)\n            <:\n            t_Slice u8)\n      }\n      <:\n      t_Foo\n    }\n    <:\n    t_Pair (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n  in\n  x.f_a\n\nlet h (x: u8) : u8 =\n  let x:u8 = x +! mk_u8 10 in\n  x\n\ntype t_Bar = {\n  f_a:u8;\n  f_b:u8\n}\n\nlet i (bar: t_Bar) : (t_Bar & u8) =\n  let bar:t_Bar = { bar with f_b = bar.f_b +! bar.f_a } <: t_Bar in\n  let bar:t_Bar = { bar with f_a = h bar.f_a } <: t_Bar in\n  let hax_temp_output:u8 = bar.f_a +! bar.f_b in\n  bar, hax_temp_output <: (t_Bar & u8)\n\nlet j (x: t_Bar) : (t_Bar & u8) =\n  let out:u8 = mk_u8 123 in\n  let (tmp0: t_Bar), (out1: u8) = i x in\n  let x:t_Bar = tmp0 in\n  let hax_temp_output:u8 = out1 +! out in\n  x, hax_temp_output <: (t_Bar & u8)\n\nlet k\n      (vec: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n      (arg_1_wild3: u16)\n      (arg_1_wild: u8)\n      (arg_3_wild2: Prims.unit)\n    : (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global & u16 & Prims.unit & u64) =\n  let arg_1_wild2:u8 = vec.[ mk_usize 1 ] in\n  let arg_3_wild:u8 = vec.[ mk_usize 2 ] in\n  let arg_1_wild1:u8 = vec.[ mk_usize 3 ] in\n  let arg_3_wild1:u8 = vec.[ mk_usize 4 ] in\n  let vec:Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice\n              vec\n            <:\n            t_Slice u8)\n          (mk_usize 0)\n          ((((arg_1_wild +! arg_3_wild <: u8) +! arg_1_wild1 <: u8) +! arg_3_wild1 <: u8) +!\n            arg_1_wild\n            <:\n            u8)\n        <:\n        t_Slice u8)\n  in\n  let hax_temp_output:u64 = mk_u64 12345 in\n  vec, arg_1_wild3, arg_3_wild2, hax_temp_output\n  <:\n  (Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global & u16 & Prims.unit & u64)\n\nclass t_FooTrait (v_Self: Type0) = {\n  f_z_pre:v_Self -> Type0;\n  f_z_post:v_Self -> v_Self -> Type0;\n  f_z:x0: v_Self -> Prims.Pure v_Self (f_z_pre x0) (fun result -> f_z_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_FooTrait_for_Foo: t_FooTrait t_Foo =\n  {\n    f_z_pre = (fun (self: t_Foo) -> true);\n    f_z_post = (fun (self: t_Foo) (out: t_Foo) -> true);\n    f_z = fun (self: t_Foo) -> self\n  }\n\nlet array (x: t_Array u8 (mk_usize 10)) : t_Array u8 (mk_usize 10) =\n  let x:t_Array u8 (mk_usize 10) =\n    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize x\n      (mk_usize 1)\n      (x.[ mk_usize 2 ] <: u8)\n  in\n  x\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__naming into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: naming\n    manifest: naming/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 1\n[[stdout.diagnostics]]\nmessage = '''\n(Coq backend) something is not implemented yet.\n[ty] node str'''\nspans = ['Span { lo: Loc { line: 160, col: 0 }, hi: Loc { line: 160, col: 43 }, filename: Real(LocalPath(\"naming/src/lib.rs\")), rust_span_data: None }']\n\n[stdout.files]\n\"Naming.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nInductive t_Foo : Type :=\n| Foo_A : t_Foo\n| Foo_B : Foo_B -> t_Foo.\n\nDefinition impl__Foo__f (self : t_Foo_t) : t_Foo_t :=\n  Foo_At_Foo_t.\n\nInductive t_Foo2 : Type :=\n| Foo2_A : t_Foo2\n| Foo2_B : Foo2_B -> t_Foo2.\n\nClass t_FooTrait (Self : Type) := {\n  f_ASSOCIATED_CONSTANT : uint_size ;\n}.\n\nClass t_T1 (Self : Type) := {\n}.\n\n#[global] Instance t_Foo_t_t_T1 : t_T1 t_Foo_t := {\n}.\n\n#[global] Instance t_Foo_t × int8_t_T1 : t_T1 (t_Foo_t × int8) := {\n}.\n\nClass t_T2_for_a (Self : Type) := {\n}.\n\nClass t_T3_e_for_a (Self : Type) := {\n}.\n\n#[global] Instance t_Foo_t_t_T3_e_for_a : t_T3_e_for_a t_Foo_t := {\n}.\n\n(*Not implemented yet? todo(item)*)\n\n(*Not implemented yet? todo(item)*)\n\nDefinition v_INHERENT_CONSTANT : uint_size :=\n  (@repr WORDSIZE32 3).\n\nDefinition constants (_ : unit) : uint_size :=\n  f_ASSOCIATED_CONSTANT.+v_INHERENT_CONSTANT.\n\nDefinition ff__g (_ : unit) : unit :=\n  tt.\n\nInductive t_f__g__impl__g__Foo : Type :=\n| C_f__g__impl__g__Foo_A : t_f__g__impl__g__Foo\n| C_f__g__impl__g__Foo_B : C_f__g__impl__g__Foo_B -> t_f__g__impl__g__Foo.\n\nDefinition ff__g__impl_1__g (self : t_Foo_t) : uint_size :=\n  (@repr WORDSIZE32 1).\n\n(*Not implemented yet? todo(item)*)\n\nDefinition reserved_names (val : int8) (noeq : int8) (of : int8) : int8 :=\n  (val.+noeq).+of.\n\n(*item error backend*)\n\nRecord t_Arity1 (T : _) : Type := {\n  0 : T;\n}.\n\n#[global] Instance t_Arity1_t (t_Foo_t × int8)_t_T2_for_a : t_T2_for_a (t_Arity1_t (t_Foo_t × int8)) := {\n}.\n\nRecord t_B : Type := {\n}.\n\nDefinition impl__B__f (self : t_B_t) : t_B_t :=\n  Bt_B_t.\n\nRecord t_C : Type := {\n  f_x : uint_size;\n}.\n\nRecord t_Foobar : Type := {\n  f_a : t_Foo_t;\n}.\n\nRecord t_StructA : Type := {\n  f_a : uint_size;\n}.\n\nRecord t_StructB : Type := {\n  f_a : uint_size;\n  f_b : uint_size;\n}.\n\nRecord t_StructC : Type := {\n  f_a : uint_size;\n}.\n\nRecord t_StructD : Type := {\n  f_a : uint_size;\n  f_b : uint_size;\n}.\n\nRecord t_X : Type := {\n}.\n\nDefinition construct_structs (a : uint_size) (b : uint_size) : unit :=\n  let _ := Build_StructA (f_a := a) : t_StructA_t in\n  let _ := Build_StructB (f_a := a) (f_b := b) : t_StructB_t in\n  let _ := Build_StructC (f_a := a) : t_StructC_t in\n  let _ := Build_StructD (f_a := a) (f_b := b) : t_StructD_t in\n  tt.\n\nDefinition f (x : t_Foobar_t) : uint_size :=\n  ff__g__impl_1__g (f_a x).\n\nDefinition ff__g__impl__g (self : t_B_t) : uint_size :=\n  (@repr WORDSIZE32 0).\n\nDefinition mk_c (_ : unit) : t_C_t :=\n  let _ := Build_Foo_B (f_x := (@repr WORDSIZE32 3)) : t_Foo_t in\n  let _ := Xt_X_t : t_X_t in\n  Build_C (f_x := (@repr WORDSIZE32 3)).\n'''\n\"Naming_Ambiguous_names.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\n(*Not implemented yet? todo(item)*)\n\nDefinition debug (label : int32) (value : int32) : unit :=\n  let _ := v__print (impl_2__new_v1 (array_from_list [[;\n      ] a=;\n      \n]) (array_from_list [impl_1__new_display label;\n      impl_1__new_display value])) : unit in\n  tt.\n\nDefinition f (_ : unit) : unit :=\n  let a_1 := (@repr WORDSIZE32 104) : int32 in\n  let a_2 := (@repr WORDSIZE32 205) : int32 in\n  let a_3 := (@repr WORDSIZE32 306) : int32 in\n  let a := (@repr WORDSIZE32 123) : int32 in\n  let _ := debug (@repr WORDSIZE32 3) a_3 : unit in\n  let _ := debug (@repr WORDSIZE32 2) a_2 : unit in\n  let _ := debug (@repr WORDSIZE32 1) a_1 : unit in\n  debug (@repr WORDSIZE32 4) a.\n\nDefinition ff_expand (_ : unit) : unit :=\n  let a := (@repr WORDSIZE32 104) : int32 in\n  let a := (@repr WORDSIZE32 205) : int32 in\n  let a := (@repr WORDSIZE32 306) : int32 in\n  let a := (@repr WORDSIZE32 123) : int32 in\n  let _ := debug (@repr WORDSIZE32 3) a : unit in\n  let _ := debug (@repr WORDSIZE32 2) a : unit in\n  let _ := debug (@repr WORDSIZE32 1) a : unit in\n  debug (@repr WORDSIZE32 0) a.\n'''\n\"Naming_F_G_Impl_1_G_Hello.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Hacspec Require Import Hacspec_Lib MachineIntegers.\nFrom Coq Require Import ZArith.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nDefinition h (_ : unit) : unit :=\n  tt.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__naming into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: naming\n    manifest: naming/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Naming.Ambiguous_names.fst\" = '''\nmodule Naming.Ambiguous_names\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet debug (label value: u32) : Prims.unit =\n  let args:(u32 & u32) = label, value <: (u32 & u32) in\n  let args:t_Array Core_models.Fmt.Rt.t_Argument (mk_usize 2) =\n    let list =\n      [\n        Core_models.Fmt.Rt.impl__new_display #u32 args._1;\n        Core_models.Fmt.Rt.impl__new_display #u32 args._2\n      ]\n    in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n    Rust_primitives.Hax.array_of_list 2 list\n  in\n  let _:Prims.unit =\n    Std.Io.Stdio.e_print (Core_models.Fmt.Rt.impl_1__new_v1 (mk_usize 3)\n          (mk_usize 2)\n          (let list = [\"[\"; \"] a=\"; \"\\n\"] in\n            FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 3);\n            Rust_primitives.Hax.array_of_list 3 list)\n          args\n        <:\n        Core_models.Fmt.t_Arguments)\n  in\n  ()\n\n/// `f` stacks mutliple let bindings declaring different `a`s.\nlet f (_: Prims.unit) : Prims.unit =\n  let a_1_:u32 = mk_u32 104 in\n  let a_2_:u32 = mk_u32 205 in\n  let a_3_:u32 = mk_u32 306 in\n  let a:u32 = mk_u32 123 in\n  let _:Prims.unit = debug (mk_u32 3) a_3_ in\n  let _:Prims.unit = debug (mk_u32 2) a_2_ in\n  let _:Prims.unit = debug (mk_u32 1) a_1_ in\n  debug (mk_u32 4) a\n\n/// `f` is expanded into `f_expand` below, while the execution of `f` gives:\n/// ```plaintext\n///  [3] a=306\n///  [2] a=205\n///  [1] a=104\n///  [last] a=123\n/// ```\nlet ff_expand (_: Prims.unit) : Prims.unit =\n  let a:i32 = mk_i32 104 in\n  let a:i32 = mk_i32 205 in\n  let a:i32 = mk_i32 306 in\n  let a:u32 = mk_u32 123 in\n  let _:Prims.unit = debug (mk_u32 3) a in\n  let _:Prims.unit = debug (mk_u32 2) a in\n  let _:Prims.unit = debug (mk_u32 1) a in\n  debug (mk_u32 0) a\n'''\n\"Naming.Functions_defined_in_trait_impls.fst\" = '''\nmodule Naming.Functions_defined_in_trait_impls\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_A = | A : t_A\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Cmp.t_PartialEq t_A t_A =\n  {\n    f_eq_pre = (fun (self: t_A) (other: t_A) -> true);\n    f_eq_post = (fun (self: t_A) (other: t_A) (out: bool) -> true);\n    f_eq\n    =\n    fun (self: t_A) (other: t_A) ->\n      Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic \"explicit panic\"\n          <:\n          Rust_primitives.Hax.t_Never)\n  }\n\ntype t_B = | B : t_B\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_1: Core_models.Cmp.t_PartialEq t_B t_B =\n  {\n    f_eq_pre = (fun (self: t_B) (other: t_B) -> true);\n    f_eq_post = (fun (self: t_B) (other: t_B) (out: bool) -> true);\n    f_eq\n    =\n    fun (self: t_B) (other: t_B) ->\n      Rust_primitives.Hax.never_to_any (Core_models.Panicking.panic \"explicit panic\"\n          <:\n          Rust_primitives.Hax.t_Never)\n  }\n'''\n\"Naming.fst\" = '''\nmodule Naming\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo =\n  | Foo_A : t_Foo\n  | Foo_B { f_x:usize }: t_Foo\n\ntype t_Foo2 =\n  | Foo2_A : t_Foo2\n  | Foo2_B { f_x:usize }: t_Foo2\n\ntype t_B = | B : t_B\n\ntype t_C = { f_x:usize }\n\ntype t_X = | X : t_X\n\nlet mk_c (_: Prims.unit) : t_C =\n  let _:t_Foo = Foo_B ({ f_x = mk_usize 3 }) <: t_Foo in\n  let _:t_X = X <: t_X in\n  { f_x = mk_usize 3 } <: t_C\n\nlet impl_Foo__f (self: t_Foo) : t_Foo = Foo_A <: t_Foo\n\nlet impl_B__f (self: t_B) : t_B = B <: t_B\n\ntype t_Foobar = { f_a:t_Foo }\n\nlet f__g (_: Prims.unit) : Prims.unit = ()\n\nlet f__g__impl_B__g (self: t_B) : usize = mk_usize 0\n\ntype f__g__impl_B__g__t_Foo =\n  | C_f__g__impl_B__g__Foo_A : f__g__impl_B__g__t_Foo\n  | C_f__g__impl_B__g__Foo_B { f__g__impl_B__g__f_x:usize }: f__g__impl_B__g__t_Foo\n\nlet f__g__impl_Foo__g (self: t_Foo) : usize = mk_usize 1\n\nlet f (x: t_Foobar) : usize = f__g__impl_Foo__g x.f_a\n\nlet f__g__impl_Foo__g__t_hello__h (_: Prims.unit) : Prims.unit = ()\n\nlet reserved_names (v_val v_noeq v_of: u8) : u8 = (v_val +! v_noeq <: u8) +! v_of\n\ntype t_Arity1 (v_T: Type0) = | Arity1 : v_T -> t_Arity1 v_T\n\nclass t_T1 (v_Self: Type0) = { __marker_trait_t_T1:Prims.unit }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_T1_for_Foo: t_T1 t_Foo = { __marker_trait_t_T1 = () }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_T1_for_tuple_Foo_u8: t_T1 (t_Foo & u8) = { __marker_trait_t_T1 = () }\n\nclass t_T2_for_a (v_Self: Type0) = { __marker_trait_t_T2_for_a:Prims.unit }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_T2_ee_for_a_for_Arity1_of_tuple_Foo_u8: t_T2_for_a (t_Arity1 (t_Foo & u8)) =\n  { __marker_trait_t_T2_for_a = () }\n\nclass t_T3_ee_for_a (v_Self: Type0) = { __marker_trait_t_T3_ee_for_a:Prims.unit }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_T3_ee_e_for_a_for_Foo: t_T3_ee_for_a t_Foo = { __marker_trait_t_T3_ee_for_a = () }\n\ntype t_StructA = { f_a:usize }\n\ntype t_StructB = {\n  f_a:usize;\n  f_b:usize\n}\n\ntype t_StructC = { f_a:usize }\n\ntype t_StructD = {\n  f_a:usize;\n  f_b:usize\n}\n\nlet construct_structs (a b: usize) : Prims.unit =\n  let _:t_StructA = { f_a = a } <: t_StructA in\n  let _:t_StructB = { f_a = a; f_b = b } <: t_StructB in\n  let _:t_StructC = { f_a = a } <: t_StructC in\n  let _:t_StructD = { f_a = a; f_b = b } <: t_StructD in\n  ()\n\nlet v_INHERENT_CONSTANT: usize = mk_usize 3\n\nclass t_FooTrait (v_Self: Type0) = { f_ASSOCIATED_CONSTANT:usize }\n\nlet constants\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_FooTrait v_T)\n      (_: Prims.unit)\n    : usize =\n  (f_ASSOCIATED_CONSTANT #FStar.Tactics.Typeclasses.solve <: usize) +! v_INHERENT_CONSTANT\n\n/// From issue https://github.com/hacspec/hax/issues/839\nlet string_shadows (v_string n: string) : Prims.unit = ()\n\n/// From issue https://github.com/cryspen/hax/issues/1450\nlet items_under_closures (_: Prims.unit) : Prims.unit =\n  let _: Prims.unit -> Prims.unit =\n    fun temp_0_ ->\n      let _:Prims.unit = temp_0_ in\n      ()\n  in\n  ()\n\nlet items_under_closures__anon_const_0__nested_function (_: Prims.unit) : Prims.unit = ()\n\ntype items_under_closures__anon_const_0__t_NestedStruct =\n  | C_items_under_closures__anon_const_0__NestedStruct : items_under_closures__anon_const_0__t_NestedStruct\n\nlet items_under_closures__nested_function (_: Prims.unit) : Prims.unit = ()\n\ntype items_under_closures__t_NestedStruct =\n  | C_items_under_closures__NestedStruct : items_under_closures__t_NestedStruct\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__pattern-or into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: pattern-or\n    manifest: pattern-or/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Pattern_or.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nInductive t_E : Type :=\n| E_A\n| E_B.\n\n\n\nDefinition t_E_cast_to_repr (x : t_E) : t_isize :=\n  match x with\n  | E_A =>\n    (0 : t_isize)\n  | E_B =>\n    (1 : t_isize)\n  end.\n\nDefinition bar (x : t_E) : unit :=\n  match x with\n  | E_A\n  | E_B =>\n    tt\n  end.\n\nDefinition nested (x : t_Option ((t_i32))) : t_i32 :=\n  match x with\n  | Option_Some (1\n  | 2) =>\n    (1 : t_i32)\n  | Option_Some (x) =>\n    x\n  | Option_None =>\n    (0 : t_i32)\n  end.\n\nDefinition deep (x : (t_i32*t_Option ((t_i32)))) : t_i32 :=\n  match x with\n  | (1\n  | 2,Option_Some (3\n  | 4)) =>\n    (0 : t_i32)\n  | (x,_) =>\n    x\n  end.\n\nDefinition equivalent (x : (t_i32*t_Option ((t_i32)))) : t_i32 :=\n  match x with\n  | (1,Option_Some (3))\n  | (1,Option_Some (4))\n  | (2,Option_Some (3))\n  | (2,Option_Some (4)) =>\n    (0 : t_i32)\n  | (x,_) =>\n    x\n  end.\n\nDefinition deep_capture (x : t_Result (((t_i32*t_i32))) (((t_i32*t_i32)))) : t_i32 :=\n  match x with\n  | Result_Ok ((1\n  | 2,x))\n  | Result_Err ((3\n  | 4,x)) =>\n    x\n  | Result_Ok ((x,_))\n  | Result_Err ((x,_)) =>\n    x\n  end.\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nPattern_or.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__pattern-or into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: pattern-or\n    manifest: pattern-or/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Pattern_or.fst\" = '''\nmodule Pattern_or\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_E =\n  | E_A : t_E\n  | E_B : t_E\n\nlet t_E_cast_to_repr (x: t_E) : isize =\n  match x <: t_E with\n  | E_A  -> mk_isize 0\n  | E_B  -> mk_isize 1\n\nlet bar (x: t_E) : Prims.unit = match x <: t_E with | E_A  | E_B  -> () <: Prims.unit\n\nlet nested (x: Core_models.Option.t_Option i32) : i32 =\n  match x <: Core_models.Option.t_Option i32 with\n  | Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 1)\n  | Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 2) -> mk_i32 1\n  | Core_models.Option.Option_Some x -> x\n  | Core_models.Option.Option_None  -> mk_i32 0\n\nlet deep (x: (i32 & Core_models.Option.t_Option i32)) : i32 =\n  match x <: (i32 & Core_models.Option.t_Option i32) with\n  | Rust_primitives.Integers.MkInt 1,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3)\n  | Rust_primitives.Integers.MkInt 1,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4)\n  | Rust_primitives.Integers.MkInt 2,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3)\n  | Rust_primitives.Integers.MkInt 2,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4) ->\n    mk_i32 0\n  | x, _ -> x\n\nlet equivalent (x: (i32 & Core_models.Option.t_Option i32)) : i32 =\n  match x <: (i32 & Core_models.Option.t_Option i32) with\n  | Rust_primitives.Integers.MkInt 1,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3)\n  | Rust_primitives.Integers.MkInt 1,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4)\n  | Rust_primitives.Integers.MkInt 2,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 3)\n  | Rust_primitives.Integers.MkInt 2,\n  Core_models.Option.Option_Some (Rust_primitives.Integers.MkInt 4) ->\n    mk_i32 0\n  | x, _ -> x\n\nlet deep_capture (x: Core_models.Result.t_Result (i32 & i32) (i32 & i32)) : i32 =\n  match x <: Core_models.Result.t_Result (i32 & i32) (i32 & i32) with\n  | Core_models.Result.Result_Ok (Rust_primitives.Integers.MkInt 1, x)\n  | Core_models.Result.Result_Ok (Rust_primitives.Integers.MkInt 2, x)\n  | Core_models.Result.Result_Err (Rust_primitives.Integers.MkInt 3, x)\n  | Core_models.Result.Result_Err (Rust_primitives.Integers.MkInt 4, x) -> x\n  | Core_models.Result.Result_Ok (x, _) | Core_models.Result.Result_Err (x, _) -> x\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__patterns into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: patterns\n    manifest: patterns/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Patterns.fst\" = '''\nmodule Patterns\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Other = | Other : i32 -> t_Other\n\ntype t_Test = | Test_C1 : t_Other -> t_Test\n\nlet impl__test (self: t_Test) : i32 = match self <: t_Test with | Test_C1 c -> c._0\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__recursion into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: recursion\n    manifest: recursion/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Recursion.fst\" = '''\nmodule Recursion\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet rec f (n: u8) : u8 = if n =. mk_u8 0 then mk_u8 0 else n +! (f (n -! mk_u8 1 <: u8) <: u8)\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__reordering into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: reordering\n    manifest: reordering/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Reordering.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nDefinition no_dependency_1_ '(_ : unit) : unit :=\n  tt.\n\nDefinition no_dependency_2_ '(_ : unit) : unit :=\n  tt.\n\nInductive t_Foo : Type :=\n| Foo_A\n| Foo_B.\n\n\n\nDefinition f '(_ : t_u32) : t_Foo :=\n  Foo_A.\n\nRecord Bar_record : Type :=\n  {\n    Bar_0 : t_Foo;\n  }.\n\n\n#[export] Instance settable_Bar_record : Settable _ :=\n  settable! (Build_Bar_record) <Bar_0>.\nNotation \"'Bar_Bar_record'\" := Build_Bar_record.\n\nDefinition g '(_ : unit) : t_Bar :=\n  Bar (f ((32 : t_u32))).\n\nDefinition t_Foo_cast_to_repr (x : t_Foo) : t_isize :=\n  match x with\n  | Foo_A =>\n    (0 : t_isize)\n  | Foo_B =>\n    (1 : t_isize)\n  end.\n'''\n\"Reordering_Independent_cycles.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\nDefinition c '(_ : unit) : unit :=\n  a (tt).\n\nDefinition a '(_ : unit) : unit :=\n  c (tt).\n\nDefinition d '(_ : unit) : unit :=\n  b (tt).\n\nDefinition b '(_ : unit) : unit :=\n  d (tt).\n'''\n\"Reordering_Mut_rec.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\nDefinition g '(_ : unit) : unit :=\n  f (tt).\n\nDefinition f '(_ : unit) : unit :=\n  g (tt).\n\nDefinition ff_2_ '(_ : unit) : unit :=\n  f (tt).\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nReordering_Mut_rec.v\nReordering_Independent_cycles.v\nReordering.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__reordering into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: reordering\n    manifest: reordering/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Reordering.Independent_cycles.fst\" = '''\nmodule Reordering.Independent_cycles\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet rec c (_: Prims.unit) : Prims.unit = a ()\n\nand a (_: Prims.unit) : Prims.unit = c ()\n\nlet rec d (_: Prims.unit) : Prims.unit = b ()\n\nand b (_: Prims.unit) : Prims.unit = d ()\n'''\n\"Reordering.Mut_rec.fst\" = '''\nmodule Reordering.Mut_rec\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet rec g (_: Prims.unit) : Prims.unit = f ()\n\nand f (_: Prims.unit) : Prims.unit = g ()\n\nlet ff_2_ (_: Prims.unit) : Prims.unit = f ()\n'''\n\"Reordering.fst\" = '''\nmodule Reordering\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet no_dependency_1_ (_: Prims.unit) : Prims.unit = ()\n\nlet no_dependency_2_ (_: Prims.unit) : Prims.unit = ()\n\ntype t_Foo =\n  | Foo_A : t_Foo\n  | Foo_B : t_Foo\n\nlet f (_: u32) : t_Foo = Foo_A <: t_Foo\n\ntype t_Bar = | Bar : t_Foo -> t_Bar\n\nlet g (_: Prims.unit) : t_Bar = Bar (f (mk_u32 32)) <: t_Bar\n\nlet t_Foo_cast_to_repr (x: t_Foo) : isize =\n  match x <: t_Foo with\n  | Foo_A  -> mk_isize 0\n  | Foo_B  -> mk_isize 1\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__reordering into-ssprove.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: ssprove\n  info:\n    name: reordering\n    manifest: reordering/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Reordering.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations no_dependency_1_ (_ : both 'unit) : both 'unit :=\n  no_dependency_1_ _  :=\n    ret_both (tt : 'unit) : both 'unit.\nFail Next Obligation.\n\nEquations no_dependency_2_ (_ : both 'unit) : both 'unit :=\n  no_dependency_2_ _  :=\n    ret_both (tt : 'unit) : both 'unit.\nFail Next Obligation.\n\nDefinition t_Foo : choice_type :=\n  ('unit ∐ 'unit).\nNotation \"'Foo_A_case'\" := (inl tt) (at level 100).\nEquations Foo_A : both t_Foo :=\n  Foo_A  :=\n    ret_both (inl (tt : 'unit) : t_Foo) : both t_Foo.\nFail Next Obligation.\nNotation \"'Foo_B_case'\" := (inr tt) (at level 100).\nEquations Foo_B : both t_Foo :=\n  Foo_B  :=\n    ret_both (inr (tt : 'unit) : t_Foo) : both t_Foo.\nFail Next Obligation.\n\nEquations f (_ : both int32) : both t_Foo :=\n  f _  :=\n    Foo_A : both t_Foo.\nFail Next Obligation.\n\nDefinition t_Bar : choice_type :=\n  (t_Foo).\nEquations 0 (s : both t_Bar) : both t_Foo :=\n  0 s  :=\n    bind_both s (fun x =>\n      ret_both (x : t_Foo)) : both t_Foo.\nFail Next Obligation.\nEquations Build_t_Bar {0 : both t_Foo} : both (t_Bar) :=\n  Build_t_Bar  :=\n    bind_both 0 (fun 0 =>\n      ret_both ((0) : (t_Bar))) : both (t_Bar).\nFail Next Obligation.\nNotation \"'Build_t_Bar' '[' x ']' '(' '0' ':=' y ')'\" := (Build_t_Bar (0 := y)).\n\nEquations g (_ : both 'unit) : both t_Bar :=\n  g _  :=\n    Bar (f (ret_both (32 : int32))) : both t_Bar.\nFail Next Obligation.\n\nEquations t_Foo_cast_to_repr (x : both t_Foo) : both uint_size :=\n  t_Foo_cast_to_repr x  :=\n    matchb x with\n    | Foo_A_case  =>\n      ret_both (0 : uint_size)\n    | Foo_B_case  =>\n      ret_both (1 : uint_size)\n    end : both uint_size.\nFail Next Obligation.\n'''\n\"Reordering_Independent_cycles.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations c (_ : both 'unit) : both 'unit :=\n  c _  :=\n    a : both 'unit.\nFail Next Obligation.\n\nEquations a (_ : both 'unit) : both 'unit :=\n  a _  :=\n    c : both 'unit.\nFail Next Obligation.\n\nEquations d (_ : both 'unit) : both 'unit :=\n  d _  :=\n    b : both 'unit.\nFail Next Obligation.\n\nEquations b (_ : both 'unit) : both 'unit :=\n  b _  :=\n    d : both 'unit.\nFail Next Obligation.\n'''\n\"Reordering_Mut_rec.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations g (_ : both 'unit) : both 'unit :=\n  g _  :=\n    f : both 'unit.\nFail Next Obligation.\n\nEquations f (_ : both 'unit) : both 'unit :=\n  f _  :=\n    g : both 'unit.\nFail Next Obligation.\n\nEquations ff_2_ (_ : both 'unit) : both 'unit :=\n  ff_2_ _  :=\n    f : both 'unit.\nFail Next Obligation.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__side-effects into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: side-effects\n    manifest: side-effects/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Side_effects.Issue_1083_.fst\" = '''\nmodule Side_effects.Issue_1083_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_MyFrom (v_Self: Type0) (v_T: Type0) = {\n  f_my_from_pre:v_T -> Type0;\n  f_my_from_post:v_T -> v_Self -> Type0;\n  f_my_from:x0: v_T -> Prims.Pure v_Self (f_my_from_pre x0) (fun result -> f_my_from_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_MyFrom u16 u8 =\n  {\n    f_my_from_pre = (fun (x: u8) -> true);\n    f_my_from_post = (fun (x: u8) (out: u16) -> true);\n    f_my_from = fun (x: u8) -> cast (x <: u8) <: u16\n  }\n\nlet f (x: u8) : Core_models.Result.t_Result u16 u16 =\n  match Core_models.Result.Result_Err (mk_u8 1) <: Core_models.Result.t_Result Prims.unit u8 with\n  | Core_models.Result.Result_Ok _ ->\n    Core_models.Result.Result_Ok (f_my_from #u16 #u8 #FStar.Tactics.Typeclasses.solve x)\n    <:\n    Core_models.Result.t_Result u16 u16\n  | Core_models.Result.Result_Err err ->\n    Core_models.Result.Result_Err\n    (Core_models.Convert.f_from #u16 #u8 #FStar.Tactics.Typeclasses.solve err)\n    <:\n    Core_models.Result.t_Result u16 u16\n'''\n\"Side_effects.Issue_1089_.fst\" = '''\nmodule Side_effects.Issue_1089_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet test (x y: Core_models.Option.t_Option i32) : Core_models.Option.t_Option i32 =\n  match\n    Core_models.Option.impl__map #i32\n      #(Core_models.Option.t_Option i32)\n      #(i32 -> Core_models.Option.t_Option i32)\n      x\n      (fun i ->\n          let i:i32 = i in\n          match y <: Core_models.Option.t_Option i32 with\n          | Core_models.Option.Option_Some hoist38 ->\n            Core_models.Option.Option_Some (i +! hoist38 <: i32) <: Core_models.Option.t_Option i32\n          | Core_models.Option.Option_None  ->\n            Core_models.Option.Option_None <: Core_models.Option.t_Option i32)\n    <:\n    Core_models.Option.t_Option (Core_models.Option.t_Option i32)\n  with\n  | Core_models.Option.Option_Some some -> some\n  | Core_models.Option.Option_None  ->\n    Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n'''\n\"Side_effects.Issue_1299_.fst\" = '''\nmodule Side_effects.Issue_1299_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Foo = { f_y:u8 }\n\ntype t_S = { f_g:t_Foo }\n\ntype t_OtherS = { f_g:Core_models.Option.t_Option t_Foo }\n\nlet impl_Foo__from (i: t_Foo) : t_Foo =\n  { f_y = Core_models.Clone.f_clone #u8 #FStar.Tactics.Typeclasses.solve i.f_y } <: t_Foo\n\ntype t_Error = | Error : t_Error\n\nlet impl_S__from (i: t_OtherS) : Core_models.Result.t_Result t_S t_Error =\n  match\n    Core_models.Option.impl__ok_or #t_Foo\n      #t_Error\n      (Core_models.Option.impl__as_ref #t_Foo i.f_g <: Core_models.Option.t_Option t_Foo)\n      (Error <: t_Error)\n    <:\n    Core_models.Result.t_Result t_Foo t_Error\n  with\n  | Core_models.Result.Result_Ok hoist47 ->\n    Core_models.Result.Result_Ok ({ f_g = impl_Foo__from hoist47 } <: t_S)\n    <:\n    Core_models.Result.t_Result t_S t_Error\n  | Core_models.Result.Result_Err err ->\n    Core_models.Result.Result_Err err <: Core_models.Result.t_Result t_S t_Error\n'''\n\"Side_effects.Issue_1300_.fst\" = '''\nmodule Side_effects.Issue_1300_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_fun (_: Prims.unit) : Core_models.Result.t_Result Prims.unit u8 =\n  match\n    Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Map.t_Map\n          (Core_models.Slice.Iter.t_Iter u8)\n          (u8 -> Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8))\n      #FStar.Tactics.Typeclasses.solve\n      #(Core_models.Result.t_Result\n          (Alloc.Vec.t_Vec (u8 & t_Array u8 (mk_usize 32)) Alloc.Alloc.t_Global) u8)\n      (Core_models.Iter.Traits.Iterator.f_map #(Core_models.Slice.Iter.t_Iter u8)\n          #FStar.Tactics.Typeclasses.solve\n          #(Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8)\n          #(u8 -> Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8)\n          (Core_models.Slice.impl__iter #u8\n              (Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 5) <: t_Slice u8)\n            <:\n            Core_models.Slice.Iter.t_Iter u8)\n          (fun prev ->\n              let prev:u8 = prev in\n              match\n                Core_models.Result.Result_Ok\n                (Rust_primitives.Hax.repeat (mk_u8 0) (mk_usize 32) <: t_Array u8 (mk_usize 32))\n                <:\n                Core_models.Result.t_Result (t_Array u8 (mk_usize 32)) u8\n              with\n              | Core_models.Result.Result_Ok hoist45 ->\n                Core_models.Result.Result_Ok (prev, hoist45 <: (u8 & t_Array u8 (mk_usize 32)))\n                <:\n                Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8\n              | Core_models.Result.Result_Err err ->\n                Core_models.Result.Result_Err err\n                <:\n                Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8)\n        <:\n        Core_models.Iter.Adapters.Map.t_Map (Core_models.Slice.Iter.t_Iter u8)\n          (u8 -> Core_models.Result.t_Result (u8 & t_Array u8 (mk_usize 32)) u8))\n    <:\n    Core_models.Result.t_Result\n      (Alloc.Vec.t_Vec (u8 & t_Array u8 (mk_usize 32)) Alloc.Alloc.t_Global) u8\n  with\n  | Core_models.Result.Result_Ok v_val ->\n    Core_models.Result.Result_Ok (() <: Prims.unit) <: Core_models.Result.t_Result Prims.unit u8\n  | Core_models.Result.Result_Err err ->\n    Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit u8\n'''\n\"Side_effects.Nested_return.fst\" = '''\nmodule Side_effects.Nested_return\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet other_fun (rng: i8) : (i8 & Core_models.Result.t_Result Prims.unit Prims.unit) =\n  let hax_temp_output:Core_models.Result.t_Result Prims.unit Prims.unit =\n    Core_models.Result.Result_Ok (() <: Prims.unit)\n    <:\n    Core_models.Result.t_Result Prims.unit Prims.unit\n  in\n  rng, hax_temp_output <: (i8 & Core_models.Result.t_Result Prims.unit Prims.unit)\n\nlet v_fun (rng: i8) : (i8 & Core_models.Result.t_Result Prims.unit Prims.unit) =\n  let (tmp0: i8), (out: Core_models.Result.t_Result Prims.unit Prims.unit) = other_fun rng in\n  let rng:i8 = tmp0 in\n  match out <: Core_models.Result.t_Result Prims.unit Prims.unit with\n  | Core_models.Result.Result_Ok hoist41 ->\n    rng, (Core_models.Result.Result_Ok hoist41 <: Core_models.Result.t_Result Prims.unit Prims.unit)\n    <:\n    (i8 & Core_models.Result.t_Result Prims.unit Prims.unit)\n  | Core_models.Result.Result_Err err ->\n    rng, (Core_models.Result.Result_Err err <: Core_models.Result.t_Result Prims.unit Prims.unit)\n    <:\n    (i8 & Core_models.Result.t_Result Prims.unit Prims.unit)\n'''\n\"Side_effects.fst\" = '''\nmodule Side_effects\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\n/// Helper function\nlet add3 (x y z: u32) : u32 =\n  Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add x y <: u32) z\n\n/// Exercise local mutation with control flow and loops\nlet local_mutation (x: u32) : u32 =\n  let y:u32 = mk_u32 0 in\n  let x:u32 = Core_models.Num.impl_u32__wrapping_add x (mk_u32 1) in\n  if x >. mk_u32 3\n  then\n    let x:u32 = Core_models.Num.impl_u32__wrapping_sub x (mk_u32 3) in\n    let y:u32 = x /! mk_u32 2 in\n    let y:u32 = Core_models.Num.impl_u32__wrapping_add y (mk_u32 2) in\n    let y:u32 =\n      Core_models.Iter.Traits.Iterator.f_fold (Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Ops.Range.t_Range\n              u32)\n            #FStar.Tactics.Typeclasses.solve\n            ({ Core_models.Ops.Range.f_start = mk_u32 0; Core_models.Ops.Range.f_end = mk_u32 10 }\n              <:\n              Core_models.Ops.Range.t_Range u32)\n          <:\n          Core_models.Ops.Range.t_Range u32)\n        y\n        (fun y i ->\n            let y:u32 = y in\n            let i:u32 = i in\n            Core_models.Num.impl_u32__wrapping_add x i <: u32)\n    in\n    Core_models.Num.impl_u32__wrapping_add x y\n  else\n    let ((x: u32), (y: u32)), (hoist7: u32) =\n      match x <: u32 with\n      | Rust_primitives.Integers.MkInt 12 ->\n        let y:u32 = Core_models.Num.impl_u32__wrapping_add x y in\n        (x, y <: (u32 & u32)), mk_u32 3 <: ((u32 & u32) & u32)\n      | Rust_primitives.Integers.MkInt 13 ->\n        let x:u32 = Core_models.Num.impl_u32__wrapping_add x (mk_u32 1) in\n        (x, y <: (u32 & u32)),\n        add3 x (Core_models.Num.impl_u32__wrapping_add (mk_u32 123) x <: u32) x\n        <:\n        ((u32 & u32) & u32)\n      | _ -> (x, y <: (u32 & u32)), mk_u32 0 <: ((u32 & u32) & u32)\n    in\n    let x:u32 = hoist7 in\n    Core_models.Num.impl_u32__wrapping_add x y\n\n/// Exercise early returns with control flow and loops\nlet early_returns (x: u32) : u32 =\n  if x >. mk_u32 3\n  then mk_u32 0\n  else\n    if x >. mk_u32 30\n    then\n      match true <: bool with\n      | true -> mk_u32 34\n      | _ ->\n        let (x: u32), (hoist11: u32) = x, mk_u32 3 <: (u32 & u32) in\n        Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (mk_u32 123)\n              hoist11\n            <:\n            u32)\n          x\n    else\n      let x:u32 = x +! mk_u32 9 in\n      let (x: u32), (hoist11: u32) = x, x +! mk_u32 1 <: (u32 & u32) in\n      Core_models.Num.impl_u32__wrapping_add (Core_models.Num.impl_u32__wrapping_add (mk_u32 123)\n            hoist11\n          <:\n          u32)\n        x\n\nlet simplifiable_return (c1 c2 c3: bool) : i32 =\n  let x:i32 = mk_i32 0 in\n  if c1\n  then\n    if c2\n    then\n      let x:i32 = x +! mk_i32 10 in\n      if c3 then mk_i32 1 else x +! mk_i32 1\n    else x +! mk_i32 1\n  else x\n\nlet simplifiable_question_mark (c: bool) (x: Core_models.Option.t_Option i32)\n    : Core_models.Option.t_Option i32 =\n  if c\n  then\n    match x <: Core_models.Option.t_Option i32 with\n    | Core_models.Option.Option_Some hoist16 ->\n      let a:i32 = hoist16 +! mk_i32 10 in\n      let b:i32 = mk_i32 20 in\n      Core_models.Option.Option_Some (a +! b) <: Core_models.Option.t_Option i32\n    | Core_models.Option.Option_None  ->\n      Core_models.Option.Option_None <: Core_models.Option.t_Option i32\n  else\n    let a:i32 = mk_i32 0 in\n    let b:i32 = mk_i32 20 in\n    Core_models.Option.Option_Some (a +! b) <: Core_models.Option.t_Option i32\n\n/// Question mark without error coercion\nlet direct_result_question_mark (y: Core_models.Result.t_Result Prims.unit u32)\n    : Core_models.Result.t_Result i8 u32 =\n  match y <: Core_models.Result.t_Result Prims.unit u32 with\n  | Core_models.Result.Result_Ok _ ->\n    Core_models.Result.Result_Ok (mk_i8 0) <: Core_models.Result.t_Result i8 u32\n  | Core_models.Result.Result_Err err ->\n    Core_models.Result.Result_Err err <: Core_models.Result.t_Result i8 u32\n\n/// Question mark with an error coercion\nlet direct_result_question_mark_coercion (y: Core_models.Result.t_Result i8 u16)\n    : Core_models.Result.t_Result i8 u32 =\n  match y <: Core_models.Result.t_Result i8 u16 with\n  | Core_models.Result.Result_Ok hoist17 ->\n    Core_models.Result.Result_Ok hoist17 <: Core_models.Result.t_Result i8 u32\n  | Core_models.Result.Result_Err err ->\n    Core_models.Result.Result_Err\n    (Core_models.Convert.f_from #u32 #u16 #FStar.Tactics.Typeclasses.solve err)\n    <:\n    Core_models.Result.t_Result i8 u32\n\n/// Test question mark on `Option`s with some control flow\nlet options (x y: Core_models.Option.t_Option u8) (z: Core_models.Option.t_Option u64)\n    : Core_models.Option.t_Option u8 =\n  match x <: Core_models.Option.t_Option u8 with\n  | Core_models.Option.Option_Some hoist21 ->\n    if hoist21 >. mk_u8 10\n    then\n      match x <: Core_models.Option.t_Option u8 with\n      | Core_models.Option.Option_Some hoist23 ->\n        (match\n            Core_models.Option.Option_Some (Core_models.Num.impl_u8__wrapping_add hoist23 (mk_u8 3))\n            <:\n            Core_models.Option.t_Option u8\n          with\n          | Core_models.Option.Option_Some hoist29 ->\n            (match hoist29 <: u8 with\n              | Rust_primitives.Integers.MkInt 3 ->\n                (match Core_models.Option.Option_None <: Core_models.Option.t_Option u8 with\n                  | Core_models.Option.Option_Some some ->\n                    let v:u8 = some in\n                    (match x <: Core_models.Option.t_Option u8 with\n                      | Core_models.Option.Option_Some hoist30 ->\n                        (match y <: Core_models.Option.t_Option u8 with\n                          | Core_models.Option.Option_Some hoist31 ->\n                            Core_models.Option.Option_Some\n                            (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add\n                                    v\n                                    hoist30\n                                  <:\n                                  u8)\n                                hoist31)\n                            <:\n                            Core_models.Option.t_Option u8\n                          | Core_models.Option.Option_None  ->\n                            Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                      | Core_models.Option.Option_None  ->\n                        Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                  | Core_models.Option.Option_None  ->\n                    Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n              | Rust_primitives.Integers.MkInt 4 ->\n                (match z <: Core_models.Option.t_Option u64 with\n                  | Core_models.Option.Option_Some hoist18 ->\n                    let v:u8 =\n                      mk_u8 4 +! (if hoist18 >. mk_u64 4 <: bool then mk_u8 0 else mk_u8 3)\n                    in\n                    (match x <: Core_models.Option.t_Option u8 with\n                      | Core_models.Option.Option_Some hoist30 ->\n                        (match y <: Core_models.Option.t_Option u8 with\n                          | Core_models.Option.Option_Some hoist31 ->\n                            Core_models.Option.Option_Some\n                            (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add\n                                    v\n                                    hoist30\n                                  <:\n                                  u8)\n                                hoist31)\n                            <:\n                            Core_models.Option.t_Option u8\n                          | Core_models.Option.Option_None  ->\n                            Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                      | Core_models.Option.Option_None  ->\n                        Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                  | Core_models.Option.Option_None  ->\n                    Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n              | _ ->\n                let v:u8 = mk_u8 12 in\n                match x <: Core_models.Option.t_Option u8 with\n                | Core_models.Option.Option_Some hoist30 ->\n                  (match y <: Core_models.Option.t_Option u8 with\n                    | Core_models.Option.Option_Some hoist31 ->\n                      Core_models.Option.Option_Some\n                      (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add v\n                              hoist30\n                            <:\n                            u8)\n                          hoist31)\n                      <:\n                      Core_models.Option.t_Option u8\n                    | Core_models.Option.Option_None  ->\n                      Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                | Core_models.Option.Option_None  ->\n                  Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n          | Core_models.Option.Option_None  ->\n            Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n      | Core_models.Option.Option_None  ->\n        Core_models.Option.Option_None <: Core_models.Option.t_Option u8\n    else\n      (match x <: Core_models.Option.t_Option u8 with\n        | Core_models.Option.Option_Some hoist26 ->\n          (match y <: Core_models.Option.t_Option u8 with\n            | Core_models.Option.Option_Some hoist25 ->\n              (match\n                  Core_models.Option.Option_Some\n                  (Core_models.Num.impl_u8__wrapping_add hoist26 hoist25)\n                  <:\n                  Core_models.Option.t_Option u8\n                with\n                | Core_models.Option.Option_Some hoist29 ->\n                  (match hoist29 <: u8 with\n                    | Rust_primitives.Integers.MkInt 3 ->\n                      (match Core_models.Option.Option_None <: Core_models.Option.t_Option u8 with\n                        | Core_models.Option.Option_Some some ->\n                          let v:u8 = some in\n                          (match x <: Core_models.Option.t_Option u8 with\n                            | Core_models.Option.Option_Some hoist30 ->\n                              (match y <: Core_models.Option.t_Option u8 with\n                                | Core_models.Option.Option_Some hoist31 ->\n                                  Core_models.Option.Option_Some\n                                  (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add\n                                          v\n                                          hoist30\n                                        <:\n                                        u8)\n                                      hoist31)\n                                  <:\n                                  Core_models.Option.t_Option u8\n                                | Core_models.Option.Option_None  ->\n                                  Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                            | Core_models.Option.Option_None  ->\n                              Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                        | Core_models.Option.Option_None  ->\n                          Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                    | Rust_primitives.Integers.MkInt 4 ->\n                      (match z <: Core_models.Option.t_Option u64 with\n                        | Core_models.Option.Option_Some hoist18 ->\n                          let v:u8 =\n                            mk_u8 4 +! (if hoist18 >. mk_u64 4 <: bool then mk_u8 0 else mk_u8 3)\n                          in\n                          (match x <: Core_models.Option.t_Option u8 with\n                            | Core_models.Option.Option_Some hoist30 ->\n                              (match y <: Core_models.Option.t_Option u8 with\n                                | Core_models.Option.Option_Some hoist31 ->\n                                  Core_models.Option.Option_Some\n                                  (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add\n                                          v\n                                          hoist30\n                                        <:\n                                        u8)\n                                      hoist31)\n                                  <:\n                                  Core_models.Option.t_Option u8\n                                | Core_models.Option.Option_None  ->\n                                  Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                            | Core_models.Option.Option_None  ->\n                              Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                        | Core_models.Option.Option_None  ->\n                          Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                    | _ ->\n                      let v:u8 = mk_u8 12 in\n                      match x <: Core_models.Option.t_Option u8 with\n                      | Core_models.Option.Option_Some hoist30 ->\n                        (match y <: Core_models.Option.t_Option u8 with\n                          | Core_models.Option.Option_Some hoist31 ->\n                            Core_models.Option.Option_Some\n                            (Core_models.Num.impl_u8__wrapping_add (Core_models.Num.impl_u8__wrapping_add\n                                    v\n                                    hoist30\n                                  <:\n                                  u8)\n                                hoist31)\n                            <:\n                            Core_models.Option.t_Option u8\n                          | Core_models.Option.Option_None  ->\n                            Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                      | Core_models.Option.Option_None  ->\n                        Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n                | Core_models.Option.Option_None  ->\n                  Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n            | Core_models.Option.Option_None  ->\n              Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n        | Core_models.Option.Option_None  ->\n          Core_models.Option.Option_None <: Core_models.Option.t_Option u8)\n  | Core_models.Option.Option_None  ->\n    Core_models.Option.Option_None <: Core_models.Option.t_Option u8\n\n/// Test question mark on `Result`s with local mutation\nlet question_mark (x: u32) : Core_models.Result.t_Result u32 u32 =\n  if x >. mk_u32 40\n  then\n    let y:u32 = mk_u32 0 in\n    let x:u32 = Core_models.Num.impl_u32__wrapping_add x (mk_u32 3) in\n    let y:u32 = Core_models.Num.impl_u32__wrapping_add x y in\n    let x:u32 = Core_models.Num.impl_u32__wrapping_add x y in\n    if x >. mk_u32 90\n    then\n      match\n        Core_models.Result.Result_Err (mk_u8 12) <: Core_models.Result.t_Result Prims.unit u8\n      with\n      | Core_models.Result.Result_Ok ok ->\n        Core_models.Result.Result_Ok (Core_models.Num.impl_u32__wrapping_add (mk_u32 3) x)\n        <:\n        Core_models.Result.t_Result u32 u32\n      | Core_models.Result.Result_Err err ->\n        Core_models.Result.Result_Err\n        (Core_models.Convert.f_from #u32 #u8 #FStar.Tactics.Typeclasses.solve err)\n        <:\n        Core_models.Result.t_Result u32 u32\n    else\n      Core_models.Result.Result_Ok (Core_models.Num.impl_u32__wrapping_add (mk_u32 3) x)\n      <:\n      Core_models.Result.t_Result u32 u32\n  else\n    Core_models.Result.Result_Ok (Core_models.Num.impl_u32__wrapping_add (mk_u32 3) x)\n    <:\n    Core_models.Result.t_Result u32 u32\n\ntype t_A = | A : t_A\n\ntype t_B = | B : t_B\n\n/// Combine `?` and early return\nlet monad_lifting (x: u8) : Core_models.Result.t_Result t_A t_B =\n  if x >. mk_u8 123\n  then\n    match Core_models.Result.Result_Err (B <: t_B) <: Core_models.Result.t_Result t_A t_B with\n    | Core_models.Result.Result_Ok hoist35 ->\n      Core_models.Result.Result_Ok hoist35 <: Core_models.Result.t_Result t_A t_B\n    | Core_models.Result.Result_Err err ->\n      Core_models.Result.Result_Err err <: Core_models.Result.t_Result t_A t_B\n  else Core_models.Result.Result_Ok (A <: t_A) <: Core_models.Result.t_Result t_A t_B\n\ntype t_Bar = {\n  f_a:bool;\n  f_b:(t_Array (bool & bool) (mk_usize 6) & bool)\n}\n\ntype t_Foo = {\n  f_x:bool;\n  f_y:(bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global);\n  f_z:t_Array t_Bar (mk_usize 6);\n  f_bar:t_Bar\n}\n\n/// Test assignation on non-trivial places\nlet assign_non_trivial_lhs (foo: t_Foo) : t_Foo =\n  let foo:t_Foo = { foo with f_x = true } <: t_Foo in\n  let foo:t_Foo = { foo with f_bar = { foo.f_bar with f_a = true } <: t_Bar } <: t_Foo in\n  let foo:t_Foo =\n    {\n      foo with\n      f_bar\n      =\n      {\n        foo.f_bar with\n        f_b\n        =\n        {\n          foo.f_bar.f_b with\n          _1\n          =\n          Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_bar.f_b._1\n            (mk_usize 3)\n            ({ (foo.f_bar.f_b._1.[ mk_usize 3 ] <: (bool & bool)) with _2 = true } <: (bool & bool))\n        }\n        <:\n        (t_Array (bool & bool) (mk_usize 6) & bool)\n      }\n      <:\n      t_Bar\n    }\n    <:\n    t_Foo\n  in\n  let foo:t_Foo =\n    {\n      foo with\n      f_z\n      =\n      Rust_primitives.Hax.Monomorphized_update_at.update_at_usize foo.f_z\n        (mk_usize 3)\n        ({ (foo.f_z.[ mk_usize 3 ] <: t_Bar) with f_a = true } <: t_Bar)\n    }\n    <:\n    t_Foo\n  in\n  let foo:t_Foo =\n    {\n      foo with\n      f_y\n      =\n      {\n        foo.f_y with\n        _2\n        =\n        Alloc.Slice.impl__to_vec (Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (Alloc.Vec.impl_1__as_slice\n                  foo.f_y._2\n                <:\n                t_Slice t_Bar)\n              (mk_usize 3)\n              ({\n                  (foo.f_y._2.[ mk_usize 3 ] <: t_Bar) with\n                  f_b\n                  =\n                  {\n                    (foo.f_y._2.[ mk_usize 3 ] <: t_Bar).f_b with\n                    _1\n                    =\n                    Rust_primitives.Hax.Monomorphized_update_at.update_at_usize (foo.f_y._2.[ mk_usize\n                          3 ]\n                        <:\n                        t_Bar)\n                        .f_b\n                        ._1\n                      (mk_usize 5)\n                      ({\n                          ((foo.f_y._2.[ mk_usize 3 ] <: t_Bar).f_b._1.[ mk_usize 5 ]\n                            <:\n                            (bool & bool)) with\n                          _1 = true\n                        }\n                        <:\n                        (bool & bool))\n                    <:\n                    t_Array (bool & bool) (mk_usize 6)\n                  }\n                  <:\n                  (t_Array (bool & bool) (mk_usize 6) & bool)\n                }\n                <:\n                t_Bar)\n            <:\n            t_Slice t_Bar)\n      }\n      <:\n      (bool & Alloc.Vec.t_Vec t_Bar Alloc.Alloc.t_Global)\n    }\n    <:\n    t_Foo\n  in\n  foo\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__side-effects into-ssprove.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: ssprove\n  info:\n    name: side-effects\n    manifest: side-effects/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 1\nstderr = \"\"\"\nFinished `dev` profile [unoptimized + debuginfo] target(s) in XXs\n\\u001B[1m\\u001B[91merror\\u001B[0m: \\u001B[1m[HAX0001] (SSProve backend) something is not implemented yet.\n[expr] node app global vcar projector tuple\\u001B[0m\n   \\u001B[1m\\u001B[94m-->\\u001B[0m side-effects/src/lib.rs:156:5\n\\u001B[1m\\u001B[94m    |\\u001B[0m\n\\u001B[1m\\u001B[94m156 |\\u001B[0m     foo.y.1[3].b.0[5].0 = true;\n\\u001B[1m\\u001B[94m    |\\u001B[0m\\u001B[1m\\u001B[91m     ^^^^^^^^^^^^^^^^^^^^^^^^^^\\u001B[0m\n\\u001B[1m\\u001B[94m    |\\u001B[0m\"\"\"\n[[stdout.diagnostics]]\nmessage = '''\n(SSProve backend) something is not implemented yet.\n[expr] node app global vcar projector tuple'''\nspans = ['Span { lo: Loc { line: 156, col: 4 }, hi: Loc { line: 156, col: 30 }, filename: Real(LocalPath(\"side-effects/src/lib.rs\")), rust_span_data: None }']\n\n[stdout.files]\n\"Side_effects.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations add3 (x : both int32) (y : both int32) (z : both int32) : both int32 :=\n  add3 x y z  :=\n    impl_u32__wrapping_add (impl_u32__wrapping_add x y) z : both int32.\nFail Next Obligation.\n\nEquations local_mutation (x : both int32) : both int32 :=\n  local_mutation x  :=\n    letb y := ret_both (0 : int32) in\n    letb x := impl_u32__wrapping_add x (ret_both (1 : int32)) in\n    letb hoist1 := x >.? (ret_both (3 : int32)) in\n    ifb hoist1\n    then letb x := impl_u32__wrapping_sub x (ret_both (3 : int32)) in\n    letb y := x ./ (ret_both (2 : int32)) in\n    letb y := impl_u32__wrapping_add y (ret_both (2 : int32)) in\n    letb hoist2 := ret_both (0 : int32) in\n    letb hoist3 := Build_t_Range (f_start := hoist2) (f_end := ret_both (10 : int32)) in\n    letb hoist4 := f_into_iter hoist3 in\n    letb y := foldi_both_list hoist4 (fun i =>\n      ssp (fun y =>\n        impl_u32__wrapping_add x i : (both int32))) y in\n    impl_u32__wrapping_add x y\n    else letb '((x,y),hoist7) := matchb x with\n    | 12 =>\n      letb y := impl_u32__wrapping_add x y in\n      prod_b (prod_b (x,y),ret_both (3 : int32))\n    | 13 =>\n      letb hoist6 := x in\n      letb x := impl_u32__wrapping_add x (ret_both (1 : int32)) in\n      letb hoist5 := impl_u32__wrapping_add (ret_both (123 : int32)) x in\n      prod_b (prod_b (x,y),add3 hoist6 hoist5 x)\n    | _ =>\n      prod_b (prod_b (x,y),ret_both (0 : int32))\n    end in\n    letb x := hoist7 in\n    impl_u32__wrapping_add x y : both int32.\nFail Next Obligation.\n\nEquations early_returns (x : both int32) : both int32 :=\n  early_returns x  :=\n    run (letm[choice_typeMonad.result_bind_code int32] _ := ifb x >.? (ret_both (3 : int32))\n    then letm[choice_typeMonad.result_bind_code int32] hoist8 := ControlFlow_Break (ret_both (0 : int32)) in\n    ControlFlow_Continue (never_to_any hoist8)\n    else ControlFlow_Continue (ret_both (tt : 'unit)) in\n    letb hoist9 := x >.? (ret_both (30 : int32)) in\n    letm[choice_typeMonad.result_bind_code int32] '(x,hoist11) := ifb hoist9\n    then matchb ret_both (true : 'bool) with\n    | true =>\n      letm[choice_typeMonad.result_bind_code int32] hoist10 := ControlFlow_Break (ret_both (34 : int32)) in\n      ControlFlow_Continue (prod_b (x,never_to_any hoist10))\n    | _ =>\n      ControlFlow_Continue (prod_b (x,ret_both (3 : int32)))\n    end\n    else ControlFlow_Continue (letb x := x .+ (ret_both (9 : int32)) in\n    prod_b (x,x .+ (ret_both (1 : int32)))) in\n    letb hoist12 := impl_u32__wrapping_add (ret_both (123 : int32)) hoist11 in\n    letb hoist13 := impl_u32__wrapping_add hoist12 x in\n    letm[choice_typeMonad.result_bind_code int32] hoist14 := ControlFlow_Break hoist13 in\n    ControlFlow_Continue (never_to_any hoist14)) : both int32.\nFail Next Obligation.\n\nEquations simplifiable_return (c1 : both 'bool) (c2 : both 'bool) (c3 : both 'bool) : both int32 :=\n  simplifiable_return c1 c2 c3  :=\n    run (letb x := ret_both (0 : int32) in\n    letm[choice_typeMonad.result_bind_code int32] x := ifb c1\n    then letm[choice_typeMonad.result_bind_code int32] x := ifb c2\n    then letb x := x .+ (ret_both (10 : int32)) in\n    ifb c3\n    then letm[choice_typeMonad.result_bind_code int32] hoist15 := ControlFlow_Break (ret_both (1 : int32)) in\n    ControlFlow_Continue x\n    else ControlFlow_Continue x\n    else ControlFlow_Continue x in\n    ControlFlow_Continue (letb x := x .+ (ret_both (1 : int32)) in\n    x)\n    else ControlFlow_Continue x in\n    ControlFlow_Continue x) : both int32.\nFail Next Obligation.\n\nEquations simplifiable_question_mark (c : both 'bool) (x : both (t_Option int32)) : both (t_Option int32) :=\n  simplifiable_question_mark c x  :=\n    run (letm[choice_typeMonad.option_bind_code] a := ifb c\n    then letm[choice_typeMonad.option_bind_code] hoist16 := x in\n    Option_Some (hoist16 .+ (ret_both (10 : int32)))\n    else Option_Some (ret_both (0 : int32)) in\n    Option_Some (letb b := ret_both (20 : int32) in\n    Option_Some (a .+ b))) : both (t_Option int32).\nFail Next Obligation.\n\nEquations direct_result_question_mark (y : both (t_Result 'unit int32)) : both (t_Result int8 int32) :=\n  direct_result_question_mark y  :=\n    run (letm[choice_typeMonad.result_bind_code int32] _ := y in\n    Result_Ok (Result_Ok (ret_both (0 : int8)))) : both (t_Result int8 int32).\nFail Next Obligation.\n\nEquations direct_result_question_mark_coercion (y : both (t_Result int8 int16)) : both (t_Result int8 int32) :=\n  direct_result_question_mark_coercion y  :=\n    run (letm[choice_typeMonad.result_bind_code int32] hoist17 := impl__map_err y f_from in\n    Result_Ok (Result_Ok hoist17)) : both (t_Result int8 int32).\nFail Next Obligation.\n\nEquations options (x : both (t_Option int8)) (y : both (t_Option int8)) (z : both (t_Option int64)) : both (t_Option int8) :=\n  options x y z  :=\n    run (letm[choice_typeMonad.option_bind_code] hoist21 := x in\n    letb hoist22 := hoist21 >.? (ret_both (10 : int8)) in\n    letm[choice_typeMonad.option_bind_code] hoist28 := ifb hoist22\n    then letm[choice_typeMonad.option_bind_code] hoist23 := x in\n    Option_Some (letb hoist24 := impl_u8__wrapping_add hoist23 (ret_both (3 : int8)) in\n    Option_Some hoist24)\n    else letm[choice_typeMonad.option_bind_code] hoist26 := x in\n    letm[choice_typeMonad.option_bind_code] hoist25 := y in\n    Option_Some (letb hoist27 := impl_u8__wrapping_add hoist26 hoist25 in\n    Option_Some hoist27) in\n    letm[choice_typeMonad.option_bind_code] hoist29 := hoist28 in\n    letm[choice_typeMonad.option_bind_code] v := matchb hoist29 with\n    | 3 =>\n      Option_None\n    | 4 =>\n      letm[choice_typeMonad.option_bind_code] hoist18 := z in\n      Option_Some (letb hoist19 := hoist18 >.? (ret_both (4 : int64)) in\n      letb hoist20 := ifb hoist19\n      then ret_both (0 : int8)\n      else ret_both (3 : int8) in\n      (ret_both (4 : int8)) .+ hoist20)\n    | _ =>\n      Option_Some (ret_both (12 : int8))\n    end in\n    letm[choice_typeMonad.option_bind_code] hoist30 := x in\n    letb hoist32 := impl_u8__wrapping_add v hoist30 in\n    letm[choice_typeMonad.option_bind_code] hoist31 := y in\n    Option_Some (letb hoist33 := impl_u8__wrapping_add hoist32 hoist31 in\n    Option_Some hoist33)) : both (t_Option int8).\nFail Next Obligation.\n\nEquations question_mark (x : both int32) : both (t_Result int32 int32) :=\n  question_mark x  :=\n    run (letm[choice_typeMonad.result_bind_code int32] x := ifb x >.? (ret_both (40 : int32))\n    then letb y := ret_both (0 : int32) in\n    letb x := impl_u32__wrapping_add x (ret_both (3 : int32)) in\n    letb y := impl_u32__wrapping_add x y in\n    letb x := impl_u32__wrapping_add x y in\n    letb hoist34 := x >.? (ret_both (90 : int32)) in\n    ifb hoist34\n    then letm[choice_typeMonad.result_bind_code int32] _ := impl__map_err (Result_Err (ret_both (12 : int8))) f_from in\n    Result_Ok x\n    else Result_Ok x\n    else Result_Ok x in\n    Result_Ok (Result_Ok (impl_u32__wrapping_add (ret_both (3 : int32)) x))) : both (t_Result int32 int32).\nFail Next Obligation.\n\nDefinition t_A : choice_type :=\n  'unit.\nEquations Build_t_A : both (t_A) :=\n  Build_t_A  :=\n    ret_both (tt (* Empty tuple *) : (t_A)) : both (t_A).\nFail Next Obligation.\n\nDefinition t_B : choice_type :=\n  'unit.\nEquations Build_t_B : both (t_B) :=\n  Build_t_B  :=\n    ret_both (tt (* Empty tuple *) : (t_B)) : both (t_B).\nFail Next Obligation.\n\nEquations monad_lifting (x : both int8) : both (t_Result t_A t_B) :=\n  monad_lifting x  :=\n    run (ifb x >.? (ret_both (123 : int8))\n    then letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist35 := ControlFlow_Continue (Result_Err B) in\n    letb hoist36 := Result_Ok hoist35 in\n    letm[choice_typeMonad.result_bind_code (t_Result t_A t_B)] hoist37 := ControlFlow_Break hoist36 in\n    ControlFlow_Continue (never_to_any hoist37)\n    else ControlFlow_Continue (Result_Ok A)) : both (t_Result t_A t_B).\nFail Next Obligation.\n\nDefinition t_Bar : choice_type :=\n  ('bool × nseq ('bool × 'bool) 6 × 'bool).\nEquations f_a (s : both t_Bar) : both 'bool :=\n  f_a s  :=\n    bind_both s (fun x =>\n      ret_both (fst x : 'bool)) : both 'bool.\nFail Next Obligation.\nEquations f_b (s : both t_Bar) : both (nseq ('bool × 'bool) 6 × 'bool) :=\n  f_b s  :=\n    bind_both s (fun x =>\n      ret_both (snd x : (nseq ('bool × 'bool) 6 × 'bool))) : both (nseq ('bool × 'bool) 6 × 'bool).\nFail Next Obligation.\nEquations Build_t_Bar {f_a : both 'bool} {f_b : both (nseq ('bool × 'bool) 6 × 'bool)} : both (t_Bar) :=\n  Build_t_Bar  :=\n    bind_both f_b (fun f_b =>\n      bind_both f_a (fun f_a =>\n        ret_both ((f_a,f_b) : (t_Bar)))) : both (t_Bar).\nFail Next Obligation.\nNotation \"'Build_t_Bar' '[' x ']' '(' 'f_a' ':=' y ')'\" := (Build_t_Bar (f_a := y) (f_b := f_b x)).\nNotation \"'Build_t_Bar' '[' x ']' '(' 'f_b' ':=' y ')'\" := (Build_t_Bar (f_a := f_a x) (f_b := y)).\n\nDefinition t_Foo : choice_type :=\n  ('bool × 'bool × t_Vec t_Bar t_Global × nseq t_Bar 6 × t_Bar).\nEquations f_x (s : both t_Foo) : both 'bool :=\n  f_x s  :=\n    bind_both s (fun x =>\n      ret_both (fst (fst (fst x)) : 'bool)) : both 'bool.\nFail Next Obligation.\nEquations f_y (s : both t_Foo) : both ('bool × t_Vec t_Bar t_Global) :=\n  f_y s  :=\n    bind_both s (fun x =>\n      ret_both (snd (fst (fst x)) : ('bool × t_Vec t_Bar t_Global))) : both ('bool × t_Vec t_Bar t_Global).\nFail Next Obligation.\nEquations f_z (s : both t_Foo) : both (nseq t_Bar 6) :=\n  f_z s  :=\n    bind_both s (fun x =>\n      ret_both (snd (fst x) : (nseq t_Bar 6))) : both (nseq t_Bar 6).\nFail Next Obligation.\nEquations f_bar (s : both t_Foo) : both t_Bar :=\n  f_bar s  :=\n    bind_both s (fun x =>\n      ret_both (snd x : t_Bar)) : both t_Bar.\nFail Next Obligation.\nEquations Build_t_Foo {f_x : both 'bool} {f_y : both ('bool × t_Vec t_Bar t_Global)} {f_z : both (nseq t_Bar 6)} {f_bar : both t_Bar} : both (t_Foo) :=\n  Build_t_Foo  :=\n    bind_both f_bar (fun f_bar =>\n      bind_both f_z (fun f_z =>\n        bind_both f_y (fun f_y =>\n          bind_both f_x (fun f_x =>\n            ret_both ((f_x,f_y,f_z,f_bar) : (t_Foo)))))) : both (t_Foo).\nFail Next Obligation.\nNotation \"'Build_t_Foo' '[' x ']' '(' 'f_x' ':=' y ')'\" := (Build_t_Foo (f_x := y) (f_y := f_y x) (f_z := f_z x) (f_bar := f_bar x)).\nNotation \"'Build_t_Foo' '[' x ']' '(' 'f_y' ':=' y ')'\" := (Build_t_Foo (f_x := f_x x) (f_y := y) (f_z := f_z x) (f_bar := f_bar x)).\nNotation \"'Build_t_Foo' '[' x ']' '(' 'f_z' ':=' y ')'\" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := y) (f_bar := f_bar x)).\nNotation \"'Build_t_Foo' '[' x ']' '(' 'f_bar' ':=' y ')'\" := (Build_t_Foo (f_x := f_x x) (f_y := f_y x) (f_z := f_z x) (f_bar := y)).\n\n(*item error backend*)\n'''\n\"Side_effects_Issue_1083_.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nClass t_MyFrom (Self : choice_type) (v_Self : v_Self) {v_T : v_T} `{ t_Sized v_T} := {\n  f_my_from : (both v_T -> both v_Self) ;\n}.\n\n#[global] Program Instance int16_t_MyFrom : t_MyFrom int16 int8 :=\n  let f_my_from := fun  (x : both int8) => cast_int (WS2 := _) x : both int16 in\n  {| f_my_from := (@f_my_from)|}.\nFail Next Obligation.\nHint Unfold int16_t_MyFrom.\n\nEquations f (x : both int8) : both (t_Result int16 int16) :=\n  f x  :=\n    run (letm[choice_typeMonad.result_bind_code int16] _ := impl__map_err (Result_Err (ret_both (1 : int8))) f_from in\n    Result_Ok (Result_Ok (f_my_from x))) : both (t_Result int16 int16).\nFail Next Obligation.\n'''\n\"Side_effects_Issue_1089_.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations test (x : both (t_Option int32)) (y : both (t_Option int32)) : both (t_Option int32) :=\n  test x y  :=\n    run (impl__map x (fun i =>\n      letm[choice_typeMonad.option_bind_code] hoist38 := y in\n      Option_Some (letb hoist39 := i .+ hoist38 in\n      Option_Some hoist39))) : both (t_Option int32).\nFail Next Obligation.\n'''\n\"Side_effects_Issue_1299_.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nDefinition t_Foo : choice_type :=\n  (int8).\nEquations f_y (s : both t_Foo) : both int8 :=\n  f_y s  :=\n    bind_both s (fun x =>\n      ret_both (x : int8)) : both int8.\nFail Next Obligation.\nEquations Build_t_Foo {f_y : both int8} : both (t_Foo) :=\n  Build_t_Foo  :=\n    bind_both f_y (fun f_y =>\n      ret_both ((f_y) : (t_Foo))) : both (t_Foo).\nFail Next Obligation.\nNotation \"'Build_t_Foo' '[' x ']' '(' 'f_y' ':=' y ')'\" := (Build_t_Foo (f_y := y)).\n\nDefinition t_S : choice_type :=\n  (t_Foo).\nEquations f_g (s : both t_S) : both t_Foo :=\n  f_g s  :=\n    bind_both s (fun x =>\n      ret_both (x : t_Foo)) : both t_Foo.\nFail Next Obligation.\nEquations Build_t_S {f_g : both t_Foo} : both (t_S) :=\n  Build_t_S  :=\n    bind_both f_g (fun f_g =>\n      ret_both ((f_g) : (t_S))) : both (t_S).\nFail Next Obligation.\nNotation \"'Build_t_S' '[' x ']' '(' 'f_g' ':=' y ')'\" := (Build_t_S (f_g := y)).\n\nDefinition t_OtherS : choice_type :=\n  (t_Option t_Foo).\nEquations f_g (s : both t_OtherS) : both (t_Option t_Foo) :=\n  f_g s  :=\n    bind_both s (fun x =>\n      ret_both (x : (t_Option t_Foo))) : both (t_Option t_Foo).\nFail Next Obligation.\nEquations Build_t_OtherS {f_g : both (t_Option t_Foo)} : both (t_OtherS) :=\n  Build_t_OtherS  :=\n    bind_both f_g (fun f_g =>\n      ret_both ((f_g) : (t_OtherS))) : both (t_OtherS).\nFail Next Obligation.\nNotation \"'Build_t_OtherS' '[' x ']' '(' 'f_g' ':=' y ')'\" := (Build_t_OtherS (f_g := y)).\n\nEquations impl_Foo__from (i : both t_Foo) : both t_Foo :=\n  impl_Foo__from i  :=\n    Build_t_Foo (f_y := f_clone (f_y i)) : both t_Foo.\nFail Next Obligation.\n\nDefinition t_Error : choice_type :=\n  'unit.\nEquations Build_t_Error : both (t_Error) :=\n  Build_t_Error  :=\n    ret_both (tt (* Empty tuple *) : (t_Error)) : both (t_Error).\nFail Next Obligation.\n\nEquations impl_S__from (i : both t_OtherS) : both (t_Result t_S t_Error) :=\n  impl_S__from i  :=\n    run (letm[choice_typeMonad.result_bind_code t_Error] hoist49 := impl__ok_or (impl__as_ref (f_g i)) Error in\n    Result_Ok (letb hoist50 := impl_Foo__from hoist49 in\n    letb hoist51 := Build_t_S (f_g := hoist50) in\n    Result_Ok hoist51)) : both (t_Result t_S t_Error).\nFail Next Obligation.\n'''\n\"Side_effects_Issue_1300_.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations fun (_ : both 'unit) : both (t_Result 'unit int8) :=\n  fun _  :=\n    letb val := f_collect (f_map (impl__iter (unsize (repeat (ret_both (0 : int8)) (ret_both (5 : uint_size))))) (fun prev =>\n      letb hoist47 := Result_Ok (repeat (ret_both (0 : int8)) (ret_both (32 : uint_size))) in\n      letb hoist48 := prod_b (prev,hoist47) in\n      Result_Ok hoist48)) in\n    Result_Ok (ret_both (tt : 'unit)) : both (t_Result 'unit int8).\nFail Next Obligation.\n'''\n\"Side_effects_Nested_return.v\" = '''\n(* File automatically generated by Hacspec *)\nSet Warnings \"-notation-overridden,-ambiguous-paths\".\nFrom Crypt Require Import choice_type Package Prelude.\nImport PackageNotation.\nFrom extructures Require Import ord fset.\nFrom mathcomp Require Import word_ssrZ word.\n(* From Jasmin Require Import word. *)\n\nFrom Coq Require Import ZArith.\nFrom Coq Require Import Strings.String.\nImport List.ListNotations.\nOpen Scope list_scope.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\n\nFrom Hacspec Require Import ChoiceEquality.\nFrom Hacspec Require Import LocationUtility.\nFrom Hacspec Require Import Hacspec_Lib_Comparable.\nFrom Hacspec Require Import Hacspec_Lib_Pre.\nFrom Hacspec Require Import Hacspec_Lib.\n\nOpen Scope hacspec_scope.\nImport choice.Choice.Exports.\n\nFrom RecordUpdate Require Import RecordUpdate.\n\nImport RecordSetNotations.\n\nObligation Tactic := (* try timeout 8 *) solve_ssprove_obligations.\n\n(*Not implemented yet? todo(item)*)\n\nEquations other_fun (rng : both int8) : both (int8 × t_Result 'unit 'unit) :=\n  other_fun rng  :=\n    letb hax_temp_output := Result_Ok (ret_both (tt : 'unit)) in\n    prod_b (rng,hax_temp_output) : both (int8 × t_Result 'unit 'unit).\nFail Next Obligation.\n\nEquations fun (rng : both int8) : both (int8 × t_Result 'unit 'unit) :=\n  fun rng  :=\n    run (letb '(tmp0,out) := other_fun rng in\n    letb rng := tmp0 in\n    letb hoist41 := out in\n    letb hoist42 := f_branch hoist41 in\n    letm[choice_typeMonad.result_bind_code (int8 × t_Result 'unit 'unit)] hoist43 := matchb hoist42 with\n    | ControlFlow_Break_case residual =>\n      letb residual := ret_both ((residual) : (t_Result t_Infallible 'unit)) in\n      letm[choice_typeMonad.result_bind_code (int8 × t_Result 'unit 'unit)] hoist40 := ControlFlow_Break (prod_b (rng,f_from_residual residual)) in\n      ControlFlow_Continue (never_to_any hoist40)\n    | ControlFlow_Continue_case val =>\n      letb val := ret_both ((val) : ('unit)) in\n      ControlFlow_Continue val\n    end in\n    letb hoist44 := Result_Ok hoist43 in\n    letb hoist45 := prod_b (rng,hoist44) in\n    letm[choice_typeMonad.result_bind_code (int8 × t_Result 'unit 'unit)] hoist46 := ControlFlow_Break hoist45 in\n    ControlFlow_Continue (letb hax_temp_output := never_to_any hoist46 in\n    prod_b (rng,hax_temp_output))) : both (int8 × t_Result 'unit 'unit).\nFail Next Obligation.\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__slices into-coq.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: coq\n  info:\n    name: slices\n    manifest: slices/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Slices.v\" = '''\n(* File automatically generated by Hacspec *)\nFrom Coq Require Import ZArith.\nRequire Import List.\nImport List.ListNotations.\nOpen Scope Z_scope.\nOpen Scope bool_scope.\nRequire Import Ascii.\nRequire Import String.\nRequire Import Coq.Floats.Floats.\nFrom RecordUpdate Require Import RecordSet.\nImport RecordSetNotations.\nFrom Core Require Import Core.\n\n(* NotImplementedYet *)\n\n\n\nDefinition v_VERSION : t_Slice t_u8 :=\n  unsize ([(118 : t_u8); (49 : t_u8)]).\n\nDefinition do_something '(_ : t_Slice t_u8) : unit :=\n  tt.\n\nDefinition r#unsized '(_ : t_Array (t_Slice t_u8) ((1 : t_usize))) : unit :=\n  tt.\n\nDefinition sized (x : t_Array (t_Array (t_u8) ((4 : t_usize))) ((1 : t_usize))) : unit :=\n  r#unsized ([unsize (f_index (x) ((0 : t_usize)))]).\n'''\n_CoqProject = '''\n-R ./ TODO\n-arg -w\n-arg all\n\nSlices.v'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__slices into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: slices\n    manifest: slices/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Slices.fst\" = '''\nmodule Slices\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_VERSION: t_Slice u8 =\n  (let list = [mk_u8 118; mk_u8 49] in\n    FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 2);\n    Rust_primitives.Hax.array_of_list 2 list)\n  <:\n  t_Slice u8\n\nlet do_something (_: t_Slice u8) : Prims.unit = ()\n\nlet r#unsized (_: t_Array (t_Slice u8) (mk_usize 1)) : Prims.unit = ()\n\nlet sized (x: t_Array (t_Array u8 (mk_usize 4)) (mk_usize 1)) : Prims.unit =\n  r#unsized (let list = [x.[ mk_usize 0 ] <: t_Slice u8] in\n      FStar.Pervasives.assert_norm (Prims.eq2 (List.Tot.length list) 1);\n      Rust_primitives.Hax.array_of_list 1 list)\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__statics into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: statics\n    manifest: statics/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Statics.fst\" = '''\nmodule Statics\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet v_FOO: usize = mk_usize 0\n\nlet get_foo (_: Prims.unit) : usize = v_FOO\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__traits into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: traits\n    manifest: traits/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: false\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Traits.Block_size.fst\" = '''\nmodule Traits.Block_size\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_BlockSizeUser (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_BlockSize:Type0\n}\n\nclass t_ParBlocksSizeUser (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_BlockSizeUser v_Self\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_ParBlocksSizeUser v_Self|} -> i._super_i0\n\nclass t_BlockBackend (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_ParBlocksSizeUser v_Self;\n  f_proc_block_pre:Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global -> Type0;\n  f_proc_block_post:Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global -> Prims.unit -> Type0;\n  f_proc_block:x0: Alloc.Vec.t_Vec _ Alloc.Alloc.t_Global\n    -> Prims.Pure Prims.unit (f_proc_block_pre x0) (fun result -> f_proc_block_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_BlockBackend v_Self|} -> i._super_i0\n'''\n\"Traits.Default_traits_parameters.fst\" = '''\nmodule Traits.Default_traits_parameters\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_Bar (v_Self: Type0) (v_T: Type0) = { __marker_trait_t_Bar:Prims.unit }\n\nclass t_Foo (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Bar v_Self f_U;\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_U:Type0\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Foo v_Self|} -> i._super_i0\n'''\n\"Traits.For_clauses.Issue_495_.Minimized_3_.fst\" = '''\nmodule Traits.For_clauses.Issue_495_.Minimized_3_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_Trait (v_Self: Type0) = { __marker_trait_t_Trait:Prims.unit }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl\n      (#v_P: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: Core_models.Ops.Function.t_FnMut v_P u8)\n    : t_Trait v_P = { __marker_trait_t_Trait = () }\n'''\n\"Traits.For_clauses.Issue_495_.fst\" = '''\nmodule Traits.For_clauses.Issue_495_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet original_function_from_495_ (list: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global) : Prims.unit =\n  let (e_indices: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global):Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Filter.t_Filter\n          (Core_models.Ops.Range.t_Range u8) (u8 -> bool))\n      #FStar.Tactics.Typeclasses.solve\n      #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n      (Core_models.Iter.Traits.Iterator.f_filter #(Core_models.Ops.Range.t_Range u8)\n          #FStar.Tactics.Typeclasses.solve\n          #(u8 -> bool)\n          ({ Core_models.Ops.Range.f_start = mk_u8 0; Core_models.Ops.Range.f_end = mk_u8 5 }\n            <:\n            Core_models.Ops.Range.t_Range u8)\n          (fun i ->\n              let i:u8 = i in\n              let (_: Core_models.Slice.Iter.t_Iter u8), (out: bool) =\n                Core_models.Iter.Traits.Iterator.f_any #(Core_models.Slice.Iter.t_Iter u8)\n                  #FStar.Tactics.Typeclasses.solve\n                  #(u8 -> bool)\n                  (Core_models.Slice.impl__iter #u8 (Alloc.Vec.impl_1__as_slice list <: t_Slice u8)\n                    <:\n                    Core_models.Slice.Iter.t_Iter u8)\n                  (fun n ->\n                      let n:u8 = n in\n                      n =. i <: bool)\n              in\n              out)\n        <:\n        Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool))\n  in\n  ()\n\nlet minimized_1_ (list: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n    : Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n  Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Filter.t_Filter\n        (Core_models.Ops.Range.t_Range u8) (u8 -> bool))\n    #FStar.Tactics.Typeclasses.solve\n    #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n    (Core_models.Iter.Traits.Iterator.f_filter #(Core_models.Ops.Range.t_Range u8)\n        #FStar.Tactics.Typeclasses.solve\n        #(u8 -> bool)\n        ({ Core_models.Ops.Range.f_start = mk_u8 0; Core_models.Ops.Range.f_end = mk_u8 5 }\n          <:\n          Core_models.Ops.Range.t_Range u8)\n        (fun temp_0_ ->\n            let _:u8 = temp_0_ in\n            true)\n      <:\n      Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool))\n\nlet minimized_2_\n      (it:\n          Core_models.Iter.Adapters.Filter.t_Filter (Core_models.Ops.Range.t_Range u8) (u8 -> bool))\n    : Prims.unit =\n  let (e_indices: Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global):Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global =\n    Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Filter.t_Filter\n          (Core_models.Ops.Range.t_Range u8) (u8 -> bool))\n      #FStar.Tactics.Typeclasses.solve\n      #(Alloc.Vec.t_Vec u8 Alloc.Alloc.t_Global)\n      it\n  in\n  ()\n'''\n\"Traits.For_clauses.fst\" = '''\nmodule Traits.For_clauses\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_Foo (v_Self: Type0) (v_T: Type0) = {\n  f_to_t_pre:v_Self -> Type0;\n  f_to_t_post:v_Self -> v_T -> Type0;\n  f_to_t:x0: v_Self -> Prims.Pure v_T (f_to_t_pre x0) (fun result -> f_to_t_post x0 result)\n}\n\nlet e_f (#v_X: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Foo v_X u8) (x: v_X)\n    : Prims.unit =\n  let _:u8 = f_to_t #v_X #u8 #FStar.Tactics.Typeclasses.solve x in\n  ()\n'''\n\"Traits.Impl_expr_in_goal.fst\" = '''\nmodule Traits.Impl_expr_in_goal\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_T1 (v_Self: Type0) = { [@@@ FStar.Tactics.Typeclasses.no_method]f_Assoc:Type0 }\n\nclass t_T2 (v_Self: Type0) = { __marker_trait_t_T2:Prims.unit }\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl\n      (#v_U: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_T1 v_U)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i1: t_T2 i0.f_Assoc)\n    : t_T2 v_U = { __marker_trait_t_T2 = () }\n'''\n\"Traits.Implement_arithmetic_trait.fst\" = '''\nmodule Traits.Implement_arithmetic_trait\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Wrapped = | Wrapped : i32 -> t_Wrapped\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Core_models.Ops.Arith.t_Add t_Wrapped t_Wrapped =\n  {\n    f_Output = t_Wrapped;\n    f_add_pre = (fun (self: t_Wrapped) (rhs: t_Wrapped) -> true);\n    f_add_post = (fun (self: t_Wrapped) (rhs: t_Wrapped) (out: t_Wrapped) -> true);\n    f_add = fun (self: t_Wrapped) (rhs: t_Wrapped) -> Wrapped (self._0 +! rhs._0) <: t_Wrapped\n  }\n\nlet test (x y: t_Wrapped) : t_Wrapped =\n  Core_models.Ops.Arith.f_add #t_Wrapped #t_Wrapped #FStar.Tactics.Typeclasses.solve x y\n'''\n\"Traits.Implicit_dependencies_issue_667_.Define_type.fst\" = '''\nmodule Traits.Implicit_dependencies_issue_667_.Define_type\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_MyType = | MyType : t_MyType\n'''\n\"Traits.Implicit_dependencies_issue_667_.Impl_type.fst\" = '''\nmodule Traits.Implicit_dependencies_issue_667_.Impl_type\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: Traits.Implicit_dependencies_issue_667_.Trait_definition.t_MyTrait\nTraits.Implicit_dependencies_issue_667_.Define_type.t_MyType =\n  {\n    f_my_method_pre\n    =\n    (fun (self: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) -> true);\n    f_my_method_post\n    =\n    (fun (self: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) (out: Prims.unit) ->\n        true);\n    f_my_method = fun (self: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) -> ()\n  }\n'''\n\"Traits.Implicit_dependencies_issue_667_.Trait_definition.fst\" = '''\nmodule Traits.Implicit_dependencies_issue_667_.Trait_definition\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_MyTrait (v_Self: Type0) = {\n  f_my_method_pre:v_Self -> Type0;\n  f_my_method_post:v_Self -> Prims.unit -> Type0;\n  f_my_method:x0: v_Self\n    -> Prims.Pure Prims.unit (f_my_method_pre x0) (fun result -> f_my_method_post x0 result)\n}\n'''\n\"Traits.Implicit_dependencies_issue_667_.Use_type.fst\" = '''\nmodule Traits.Implicit_dependencies_issue_667_.Use_type\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Traits.Implicit_dependencies_issue_667_.Impl_type in\n  let open Traits.Implicit_dependencies_issue_667_.Trait_definition in\n  ()\n\nlet some_function (x: Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType) : Prims.unit =\n  Traits.Implicit_dependencies_issue_667_.Trait_definition.f_my_method #Traits.Implicit_dependencies_issue_667_.Define_type.t_MyType\n    #FStar.Tactics.Typeclasses.solve\n    x\n'''\n\"Traits.Implicit_explicit_calling_conventions.fst\" = '''\nmodule Traits.Implicit_explicit_calling_conventions\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Type (v_TypeArg: Type0) (v_ConstArg: usize) = { f_field:t_Array v_TypeArg v_ConstArg }\n\nclass t_Trait (v_Self: Type0) (v_TypeArg: Type0) (v_ConstArg: usize) = {\n  f_method_pre:\n      #v_MethodTypeArg: Type0 ->\n      v_MethodConstArg: usize ->\n      v_Self ->\n      v_TypeArg ->\n      t_Type v_TypeArg v_ConstArg\n    -> Type0;\n  f_method_post:\n      #v_MethodTypeArg: Type0 ->\n      v_MethodConstArg: usize ->\n      v_Self ->\n      v_TypeArg ->\n      t_Type v_TypeArg v_ConstArg ->\n      Prims.unit\n    -> Type0;\n  f_method:\n      #v_MethodTypeArg: Type0 ->\n      v_MethodConstArg: usize ->\n      x0: v_Self ->\n      x1: v_TypeArg ->\n      x2: t_Type v_TypeArg v_ConstArg\n    -> Prims.Pure Prims.unit\n        (f_method_pre #v_MethodTypeArg v_MethodConstArg x0 x1 x2)\n        (fun result -> f_method_post #v_MethodTypeArg v_MethodConstArg x0 x1 x2 result);\n  f_associated_function_pre:\n      #v_MethodTypeArg: Type0 ->\n      v_MethodConstArg: usize ->\n      v_Self ->\n      v_TypeArg ->\n      t_Type v_TypeArg v_ConstArg\n    -> Type0;\n  f_associated_function_post:\n      #v_MethodTypeArg: Type0 ->\n      v_MethodConstArg: usize ->\n      v_Self ->\n      v_TypeArg ->\n      t_Type v_TypeArg v_ConstArg ->\n      Prims.unit\n    -> Type0;\n  f_associated_function:\n      #v_MethodTypeArg: Type0 ->\n      v_MethodConstArg: usize ->\n      x0: v_Self ->\n      x1: v_TypeArg ->\n      x2: t_Type v_TypeArg v_ConstArg\n    -> Prims.Pure Prims.unit\n        (f_associated_function_pre #v_MethodTypeArg v_MethodConstArg x0 x1 x2)\n        (fun result -> f_associated_function_post #v_MethodTypeArg v_MethodConstArg x0 x1 x2 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (#v_TypeArg: Type0) (v_ConstArg: usize) : t_Trait Prims.unit v_TypeArg v_ConstArg =\n  {\n    f_method_pre\n    =\n    (fun\n        (#v_MethodTypeArg: Type0)\n        (v_MethodConstArg: usize)\n        (self: Prims.unit)\n        (value_TypeArg: v_TypeArg)\n        (value_Type: t_Type v_TypeArg v_ConstArg)\n        ->\n        true);\n    f_method_post\n    =\n    (fun\n        (#v_MethodTypeArg: Type0)\n        (v_MethodConstArg: usize)\n        (self: Prims.unit)\n        (value_TypeArg: v_TypeArg)\n        (value_Type: t_Type v_TypeArg v_ConstArg)\n        (out: Prims.unit)\n        ->\n        true);\n    f_method\n    =\n    (fun\n        (#v_MethodTypeArg: Type0)\n        (v_MethodConstArg: usize)\n        (self: Prims.unit)\n        (value_TypeArg: v_TypeArg)\n        (value_Type: t_Type v_TypeArg v_ConstArg)\n        ->\n        ());\n    f_associated_function_pre\n    =\n    (fun\n        (#v_MethodTypeArg: Type0)\n        (v_MethodConstArg: usize)\n        (e_self: Prims.unit)\n        (value_TypeArg: v_TypeArg)\n        (value_Type: t_Type v_TypeArg v_ConstArg)\n        ->\n        true);\n    f_associated_function_post\n    =\n    (fun\n        (#v_MethodTypeArg: Type0)\n        (v_MethodConstArg: usize)\n        (e_self: Prims.unit)\n        (value_TypeArg: v_TypeArg)\n        (value_Type: t_Type v_TypeArg v_ConstArg)\n        (out: Prims.unit)\n        ->\n        true);\n    f_associated_function\n    =\n    fun\n      (#v_MethodTypeArg: Type0)\n      (v_MethodConstArg: usize)\n      (e_self: Prims.unit)\n      (value_TypeArg: v_TypeArg)\n      (value_Type: t_Type v_TypeArg v_ConstArg)\n      ->\n      ()\n  }\n\nlet method_caller\n      (#v_MethodTypeArg #v_TypeArg: Type0)\n      (v_ConstArg v_MethodConstArg: usize)\n      (#v_ImplTrait: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Trait v_ImplTrait v_TypeArg v_ConstArg)\n      (x: v_ImplTrait)\n      (value_TypeArg: v_TypeArg)\n      (value_Type: t_Type v_TypeArg v_ConstArg)\n    : Prims.unit =\n  let _:Prims.unit =\n    f_method #v_ImplTrait\n      #v_TypeArg\n      #v_ConstArg\n      #FStar.Tactics.Typeclasses.solve\n      #v_MethodTypeArg\n      v_MethodConstArg\n      x\n      value_TypeArg\n      value_Type\n  in\n  ()\n\nlet associated_function_caller\n      (#v_MethodTypeArg #v_TypeArg: Type0)\n      (v_ConstArg v_MethodConstArg: usize)\n      (#v_ImplTrait: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Trait v_ImplTrait v_TypeArg v_ConstArg)\n      (x: v_ImplTrait)\n      (value_TypeArg: v_TypeArg)\n      (value_Type: t_Type v_TypeArg v_ConstArg)\n    : Prims.unit =\n  let _:Prims.unit =\n    f_associated_function #v_ImplTrait\n      #v_TypeArg\n      #v_ConstArg\n      #FStar.Tactics.Typeclasses.solve\n      #v_MethodTypeArg\n      v_MethodConstArg\n      x\n      value_TypeArg\n      value_Type\n  in\n  ()\n\nclass t_SubTrait (v_Self: Type0) (v_TypeArg: Type0) (v_ConstArg: usize) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Trait v_Self v_TypeArg v_ConstArg;\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_AssocType:Type0;\n  f_AssocType_i0:t_Trait f_AssocType v_TypeArg v_ConstArg\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) (v_TypeArg:Type0) (v_ConstArg:usize) {|i: t_SubTrait v_Self v_TypeArg v_ConstArg|} -> i._super_i0\n'''\n\"Traits.Interlaced_consts_types.fst\" = '''\nmodule Traits.Interlaced_consts_types\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Bar (v_FooConst: usize) (v_FooType: Type0) =\n  | Bar : t_Array v_FooType v_FooConst -> t_Bar v_FooConst v_FooType\n\nclass t_Foo (v_Self: Type0) (v_FooConst: usize) (v_FooType: Type0) = {\n  f_fun_pre:\n      v_FunConst: usize ->\n      #v_FunType: Type0 ->\n      t_Array v_FooType v_FooConst ->\n      t_Array v_FunType v_FunConst\n    -> Type0;\n  f_fun_post:\n      v_FunConst: usize ->\n      #v_FunType: Type0 ->\n      t_Array v_FooType v_FooConst ->\n      t_Array v_FunType v_FunConst ->\n      Prims.unit\n    -> Type0;\n  f_fun:\n      v_FunConst: usize ->\n      #v_FunType: Type0 ->\n      x0: t_Array v_FooType v_FooConst ->\n      x1: t_Array v_FunType v_FunConst\n    -> Prims.Pure Prims.unit\n        (f_fun_pre v_FunConst #v_FunType x0 x1)\n        (fun result -> f_fun_post v_FunConst #v_FunType x0 x1 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl (v_FooConst: usize) (#v_FooType #v_SelfType: Type0) : t_Foo v_SelfType v_FooConst v_FooType =\n  {\n    f_fun_pre\n    =\n    (fun\n        (v_FunConst: usize)\n        (#v_FunType: Type0)\n        (x: t_Array v_FooType v_FooConst)\n        (y: t_Array v_FunType v_FunConst)\n        ->\n        true);\n    f_fun_post\n    =\n    (fun\n        (v_FunConst: usize)\n        (#v_FunType: Type0)\n        (x: t_Array v_FooType v_FooConst)\n        (y: t_Array v_FunType v_FunConst)\n        (out: Prims.unit)\n        ->\n        true);\n    f_fun\n    =\n    fun\n      (v_FunConst: usize)\n      (#v_FunType: Type0)\n      (x: t_Array v_FooType v_FooConst)\n      (y: t_Array v_FunType v_FunConst)\n      ->\n      ()\n  }\n'''\n\"Traits.Recursive_trait_with_assoc_type.fst\" = '''\nmodule Traits.Recursive_trait_with_assoc_type\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_Trait1 (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_T:Type0;\n  f_T_i0:t_Trait1 f_T\n}\n\nclass t_Trait2 (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:t_Trait1 v_Self;\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_U:Type0\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_Trait2 v_Self|} -> i._super_i0\n'''\n\"Traits.Type_alias_bounds_issue_707_.fst\" = '''\nmodule Traits.Type_alias_bounds_issue_707_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_StructWithGenericBounds (v_T: Type0) {| i0: Core_models.Clone.t_Clone v_T |} =\n  | StructWithGenericBounds : v_T -> t_StructWithGenericBounds v_T\n'''\n\"Traits.Typenum_perf.fst\" = '''\nmodule Traits.Typenum_perf\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nlet _ =\n  (* This module has implicit dependencies, here we make them explicit. *)\n  (* The implicit dependencies arise from typeclasses instances. *)\n  let open Typenum.Type_operators in\n  ()\n\nlet e_f\n      (#v_T: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Typenum.Type_operators.t_IsLess v_T\n            (Typenum.Uint.t_UInt\n                (Typenum.Uint.t_UInt\n                    (Typenum.Uint.t_UInt\n                        (Typenum.Uint.t_UInt\n                            (Typenum.Uint.t_UInt\n                                (Typenum.Uint.t_UInt\n                                    (Typenum.Uint.t_UInt\n                                        (Typenum.Uint.t_UInt\n                                            (Typenum.Uint.t_UInt\n                                                (Typenum.Uint.t_UInt\n                                                    (Typenum.Uint.t_UInt\n                                                        (Typenum.Uint.t_UInt\n                                                            (Typenum.Uint.t_UInt\n                                                                (Typenum.Uint.t_UInt\n                                                                    (Typenum.Uint.t_UInt\n                                                                        (Typenum.Uint.t_UInt\n                                                                            (Typenum.Uint.t_UInt\n                                                                                (Typenum.Uint.t_UInt\n                                                                                    (Typenum.Uint.t_UInt\n                                                                                        (Typenum.Uint.t_UInt\n                                                                                            Typenum.Uint.t_UTerm\n                                                                                            Typenum.Bit.t_B1\n                                                                                        )\n                                                                                        Typenum.Bit.t_B1\n                                                                                    )\n                                                                                    Typenum.Bit.t_B1\n                                                                                ) Typenum.Bit.t_B1)\n                                                                            Typenum.Bit.t_B1)\n                                                                        Typenum.Bit.t_B1)\n                                                                    Typenum.Bit.t_B1)\n                                                                Typenum.Bit.t_B1) Typenum.Bit.t_B1)\n                                                        Typenum.Bit.t_B1) Typenum.Bit.t_B1)\n                                                Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1\n                                    ) Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1)\n                        Typenum.Bit.t_B1) Typenum.Bit.t_B1) Typenum.Bit.t_B1))\n      (_: Prims.unit)\n    : Prims.unit = ()\n'''\n\"Traits.Unconstrainted_types_issue_677_.fst\" = '''\nmodule Traits.Unconstrainted_types_issue_677_\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_PolyOp (v_Self: Type0) = {\n  f_op_pre:u32 -> u32 -> Type0;\n  f_op_post:u32 -> u32 -> u32 -> Type0;\n  f_op:x0: u32 -> x1: u32 -> Prims.Pure u32 (f_op_pre x0 x1) (fun result -> f_op_post x0 x1 result)\n}\n\ntype t_Plus = | Plus : t_Plus\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_PolyOp t_Plus =\n  {\n    f_op_pre = (fun (x: u32) (y: u32) -> true);\n    f_op_post = (fun (x: u32) (y: u32) (out: u32) -> true);\n    f_op = fun (x: u32) (y: u32) -> x +! y\n  }\n\ntype t_Times = | Times : t_Times\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_PolyOp_for_Times: t_PolyOp t_Times =\n  {\n    f_op_pre = (fun (x: u32) (y: u32) -> true);\n    f_op_post = (fun (x: u32) (y: u32) (out: u32) -> true);\n    f_op = fun (x: u32) (y: u32) -> x *! y\n  }\n\nlet twice (#v_OP: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_PolyOp v_OP) (x: u32)\n    : u32 = f_op #v_OP #FStar.Tactics.Typeclasses.solve x x\n\nlet both (x: u32) : (u32 & u32) = twice #t_Plus x, twice #t_Times x <: (u32 & u32)\n'''\n\"Traits.fst\" = '''\nmodule Traits\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\nclass t_SuperTrait (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]_super_i0:Core_models.Clone.t_Clone v_Self;\n  f_function_of_super_trait_pre:v_Self -> Type0;\n  f_function_of_super_trait_post:v_Self -> u32 -> Type0;\n  f_function_of_super_trait:x0: v_Self\n    -> Prims.Pure u32\n        (f_function_of_super_trait_pre x0)\n        (fun result -> f_function_of_super_trait_post x0 result)\n}\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet _ = fun (v_Self:Type0) {|i: t_SuperTrait v_Self|} -> i._super_i0\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl: t_SuperTrait i32 =\n  {\n    _super_i0 = FStar.Tactics.Typeclasses.solve;\n    f_function_of_super_trait_pre = (fun (self: i32) -> true);\n    f_function_of_super_trait_post = (fun (self: i32) (out: u32) -> true);\n    f_function_of_super_trait\n    =\n    fun (self: i32) -> cast (Core_models.Num.impl_i32__abs self <: i32) <: u32\n  }\n\ntype t_Struct = | Struct : t_Struct\n\nclass t_Bar (v_Self: Type0) = {\n  f_bar_pre:v_Self -> Type0;\n  f_bar_post:v_Self -> Prims.unit -> Type0;\n  f_bar:x0: v_Self -> Prims.Pure Prims.unit (f_bar_pre x0) (fun result -> f_bar_post x0 result)\n}\n\nlet impl_2__method (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Bar v_T) (x: v_T)\n    : Prims.unit = f_bar #v_T #FStar.Tactics.Typeclasses.solve x\n\nlet cclosure_iimpl_expr\n      (#v_I: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Iter.Traits.Iterator.t_Iterator v_I)\n      (#_: unit{i0.Core_models.Iter.Traits.Iterator.f_Item == Prims.unit})\n      (it: v_I)\n    : Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global =\n  Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Map.t_Map v_I\n        (Prims.unit -> Prims.unit))\n    #FStar.Tactics.Typeclasses.solve\n    #(Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global)\n    (Core_models.Iter.Traits.Iterator.f_map #v_I\n        #FStar.Tactics.Typeclasses.solve\n        #Prims.unit\n        #(Prims.unit -> Prims.unit)\n        it\n        (fun x -> x)\n      <:\n      Core_models.Iter.Adapters.Map.t_Map v_I (Prims.unit -> Prims.unit))\n\nlet cclosure_iimpl_expr_fngen\n      (#v_I #v_F: Type0)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i0:\n          Core_models.Iter.Traits.Iterator.t_Iterator v_I)\n      (#[FStar.Tactics.Typeclasses.tcresolve ()]\n          i1:\n          Core_models.Ops.Function.t_FnMut v_F Prims.unit)\n      (#_: unit{i0.Core_models.Iter.Traits.Iterator.f_Item == Prims.unit})\n      (it: v_I)\n      (f: v_F)\n    : Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global =\n  Core_models.Iter.Traits.Iterator.f_collect #(Core_models.Iter.Adapters.Map.t_Map v_I v_F)\n    #FStar.Tactics.Typeclasses.solve\n    #(Alloc.Vec.t_Vec Prims.unit Alloc.Alloc.t_Global)\n    (Core_models.Iter.Traits.Iterator.f_map #v_I\n        #FStar.Tactics.Typeclasses.solve\n        #Prims.unit\n        #v_F\n        it\n        f\n      <:\n      Core_models.Iter.Adapters.Map.t_Map v_I v_F)\n\ntype t_Error = | Error_Fail : t_Error\n\nlet t_Error_cast_to_repr (x: t_Error) : isize = match x <: t_Error with | Error_Fail  -> mk_isize 0\n\nlet impl_Error__for_application_callback (_: Prims.unit) :  Prims.unit -> t_Error =\n  fun temp_0_ ->\n    let _:Prims.unit = temp_0_ in\n    Error_Fail <: t_Error\n\nlet iter_option (#v_T: Type0) (x: Core_models.Option.t_Option v_T)\n    : Core_models.Option.t_IntoIter v_T =\n  Core_models.Iter.Traits.Collect.f_into_iter #(Core_models.Option.t_Option v_T)\n    #FStar.Tactics.Typeclasses.solve\n    (Core_models.Option.impl__as_ref #v_T x <: Core_models.Option.t_Option v_T)\n\nlet uuse_iimpl_trait (_: Prims.unit) : Prims.unit =\n  let iter:Core_models.Option.t_IntoIter bool =\n    iter_option #bool (Core_models.Option.Option_Some false <: Core_models.Option.t_Option bool)\n  in\n  let (tmp0: Core_models.Option.t_IntoIter bool), (out: Core_models.Option.t_Option bool) =\n    Core_models.Iter.Traits.Iterator.f_next #(Core_models.Option.t_IntoIter bool)\n      #FStar.Tactics.Typeclasses.solve\n      iter\n  in\n  let iter:Core_models.Option.t_IntoIter bool = tmp0 in\n  let _:Core_models.Option.t_Option bool = out in\n  ()\n\nclass t_Foo (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_AssocType:Type0;\n  f_AssocType_i0:t_SuperTrait f_AssocType;\n  f_N:usize;\n  f_assoc_f_pre:Prims.unit -> Type0;\n  f_assoc_f_post:Prims.unit -> Prims.unit -> Type0;\n  f_assoc_f:x0: Prims.unit\n    -> Prims.Pure Prims.unit (f_assoc_f_pre x0) (fun result -> f_assoc_f_post x0 result);\n  f_method_f_pre:v_Self -> Type0;\n  f_method_f_post:v_Self -> Prims.unit -> Type0;\n  f_method_f:x0: v_Self\n    -> Prims.Pure Prims.unit (f_method_f_pre x0) (fun result -> f_method_f_post x0 result);\n  f_assoc_type_pre:{| i1: Core_models.Marker.t_Copy f_AssocType |} -> f_AssocType -> Type0;\n  f_assoc_type_post:{| i1: Core_models.Marker.t_Copy f_AssocType |} -> f_AssocType -> Prims.unit\n    -> Type0;\n  f_assoc_type:{| i1: Core_models.Marker.t_Copy f_AssocType |} -> x0: f_AssocType\n    -> Prims.Pure Prims.unit\n        (f_assoc_type_pre #i1 x0)\n        (fun result -> f_assoc_type_post #i1 x0 result)\n}\n\nclass t_Lang (v_Self: Type0) = {\n  [@@@ FStar.Tactics.Typeclasses.no_method]f_Var:Type0;\n  f_s_pre:v_Self -> i32 -> Type0;\n  f_s_post:v_Self -> i32 -> (v_Self & f_Var) -> Type0;\n  f_s:x0: v_Self -> x1: i32\n    -> Prims.Pure (v_Self & f_Var) (f_s_pre x0 x1) (fun result -> f_s_post x0 x1 result)\n}\n\nlet f (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Foo v_T) (x: v_T) : Prims.unit =\n  let _:Prims.unit = f_assoc_f #v_T #FStar.Tactics.Typeclasses.solve () in\n  f_method_f #v_T #FStar.Tactics.Typeclasses.solve x\n\nlet g (#v_T: Type0) (#[FStar.Tactics.Typeclasses.tcresolve ()] i0: t_Foo v_T) (x: i0.f_AssocType)\n    : u32 = f_function_of_super_trait #i0.f_AssocType #FStar.Tactics.Typeclasses.solve x\n\n[@@ FStar.Tactics.Typeclasses.tcinstance]\nlet impl_Foo_for_tuple_: t_Foo Prims.unit =\n  {\n    f_AssocType = i32;\n    f_AssocType_i0 = FStar.Tactics.Typeclasses.solve;\n    f_N = mk_usize 32;\n    f_assoc_f_pre = (fun (_: Prims.unit) -> true);\n    f_assoc_f_post = (fun (_: Prims.unit) (out: Prims.unit) -> true);\n    f_assoc_f = (fun (_: Prims.unit) -> () <: Prims.unit);\n    f_method_f_pre = (fun (self: Prims.unit) -> true);\n    f_method_f_post = (fun (self: Prims.unit) (out: Prims.unit) -> true);\n    f_method_f\n    =\n    (fun (self: Prims.unit) -> f_assoc_f #Prims.unit #FStar.Tactics.Typeclasses.solve ());\n    f_assoc_type_pre = (fun (_: i32) -> true);\n    f_assoc_type_post = (fun (_: i32) (out: Prims.unit) -> true);\n    f_assoc_type = fun (_: i32) -> ()\n  }\n'''\n"
  },
  {
    "path": "test-harness/src/snapshots/toolchain__unsafe into-fstar.snap",
    "content": "---\nsource: test-harness/src/harness.rs\nexpression: snapshot\ninfo:\n  kind:\n    Translate:\n      backend: fstar\n  info:\n    name: unsafe\n    manifest: unsafe/Cargo.toml\n    description: ~\n  spec:\n    optional: false\n    broken: false\n    issue_id: ~\n    positive: true\n    snapshot:\n      stderr: true\n      stdout: true\n    include_flag: ~\n    backend_options: ~\n---\nexit = 0\nstderr = 'Finished `dev` profile [unoptimized + debuginfo] target(s) in XXs'\n\n[stdout]\ndiagnostics = []\n\n[stdout.files]\n\"Unsafe.fst\" = '''\nmodule Unsafe\n#set-options \"--fuel 0 --ifuel 1 --z3rlimit 15\"\nopen FStar.Mul\nopen Core_models\n\ntype t_Impossible =\n\nlet t_Impossible_cast_to_repr (x: t_Impossible) : Rust_primitives.Hax.t_Never =\n  match x <: t_Impossible with\n\nlet impossible (_: Prims.unit) : Prims.Pure t_Impossible (requires false) (fun _ -> Prims.l_True) =\n  Rust_primitives.Hax.never_to_any (Core_models.Hint.unreachable_unchecked ()\n      <:\n      Rust_primitives.Hax.t_Never)\n\nlet get_unchecked_example (slice: t_Slice u8)\n    : Prims.Pure u8\n      (requires (Core_models.Slice.impl__len #u8 slice <: usize) >. mk_usize 10)\n      (fun _ -> Prims.l_True) = Core_models.Slice.impl__get_unchecked #u8 #usize slice (mk_usize 6)\n'''\n"
  },
  {
    "path": "tests/.gitignore",
    "content": "# ignore all output folder generated by the tool\nproofs/\n"
  },
  {
    "path": "tests/Cargo.toml",
    "content": "[workspace]\nmembers = [\n        \"assert\",\n        \"enum-struct-variant\",\n        \"literals\",\n        \"slices\",\n        \"naming\",\n        \"if-let\",\n        \"let-else\",\n        \"enum-repr\",\n        \"pattern-or\",\n        \"side-effects\",\n        \"mut-ref-functionalization\",\n        \"generics\",\n        \"lean-tests\",\n        \"lean-core-models\",\n        \"loops\",\n        \"even\",\n        \"odd\",\n        \"never-type\",\n        \"attributes\",\n        \"attribute-opaque\",\n        \"raw-attributes\",\n        \"traits\",\n        \"dyn\",\n        \"reordering\",\n        \"nested-derefs\",\n        \"patterns\",\n        \"proverif-minimal\",\n        \"proverif-basic-structs\",\n        \"proverif-ping-pong\",\n        \"proverif-noise\",\n        \"proverif-fn-to-letfun\",\n        \"cli/include-flag\",\n        \"cli/interface-only\",\n        \"recursion\",\n        \"functions\",\n        \"guards\",\n        \"cyclic-modules\",\n        \"unsafe\",\n        \"constructor-as-closure\",\n        \"statics\",\n]\nresolver = \"2\"\n"
  },
  {
    "path": "tests/README.md",
    "content": "# Tests\n\nThis directory contains tests for the engine and the frontend.\nFor examples of verification using hax, see `../examples`.\n"
  },
  {
    "path": "tests/assert/Cargo.toml",
    "content": "[package]\nname = \"assert\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq+ssprove\" = { broken = false, snapshot = \"stdout\", issue_id = \"285\" }\n"
  },
  {
    "path": "tests/assert/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub fn asserts() {\n    assert!({\n        assert!(true);\n        1 == 1\n    });\n    assert_eq!(2, 2);\n    assert_ne!(1, 2);\n}\n"
  },
  {
    "path": "tests/attribute-opaque/Cargo.toml",
    "content": "[package]\nname = \"attribute-opaque\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\nserde = { version = \"1.0\", features = [\"derive\"] }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { backend-options = [\"--interfaces\", \"+**\"] }\n"
  },
  {
    "path": "tests/attribute-opaque/src/lib.rs",
    "content": "#[hax_lib::opaque]\nstruct OpaqueStruct<const X: usize, T, U> {\n    field: [T; X],\n    other_field: U,\n}\n\n#[hax_lib::opaque]\nenum OpaqueEnum<const X: usize, T, U> {\n    A([T; X]),\n    B(U),\n}\n\n#[hax_lib::opaque]\nfn f_generic<const X: usize, T, U>(x: U) -> OpaqueEnum<X, T, U> {\n    OpaqueEnum::B(x)\n}\n\n#[hax_lib::opaque]\nfn f(x: bool, y: bool) -> bool {\n    x && y\n}\n\n#[hax_lib::opaque]\n#[hax_lib::requires(x)]\n#[hax_lib::ensures(|result| result == y)]\nfn f_pre_post(x: bool, y: bool) -> bool {\n    x && y\n}\n\n#[hax_lib::attributes]\ntrait T {\n    type U;\n    const c: u8;\n    fn d();\n    #[hax_lib::requires(x == 0)]\n    fn m(&self, x: u8) -> bool;\n}\n\n#[hax_lib::attributes]\n#[hax_lib::opaque]\nimpl T for u8 {\n    type U = u8;\n    const c: u8 = 0;\n    fn d() {\n        unsafe {\n            let my_num: i32 = 10;\n            let _my_num_ptr: *const i32 = &my_num;\n            let mut my_speed: i32 = 88;\n            let _my_speed_ptr: *mut i32 = &mut my_speed;\n        }\n    }\n    #[hax_lib::requires(x == 0)]\n    #[hax_lib::ensures(|result| result)]\n    fn m(&self, x: u8) -> bool {\n        *self >= x\n    }\n}\n\ntrait TrGeneric<U: Clone> {\n    fn f(x: U) -> Self;\n}\n\n#[hax_lib::opaque]\nimpl<U: Clone> TrGeneric<U> for i32 {\n    fn f(_x: U) -> Self {\n        0\n    }\n}\n\n#[hax_lib::opaque]\nconst C: u8 = 0 + 0;\n\nstruct S1();\n\nimpl S1 {\n    #[hax_lib::opaque]\n    fn f_s1() {}\n}\n\nstruct S2();\n\n#[hax_lib::opaque]\nimpl S2 {\n    fn f_s2() {}\n}\n"
  },
  {
    "path": "tests/attributes/Cargo.toml",
    "content": "[package]\nname = \"attributes\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\nhax-bounded-integers = { path = \"../../hax-bounded-integers\" }\nserde = { version = \"1.0\", features = [\"derive\"] }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/attributes/src/lib.rs",
    "content": "use hax_lib as hax;\n\n// dummy max value\nconst u32_max: u32 = 90000;\n\n/// A doc comment on `add3`\n#[doc = \"another doc comment on add3\"]\n#[hax::requires(x > 10 && y > 10 && z > 10 && x + y + z < u32_max)]\n#[hax::ensures(|result| hax_lib::implies(true, result > 32))]\nfn add3(x: u32, y: u32, z: u32) -> u32 {\n    x + y + z\n}\n\n#[hax::requires(*x < 40 && *y < 300)]\n#[hax::ensures(|result| *future(x) == *y && *future(y) == *x && result == *x + *y)]\nfn swap_and_mut_req_ens(x: &mut u32, y: &mut u32) -> u32 {\n    let x0 = *x;\n    *x = *y;\n    *y = x0;\n    *x + *y\n}\n\n#[hax_lib::ensures(|_| true)]\nfn issue_844(_x: &mut u8) {}\n\n// From issue #845\nmod ensures_on_arity_zero_fns {\n    #[hax_lib::requires(true)]\n    #[hax_lib::ensures(|_x| true)]\n    fn doing_nothing() {}\n    #[hax_lib::requires(true)]\n    #[hax_lib::ensures(|x| x > 100)]\n    fn basically_a_constant() -> u8 {\n        127\n    }\n}\n\n#[hax::lemma]\nfn add3_lemma(x: u32) -> Proof<{ x <= 10 || x >= u32_max / 3 || add3(x, x, x) == x * 3 }> {}\n\nfn dummy_function(x: u32) -> u32 {\n    x\n}\n\n#[hax::lemma]\n#[hax::fstar::smt_pat(x)]\nfn apply_dummy_function_lemma(x: u32) -> Proof<{ x == dummy_function(x) }> {}\n\nmod postprocess_with {\n    #[hax_lib::fstar::postprocess_with(\"fun _ -> FStar.Tactics.trefl ()\")]\n    fn f() {}\n\n    pub mod somewhere {\n        pub fn some_hypothetical_tactic(some_param: u8) {}\n    }\n    use somewhere::some_hypothetical_tactic;\n\n    #[hax_lib::fstar::postprocess_with(|()| some_hypothetical_tactic(12))]\n    fn g() {}\n}\n\n#[hax::exclude]\npub fn f<'a, T>(c: bool, x: &'a mut T, y: &'a mut T) -> &'a mut T {\n    if c {\n        x\n    } else {\n        y\n    }\n}\n\n#[hax::decreases(x)]\nfn fib(x: usize) -> usize {\n    if x <= 2 {\n        x\n    } else {\n        fib(x - 1).wrapping_add(fib(x - 2))\n    }\n}\n\n#[hax::attributes]\npub struct Foo {\n    pub x: u32,\n    #[refine(y > 3)]\n    pub y: u32,\n    #[refine(y + x + z > 3)]\n    pub z: u32,\n}\n\n#[hax::exclude]\nimpl Foo {\n    fn g(&self) {}\n}\n\nimpl Foo {\n    #[hax::exclude]\n    fn h(&self) {}\n}\n\nfn props() {\n    hax_lib::assume!(hax_lib::fstar::prop!(\"True\"));\n    hax_lib::assert_prop!(hax_lib::fstar::prop!(\"True\"));\n}\n\n#[hax::attributes]\nmod refined_arithmetic {\n    use core::ops::{Add, Mul};\n\n    struct Foo(u8);\n\n    impl Add for Foo {\n        type Output = Foo;\n        #[requires(self.0 < 255 - rhs.0)]\n        fn add(self, rhs: Foo) -> Foo {\n            Foo(self.0 + rhs.0)\n        }\n    }\n\n    impl Mul for Foo {\n        type Output = Foo;\n        #[requires(rhs.0 == 0 || self.0 < 255 / rhs.0)]\n        fn mul(self, rhs: Foo) -> Foo {\n            Foo(self.0 * rhs.0)\n        }\n    }\n}\n\nmod refined_indexes {\n    use hax_lib as hax;\n    const MAX: usize = 10;\n    struct MyArray(pub [u8; MAX]);\n\n    #[hax::attributes]\n    impl std::ops::Index<usize> for MyArray {\n        type Output = u8;\n        #[requires(index < MAX)]\n        fn index(&self, index: usize) -> &Self::Output {\n            &self[index]\n        }\n    }\n\n    #[hax::exclude]\n    impl std::ops::IndexMut<usize> for MyArray {\n        fn index_mut(&mut self, index: usize) -> &mut Self::Output {\n            &mut self[index]\n        }\n    }\n\n    /// Triple dash comment\n    /** Multiline double star comment Maecenas blandit accumsan feugiat.\n    Done vitae ullamcorper est.\n    Curabitur id dui eget sem viverra interdum. */\n    fn mutation_example(\n        use_generic_update_at: &mut MyArray,\n        use_specialized_update_at: &mut [u8],\n        specialized_as_well: &mut Vec<u8>,\n    ) {\n        use_generic_update_at[2] = 0;\n        use_specialized_update_at[2] = 0;\n        specialized_as_well[2] = 0;\n    }\n}\nmod newtype_pattern {\n    use hax_lib as hax;\n\n    const MAX: usize = 10;\n    #[hax::attributes]\n    struct SafeIndex {\n        #[refine(i < MAX)]\n        i: usize,\n    }\n    impl SafeIndex {\n        fn new(i: usize) -> Option<Self> {\n            if i < MAX {\n                Some(Self { i })\n            } else {\n                None\n            }\n        }\n        fn as_usize(&self) -> usize {\n            self.i\n        }\n    }\n\n    impl<T> std::ops::Index<SafeIndex> for [T; MAX] {\n        type Output = T;\n        fn index(&self, index: SafeIndex) -> &Self::Output {\n            &self[index.i]\n        }\n    }\n}\n\n#[hax::fstar::before(r#\"let before_inlined_code = \"example before\"\"#)]\n#[hax::fstar::after(r#\"let inlined_code_after = \"example after\"\"#)]\nfn inlined_code(foo: Foo) {\n    const V: u8 = 12;\n    let v_a = 13;\n    hax::fstar!(\n        r\"let x = ${foo.x} in\n          let $?{Foo {y, ..}} = $foo in\n          $add3 ((fun _ -> 3ul) $foo) $v_a $V y\n        \"\n    );\n}\n\n#[hax::fstar::before(r#\"let before_1 = \"example before 1\"\"#)]\n#[hax::fstar::before(r#\"let before_2 = \"example before 2\"\"#)]\n#[hax::fstar::before(r#\"let before_3 = \"example before 3\"\"#)]\n#[hax::fstar::after(r#\"let after 1 = \"example after 1\"\"#)]\n#[hax::fstar::after(r#\"let after 2 = \"example after 2\"\"#)]\n#[hax::fstar::after(r#\"let after 3 = \"example after 3\"\"#)]\nfn mutliple_before_after() {}\n\n#[hax::fstar::replace(r#\"unfold let $some_function _ = \"hello from F*\"\"#)]\nfn some_function() -> String {\n    String::from(\"hello from Rust\")\n}\n\nmod future_self {\n    #[derive(Eq, PartialEq)]\n    struct Dummy;\n\n    #[hax_lib::attributes]\n    impl Dummy {\n        #[hax_lib::ensures(|_| future(self) == self)]\n        fn f(&mut self) {}\n    }\n}\n\nmod replace_body {\n    #[hax_lib::fstar::replace_body(\"magic ${x}\")]\n    fn f(x: u8, y: u8) -> u8 {\n        1 + 2\n    }\n    struct Foo;\n    impl Foo {\n        #[hax_lib::fstar::replace_body(\"(magic (${self} <: $:{Self})) ${x}\")]\n        fn assoc_fn(&self, x: u8) {}\n    }\n    impl ToString for Foo {\n        #[hax_lib::fstar::replace_body(r#\"\"The type was $:{Self}\"\"#)]\n        fn to_string(&self) -> String {\n            \"Hello\".into()\n        }\n    }\n}\n\nmod pre_post_on_traits_and_impls {\n    use hax_lib::*;\n\n    #[hax_lib::attributes]\n    trait Operation {\n        // we allow `hax_lib`, `::hax_lib` or no path at all\n        #[hax_lib::requires(x.lift() <= int!(127))]\n        #[ensures(|result| x.lift() * int!(2) == result.lift())]\n        fn double(x: u8) -> u8;\n    }\n\n    struct ViaAdd;\n    struct ViaMul;\n\n    #[hax_lib::attributes]\n    impl Operation for ViaAdd {\n        #[::hax_lib::requires(x.lift() <= int!(127))]\n        #[ensures(|result| x.lift() * int!(2) == result.lift())]\n        fn double(x: u8) -> u8 {\n            x + x\n        }\n    }\n\n    #[hax_lib::attributes]\n    impl Operation for ViaMul {\n        #[requires(x.lift() <= int!(127))]\n        #[::hax_lib::ensures(|result| x.lift() * int!(2) == result.lift())]\n        fn double(x: u8) -> u8 {\n            x * 2\n        }\n    }\n\n    #[hax_lib::attributes]\n    trait TraitWithRequiresAndEnsures {\n        #[requires(x < 100)]\n        #[ensures(|r| r > 88)]\n        fn method(&self, x: u8) -> u8;\n    }\n\n    fn test<T: TraitWithRequiresAndEnsures>(x: T) -> u8 {\n        x.method(99) - 88\n    }\n}\n\n/// An minimal example of a model of math integers for F*\nmod int_model {\n    use super::hax;\n    #[hax::fstar::replace(r#\"unfold type $:{Int} = int\"#)]\n    #[derive(Copy, Clone)]\n    struct Int(u128);\n\n    #[hax::fstar::replace(r#\"unfold let ${add} x y = x + y\"#)]\n    fn add(x: Int, y: Int) -> Int {\n        Int(x.0 + y.0)\n    }\n\n    use std::ops::Sub;\n    #[hax::fstar::replace(\n        r#\"\nunfold instance impl: Core.Ops.Arith.t_Sub $:Int $:Int =\n  {\n    f_Output = $:Int;\n    f_sub_pre = (fun (self: $:Int) (other: $:Int) -> true);\n    f_sub_post = (fun (self: $:Int) (other: $:Int) (out: $:Int) -> true);\n    f_sub = fun (self: $:Int) (other: $:Int) -> self + other\n  }\n\"#\n    )]\n    impl Sub for Int {\n        type Output = Self;\n\n        fn sub(self, other: Self) -> Self::Output {\n            Self(self.0 + other.0)\n        }\n    }\n}\n\n/// Illustration of the `refinement_type` macro that helps creating refinement types via thin newtype wrappers.\nmod refinement_types {\n    use hax_lib::*;\n\n    #[hax_lib::refinement_type(|x| x >= MIN && x <= MAX)]\n    pub struct BoundedU8<const MIN: u8, const MAX: u8>(u8);\n\n    pub fn bounded_u8(x: BoundedU8<12, 15>, y: BoundedU8<10, 11>) -> BoundedU8<1, 23> {\n        BoundedU8::new(x.get() + y.get())\n    }\n\n    /// Even `u8` numbers. Constructing pub Even values triggers static\n    /// proofs in the extraction.\n    #[hax_lib::refinement_type(|x| x % 2 == 0)]\n    pub struct Even(u8);\n\n    #[hax_lib::requires(x < 127)]\n    pub fn double(x: u8) -> Even {\n        Even::new(x + x)\n    }\n\n    #[hax_lib::requires(x < 127)]\n    pub fn double_refine(x: u8) -> Even {\n        (x + x).into_checked()\n    }\n\n    /// A string that contains no space.\n    #[hax_lib::refinement_type(|x| !x.chars().any(|ch| ch == ' '))]\n    pub struct NoE(String);\n\n    /// A modular mutliplicative inverse\n    #[hax_lib::refinement_type(|n| (n as u128 * MOD as u128) % MOD as u128 == 1)]\n    pub struct ModInverse<const MOD: u32>(u32);\n\n    /// A field element\n    #[hax_lib::refinement_type(|x| x <= 2347)]\n    pub struct FieldElement(u16);\n\n    /// Example of a specific constraint on a value\n    #[hax_lib::refinement_type(|x| x == 4 || x == 5 || x == 10 || x == 11)]\n    pub struct CompressionFactor(u8);\n\n    use hax_lib::int::*;\n    /// Example of a refined int, that derives all common arithmetic operations\n    hax_bounded_integers::refinement_int!(\n        BoundedAbsI16<const B: usize>(i16, 2, |x| B.lift() < int!(32768) && x.lift() >= -B.lift() && x.lift() <= B.lift())\n    );\n\n    #[hax_lib::requires(M.lift() < int!(32768) && M.lift() == N.lift() * int!(2))]\n    fn double_abs_i16<const N: usize, const M: usize>(x: BoundedAbsI16<N>) -> BoundedAbsI16<M> {\n        (x * 2).into_checked()\n    }\n}\nmod nested_refinement_elim {\n    use hax_lib::*;\n    #[refinement_type(|x| true)]\n    pub struct DummyRefinement(u16);\n\n    fn elim_twice(x: DummyRefinement) -> u16 {\n        (DummyRefinement::new(x.get())).get()\n    }\n}\n\n/// `ensures` and `requires` with inlined code (issue #825)\nmod inlined_code_ensures_requires {\n    #[hax_lib::requires(fstar!(\"forall i. FStar.Seq.index $v i <. ${254u8}\"))]\n    #[hax_lib::ensures(|()| {\n        let future_v = future(v);\n        fstar!(\"forall i. FStar.Seq.index ${future_v} i >. ${0u8}\")\n    })]\n    fn increment_array(v: &mut [u8; 4]) {\n        v[0] += 1;\n        v[1] += 1;\n        v[2] += 1;\n        v[3] += 1;\n    }\n}\n\nmod verifcation_status {\n    #[hax_lib::fstar::verification_status(lax)]\n    fn a_function_which_only_laxes() {\n        assert!(/*very complicated stuff*/ false)\n    }\n\n    #[hax_lib::fstar::verification_status(panic_free)]\n    #[hax_lib::ensures(|x|/*very complicated stuff*/false)]\n    fn a_panicfree_function() -> u8 {\n        let a = 3;\n        let b = 6;\n        a + b\n    }\n\n    #[hax_lib::fstar::verification_status(panic_free)]\n    #[hax_lib::ensures(|x|/*very complicated stuff*/false)]\n    fn another_panicfree_function() {\n        let not_much = 0;\n        let nothing = 0;\n        let still_not_much = not_much + nothing;\n    }\n}\n\nmod requires_mut {\n    use hax_lib::*;\n\n    #[hax_lib::attributes]\n    trait Foo {\n        #[hax_lib::requires(x.lift() + y.lift() < int!(254))]\n        #[hax_lib::ensures(|output_variable| output_variable == *future(y))]\n        fn f(x: u8, y: &mut u8) -> u8;\n\n        fn g(x: u8, y: u8) -> u8;\n        fn h(x: u8, y: u8);\n        fn i(x: u8, y: &mut u8);\n    }\n\n    #[hax_lib::attributes]\n    impl Foo for () {\n        #[hax_lib::requires(x.lift() + y.lift() < int!(254))]\n        #[hax_lib::ensures(|output_variable| output_variable == *future(y))]\n        fn f(x: u8, y: &mut u8) -> u8 {\n            *y += x;\n            *y\n        }\n\n        #[hax_lib::requires(true)]\n        #[hax_lib::ensures(|output_variable| output_variable == y)]\n        fn g(x: u8, y: u8) -> u8 {\n            y\n        }\n\n        #[hax_lib::requires(true)]\n        #[hax_lib::ensures(|output_variable| output_variable == ())]\n        fn h(x: u8, y: u8) {\n            ()\n        }\n\n        #[hax_lib::requires(true)]\n        #[hax_lib::ensures(|out| *future(y) == *y)]\n        fn i(x: u8, y: &mut u8) {\n            ()\n        }\n    }\n}\n\nmod issue_1266 {\n    #[hax_lib::attributes]\n    trait T {\n        #[hax_lib::ensures(|_|true)]\n        fn v(x: &mut Self);\n    }\n}\n\nmod props {\n    use hax_lib::*;\n\n    fn f(x: Prop, y: bool) -> Prop {\n        let xprop: Prop = y.into();\n        let p = y.lift() & xprop & y & y.to_prop();\n        !(p | y).implies(forall(|x: u8| x <= u8::MAX) & exists(|x: u16| x > 300))\n    }\n}\n\nmod reorder {\n    #[hax_lib::attributes]\n    struct Foo {\n        #[order(40)]\n        pub field_1: u8,\n        #[hax_lib::order(31)]\n        pub field_2: u8,\n        pub field_3: u8,\n        pub field_4: u8,\n    }\n\n    #[hax_lib::attributes]\n    enum Bar {\n        A {\n            a_field_1: u8,\n            a_field_2: u8,\n            #[hax_lib::order(-42)]\n            a_field_3: u8,\n        },\n        B {\n            b_field_1: u8,\n            #[hax_lib::order(42)]\n            b_field_2: u8,\n            b_field_3: u8,\n        },\n    }\n}\n\nmod issue_1276 {\n    struct S(pub u8);\n\n    #[hax_lib::attributes]\n    impl S {\n        #[hax_lib::requires(self.0 == 0 && self_ == self_1 && self_2 == 9)]\n        fn f(&self, self_: u8, self_0: u8, self_1: u8, self_2: u8) {}\n    }\n}\n\nmod issue_evit_57 {\n    struct Foo;\n\n    #[hax_lib::attributes]\n    impl Foo {\n        #[hax_lib::requires(true)]\n        fn f(mut self) {}\n    }\n}\n"
  },
  {
    "path": "tests/cli/include-flag/Cargo.toml",
    "content": "[package]\nname = \"include-flag\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq\" = { snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/cli/include-flag/src/lib.rs",
    "content": "#![allow(dead_code)]\n#![allow(non_camel_case_types)]\n\n/// Entrypoint\nfn main() {\n    main_a(Foo);\n    main_b();\n    main_c();\n}\n\n/// Direct dependencies\nfn main_a<T: Trait>(x: T) {\n    main_a_a();\n    main_a_b();\n    main_a_c();\n}\nfn main_b() {\n    main_b_a();\n    main_b_b();\n    main_b_c();\n}\nfn main_c() {\n    main_c_a();\n    main_c_b();\n    main_c_c();\n}\nstruct Foo;\n\ntrait Trait {}\nimpl Trait for Foo {}\n\n/// Indirect dependencies\nfn main_a_a() {}\nfn main_b_a() {}\nfn main_c_a() {}\n\nfn main_a_b() {}\nfn main_b_b() {}\nfn main_c_b() {}\n\nfn main_a_c() {}\nfn main_b_c() {}\nfn main_c_c() {}\n"
  },
  {
    "path": "tests/cli/interface-only/Cargo.toml",
    "content": "[package]\nname = \"interface-only\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { include-flag = \"+:** -interface_only::Foo\" }\n"
  },
  {
    "path": "tests/cli/interface-only/src/lib.rs",
    "content": "#![allow(dead_code)]\n\n/// This item contains unsafe blocks and raw references, two features\n/// not supported by hax. Thanks to the `-i` flag and the `+:`\n/// modifier, `f` is still extractable as an interface.\n///\n/// Expressions within type are still extracted, as well as pre- and\n/// post-conditions.\n#[hax_lib::requires(x < 254)]\n#[hax_lib::ensures(|r| r[0] > x)]\nfn f(x: u8) -> [u8; 4] {\n    let y = x as *const i8;\n\n    unsafe {\n        println!(\"{}\", *y);\n    }\n\n    [x + 1, x, x, x]\n}\n\n/// This struct contains a field which uses raw pointers, which are\n/// not supported by hax. This item cannot be extracted at all: we\n/// need to exclude it with `-i '-*::Foo'`.\nstruct Foo {\n    unsupported_field: *const u8,\n}\n\nstruct Bar;\n\n/// Non-inherent implementations are extracted, their bodies are not\n/// dropped. This might be a bit surprising: see\n/// https://github.com/hacspec/hax/issues/616.\nimpl From<()> for Bar {\n    fn from((): ()) -> Self {\n        Bar\n    }\n}\n\n/// If you need to drop the body of a method, please hoist it:\nimpl From<u8> for Bar {\n    fn from(x: u8) -> Self {\n        fn from(_: u8) -> Bar {\n            Bar\n        }\n        from(x)\n    }\n}\n\npub struct Holder<T> {\n    pub(crate) value: Vec<T>,\n}\n\nimpl<T> From<()> for Holder<T> {\n    fn from((): ()) -> Self {\n        Holder { value: Vec::new() }\n    }\n}\n\npub struct Param<const SIZE: usize> {\n    pub(crate) value: [u8; SIZE],\n}\n\nimpl<const SIZE: usize> From<()> for Param<SIZE> {\n    fn from((): ()) -> Self {\n        Param { value: [0; SIZE] }\n    }\n}\n\nfn f_generic<const X: usize, U>(_x: U) -> Param<X> {\n    Param { value: [0; X] }\n}\n\ntrait T {\n    type Assoc;\n    fn d();\n}\n\n/// Impls with associated types are not erased\nimpl T for u8 {\n    type Assoc = u8;\n    fn d() {}\n}\ntrait T2 {\n    fn d();\n}\n\n/// Items can be forced to be transparent\n#[hax_lib::transparent]\n#[hax_lib::attributes]\nimpl T2 for u8 {\n    #[hax_lib::requires(false)]\n    fn d() {}\n}\n\n#[hax_lib::requires(b.len() >= n)]\n#[hax_lib::ensures(|out| out <= n)]\nfn padlen(b: &[u8], n: usize) -> usize {\n    if n > 0 && b[n - 1] == 0 {\n        1 + padlen(b, n - 1)\n    } else {\n        0\n    }\n}\n"
  },
  {
    "path": "tests/constructor-as-closure/Cargo.toml",
    "content": "[package]\nname = \"constructor-as-closure\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { broken = false, snapshot = \"stdout\", issue_id = \"914\" }\n"
  },
  {
    "path": "tests/constructor-as-closure/src/lib.rs",
    "content": "struct Test(i32);\nimpl Test {\n    pub fn test(x: Option<i32>) -> Option<Test> {\n        x.map(Self)\n    }\n}\npub enum Context {\n    A(i32),\n    B(i32),\n}\nimpl Context {\n    pub fn test(x: Option<i32>) -> Option<Context> {\n        x.map(Self::B)\n    }\n}\n"
  },
  {
    "path": "tests/cyclic-modules/Cargo.toml",
    "content": "[package]\nname = \"cyclic-modules\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { broken = false, snapshot = \"stdout\", issue_id = \"396\" }\ninto.\"lean\" = { broken = false, snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/cyclic-modules/src/lib.rs",
    "content": "mod typ_a {\n    pub enum TRec {\n        T(super::typ_b::T1Rec),\n        Empty,\n    }\n    pub enum T {\n        T(super::typ_b::T1),\n    }\n}\nmod typ_b {\n    pub enum T1Rec {\n        T1(Box<T2Rec>),\n    }\n    pub enum T2Rec {\n        T2(super::typ_a::TRec),\n    }\n\n    pub enum T1 {\n        T1,\n    }\n    pub enum T2 {\n        T2(super::typ_a::T),\n    }\n}\n\nfn f() {}\nmod b {\n    pub fn g() {\n        super::f()\n    }\n}\nfn h() {\n    b::g();\n    c::i()\n}\nfn h2() {\n    c::i()\n}\nmod c {\n    pub fn i() {}\n}\nmod d {\n    pub fn d1() {}\n    pub fn d2() {\n        super::de::de1()\n    }\n}\nmod e {\n    pub fn e1() {\n        super::d::d1()\n    }\n}\nmod de {\n    pub fn de1() {\n        super::e::e1()\n    }\n}\n\nmod rec {\n    enum T {\n        t1,\n        t2,\n    }\n    pub fn g1(x: T) -> T {\n        match x {\n            T::t1 => g2(x),\n            T::t2 => T::t1,\n        }\n    }\n    pub fn g2(x: T) -> T {\n        match x {\n            T::t1 => g1(x),\n            T::t2 => hf(x),\n        }\n    }\n    pub fn hf(x: T) -> T {\n        match x {\n            T::t1 => hf(T::t2),\n            T::t2 => x,\n        }\n    }\n}\n\nmod rec1_same_name {\n    pub fn f(x: i32) -> i32 {\n        super::rec2_same_name::f(x)\n    }\n}\nmod rec2_same_name {\n    pub fn f(x: i32) -> i32 {\n        if x > 0 {\n            super::rec1_same_name::f(x - 1)\n        } else {\n            0\n        }\n    }\n}\nmod enums_a {\n    pub enum T {\n        A,\n        B,\n        C(Vec<super::enums_b::U>),\n        D(Vec<super::enums_b::T>),\n    }\n}\nmod enums_b {\n    pub enum U {\n        A,\n        B,\n        C(Vec<super::enums_a::T>),\n    }\n    pub enum T {\n        A,\n        B,\n        C(Vec<super::enums_a::T>),\n    }\n    pub fn f() -> T {\n        T::A\n    }\n}\n\nmod m1 {\n    pub fn a() {\n        super::m2::c()\n    }\n}\n\nmod m2 {\n    pub fn d() {}\n    pub fn b() {\n        super::m1::a();\n        d()\n    }\n    pub fn c() {}\n}\n\npub mod disjoint_cycle_a {\n    pub fn f() {\n        super::disjoint_cycle_b::h()\n    }\n    pub fn g() {}\n}\npub mod disjoint_cycle_b {\n    pub fn h() {}\n    pub fn i() {\n        super::disjoint_cycle_a::g()\n    }\n}\n\npub mod variant_constructor_a {\n    pub enum Context {\n        A(i32),\n        B(i32),\n    }\n    pub fn f() -> Context {\n        super::variant_constructor_b::h()\n    }\n    impl Context {\n        pub fn test(x: Option<i32>) -> Option<Context> {\n            x.map(Self::A)\n        }\n    }\n}\npub mod variant_constructor_b {\n    pub fn h() -> super::variant_constructor_a::Context {\n        super::variant_constructor_a::Context::A(1)\n    }\n}\n\npub mod late_skip_a {\n    pub fn f() {\n        super::late_skip_b::f()\n    }\n}\npub mod late_skip_b {\n    #[hax_lib::requires(true)]\n    pub fn f() {\n        super::late_skip_a::f()\n    }\n}\n\nmod issue_1823 {\n    mod first_example {\n        pub mod a {\n            pub struct A {}\n\n            impl A {\n                pub fn mkb(self) -> super::b::B {\n                    super::b::B {}\n                }\n            }\n        }\n        pub mod b {\n            pub struct B {}\n\n            impl B {\n                pub fn mka(self) -> super::a::A {\n                    super::a::A {}\n                }\n            }\n        }\n    }\n    mod second_example {\n        pub mod a {\n            pub fn call_b() {\n                super::b::b()\n            }\n            pub fn a() {}\n        }\n        pub mod b {\n            pub fn call_a() {\n                super::a::a()\n            }\n            pub fn b() {}\n        }\n    }\n}\n"
  },
  {
    "path": "tests/dyn/Cargo.toml",
    "content": "[package]\nname = \"dyn\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { broken = false, snapshot = \"stdout\", issue_id = \"296\" }\n"
  },
  {
    "path": "tests/dyn/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub trait Printable<S> {\n    fn stringify(&self) -> S;\n}\n\nimpl Printable<String> for i32 {\n    fn stringify(&self) -> String {\n        self.to_string()\n    }\n}\n\npub fn print(a: Box<dyn Printable<String>>) {\n    println!(\"{}\", a.stringify());\n}\n"
  },
  {
    "path": "tests/enum-repr/Cargo.toml",
    "content": "[package]\nname = \"enum-repr\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq+ssprove\" = { broken = false, issue_id = \"162\" }\n"
  },
  {
    "path": "tests/enum-repr/src/lib.rs",
    "content": "#![allow(dead_code)]\n\n#[repr(u16)]\nenum EnumWithRepr {\n    ExplicitDiscr1 = 1,\n    ExplicitDiscr2 = 5,\n    ImplicitDiscrEmptyTuple(),\n    ImplicitDiscrEmptyStruct {},\n}\n\n#[repr(u64)]\nenum ImplicitReprs {\n    A,\n    B(),\n    C {},\n    D,\n    E = 30,\n    F,\n    G,\n    H {},\n    I(),\n}\n\nfn f() -> u32 {\n    const CONST: u16 = EnumWithRepr::ExplicitDiscr1 as u16;\n    let _x = EnumWithRepr::ExplicitDiscr2 as u16;\n    EnumWithRepr::ImplicitDiscrEmptyTuple() as u32\n        + EnumWithRepr::ImplicitDiscrEmptyStruct {} as u32\n}\n\nfn get_repr(x: EnumWithRepr) -> u16 {\n    x as u16\n}\n\nfn get_casted_repr(x: EnumWithRepr) -> u64 {\n    x as u64\n}\n"
  },
  {
    "path": "tests/enum-struct-variant/Cargo.toml",
    "content": "[package]\nname = \"enum-struct-variant\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq\" = {broken = false, snapshot = \"none\"}\ninto.\"ssprove\" = {broken = true, snapshot = \"none\"}"
  },
  {
    "path": "tests/enum-struct-variant/src/lib.rs",
    "content": "#![allow(dead_code)]\n\n#[derive(Debug)]\npub struct Money {\n    value: u64,\n}\n\n#[derive(Debug)]\npub enum EnumWithStructVariant {\n    Funds { balance: Money },\n}\n"
  },
  {
    "path": "tests/even/Cargo.toml",
    "content": "[package]\nname = \"even\"\nversion = \"0.0.1\"\nedition = \"2021\"\n\n[dependencies]\n"
  },
  {
    "path": "tests/even/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub fn even(n: usize) -> bool {\n    n % 2 == 0\n}\n"
  },
  {
    "path": "tests/functions/Cargo.toml",
    "content": "[package]\nname = \"functions\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/functions/src/lib.rs",
    "content": "/// Issue #757\nfn calling_function_pointer() {\n    fn f<T>() {}\n    let f_ptr = f::<i32>;\n    f_ptr();\n}\n\nmod issue_1048 {\n    pub struct CallableViaDeref;\n\n    impl core::ops::Deref for CallableViaDeref {\n        type Target = fn() -> bool;\n\n        fn deref(&self) -> &Self::Target {\n            &((|| true) as fn() -> bool)\n        }\n    }\n\n    pub fn call_via_deref() -> bool {\n        CallableViaDeref()\n    }\n}\n"
  },
  {
    "path": "tests/generics/Cargo.toml",
    "content": "[package]\nname = \"generics\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { broken = false, issue_id = \"21\" }\n"
  },
  {
    "path": "tests/generics/src/lib.rs",
    "content": "#![allow(dead_code)]\n\nfn dup<T: Clone>(x: T) -> (T, T) {\n    (x.clone(), x.clone())\n}\n\nfn foo<const LEN: usize>(arr: [usize; LEN]) -> usize {\n    let mut acc = LEN + 9;\n    for i in 0..LEN {\n        acc += arr[i];\n    }\n    acc\n}\n\nfn repeat<const LEN: usize, T: Copy>(x: T) -> [T; LEN] {\n    [x; LEN]\n}\n\nfn call_f() -> usize {\n    f::<10>(3) + 3\n}\nfn f<const N: usize>(x: usize) -> usize {\n    N + N + x\n}\n\nfn call_g() -> usize {\n    g::<3, [usize; 3]>([42, 3, 49]) + 3\n}\nfn g<const N: usize, T: Into<[usize; N]>>(arr: T) -> usize {\n    arr.into().into_iter().max().unwrap_or(N) + N\n}\n\ntrait Foo {\n    fn const_add<const N: usize>(self) -> usize;\n}\n\nimpl Foo for usize {\n    fn const_add<const N: usize>(self) -> usize {\n        self + N\n    }\n}\n\nstruct Bar;\n\nimpl Bar {\n    fn inherent_impl_generics<T, const N: usize>(x: [T; N]) {}\n}\n\n/// Test defaults types and constants\nmod defaults_generics {\n    struct Defaults<T = (), const N: usize = 2>([T; N]);\n    fn f(_: Defaults) {}\n}\n\n/// See https://github.com/hacspec/hax/issues/1176\nmod impl_generics {\n    struct Test();\n\n    impl Test {\n        fn set_ciphersuites<S>(&self, ciphers: impl IntoIterator<Item = S>) -> Result<(), ()>\n        where\n            S: AsRef<str>,\n        {\n            Ok(())\n        }\n\n        fn set_alpn_protocols<S>(&self, _protocols: impl IntoIterator<Item = S>) -> Result<(), ()>\n        where\n            S: AsRef<str>,\n        {\n            Ok(())\n        }\n    }\n}\n\n/// See https://github.com/cryspen/hax/issues/1289\nmod assoc_const_param {\n    struct Test<const N: usize>();\n\n    impl<const N: usize> Test<N> {\n        const A: Self = Self();\n    }\n\n    fn test() -> Test<1> {\n        Test::<1>::A\n    }\n}\n"
  },
  {
    "path": "tests/guards/Cargo.toml",
    "content": "[package]\nname = \"guards\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq+ssprove\" = { broken = false, snapshot = \"stdout\", issue_id = \"814\" }\n"
  },
  {
    "path": "tests/guards/src/lib.rs",
    "content": "#![feature(if_let_guard)]\n#![allow(dead_code)]\n\npub fn if_let_guard(x: Option<Result<i32, i32>>) -> i32 {\n    match x {\n        None => 0,\n        Some(v) if let Ok(y) = v => y,\n        Some(Err(y)) => y,\n        _ => 1,\n    }\n}\n\npub fn equivalent(x: Option<Result<i32, i32>>) -> i32 {\n    match x {\n        None => 0,\n        _ => match match x {\n            Some(v) => match v {\n                Ok(y) => Some(y),\n                _ => None,\n            },\n            _ => None,\n        } {\n            Some(y) => y,\n            None => match x {\n                Some(Err(y)) => y,\n                _ => 1,\n            },\n        },\n    }\n}\n\npub fn multiple_guards(x: Option<Result<i32, i32>>) -> i32 {\n    match x {\n        None => 0,\n        Some(Ok(v)) if let Some(1) = Some(v + 1) => 0,\n        Some(v) if let Ok(y) = v => y,\n        Some(Err(y)) => y,\n        _ => 1,\n    }\n}\n\npub fn if_guard(x: Option<i32>) -> i32 {\n    match x {\n        Some(v) if v > 0 => v,\n        _ => 0,\n    }\n}\n"
  },
  {
    "path": "tests/if-let/Cargo.toml",
    "content": "[package]\nname = \"if-let\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq+ssprove\" = { broken = false, snapshot = \"none\", issue_id = \"85\" }\n"
  },
  {
    "path": "tests/if-let/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub fn fun_with_if_let() -> u8 {\n    let x = Some(5);\n    if let Some(x) = x {\n        x\n    } else {\n        7\n    }\n}\n"
  },
  {
    "path": "tests/lean-core-models/Cargo.toml",
    "content": "[package]\nname = \"lean-core-models\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"lean\" = {}"
  },
  {
    "path": "tests/lean-core-models/src/default.rs",
    "content": "// Tests for core models in lean\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\n// Default on struct\nmod structs {\n    struct S {\n        f1: usize,\n    }\n\n    impl Default for S {\n        fn default() -> Self {\n            S { f1: 0 }\n        }\n    }\n\n    fn test() -> S {\n        S::default()\n    }\n}\n\n// Default on enum\nmod enums {\n    enum E<T> {\n        C1(u32),\n        C2(T),\n    }\n\n    impl<T: Default> Default for E<T> {\n        fn default() -> Self {\n            E::C2(T::default())\n        }\n    }\n}\n"
  },
  {
    "path": "tests/lean-core-models/src/function.rs",
    "content": "#![allow(dead_code)]\n#![allow(unused_variables)]\n\nfn test() -> u32 {\n    let f_1 = |_: u32| 9;\n    let f_2 = |x: u32, y: u32| x + y;\n    let f_2_tuple = |(x, y): (u32, u32)| x + y;\n    f_1(0) + f_2(1, 2) + f_2_tuple((1, 2))\n}\n"
  },
  {
    "path": "tests/lean-core-models/src/lib.rs",
    "content": "// Tests for core models in lean\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\npub mod default;\npub mod function;\npub mod option;\npub mod phantom;\npub mod result;\n"
  },
  {
    "path": "tests/lean-core-models/src/option.rs",
    "content": "// Tests for core models in lean\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nstruct S {\n    f1: u32,\n}\n\nenum E {\n    C(u32),\n}\n\nimpl Default for S {\n    fn default() -> Self {\n        S { f1: 42 }\n    }\n}\n\nfn test() {\n    let o1 = Option::Some(4);\n    let o2: Option<i32> = None;\n\n    let o3 = o1.clone().is_some_and(|x| x == 0);\n    let o3 = o1.clone().is_none_or(|x| x == 0);\n\n    let o4 = Some(0).unwrap();\n    let o5 = Some(0).unwrap_or(9);\n    let o6 = Some(0).unwrap_or_else(|| 9);\n    let o7 = Option::None::<S>.unwrap_or_default();\n\n    // maps\n    let o8 = Some(0).map(|x| x + 1);\n    let o9 = Some(1).map_or(9, |x| x + 1);\n    let o10 = Some(2).map_or_else(|| 9, |x| x + 1);\n\n    // options and  results\n    let o11 = Some(3).ok_or(E::C(0));\n    let o12 = Some(1).ok_or_else(|| E::C(1));\n\n    let o13 = None.and_then(|x: u32| Some(x));\n    let o14 = Some(S { f1: 9 }).take();\n\n    // tests\n    let o15 = Some(1).is_some();\n    let o16 = Some(2).is_none();\n    let o17 = Some(3).expect(\"Should be Some\");\n    let o18 = Some(4).unwrap();\n}\n"
  },
  {
    "path": "tests/lean-core-models/src/phantom.rs",
    "content": "// Tests for core models in lean\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nuse core::marker::PhantomData;\n\ntrait Foo {}\n\nstruct Bar<F: Foo> {\n    _phantom: PhantomData<F>,\n}\n\nimpl<F: Foo> Bar<F> {\n    fn new() -> Self {\n        Self {_phantom : PhantomData}\n    }\n}\n"
  },
  {
    "path": "tests/lean-core-models/src/result.rs",
    "content": "// Tests for core models in lean\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\n#[derive(Clone)]\nenum E1 {\n    C1,\n    C2(u32),\n}\n\nenum E2 {\n    C1,\n    C2(u32),\n}\n\nfn tests() -> Result<u32, E1> {\n    // Constructors\n    let v1 = Result::<u32, E1>::Ok(1);\n    let v2 = Result::<u32, _>::Err(E1::C1);\n\n    let f = |x: u32| x + 1;\n\n    // map\n    let v5 = Ok::<_, E1>(1).map(|v| v + 1);\n    let v6 = Ok::<_, E1>(1).map_or(9, f);\n    let v7 = Ok::<_, E1>(1).map_or_else(|_| 10, f);\n    let v8 = Ok(0).map_err(|e: E1| match e {\n        E1::C1 => E2::C1,\n        E1::C2(x) => E2::C2(x + 1),\n    });\n\n    let v9 = v1.is_ok();\n    let v10 = v1.is_err();\n    let v11 = v1.clone().and_then(|x| Ok::<_, E1>(x + 1));\n\n    let v12 = Ok::<u32, u32>(0).clone().unwrap();\n    let v13 = Ok::<u32, u32>(0).clone().expect(\"Should be Ok\");\n\n    // ? notation\n    let v3 = v1.map(f)? + v2?;\n\n    Ok(v3)\n}\n"
  },
  {
    "path": "tests/lean-tests/Cargo.toml",
    "content": "[package]\nname = \"lean-tests\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"lean\" = { broken = false }\n"
  },
  {
    "path": "tests/lean-tests/src/array.rs",
    "content": "// Arrays with const generic sizes\n\nfn f<const N: usize>(x: [u8; N]) {}\n\nfn g<const N: usize>(x: [u8; N]) {\n    f(x);\n    f([0u8; 10]);\n}\n"
  },
  {
    "path": "tests/lean-tests/src/associated_types.rs",
    "content": "mod basic {\n    trait Iterable {\n        type Item;\n        fn first(&self) -> Self::Item;\n    }\n\n    fn just_the_first<I: Iterable>(iter: I) -> I::Item {\n        iter.first()\n    }\n\n    fn first_plus_1<I: Iterable<Item = i32>>(iter: I) -> i32 {\n        iter.first() + 1\n    }\n\n    impl Iterable for bool {\n        type Item = i32;\n        fn first(&self) -> i32 {\n            3\n        }\n    }\n\n    fn a() {\n        first_plus_1(true);\n    }\n}\n\nmod projection {\n    trait T1 {\n        type A1;\n    }\n\n    trait T2 {\n        type A2: T1;\n        fn f() -> <Self::A2 as T1>::A1;\n    }\n}\n\nmod multiple_associated_types {\n    trait Pair {\n        type First;\n        type Second;\n        fn first(&self) -> Self::First;\n        fn second(&self) -> Self::Second;\n    }\n\n    fn get_both<P: Pair>(pair: P) -> (P::First, P::Second) {\n        (pair.first(), pair.second())\n    }\n\n    impl Pair for (i32, bool) {\n        type First = i32;\n        type Second = bool;\n        fn first(&self) -> i32 {\n            self.0\n        }\n        fn second(&self) -> bool {\n            self.1\n        }\n    }\n\n    fn b() {\n        let pair = (42, true);\n        let both = get_both(pair);\n    }\n\n    fn get_first_as_i32<P: Pair<First = i32>>(pair: P) -> i32 {\n        pair.first()\n    }\n}\n\nmod multiple_projections {\n    trait FnOnce<T> {\n        type Output;\n    }\n\n    pub fn func<T, U, D, F>(d: D, f: F, u: U)\n    where\n        F: FnOnce<T, Output = U>,\n        D: FnOnce<T, Output = U>,\n    {\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/binops.rs",
    "content": "//! Tests known binops\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nfn noop (x: i32) -> i32 { x }\n\n/////////////////////\n// UNARY FUNCTIONS //\n/////////////////////\n\nfn neg_int (x: i32) -> i32 { -x }\n\nfn not_int (x: i32) -> i32 { !x }\n\nfn not_bool (x: bool) -> bool { !x }\n\nfn index (x: [i32; 1]) -> i32 { x[0] }\n\n//////////////////////\n// BINARY FUNCTIONS //\n//////////////////////\n\nfn add_int (x: i32, y: i32) -> i32 { x + y }\n\nfn sub_int (x: i32, y: i32) -> i32 { x - y }\n\nfn mul_int (x: i32, y: i32) -> i32 { x * y }\n\nfn div_int (x: i32, y: i32) -> i32 { x / y }\n\nfn rem_int (x: i32, y: i32) -> i32 { x % y }\n\nfn shr_int (x: i32, y: i32) -> i32 { x >> y }\n\nfn shl_int (x: i32, y: i32) -> i32 { x << y }\n\nfn bitand_int (x: i32, y: i32) -> i32 { x & y }\n\nfn bitand_bool (x : bool, y: bool) -> bool { x & y }\n\nfn bitor_int (x: i32, y: i32) -> i32 { x | y }\n\nfn bitor_bool (x : bool, y: bool) -> bool { x | y }\n\nfn bitxor_int (x: i32, y: i32) -> i32 { x ^ y }\n\nfn bitxor_bool (x: bool, y: bool) -> bool { x ^ y }\n\nfn logical_op_and (x: bool, y: bool) -> bool { x && y }\n\nfn logical_op_or (x: bool, y: bool) -> bool { x || y }\n\nfn eq_int(x : i32, y: i32) -> bool { x == y }\n\nfn eq_bool(x : bool, y: bool) -> bool { x == y }\n\nfn neq_int (x : i32, y: i32) -> bool { x != y }\n\nfn neq_bool(x : bool, y: bool) -> bool { x != y }\n\nfn lt_int(x : i32, y: i32) -> bool { x < y }\n\nfn le_int(x : i32, y: i32) -> bool { x <= y }\n\nfn gt_int(x : i32, y: i32) -> bool { x > y }\n\nfn ge_int(x : i32, y: i32) -> bool { x >= y }\n\n//////////////////////\n// NON BOOL AND INT //\n//////////////////////\n\n// Known binops with non boolean or integer arguments should not be pretty printed\n\nstruct S;\n\nimpl std::ops::Not for S {\n    type Output = S;\n    fn not(self) -> S {\n        self\n    }\n}\n\nimpl std::ops::Add for S {\n    type Output = S;\n    fn add(self, rhs: Self) -> Self::Output {\n        self\n    }\n}\n\nfn not_s(x: S) -> S { !x }\n\nfn add_s(x: S, y: S) -> S { x + y }\n"
  },
  {
    "path": "tests/lean-tests/src/casts.rs",
    "content": "use hax_lib::*;\n\n/// Returns true if all casting edge cases behave as expected.\n#[ensures(|result| result)]\npub fn casting_edge_cases(_dummy: bool) -> bool {\n    // 1. Truncation: u16 to u8 (256 -> 0)\n    // 256 is 0x0100. Truncating to lower 8 bits gives 0x00.\n    let case1 = (256u16 as u8) == 0;\n\n    // 2. Truncation of negative: i16 to u8 (-1 -> 255)\n    // -1 in i16 is 0xFFFF. Truncating to u8 gives 0xFF (255).\n    let case2 = (-1i16 as u8) == 255;\n\n    // 3. Sign extension: i8 to i16 (-1 -> -1)\n    // -1 in i8 is 0xFF. Sign extending to i16 gives 0xFFFF (-1).\n    let case3 = (-1i8 as i16) == -1;\n\n    // 4. Reinterpretation of bits: u8 to i8 (128 -> -128)\n    // 128 in u8 is 0x80. In i8 (two's complement), 0x80 is -128.\n    let case4 = (128u8 as i8) == -128;\n\n    // 5. Large u32 to i32 (0xFFFFFFFF -> -1)\n    // 0xFFFFFFFF in u32. In i32 (two's complement), this is -1.\n    let case5 = (0xFFFFFFFFu32 as i32) == -1;\n\n    case1 && case2 && case3 && case4 && case5\n}\n\n/// https://github.com/cryspen/hax/issues/1912\npub fn shift_after_cast(x: u16, n: u8) -> u32 {\n    (x as u32) << (n as u32)\n}\n\n/// https://github.com/cryspen/hax/issues/1911\npub fn add_after_cast(a: u8, b: u8, c: u8) -> u16 {\n    (a as u16) + (b as u16) + (c as u16)\n}\n"
  },
  {
    "path": "tests/lean-tests/src/comments.rs",
    "content": "#![allow(dead_code)]\n#![allow(unused_variables)]\n\n/// Single line doc comment\nfn f() {}\n\n/** Block doc-comment : Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vestibulum rutrum\norci ac tellus ullamcorper sollicitudin. Sed fringilla mi id arcu suscipit rhoncus. Pellentesque et\nmetus a ante feugiat lobortis. Nam a mauris eget nisl congue egestas. Duis et gravida\nnulla. Curabitur mattis leo vel molestie posuere. Etiam malesuada et augue eget\nvarius. Pellentesque quis tincidunt erat. Vestibulum id consectetur turpis. Cras elementum magna id\nurna volutpat fermentum. In vel erat quis nunc rhoncus porta. Aliquam sed pellentesque\ntellus. Quisque odio diam, mollis ut venenatis non, scelerisque at nulla. Nunc urna ante, tristique\nquis nisi quis, congue maximus nisl. Curabitur non efficitur odio. */\nfn heavily_documented() -> u32 {\n    4\n}\n"
  },
  {
    "path": "tests/lean-tests/src/constants.rs",
    "content": "// Tests on constants\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nconst C1: u32 = 5678;\nconst C2: u32 = C1 + 1;\nconst C3: u32 = if true { 890 } else { 9 / 0 };\n\nconst fn computation(x: u32) -> u32 {\n    x + x + 1\n}\n\nconst C4: u32 = computation(C1) + C2;\nconst C5: (u32, u32) = (0 + 0, 0);\nconst C6: [u32; 1] = [0];\n\nfn test() {\n    let x = C1 + 1;\n    let y = C2 + C3;\n    let z = C4 - C3;\n}\n\nmod const_parameters {\n\n    /// Function with const parameter\n    fn f<const N: usize>() -> usize {\n        N\n    }\n\n    const N0: usize = 1;\n    const N1: usize = 10;\n\n    fn test() {\n        let _ = f::<9>() + f::<N1>();\n    }\n\n    /// Trait definition\n    trait T<const N_TRAIT: usize> {\n        fn f<const N_FIELD: usize>(&self) -> usize;\n    }\n\n    /// Struct definition\n    struct S<const N: usize>(u32);\n\n    impl<const N_TRAIT: usize> T<N_TRAIT> for S<N_TRAIT> {\n        fn f<const N_FIELD: usize>(&self) -> usize {\n            N_TRAIT - N_FIELD\n        }\n    }\n\n    fn test2<const N2: usize, A: T<N2>>(x: A) -> usize {\n        let s = S::<N1>(9);\n        let _ = s.f::<1>() + x.f::<{ 1 + N1 }>();\n        let s = S::<{ 1 + 2 }>(9);\n        x.f::<{ 2 + 2 }>()\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/enums.rs",
    "content": "//! Tests on enums\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\n// 1. Type definition\nenum E {\n    // unit-like\n    V1,\n    V2,\n    // with positional arguments\n    V3(usize),\n    V4(usize, usize, usize),\n    // with named arguments\n    V5 { f1: usize, f2: usize },\n    V6 { f1: usize, f2: usize },\n}\n\nenum MyList<T> {\n    Nil,\n    Cons { hd: T, tl: Box<MyList<T>> },\n}\n\nfn enums() -> () {\n    // 2. Expressions\n    let e_v1 = E::V1;\n    let e_v2 = E::V2;\n    let e_v3 = E::V3(23);\n    let e_v4 = E::V4(23, 12, 1);\n    let e_v5 = E::V5 { f1: 23, f2: 43 };\n    let e_v6 = E::V6 { f1: 12, f2: 13 };\n    let nil: MyList<usize> = MyList::Nil;\n    let cons_1 = MyList::Cons {\n        hd: 1,\n        tl: Box::new(nil),\n    };\n    let cons_2_1 = MyList::Cons {\n        hd: 2,\n        tl: Box::new(cons_1),\n    };\n\n    // 3. Pattern matching\n    match e_v1 {\n        E::V1 => (),\n        E::V2 => (),\n        E::V3(_) => (),\n        E::V4(x1, x2, x3) => {\n            let y1 = x1 + x2;\n            let y2 = y1 - x2;\n            let y3 = y2 + x3;\n            ()\n        }\n        E::V5 { f1, f2 } => (),\n        E::V6 {\n            f1,\n            f2: other_name_for_f2,\n        } => (),\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/floats.rs",
    "content": "// Tests on floats\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nconst N: f32 = 1.0;\n\nfn test() {\n    let l0 = 1.0;\n    let l1 = 0.9;\n    let l2 = 5.0f32;\n    let l5 = N;\n}\n\nfn f(x: f64, y: f32) -> f32 {\n    y\n}\n"
  },
  {
    "path": "tests/lean-tests/src/ite.rs",
    "content": "//! Tests on if-then-else\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nfn test1() -> i32 {\n    let x = if true { 0 } else { 1 };\n    if false { 2 } else { 3 }\n}\n\nfn test2(b: bool) -> i32 {\n    let x = if b { 0 } else { 9 };\n    let mut y = 0;\n    if true {\n        y = y + x + 1\n    } else {\n        y = y - x - 1\n    };\n    if b {\n        let z = y + y;\n        z + y + x\n    } else {\n        let z = y - x;\n        z + y + x\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/lib.rs",
    "content": "#![allow(dead_code)]\n#![allow(unused_variables)]\n\npub mod array;\npub mod associated_types;\npub mod binops;\npub mod casts;\npub mod comments;\npub mod constants;\npub mod enums;\npub mod floats;\npub mod ite;\npub mod loops;\npub mod matching;\npub mod monadic;\npub mod nested_control_flow;\npub mod opaque;\npub mod recursion;\npub mod specs;\npub mod structs;\npub mod traits;\npub mod types;\n\nconst FORTYTWO: usize = 42;\nconst MINUS_FORTYTWO: isize = -42;\n\nfn returns42() -> usize {\n    FORTYTWO\n}\n\nfn add_two_numbers(x: usize, y: usize) -> usize {\n    x + y\n}\n\nfn letBinding(x: usize, y: usize) -> usize {\n    let useless = ();\n    let result1 = x + y;\n    let result2 = result1 + 2;\n    result2 + 1\n}\n\nfn closure() -> i32 {\n    let x = 41;\n    let f1 = |y| y + x;\n    let f2 = |y, z| y + x + z;\n    let res1 = f1(1);\n    let res2 = f2(2, 3);\n    res1 + res2\n}\n\n#[hax_lib::lean::before(\"example : Nat := 42\")]\nfn test_before_verbatime_single_line(x: u8) -> u8 {\n    42\n}\n\n#[hax_lib::lean::before(\n    \"\ndef multiline : Unit := ()\n\n\"\n)]\nfn test_before_verbatim_multi_line(x: u8) -> u8 {\n    32\n}\n\nconst NULL_CHAR: char = '\\0';\n\n/// Test string literals with escape sequences\nfn string_escapes() {\n    let _empty = \"\";\n    let _plain = \"hello world\";\n    let _with_quotes = \"she said \\\"hello\\\"\";\n    let _with_single_quote = \"it's fine\";\n    let _with_backslash = \"path\\\\to\\\\file\";\n    let _with_newline = \"line1\\nline2\";\n    let _with_tab = \"col1\\tcol2\";\n    let _with_carriage_return = \"before\\rafter\";\n    let _mixed = \"say \\\"hello\\\"\\nand\\t'goodbye'\\\\end\";\n    let _carriage_return = \"carriage\\rreturn\";\n    let _control_chars = \"null\\x00byte bell\\x07char font\\x1b[0mreset\";\n}\n"
  },
  {
    "path": "tests/lean-tests/src/loops.rs",
    "content": "//! Tests on loops\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\n/// Simple for-loop\nfn loop1() -> u32 {\n    let mut x: u32 = 0;\n    for i in 1..10 {\n        x = x + i\n    }\n    x\n}\n\n/// For-loop with a return\nfn loop2() -> u32 {\n    let mut x: u32 = 0;\n    for i in 1..10 {\n        if i == 5 {\n            return x;\n        }\n        x = x + i;\n    }\n    x\n}\n\n/// For-loop with a spec\n#[hax_lib::requires(y > 0)]\n#[hax_lib::ensures(|res| res > 0)]\nfn for_loop_with_spec(y: u64) -> u64 {\n    let mut x: u64 = y;\n    for i in 0..y {\n        hax_lib::loop_invariant!(|i: u64| x > 0);\n        if x % 5 == 0 {\n            x = 200;\n        } else {\n            x = x % 5;\n        }\n    }\n    x\n}\n\n/// while-loop\n#[hax_lib::ensures(|r| r == 0)]\n#[hax_lib::lean::proof_method::grind]\nfn while_loop1(s: u32) -> u32 {\n    let mut x: u32 = s;\n    while x > 0 {\n        hax_lib::loop_decreases!(x);\n        x = x - 1;\n    }\n    x\n}\n\nmod errors {\n    enum Error {\n        Foo,\n        Bar(u32),\n    }\n\n    fn loop3() -> Result<u32, Error> {\n        let mut x = 0;\n        let end: u32 = 10;\n        for i in 1..end {\n            if i == 5 {\n                return Err(Error::Foo);\n            }\n            x = x + 5\n        }\n        Ok(x)\n    }\n\n    fn loop4() -> Result<(u32, u32), Error> {\n        let mut e = 0;\n        let f = |()| 42;\n\n        for i in 0..(f(())) {\n            // verify degree bound\n            if i > 10 {\n                return Err(Error::Bar(e));\n            }\n            e = e + i\n        }\n\n        Ok((e, e))\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/matching.rs",
    "content": "fn test_const_matching(x: u32, c: char, s: &str, b: bool) -> u32 {\n    let x = match x {\n        0 => 42,\n        _ => 0,\n    };\n    let c = match c {\n        'a' => 42,\n        _ => 0,\n    };\n    let s = match s {\n        \"Hello\" => 42,\n        _ => 0,\n    };\n    let b = match b {\n        true => 42,\n        false => 0,\n    };\n    return x + c + s + b;\n}\n\nfn test_binding_subpattern_matching(x: (u8, (u8, u8))) -> u8 {\n    match x {\n        (0, pair @ (a, b)) => a + b + pair.0 + pair.1,\n        _ => 0,\n    }\n}\n\nfn test_ellipsis_records() {\n    enum E {\n        C { f1: u8, f2: u8, f3: u8, f4: u8 },\n    }\n\n    let c = E::C {\n        f1: 1,\n        f2: 2,\n        f3: 3,\n        f4: 4,\n    };\n\n    match c {\n        E::C { .. } => assert!(true),\n    };\n    match c {\n        E::C { f1, .. } => assert!(f1 == 1),\n    };\n    match c {\n        E::C { f1, f2, .. } => assert!(f1 == 1 && f2 == 2),\n    };\n    match c {\n        E::C { f2, f4, .. } => assert!(f2 == 2 && f4 == 4),\n    };\n    match c {\n        E::C { f1, f2, f3, f4 } => assert!(f1 == 1 && f2 == 2 && f3 == 3 && f4 == 4),\n    };\n}\n\nfn test_ellipsis_structs() {\n    struct S {\n        f1: u8,\n        f2: u8,\n        f3: u8,\n        f4: u8,\n    }\n\n    let c = S {\n        f1: 1,\n        f2: 2,\n        f3: 3,\n        f4: 4,\n    };\n\n    match c {\n        S { .. } => assert!(true),\n    };\n    match c {\n        S { f1, .. } => assert!(f1 == 1),\n    };\n    match c {\n        S { f1, f2, .. } => assert!(f1 == 1 && f2 == 2),\n    };\n    match c {\n        S { f2, f4, .. } => assert!(f2 == 2 && f4 == 4),\n    };\n    match c {\n        S { f1, f2, f3, f4 } => assert!(f1 == 1 && f2 == 2 && f3 == 3 && f4 == 4),\n    };\n}\n\nfn test_ellipsis_bare_tuples() {\n    let t = (1u8, 2u8, 3u8, 4u8);\n\n    match t {\n        (..) => assert!(true),\n    };\n    match t {\n        (a, ..) => assert!(a == 1),\n    };\n    match t {\n        (a, b, ..) => assert!(a == 1 && b == 2),\n    };\n    match t {\n        (.., d) => assert!(d == 4),\n    };\n    match t {\n        (.., c, d) => assert!(c == 3 && d == 4),\n    };\n    match t {\n        (a, .., d) => assert!(a == 1 && d == 4),\n    };\n    match t {\n        (a, b, c, d) => assert!(a == 1 && b == 2 && c == 3 && d == 4),\n    };\n}\n\nfn test_ellipsis_tuples() {\n    enum F {\n        D(u8, u8, u8, u8),\n    }\n\n    let d = F::D(1, 2, 3, 4);\n\n    match d {\n        F::D(..) => assert!(true),\n    };\n    match d {\n        F::D(a, ..) => assert!(a == 1),\n    };\n    match d {\n        F::D(a, b, ..) => assert!(a == 1 && b == 2),\n    };\n    match d {\n        F::D(.., d) => assert!(d == 4),\n    };\n    match d {\n        F::D(.., c, d) => assert!(c == 3 && d == 4),\n    };\n    match d {\n        F::D(a, .., d) => assert!(a == 1 && d == 4),\n    };\n    match d {\n        F::D(a, b, c, d) => assert!(a == 1 && b == 2 && c == 3 && d == 4),\n    };\n}\n"
  },
  {
    "path": "tests/lean-tests/src/monadic.rs",
    "content": "//! Tests on monadic encoding\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nstruct S {\n    f: u32,\n}\n\nfn test() {\n    let _ = 9; // value\n    let _ = 9 + 9; // computation\n    let _ = S { f: 9 }; // constructors are values\n    let _ = S { f: 9 + 9 }; // computation within a value\n    let _ = (S { f: 9 + 9 }).f; // projections are values\n    let _ = (S { f: 9 + 9 }).f + 9; // projections are values\n    let _ = if true { 3 + 4 } else { 3 - 4 }; // ite expects value for condition\n    let _ = if 9 + 9 == 0 { 3 + 4 } else { 3 - 4 }; // ite expects value for condition\n    let _ = if true {\n        let x = 9;\n        3 + x;\n    } else {\n        let y = 19;\n        3 + y - 4;\n    };\n}\n\nmod trait_constants {\n    // https://github.com/cryspen/hax/issues/1928\n    trait Foo {\n        const F : u32;\n    }\n\n    trait Bar {\n        const B : u32;\n    }\n\n    struct Baz;\n\n    impl Foo for Baz {\n        const F : u32 = 1;\n    }                                                                                                                                      \n                                                                                                                                        \n    impl Bar for Baz {\n        const B : u32 = Self::F - 1;\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/nested_control_flow.rs",
    "content": "//! Tests for nested control flow in expressions\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\nfn nested_control_flow() {\n    let x1 = 1 + (if true { 0 } else { 1 });\n    let x2 = 1\n        + (match (1, 2) {\n            _ => 0,\n        });\n    let x3 = 1 + {\n        let x = 9;\n        x + 1\n    };\n}\n\nfn explicit_hoisting() {\n    let x1_tmp = if true { 0 } else { 1 };\n    let x1 = 1 + x1_tmp;\n    let x2_tmp = match (1, 2) {\n        _ => 0,\n    };\n    let x2 = 1 + x2_tmp;\n    let x3_tmp_x = 9;\n    let x3_tmp = x3_tmp_x + 1;\n    let x3 = 1 + x3_tmp;\n}\n\nfn complex_nesting() {\n    let mut x1 = if true {\n        let mut y = if false {\n            let mut z = match () {\n                _ => 9,\n            };\n            z = 1 + z;\n            z + 1\n        } else {\n            let mut z = 9;\n            z = z + 1;\n            z\n        };\n        y = y + 1;\n        y + 1\n    } else {\n        0\n    };\n    x1 = x1 + 1;\n    let mut x2 = match Some(89) {\n        Some(a) => {\n            let mut y = 1 + a;\n            y = y + 1;\n            if y == 0 {\n                let mut z = 9;\n                z = z + y + 1;\n                z\n            } else {\n                10\n            }\n        }\n        None => {\n            let mut y = if false {\n                9\n            } else {\n                let mut z = 9;\n                z = z + 1;\n                z + 9\n            };\n            y = y + 1;\n            y\n        }\n    };\n    x2 = x1 + 1 + x2\n}\n"
  },
  {
    "path": "tests/lean-tests/src/opaque.rs",
    "content": "#[hax_lib::opaque]\npub fn an_opaque_fn() {}\n\ntrait T {\n    type A;\n    fn f();\n}\n\nstruct S;\n\n#[hax_lib::opaque]\nimpl T for S {\n    type A = usize;\n    fn f() {}\n}\n\n#[hax_lib::opaque]\nstruct OpaqueStruct;\n"
  },
  {
    "path": "tests/lean-tests/src/recursion.rs",
    "content": "fn factorial(n: u32) -> u32 {\n    if n == 0 { 1 } else { n * factorial(n - 1) }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/specs.rs",
    "content": "#[hax_lib::requires(x > 0)]\n#[hax_lib::ensures(|r| r == x)]\nfn test(x: u8) -> u8 {\n    x\n}\n\n#[hax_lib::requires(x > 0)]\n#[hax_lib::ensures(|r| r == x)]\nfn use_previous_result(x: u8) -> u8 {\n    test(x)\n}\n\n#[hax_lib::requires(x > 0)]\n#[hax_lib::ensures(|r| r == x)]\n#[hax_lib::lean::proof(\"by unfold lean_tests.specs.test_proof; hax_bv_decide\")]\nfn test_proof(x: u8) -> u8 {\n    x\n}\n\n#[hax_lib::requires(x < 16)]\n#[hax_lib::ensures(|res| res >= x)]\nfn square(x: u8) -> u8 {\n    x * x\n}\n\n#[hax_lib::requires(hax_lib::forall(|i:u8| hax_lib::implies(i < 20, x > i)))]\n#[hax_lib::ensures(|r| !hax_lib::exists(|i:u8| !hax_lib::implies(i < 20, r > i)))]\n#[hax_lib::lean::proof_method::grind]\nfn forall_and_exists(x: u8) -> u8 {\n    x\n}\n\n/// Test function without arguments\n/// https://github.com/cryspen/hax/issues/1856\n#[hax_lib::ensures(|_| true)]\nfn fn_without_args() {}\n\n/// The Lean backend used to produce `self_` instead of `self` in annotations in\n/// impl blocks. See https://github.com/cryspen/hax/issues/1852.\nmod issue_1852 {\n    struct T {}\n\n    #[hax_lib::attributes]\n    impl T {\n        pub fn test(self) -> bool {\n            true\n        }\n\n        #[hax_lib::requires(T::test(self))]\n        pub fn func(self) {}\n    }\n}\n\n#[hax_lib::requires(true)]\n#[hax_lib::ensures(|r| true)]\n#[hax_lib::lean::pure_requires_proof(\"⟨True, by mvcgen⟩\")]\n#[hax_lib::lean::pure_ensures_proof(\"⟨fun _ => True, by intros; mvcgen⟩\")]\nfn custom_pure_proofs(x: u8) {}\n\n/// Resugarings need to be apply also to linked items\n/// https://github.com/cryspen/hax/issues/1945\nmod issue_1945 {\n    #[hax_lib::requires({let x = a; a == 0})]\n    fn mktuple(a: i32) -> bool {\n        {let x = a; a == 0}\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/structs.rs",
    "content": "//! Tests on structs\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\n// # Tuple Structs\n\n// 1. Type definitions\nstruct T0();\nstruct T1<A>(A);\nstruct T2<A, B>(A, B);\nstruct T3<A, B, C>(A, B, C);\nstruct T3p<A, B, C>(A, T2<B, C>);\n\nfn tuple_structs() -> () {\n    // 2. Expressions\n    let t0 = T0();\n    let t1 = T1(1);\n    let t2 = T2(1, 2);\n    let t3 = T3(T0(), T1(1), T2(1, 2));\n    let t3p = T3p(T0(), T2(T1(1), T2(1, 2)));\n\n    // 3. Patterns\n    let T0() = t0;\n    let T1(u1) = t1;\n    let T2(u2, u3) = t2;\n    let T3(T0(), T1(_), T2(_, _)) = t3;\n    let T3p(T0(), T2(T1(_), T2(_, _))) = t3p;\n\n    // 4. Accessors\n    let _ = t1.0;\n    let _ = t2.0;\n    let _ = t2.1;\n    let _ = t3.0;\n    let _ = t3.1;\n    let _ = t3.2;\n    let _ = t3.2.1;\n    let _ = t3p.0;\n    let _ = t3p.1;\n    let _ = t3p.1.1.0;\n    let _ = t3p.1.0;\n    let _ = t3p.1.1;\n\n    // 5. Pattern matching\n    let _ = match t0 {\n        T0() => {}\n    };\n    let _ = match t1 {\n        T1(u1) => {}\n    };\n    let _ = match t2 {\n        T2(u2, u3) => {}\n    };\n    let _ = match t3 {\n        T3(T0(), T1(u1), T2(u2, u3)) => {}\n    };\n    let _ = match t3p {\n        T3p(T0(), T2(T1(u1), T2(u2, u3))) => {}\n    };\n}\n\n// # Normal Structs\n\n// 1. Type definitions\nstruct S1 {\n    f1: usize,\n    f2: usize,\n}\n\nstruct S2 {\n    // Nested structs\n    f1: S1, // possible shadowing between fields\n    f2: usize,\n}\n\nstruct S3 {\n    // Reserved keywords in Lean\n    end: usize,\n    def: usize,\n    theorem: usize,\n    structure: usize,\n    inductive: usize,\n}\n\nfn normal_structs() -> () {\n    // 2. Expressions\n    let s1 = S1 { f1: 0, f2: 1 };\n    let s2 = S2 {\n        f1: S1 { f1: 2, f2: 3 },\n        f2: 4,\n    };\n    let s3 = S3 {\n        end: 0,\n        def: 0,\n        theorem: 0,\n        structure: 0,\n        inductive: 0,\n    };\n\n    // 3. Patterns\n    let S1 { f1, f2 } = s1;\n    let S1 {\n        f1,\n        f2: other_name_for_f2,\n    } = s1;\n    let S2 {\n        f1: S1 { f1, f2 },\n        f2: other_name_for_f2,\n    } = s2;\n    let S3 {\n        end,\n        def,\n        theorem,\n        structure,\n        inductive,\n    } = s3;\n\n    // 4. Accessors\n    let _ = (s1.f1, s1.f2);\n    let _ = (\n        s1.f1, s1.f2, s2.f1.f1, s2.f1.f2, s2.f2, s3.end, s3.def, s3.theorem,\n    );\n\n    // 5. Pattern-matching\n    match s1 {\n        S1 { f1, f2 } => {}\n    };\n    match s2 {\n        S2 {\n            f1: S1 {\n                f1,\n                f2: other_name_for_f2,\n            },\n            f2,\n        } => {}\n    }\n    match s3 {\n        S3 {\n            end,\n            def,\n            theorem,\n            structure,\n            inductive,\n        } => {}\n    }\n}\n\nmod miscellaneous {\n    struct S {\n        f: i32,\n    }\n\n    fn test_tuples() -> (i32, i32) {\n        let lit = 1;\n        let constr = S { f: 42 };\n        let proj = constr.f;\n        let ite = if true {\n            (1, 2)\n        } else {\n            let z = 1 + 2;\n            (z, z)\n        };\n        (1, 2)\n    }\n}\n\nmod base_expressions {\n\n    struct S {\n        f1: u32,\n        f2: u32,\n        f3: u32,\n    }\n\n    fn test() {\n        let s1 = S {\n            f1: 1,\n            f2: 2,\n            f3: 3,\n        };\n        let _ = S { f1: 0, ..s1 };\n        let _ = S { f2: 0, ..s1 };\n        let _ = S { f3: 0, ..s1 };\n        let _ = S { f1: 0, f2: 1, ..s1 };\n        let _ = S { f2: 0, f3: 1, ..s1 };\n        let _ = S { f3: 0, f1: 2, ..s1 };\n        let _ = S {\n            f1: 0,\n            f2: 1,\n            f3: 0,\n            ..s1\n        };\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/traits.rs",
    "content": "// Tests on traits\n#![allow(dead_code)]\n#![allow(unused_variables)]\n\n// Simple trait\nmod basic {\n    trait T1 {\n        fn f1(&self) -> usize;\n        fn f2(&self, other: &Self) -> usize;\n    }\n\n    // Simple Impl\n    struct S;\n\n    impl T1 for S {\n        fn f1(&self) -> usize {\n            42\n        }\n\n        fn f2(&self, other: &Self) -> usize {\n            43\n        }\n    }\n\n    // Simple ImplExpr\n    fn f<T: T1>(x: T) -> usize {\n        x.f1() + x.f2(&x)\n    }\n}\n\n// Bounds on parameters and on self\nmod bounds {\n    trait T1 {\n        fn f1(&self) -> usize;\n    }\n    trait T2 {\n        fn f2(&self) -> usize;\n    }\n\n    trait Test<T: T1>: T2 {\n        fn f_test(&self, x: &T) -> usize;\n    }\n\n    struct S1;\n    impl T1 for S1 {\n        fn f1(&self) -> usize {\n            0\n        }\n    }\n\n    struct S2;\n    impl T2 for S2 {\n        fn f2(&self) -> usize {\n            1\n        }\n    }\n\n    impl Test<S1> for S2 {\n        fn f_test(&self, x: &S1) -> usize {\n            x.f1() + self.f2() + 1\n        }\n    }\n\n    fn test(x1: S1, x2: S2) -> usize {\n        x2.f_test(&x1) + x1.f1()\n    }\n}\n\nmod associated_types {\n    trait T1 {\n        type T;\n        fn f(&self, x: Self::T) -> Self::T;\n    }\n\n    trait T2 {\n        type T: T1;\n        fn f(&self, x: Self::T) -> usize;\n    }\n\n    trait Foo<T> {}\n    trait Bar {}\n\n    trait T3 {\n        type T: Bar;\n        type Tp<A: Bar>: Foo<Self::T>;\n        fn f<A: Bar>(&self, x: Self::T, y: Self::Tp<A>) -> usize;\n    }\n\n    struct S {}\n    impl T1 for S {\n        type T = i32;\n\n        fn f(&self, x: Self::T) -> Self::T {\n            2121\n        }\n    }\n    impl T2 for S {\n        type T = S;\n\n        fn f(&self, x: Self::T) -> usize {\n            21\n        }\n    }\n\n    impl Bar for i16 {}\n    impl<A> Foo<i16> for (u32, A) {}\n\n    // impl T3 for S {\n    //     type T = i16;\n\n    //     type Tp<A: Bar> = (u32, A);\n\n    //     fn f<A: Bar>(&self, x: Self::T, y: Self::Tp<A>) -> usize {\n    //         12\n    //     }\n    // }\n\n    trait Chain0 {}\n\n    trait Chain1 {\n        type A: Chain0;\n        type B: Chain0;\n    }\n\n    trait Chain2: Chain1 {}\n\n    trait Chain3: Chain2 {\n        fn f() -> Self::A;\n    }\n\n    impl Chain0 for u8 {}\n    impl Chain1 for u8 {\n        type A = u8;\n        type B = u8;\n    }\n    impl Chain2 for u8 {}\n    impl Chain3 for u8 {\n        fn f() -> u8 {\n            0\n        }\n    }\n}\n\nmod overlapping_methods {\n\n    trait T1 {\n        fn f(&self) -> usize;\n    }\n    trait T2 {\n        fn f(&self) -> usize;\n    }\n    trait T3 {\n        fn f(&self) -> usize;\n    }\n    impl T1 for u32 {\n        fn f(&self) -> usize {\n            0\n        }\n    }\n    impl T2 for u32 {\n        fn f(&self) -> usize {\n            1\n        }\n    }\n    impl T3 for u32 {\n        fn f(&self) -> usize {\n            2\n        }\n    }\n    fn test() -> usize {\n        let x: u32 = 9;\n        T1::f(&x) + T2::f(&x) + T3::f(&x)\n    }\n}\n\nmod inheritance {\n    trait T1 {\n        fn f1(&self) -> usize;\n    }\n    trait T2 {\n        fn f2(&self) -> usize;\n    }\n    trait T3: T2 + T1 {\n        fn f3(&self) -> usize;\n    }\n    trait Tp1 {\n        fn f1(&self) -> usize;\n    }\n    trait Tp2: Tp1 + T3 {\n        fn fp2(&self) -> usize;\n    }\n\n    struct S {}\n    impl T1 for S {\n        fn f1(&self) -> usize {\n            1\n        }\n    }\n    impl T2 for S {\n        fn f2(&self) -> usize {\n            2\n        }\n    }\n    impl T3 for S {\n        fn f3(&self) -> usize {\n            3\n        }\n    }\n\n    impl Tp1 for S {\n        fn f1(&self) -> usize {\n            10\n        }\n    }\n\n    impl Tp2 for S {\n        fn fp2(&self) -> usize {\n            Tp1::f1(self) + T1::f1(self) + T2::f2(self) + T3::f3(self)\n        }\n    }\n    fn test() -> usize {\n        let s = S {};\n        s.f3() + 1\n    }\n}\n\nmod default {\n\n    trait Easy {\n        fn dft(&self) -> usize {\n            32\n        }\n    }\n\n    impl Easy for usize {\n        fn dft(&self) -> usize {\n            self + 1\n        }\n    }\n\n    impl Easy for u32 {}\n\n    trait T1 {\n        fn f1(&self) -> usize;\n        fn f2(&self) -> usize {\n            1\n        }\n        fn f3<A>(&self, x: &A) -> usize {\n            1\n        }\n        fn f4<A: Easy>(&self, x: &A) -> usize {\n            x.dft() + 1\n        }\n    }\n\n    struct S<A>(usize, A);\n\n    // Override\n    impl T1 for S<usize> {\n        fn f1(&self) -> usize {\n            self.0 + self.1\n        }\n\n        fn f2(&self) -> usize {\n            self.1\n        }\n    }\n\n    impl T1 for S<bool> {\n        fn f1(&self) -> usize {\n            if self.1 { self.0 } else { 9 }\n        }\n\n        fn f2(&self) -> usize {\n            self.0 + 1\n        }\n    }\n\n    // No override\n    impl T1 for S<String> {\n        fn f1(&self) -> usize {\n            0\n        }\n    }\n}\n\nmod trait_level_args {\n    trait T1<A, B> {\n        fn f1<C, D>(&self) -> (); // A and B do not appear\n        fn f2<C, D>(&self, x: &A) -> (); // A appears\n        fn f3<C, D>(&self, x: &A, y: &B) -> (); // Both appear\n    }\n\n    impl T1<u32, u64> for usize {\n        fn f1<C, D>(&self) {}\n        fn f2<C, D>(&self, x: &u32) {}\n        fn f3<C, D>(&self, x: &u32, y: &u64) {}\n    }\n\n    fn test<A, B, C, D, U: T1<A, B>>(x: U, a: &A, b: &B) -> () {\n        x.f1::<C, D>();\n        x.f2::<C, D>(a);\n        x.f3::<C, D>(a, b);\n    }\n}\n\nmod trait_with_constraints {\n\n    trait T1 {}\n\n    trait T2 {\n        fn func(&self) -> bool\n        where\n            Self: T1;\n    }\n\n    impl<A: T1> T2 for A {\n        fn func(&self) -> bool\n        where\n            A: T1,\n        {\n            true\n        }\n    }\n}\n\nmod associated_constant {\n    pub trait Foo {\n        const f: bool;\n        const x: u8 = 0;\n    }\n\n    pub struct Bar;\n\n    impl Foo for Bar {\n        const f: bool = true;\n        const x: u8 = 1 + 1;\n    }\n\n    // https://github.com/cryspen/hax/issues/1940\n    trait Baz {\n        const One: u32 = 1;\n    }\n\n    fn foo<F: Baz>(n: u32) -> u32 {\n        n + F::One\n    }\n}\n"
  },
  {
    "path": "tests/lean-tests/src/types.rs",
    "content": "#![allow(dead_code)]\n#![allow(unused_variables)]\n// Tests on type aliases\n\ntype UsizeAlias = usize;\ntype MyOption<A> = Option<A>;\ntype MyResult<A, B> = Result<Option<A>, B>;\n\ntype ErrorMonad<A, E> = Result<A, E>;\ntype StateMonad<A, S> = (A, S);\ntype ESMonad<A, S, E> = StateMonad<ErrorMonad<A, E>, S>;\n"
  },
  {
    "path": "tests/let-else/Cargo.toml",
    "content": "[package]\nname = \"let-else\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq+ssprove\" = { broken = false, snapshot = \"stdout\", issue_id = \"155\" }\n"
  },
  {
    "path": "tests/let-else/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub fn let_else(opt: Option<u32>) -> bool {\n    let Some(x) = opt else { return false };\n    true\n}\n\npub fn let_else_different_type(opt: Option<u32>) -> bool {\n    let_else({\n        let Some(x) = opt else { return false };\n        Some(x + 1)\n    })\n}\n"
  },
  {
    "path": "tests/literals/Cargo.toml",
    "content": "[package]\nname = \"literals\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"lean\" = { broken = false, issue_id = \"85\" }\ninto.\"fstar\" = { broken = false, issue_id = \"85\" }\ninto.\"coq\" = { broken = false, issue_id = \"85\" }\ninto.\"ssprove\" = { broken = true, snapshot = \"none\", issue_id = \"85\" }\n"
  },
  {
    "path": "tests/literals/src/lib.rs",
    "content": "#![allow(dead_code)]\nuse hax_lib::*;\n\n#[hax_lib::requires(x > int!(0) && x < int!(16))]\nfn math_integers(x: Int) -> u8 {\n    let _: Int = 3usize.lift();\n    let _neg_dec = int!(-340282366920938463463374607431768211455000);\n    let _pos_dec = int!(340282366920938463463374607431768211455000);\n    let _neg_hex = int!(-0x3E7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFC18);\n    let _pos_hex = int!(0x3E7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFC18);\n    let _neg_octal = int!(-0o7637777777777777777777777777777777777777776030);\n    let _pos_octal = int!(0o7637777777777777777777777777777777777777776030);\n    let _neg_bin = int!(-0b111110011111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111110000011000);\n    let _pos_bin = int!(0b111110011111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111110000011000);\n    let _ = int!(-340282366920938463463374607431768211455000)\n        > int!(340282366920938463463374607431768211455000);\n    let _ = x < x;\n    let _ = x >= x;\n    let _ = x <= x;\n    let _ = x != x;\n    let _ = x == x;\n    let _ = x + x;\n    let _ = x - x;\n    let _ = x * x;\n    let _ = x / x;\n    let _: i16 = x.to_i16();\n    let _: i32 = x.to_i32();\n    let _: i64 = x.to_i64();\n    let _: i128 = x.to_i128();\n    let _: isize = x.to_isize();\n    let _: u16 = x.to_u16();\n    let _: u32 = x.to_u32();\n    let _: u64 = x.to_u64();\n    let _: u128 = x.to_u128();\n    let _: usize = x.to_usize();\n    (x + x * x).to_u8()\n}\n\npub fn panic_with_msg() {\n    panic!(\"with msg\")\n}\n\n#[derive(PartialEq, Eq)]\nstruct Foo {\n    field: u8,\n}\n\nconst CONSTANT: Foo = Foo { field: 3 };\n\nfn numeric() {\n    let _: usize = 123;\n    let _: isize = -42;\n    let _: isize = 42;\n    let _: i32 = -42;\n    let _: u128 = 22222222222222222222;\n}\n\npub fn patterns() {\n    match 1u8 {\n        2 => (),\n        _ => (),\n    };\n    match (\"hello\", (123, [\"a\", \"b\"])) {\n        (\"hello\", (123, _todo)) => (),\n        _ => (),\n    };\n    match (Foo { field: 4 }) {\n        CONSTANT => (), // Note [CONSTANT] is not a free variable here, we're really matching against the *value* of CONSTANT\n        _ => (),\n    };\n}\n\nfn casts(x8: u8, x16: u16, x32: u32, x64: u64, xs: usize) {\n    let _: u64 = x8 as u64 + x16 as u64 + x32 as u64 + x64 as u64 + xs as u64;\n    let _: u32 = x8 as u32 + x16 as u32 + x32 as u32 + x64 as u32 + xs as u32;\n    let _: u16 = x8 as u16 + x16 as u16 + x32 as u16 + x64 as u16 + xs as u16;\n    let _: u8 = x8 as u8 + x16 as u8 + x32 as u8 + x64 as u8 + xs as u8;\n    let _: i64 = x8 as i64 + x16 as i64 + x32 as i64 + x64 as i64 + xs as i64;\n    let _: i32 = x8 as i32 + x16 as i32 + x32 as i32 + x64 as i32 + xs as i32;\n    let _: i16 = x8 as i16 + x16 as i16 + x32 as i16 + x64 as i16 + xs as i16;\n    let _: i8 = x8 as i8 + x16 as i8 + x32 as i8 + x64 as i8 + xs as i8;\n}\n\npub fn empty_array() {\n    let _: &[u8] = &[];\n}\n\n/// https://github.com/hacspec/hax/issues/500\nfn fn_pointer_cast() {\n    let f: fn(&u32) -> &u32 = |x| x;\n}\n\nfn strings() {\n    let _: &str = \"hello\";\n    let _: &str = \"hello\\\"world\";\n    let _: &str = \"it's\";\n    let _: &str = \"back\\\\slash\";\n    let _: &str = \"line\\nbreak\";\n    let _: &str = \"carriage\\rreturn\";\n    let _: &str = \"tab\\there\";\n    let _: &str = \"null\\x00byte\";\n    let _: &str = \"bell\\x07char\";\n    let _: &str = \"\\x1b[0m\";\n    let _: &str = \"🦀\";\n}\n"
  },
  {
    "path": "tests/loops/Cargo.toml",
    "content": "[package]\nname = \"loops\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { }\ninto.\"coq\" = { broken = true, snapshot = \"none\", issue_id = \"137\" }\ninto.\"ssprove\" = { broken = true, snapshot = \"none\", issue_id = \"137\" }\n"
  },
  {
    "path": "tests/loops/src/lib.rs",
    "content": "mod recognized_loops {\n    fn range() {\n        let mut count = 0u64;\n        for i in 0u8..10u8 {\n            hax_lib::loop_invariant!(|i: u8| i <= 10);\n            count += 1;\n        }\n    }\n    fn range_step_by() {\n        let mut count = 0u64;\n        for i in (0u8..10u8).step_by(2) {\n            hax_lib::loop_invariant!(|i: u8| i <= 10);\n            count += 1;\n        }\n    }\n    fn enumerated_slice<T>(slice: &[T]) {\n        let mut count = 0u64;\n        for i in slice.into_iter().enumerate() {\n            hax_lib::loop_invariant!(|i: usize| i <= 10);\n            count += 2;\n        }\n    }\n    fn enumerated_chunked_slice<T>(slice: &[T]) {\n        let mut count = 0u64;\n        for i in slice.chunks_exact(3).enumerate() {\n            hax_lib::loop_invariant!(|i: usize| { fstar!(\"$i <= ${slice.len()}\") });\n            count += 3;\n        }\n    }\n}\n\nmod for_loops {\n    fn range1() -> usize {\n        let mut acc = 0;\n        for i in 0..15 {\n            acc = acc + i;\n        }\n        acc\n    }\n\n    fn range2(n: usize) -> usize {\n        let mut acc = 0;\n        for i in 0..(n + 10) {\n            acc = acc + i + 1;\n        }\n        acc\n    }\n\n    fn composed_range(n: usize) -> usize {\n        let mut acc = 0;\n        for i in (0..n).chain((n + 10)..(n + 50)) {\n            acc = acc + i + 1;\n        }\n        acc\n    }\n\n    fn rev_range(n: usize) -> usize {\n        let mut acc = 0;\n        for i in (0..n).rev() {\n            acc = acc + i + 1;\n        }\n        acc\n    }\n\n    fn chunks<const CHUNK_LEN: usize>(arr: Vec<usize>) -> usize {\n        let mut acc = 0;\n        let chunks = arr.chunks_exact(CHUNK_LEN);\n        for chunk in chunks.clone() {\n            let mut mean = 0;\n            for item in chunk {\n                mean = mean + item;\n            }\n            acc = acc + mean / CHUNK_LEN;\n        }\n        for item in chunks.remainder() {\n            acc = acc - item;\n        }\n        acc\n    }\n\n    fn iterator(arr: Vec<usize>) -> usize {\n        let mut acc = 0;\n        for item in arr.iter() {\n            acc = acc + item;\n        }\n        acc\n    }\n\n    fn nested(arr: Vec<usize>) -> usize {\n        let mut acc = 0;\n        for item in arr.iter() {\n            for i in (0..*item).rev() {\n                acc = acc + 1;\n                for j in arr.iter().zip(4..i) {\n                    acc = acc + item + i + j.0 + j.1;\n                }\n            }\n        }\n        acc\n    }\n\n    fn pattern(arr: Vec<(usize, usize)>) -> usize {\n        let mut acc = 0;\n        for (x, y) in arr {\n            acc = acc + x * y;\n        }\n        acc\n    }\n\n    fn enumerate_chunks(arr: Vec<usize>) -> usize {\n        let mut acc = 0;\n        for (i, chunk) in arr.chunks(4).enumerate() {\n            for (j, x) in chunk.iter().enumerate() {\n                acc = i + j + x;\n            }\n        }\n        acc\n    }\n\n    fn bool_returning(x: u8) -> bool {\n        x < 10\n    }\n\n    fn f() {\n        let mut acc = 0;\n        for i in 1..10 {\n            acc += i;\n            bool_returning(i);\n        }\n    }\n}\n\nmod while_loops {\n    fn f() -> u8 {\n        let mut x = 0;\n        while x < 10 {\n            x = x + 3;\n        }\n        x + 12\n    }\n    fn while_invariant_decr() -> u8 {\n        let mut x = 0;\n        while x < 10 {\n            hax_lib::loop_invariant!(x <= 10);\n            hax_lib::loop_decreases!(10 - x);\n            x = x + 3;\n        }\n        x + 12\n    }\n    fn while_invariant_decr_rev() -> u8 {\n        let mut x = 0;\n        while x < 10 {\n            hax_lib::loop_decreases!(10 - x);\n            hax_lib::loop_invariant!(x <= 10);\n            x = x + 3;\n        }\n        x + 12\n    }\n}\n\nmod control_flow {\n    fn double_sum() -> i32 {\n        let mut sum = 0;\n        for i in 1..10 {\n            if i < 0 {\n                break;\n            }\n            sum += i;\n        }\n        sum *= 2;\n        sum\n    }\n    fn double_sum2() -> i32 {\n        let mut sum = 0;\n        let mut sum2 = 0;\n        for i in 1..10 {\n            if i < 0 {\n                break;\n            }\n            sum += i;\n            sum2 += i\n        }\n        sum + sum2\n    }\n    fn double_sum_return(v: &[i32]) -> i32 {\n        let mut sum = 0;\n        for i in v {\n            if *i < 0 {\n                return 0;\n            }\n            sum += *i;\n        }\n        sum *= 2;\n        sum\n    }\n    fn double_sum2_return(v: &[i32]) -> i32 {\n        let mut sum = 0;\n        let mut sum2 = 0;\n        for i in v {\n            if *i < 0 {\n                return 0;\n            }\n            sum += *i;\n            sum2 += *i\n        }\n        sum + sum2\n    }\n    fn bigger_power_2(x: i32) -> i32 {\n        let mut pow = 1;\n        while pow < 1000000 {\n            pow *= 2;\n            if pow < x {\n                pow *= 3;\n                if true {\n                    break;\n                }\n            }\n            pow *= 2\n        }\n        pow\n    }\n    struct M {\n        m: Vec<u8>,\n    }\n\n    impl M {\n        fn decoded_message(&self) -> Option<Vec<u8>> {\n            for i in 0..self.m.len() {\n                if i > 5 {\n                    return None;\n                }\n            }\n            return Some(self.m.clone());\n        }\n    }\n    fn nested() -> i32 {\n        let mut sum = 0;\n        for i in 1..10 {\n            for j in 1..10 {\n                if j < 0 {\n                    break;\n                }\n                sum += j;\n            }\n            sum += i;\n        }\n        sum *= 2;\n        sum\n    }\n    fn nested_return() -> i32 {\n        let mut sum = 0;\n        for i in 1..10 {\n            for j in 1..10 {\n                if j < 0 {\n                    return 0;\n                }\n                sum += j;\n            }\n            sum += i;\n        }\n        sum *= 2;\n        sum\n    }\n    fn continue_only(x: &[i32]) {\n        let mut product = 1;\n        for i in x {\n            if *i == 0 {\n                continue;\n            }\n            product *= i\n        }\n    }\n    fn continue_and_break(x: &[i32]) {\n        let mut product = 1;\n        for i in x {\n            if *i == 0 {\n                continue;\n            }\n            if *i < 0 {\n                break;\n            }\n            product *= i\n        }\n    }\n}\n\nmod and_mut_side_effect_loop {\n    // https://github.com/hacspec/hax/issues/720\n    fn looping(array: &mut [u8; 5]) {\n        for i in 0..array.len() {\n            array[i] = i as u8;\n        }\n    }\n\n    #[hax_lib::fstar::verification_status(panic_free)]\n    fn looping_2(array: &mut [u8; 5]) {\n        for i in 0..array.len() {\n            array[i] = i as u8;\n        }\n    }\n}\n"
  },
  {
    "path": "tests/mut-ref-functionalization/Cargo.toml",
    "content": "[package]\nname = \"mut-ref-functionalization\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.fstar = {broken = false, issue_id = 90, snapshot = \"stdout\"}\n\n"
  },
  {
    "path": "tests/mut-ref-functionalization/src/lib.rs",
    "content": "#![allow(dead_code)]\n\nstruct S {\n    b: [u8; 5],\n}\n\nfn foo(mut lhs: S, rhs: &S) -> S {\n    for i in 0..1 {\n        lhs.b[i] += rhs.b[i];\n    }\n\n    lhs\n}\n\nimpl S {\n    fn update(&mut self, x: u8) {\n        self.b[0] = x;\n    }\n}\n\nfn index_mutation(x: core::ops::Range<usize>, a: &'static [u8]) {\n    let mut v = vec![1];\n    v[x].copy_from_slice(a);\n    v[1] = 3;\n}\n\nfn index_mutation_unsize(mut x: [u8; 12]) -> u8 {\n    x[4..5].copy_from_slice(&[1, 2]);\n    42\n}\n\nfn build_vec() -> Vec<u8> {\n    vec![1, 2, 3]\n}\n\nfn test_append() -> Vec<u8> {\n    let mut vec1 = Vec::new();\n    let mut vec2 = vec![1u8, 2, 3];\n    vec1.append(&mut vec2);\n    vec1.append(&mut build_vec());\n    vec1\n}\n\nfn f() -> Vec<u8> {\n    let mut vec = Vec::new();\n    vec.push(1);\n    vec.push(2);\n    vec.swap(0, 1);\n\n    // `vec.swap(0, 1)` is desugared into:\n    use std::ops::DerefMut;\n    (&mut *(vec.deref_mut())).swap(0, 1);\n\n    vec\n}\n\nstruct Foo {\n    field: Vec<u8>,\n}\nstruct Pair<T> {\n    a: T,\n    b: Foo,\n}\n\nfn g(x: Pair<Vec<u8>>) -> Vec<u8> {\n    let mut x = x;\n    for i in 1..10 {\n        x.a.push(i);\n    }\n    x.a.swap(0, 1);\n    x.b.field.swap(0, 1);\n    x.a\n}\n\nfn h(x: &mut u8) {\n    *x += 10;\n}\n\nstruct Bar {\n    a: u8,\n    b: u8,\n}\n\nfn i(bar: &mut Bar) -> u8 {\n    (*bar).b += bar.a;\n    h(&mut bar.a);\n    bar.a + bar.b\n}\n\nfn j(x: &mut Bar) -> u8 {\n    let out = 123;\n    i(x) + out\n}\n\nfn k(\n    vec: &mut Vec<u8>,\n    _: &mut u16,\n    /*test var shadowing*/ arg_1_wild: u8,\n    _: &mut (),\n) -> u64 {\n    // test variable shadowing\n    let arg_1_wild2 = vec[1];\n    let arg_3_wild = vec[2];\n    let arg_1_wild1 = vec[3];\n    let arg_3_wild1 = vec[4];\n    vec[0] = arg_1_wild + arg_3_wild + arg_1_wild1 + arg_3_wild1 + arg_1_wild;\n    12345\n}\n\ntrait FooTrait {\n    fn z(&mut self);\n}\nimpl FooTrait for Foo {\n    fn z(&mut self) {}\n}\n\nfn array(x: &mut [u8; 10]) {\n    x[1] = x[2];\n}\n"
  },
  {
    "path": "tests/naming/Cargo.toml",
    "content": "[package]\nname = \"naming\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/naming/src/lib.rs",
    "content": "#![allow(dead_code)]\n#![allow(non_camel_case_types)]\n\nenum Foo {\n    A,\n    B { x: usize },\n}\nenum Foo2 {\n    A,\n    B { x: usize },\n}\nstruct B;\n\nstruct C {\n    x: usize,\n}\n\nstruct X {}\n\nfn mk_c() -> C {\n    let _ = Foo::B { x: 3 };\n    let _ = X {};\n    C { x: 3 }\n}\n\nimpl Foo {\n    fn f(self) -> Foo {\n        Foo::A\n    }\n}\nimpl B {\n    fn f(self) -> B {\n        B\n    }\n}\n\nstruct Foobar {\n    a: Foo,\n}\n\nfn f(x: Foobar) -> usize {\n    fn g() {\n        impl B {\n            fn g(self) -> usize {\n                enum Foo {\n                    A,\n                    B { x: usize },\n                }\n                0usize\n            }\n        }\n        impl Foo {\n            fn g(self) -> usize {\n                mod hello {\n                    fn h() {}\n                }\n                1usize\n            }\n        }\n    }\n    x.a.g()\n}\n\nfn reserved_names(val: u8, noeq: u8, of: u8) -> u8 {\n    val + noeq + of\n}\n\nstruct Arity1<T>(T);\n\ntrait T1 {}\nimpl T1 for Foo {}\nimpl T1 for (Foo, u8) {}\n\ntrait T2_for_a {}\nimpl T2_for_a for Arity1<(Foo, u8)> {}\ntrait T3_e_for_a {}\nimpl T3_e_for_a for Foo {}\n\nstruct StructA {\n    a: usize,\n}\nstruct StructB {\n    a: usize,\n    b: usize,\n}\nstruct StructC {\n    a: usize,\n}\nstruct StructD {\n    a: usize,\n    b: usize,\n}\nfn construct_structs(a: usize, b: usize) {\n    let _ = StructA { a };\n    let _ = StructB { a, b };\n    let _ = StructC { a };\n    let _ = StructD { a, b };\n}\n\nconst INHERENT_CONSTANT: usize = 3;\ntrait FooTrait {\n    const ASSOCIATED_CONSTANT: usize;\n}\n\nfn constants<T: FooTrait>() -> usize {\n    <T as FooTrait>::ASSOCIATED_CONSTANT + INHERENT_CONSTANT\n}\n\n/// Test for ambiguous local names renaming: when two local vars are\n/// ambiguous by name but not by their internal IDs.\n/// Such situation can occur playing with *hygenic* macros.\n/// Also, this happens with some internal Rustc rewrite. (e.g. assignment of tuples)\nmod ambiguous_names {\n    fn debug(label: u32, value: u32) {\n        println!(\"[{}] a={}\", label, value)\n    }\n\n    /// This macro surround a given expression with a let binding for\n    /// an identifier `a` and a print of that `a`.\n    macro_rules! introduce_binding_to_new_name_a {\n        ($label:expr, $value:expr, $($e:tt)*) => {\n            let a = $value;\n            $($e)*\n            debug($label, a)\n        };\n    }\n\n    /// `f` stacks mutliple let bindings declaring different `a`s.\n    fn f() {\n        introduce_binding_to_new_name_a!(1, 104,\n               introduce_binding_to_new_name_a!(2, 205,\n                      introduce_binding_to_new_name_a!(3, 306, let a = 123;);\n               );\n        );\n        debug(4, a)\n    }\n\n    /// `f` is expanded into `f_expand` below, while the execution of `f` gives:\n    ///\n    /// ```plaintext\n    ///  [3] a=306\n    ///  [2] a=205\n    ///  [1] a=104\n    ///  [last] a=123\n    /// ```\n    #[allow(unused)]\n    fn f_expand() {\n        let a = 104;\n        let a = 205;\n        let a = 306;\n        let a = 123;\n        debug(3, a);\n        debug(2, a);\n        debug(1, a);\n        debug(0, a)\n    }\n}\n\n/// From issue https://github.com/hacspec/hax/issues/839\nfn string_shadows(string: &str, n: &str) {}\n\n/// From issue https://github.com/cryspen/hax/issues/1411\nmod functions_defined_in_trait_impls {\n    struct A;\n\n    impl PartialEq for A {\n        fn eq(&self, other: &Self) -> bool {\n            panic!()\n        }\n    }\n\n    struct B;\n\n    impl PartialEq for B {\n        fn eq(&self, other: &Self) -> bool {\n            panic!()\n        }\n    }\n}\n\n/// From issue https://github.com/cryspen/hax/issues/1450\nfn items_under_closures() {\n    let _: fn() -> () = || {\n        fn nested_function() {}\n        struct NestedStruct;\n    };\n    fn nested_function() {}\n    struct NestedStruct;\n}\n"
  },
  {
    "path": "tests/nested-derefs/Cargo.toml",
    "content": "[package]\nname = \"nested-derefs\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq+ssprove\" = { snapshot = \"none\" }\n"
  },
  {
    "path": "tests/nested-derefs/src/lib.rs",
    "content": "fn f(x: &usize) -> usize {\n    *x\n}\nfn g(x: &&usize) -> usize {\n    f(*x)\n}\n"
  },
  {
    "path": "tests/never-type/Cargo.toml",
    "content": "[package]\nname = \"never-type\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"none\" }\n"
  },
  {
    "path": "tests/never-type/src/lib.rs",
    "content": "#![allow(dead_code)]\n#![feature(never_type)]\n\nenum False {}\n\nfn never(h: False) -> ! {\n    match h {}\n}\n\nfn test(b: bool) -> u8 {\n    if b {\n        panic!();\n    };\n    3\n}\n\nfn any<T>() -> T {\n    panic!()\n}\n"
  },
  {
    "path": "tests/odd/Cargo.toml",
    "content": "[package]\nname = \"odd\"\nversion = \"0.0.1\"\nedition = \"2021\"\n\n[dependencies]\neven = { path = \"../even\" }\n"
  },
  {
    "path": "tests/odd/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub fn odd(n: usize) -> bool {\n    !even::even(n)\n}\n"
  },
  {
    "path": "tests/pattern-or/Cargo.toml",
    "content": "[package]\nname = \"pattern-or\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"coq\" = { issue_id = \"161\" }\ninto.\"fstar\" = { }\n# into.\"ssprove\" = { broken = true, snapshot = \"none\" }"
  },
  {
    "path": "tests/pattern-or/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub enum E {\n    A,\n    B,\n}\n\npub fn bar(x: E) {\n    match x {\n        E::A | E::B => (),\n    }\n}\npub fn nested(x: Option<i32>) -> i32 {\n    match x {\n        Some(1 | 2) => 1,\n        Some(x) => x,\n        None => 0,\n    }\n}\n\npub fn deep(x: (i32, Option<i32>)) -> i32 {\n    match x {\n        (1 | 2, Some(3 | 4)) => 0,\n        (x, _) => x,\n    }\n}\n\npub fn equivalent(x: (i32, Option<i32>)) -> i32 {\n    match x {\n        (1, Some(3)) | (1, Some(4)) | (2, Some(3)) | (2, Some(4)) => 0,\n        (x, _) => x,\n    }\n}\n\npub fn deep_capture(x: Result<(i32, i32), (i32, i32)>) -> i32 {\n    match x {\n        Ok((1 | 2, x)) | Err((3 | 4, x)) => x,\n        Ok((x, _)) | Err((x, _)) => x,\n    }\n}\n"
  },
  {
    "path": "tests/patterns/Cargo.toml",
    "content": "[package]\nname = \"patterns\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { issue_id = \"1170\" }"
  },
  {
    "path": "tests/patterns/src/lib.rs",
    "content": "#![allow(dead_code)]\n\nstruct Other<'a>(&'a i32);\n\nenum Test<'a> {\n    C1(Other<'a>),\n}\n\nimpl<'a> Test<'a> {\n    fn test(&self) -> i32 {\n        match self {\n            Self::C1(c) => *c.0,\n        }\n    }\n}\n"
  },
  {
    "path": "tests/proverif-basic-structs/Cargo.toml",
    "content": "[package]\nname = \"basic-structs\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"pro-verif\" = { broken = false, snapshot = \"none\" }\n"
  },
  {
    "path": "tests/proverif-basic-structs/src/lib.rs",
    "content": "// Record struct with single field\nstruct Ainitial {\n    x: u8,\n}\n\n// Record struct with multiple fields\nstruct A {\n    one: usize,\n    two: usize,\n}\n\n// Non-record struct\nstruct B(usize);\n"
  },
  {
    "path": "tests/proverif-fn-to-letfun/Cargo.toml",
    "content": "[package]\nname = \"fn-to-letfun\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\n"
  },
  {
    "path": "tests/proverif-fn-to-letfun/src/lib.rs",
    "content": "struct A {\n    x: usize,\n    y: u8,\n}\nstruct B {\n    b: bool,\n}\n\nfn some_function() -> bool {\n    true\n}\n\nfn some_other_function(b: bool) -> u8 {\n    5\n}\n\nfn longer_function(x: &str) -> A {\n    let b = some_function();\n    let d = some_other_function(b);\n\n    A { x: 12usize, y: 9u8 }\n}\n\nfn another_longer_function() -> B {\n    let b = some_function();\n    let d = some_other_function(b);\n\n    B { b: false }\n}\n\nfn void_function() {\n    let b = some_function();\n    let d = some_other_function(b);\n}\n"
  },
  {
    "path": "tests/proverif-minimal/Cargo.toml",
    "content": "[package]\nname = \"minimal\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"pro-verif\" = { broken = false, snapshot = \"none\" }\n"
  },
  {
    "path": "tests/proverif-minimal/src/lib.rs",
    "content": "pub fn add(left: usize, right: usize) -> usize {\n    left + right\n}\n\n#[cfg(test)]\nmod tests {\n    use super::*;\n\n    #[test]\n    fn it_works() {\n        let result = add(2, 2);\n        assert_eq!(result, 4);\n    }\n}\n"
  },
  {
    "path": "tests/proverif-noise/Cargo.toml",
    "content": "[package]\nname = \"noise-kkpsk0\"\nversion = \"0.1.0\"\nauthors = [\"Karthik Bhargavan <karthik@cryspen.com>\"]\nedition = \"2018\"\nlicense = \"MIT OR Apache-2.0\"\ndescription = \"hacspec chacha20 poly1305 authenticated encryption\"\nreadme = \"README.md\"\n\n[dependencies]\nhax-lib-protocol = { path = \"../../hax-lib-protocol\" }\nhax-lib-protocol-macros = { path = \"../../hax-lib-protocol-macros\" }\nhax-lib = { path = \"../../hax-lib\" }\n\n\n[dev-dependencies]\nserde_json = \"1.0\"\nserde = { version = \"1.0\", features = [\"derive\"] }\nrayon = \"1.3.0\"\ncriterion = \"0.4\"\nrand = \"0.8\"\nhacspec-dev = { git = \"https://github.com/hacspec/hacspec.git\" }\n\n[package.metadata.hax-tests]\ninto.\"pro-verif\" = { broken = false, snapshot = \"none\" }\n"
  },
  {
    "path": "tests/proverif-noise/src/lib.rs",
    "content": "pub mod noise_crypto;\npub mod noise_kkpsk0;\npub mod noise_lib;\n"
  },
  {
    "path": "tests/proverif-noise/src/noise_crypto.rs",
    "content": "// Import hacspec and all needed definitions.\nuse hax_lib_protocol::crypto::{DHGroup, *};\n\n/// This file formalizes the Crypto Functions from the Noise Specification\n/// Section 4: Crypto Functions\n/// https://noiseprotocol.org/noise.html#crypto-functions\n\npub enum Error {\n    CryptoError,\n}\n\n/// Section 4.1 and 12.1: Diffie-Hellman Functions for Curve25519\npub struct KeyPair {\n    private_key: DHScalar,\n    pub public_key: Vec<u8>,\n}\n\npub const DHLEN: usize = 32;\n\npub fn generate_keypair(sk: &[u8]) -> KeyPair {\n    let sk = DHScalar::from_bytes(sk);\n    let pk = dh_scalar_multiply_base(DHGroup::X25519, sk.clone());\n    KeyPair {\n        private_key: sk,\n        public_key: pk,\n    }\n}\n\npub fn dh(sk: &KeyPair, pk: &[u8]) -> Vec<u8> {\n    let pk = DHElement::from_bytes(pk);\n\n    dh_scalar_multiply(DHGroup::X25519, sk.private_key.clone(), pk)\n}\n\n/// Section 4.2 and 12.3: Cipher functions for ChaCha20-Poly1305\n\npub fn encrypt(key: &[u8], counter: u64, aad: &[u8], plain: &[u8]) -> Vec<u8> {\n    let mut chacha_iv = vec![0u8; 4];\n    chacha_iv.extend_from_slice(&counter.to_le_bytes());\n    let (mut cipher, tag) = aead_encrypt(\n        AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, key),\n        AEADIV::from_bytes(&chacha_iv),\n        aad,\n        plain,\n    );\n    cipher.extend_from_slice(&tag);\n    cipher\n}\n\npub fn decrypt(key: &[u8], counter: u64, aad: &[u8], cipher: &[u8]) -> Result<Vec<u8>, Error> {\n    let mut chacha_iv = vec![0u8; 4];\n    chacha_iv.extend_from_slice(&counter.to_le_bytes());\n    let cipher_len = cipher.len() - 16;\n    let cip = &cipher[0..cipher_len];\n    let tag = &cipher[cipher_len..cipher.len()];\n    aead_decrypt(\n        AEADKey::from_bytes(AEADAlgorithm::Chacha20Poly1305, key),\n        AEADIV::from_bytes(&chacha_iv),\n        aad,\n        cip,\n        AEADTag::from_bytes(tag),\n    )\n    .map_err(|_| Error::CryptoError)\n}\n\npub fn rekey(key: &[u8]) -> Vec<u8> {\n    encrypt(key, 0xffffffffffffffffu64, &Vec::new(), &[0u8; 32])\n}\n\n/// Section 4.3 and 12.5: Hash functions for SHA-256\n\npub const HASHLEN: usize = 32;\npub const BLOCKLEN: usize = 64;\n\npub fn hash(input: &[u8]) -> Vec<u8> {\n    hax_lib_protocol::crypto::hash(HashAlgorithm::Sha256, input)\n}\n\npub fn hmac_hash(key: &[u8], input: &[u8]) -> Vec<u8> {\n    hmac(HMACAlgorithm::Sha256, key, input)\n}\n\n/// HKDF spec as per Noise\n/// Alternative would be to directly use HKDF\n\npub fn kdf_next(secret: &[u8], prev: &[u8], counter: u8) -> Vec<u8> {\n    hmac_hash(secret, &[prev, &[counter]].concat())\n}\n\npub fn hkdf1(key: &[u8], ikm: &[u8]) -> Vec<u8> {\n    let secret = hmac_hash(key, ikm);\n    kdf_next(&secret, &Vec::new(), 1)\n}\n\npub fn hkdf2(key: &[u8], ikm: &[u8]) -> (Vec<u8>, Vec<u8>) {\n    let secret = hmac_hash(key, ikm);\n    let k1 = kdf_next(&secret, &Vec::new(), 1);\n    let k2 = kdf_next(&secret, &k1, 2);\n    (k1, k2)\n}\n\npub fn hkdf3(key: &[u8], ikm: &[u8]) -> (Vec<u8>, Vec<u8>, Vec<u8>) {\n    let secret = hmac_hash(key, ikm);\n    let k1 = kdf_next(&secret, &Vec::new(), 1);\n    let k2 = kdf_next(&secret, &k1, 2);\n    let k3 = kdf_next(&secret, &k1, 3);\n    (k1, k2, k3)\n}\n"
  },
  {
    "path": "tests/proverif-noise/src/noise_kkpsk0.rs",
    "content": "// Import hacspec and all needed definitions.\nuse crate::*;\nuse noise_crypto::*;\nuse noise_lib::*;\n\npub struct HandshakeStateI0 {\n    st: SymmetricState,\n    psk: Vec<u8>,\n    s: KeyPair,\n    e: KeyPair,\n    rs: Vec<u8>,\n}\n\npub struct HandshakeStateI1 {\n    st: SymmetricState,\n    s: KeyPair,\n    e: KeyPair,\n}\n\npub struct HandshakeStateR0 {\n    st: SymmetricState,\n    psk: Vec<u8>,\n    s: KeyPair,\n    e: KeyPair,\n    rs: Vec<u8>,\n}\n\npub struct HandshakeStateR1 {\n    st: SymmetricState,\n    e: KeyPair,\n    rs: Vec<u8>,\n    re: Vec<u8>,\n}\n\npub struct Transport {\n    send: CipherState,\n    recv: CipherState,\n    handshake_hash: Vec<u8>,\n}\n\nstruct ProtocolName([u8; 36]);\n#[allow(non_upper_case_globals)]\nconst Noise_KKpsk0_25519_ChaChaPoly_SHA256: ProtocolName = ProtocolName([\n    78u8, 111u8, 105u8, 115u8, 101u8, 95u8, 75u8, 75u8, 112u8, 115u8, 107u8, 48u8, 95u8, 50u8,\n    53u8, 53u8, 49u8, 57u8, 95u8, 67u8, 104u8, 97u8, 67u8, 104u8, 97u8, 80u8, 111u8, 108u8, 121u8,\n    95u8, 83u8, 72u8, 65u8, 50u8, 53u8, 54u8,\n]);\n\n///  KKpsk0:\n///    -> s\n///    <- s\n///    ...\npub fn initialize_initiator(\n    prologue: &[u8],\n    psk: Vec<u8>,\n    s: KeyPair,\n    e: KeyPair,\n    rs: &[u8],\n) -> HandshakeStateI0 {\n    let st = initialize_symmetric(&Noise_KKpsk0_25519_ChaChaPoly_SHA256.0);\n    let st = mix_hash(st, prologue);\n    let st = mix_hash(st, &s.public_key);\n    let st = mix_hash(st, rs);\n    HandshakeStateI0 {\n        psk,\n        st,\n        s,\n        e,\n        rs: rs.to_vec(),\n    }\n}\n\npub fn initialize_responder(\n    prologue: &[u8],\n    psk: Vec<u8>,\n    s: KeyPair,\n    e: KeyPair,\n    rs: &[u8],\n) -> HandshakeStateR0 {\n    let st = initialize_symmetric(&Noise_KKpsk0_25519_ChaChaPoly_SHA256.0);\n    let st = mix_hash(st, prologue);\n    let st = mix_hash(st, rs);\n    let st = mix_hash(st, &s.public_key);\n    HandshakeStateR0 {\n        st,\n        psk,\n        s,\n        e,\n        rs: rs.to_vec(),\n    }\n}\n\n///  KKpsk0:\n///    ...\n///    -> psk, e, es, ss\npub fn write_message1(\n    hs: HandshakeStateI0,\n    payload: &[u8],\n) -> Result<(HandshakeStateI1, Vec<u8>), Error> {\n    let HandshakeStateI0 { st, psk, s, e, rs } = hs;\n    let st = mix_key_and_hash(st, &psk);\n    let st = mix_hash(st, &e.public_key);\n    let st = mix_key(st, &e.public_key);\n    let es = dh(&e, &rs);\n    let st = mix_key(st, &es);\n    let ss = dh(&s, &rs);\n    let st = mix_key(st, &ss);\n    let (st, ciphertext) = encrypt_and_hash(st, payload)?;\n    let hs = HandshakeStateI1 { st, s, e };\n    Ok((hs, ciphertext))\n}\n\npub fn read_message1(\n    hs: HandshakeStateR0,\n    ciphertext: &[u8],\n) -> Result<(HandshakeStateR1, Vec<u8>), Error> {\n    let HandshakeStateR0 { st, psk, s, e, rs } = hs;\n    let re = &ciphertext[0..DHLEN];\n    let ciphertext = &ciphertext[DHLEN..ciphertext.len()];\n    let st = mix_key_and_hash(st, &psk);\n    let st = mix_hash(st, re);\n    let st = mix_key(st, re);\n    let es = dh(&s, re);\n    let st = mix_key(st, &es);\n    let ss = dh(&s, &rs);\n    let st = mix_key(st, &ss);\n    let (st, plaintext) = decrypt_and_hash(st, ciphertext)?;\n    let hs = HandshakeStateR1 {\n        st,\n        e,\n        rs,\n        re: re.to_vec(),\n    };\n    Ok((hs, plaintext))\n}\n\n///  KKpsk0:\n///    ...\n///     <- e, ee, se\npub fn write_message2(hs: HandshakeStateR1, payload: &[u8]) -> Result<(Transport, Vec<u8>), Error> {\n    let HandshakeStateR1 { st, e, rs, re } = hs;\n    let st = mix_hash(st, &e.public_key);\n    let st = mix_key(st, &e.public_key);\n    let ee = dh(&e, &re);\n    let st = mix_key(st, &ee);\n    let se = dh(&e, &rs);\n    let st = mix_key(st, &se);\n    let (st, ciphertext) = encrypt_and_hash(st, payload)?;\n    let (c1, c2, h) = split(st);\n    let tx = Transport {\n        send: c2,\n        recv: c1,\n        handshake_hash: h,\n    };\n    Ok((tx, ciphertext))\n}\n\npub fn read_message2(\n    hs: HandshakeStateI1,\n    ciphertext: &[u8],\n) -> Result<(Transport, Vec<u8>), Error> {\n    let HandshakeStateI1 { st, s, e } = hs;\n    let re = &ciphertext[0..DHLEN];\n    let ciphertext = &ciphertext[DHLEN..ciphertext.len()];\n    let st = mix_hash(st, re);\n    let st = mix_key(st, re);\n    let ee = dh(&e, re);\n    let st = mix_key(st, &ee);\n    let se = dh(&s, re);\n    let st = mix_key(st, &se);\n    let (st, plaintext) = decrypt_and_hash(st, ciphertext)?;\n    let (c1, c2, h) = split(st);\n    let tx = Transport {\n        send: c1,\n        recv: c2,\n        handshake_hash: h,\n    };\n    Ok((tx, plaintext))\n}\n\n///  KKpsk0:\n///    ->\n///    <-\npub fn write_transport(\n    tx: Transport,\n    ad: &[u8],\n    payload: &[u8],\n) -> Result<(Transport, Vec<u8>), Error> {\n    let Transport {\n        send,\n        recv,\n        handshake_hash,\n    } = tx;\n    let (send, ciphertext) = encrypt_with_ad(send, ad, payload)?;\n    let tx = Transport {\n        send,\n        recv,\n        handshake_hash,\n    };\n    Ok((tx, ciphertext))\n}\n\npub fn read_transport(\n    tx: Transport,\n    ad: &[u8],\n    ciphertext: &[u8],\n) -> Result<(Transport, Vec<u8>), Error> {\n    let Transport {\n        send,\n        recv,\n        handshake_hash,\n    } = tx;\n    let (recv, payload) = decrypt_with_ad(recv, ad, ciphertext)?;\n    let tx = Transport {\n        send,\n        recv,\n        handshake_hash,\n    };\n    Ok((tx, payload))\n}\n"
  },
  {
    "path": "tests/proverif-noise/src/noise_lib.rs",
    "content": "// Import hacspec and all needed definitions.\n\nuse crate::*;\nuse noise_crypto::*;\n\n/// This module defines the generic Noise processing rules\n/// Section 5: https://noiseprotocol.org/noise.html#processing-rules\n\npub struct CipherState {\n    k: Option<Vec<u8>>,\n    n: u64,\n}\n\npub struct SymmetricState {\n    cs: CipherState,\n    ck: Vec<u8>,\n    h: Vec<u8>,\n}\n\n/// 5.1: The CipherState Object\n\npub fn initialize_key(key: Option<Vec<u8>>) -> CipherState {\n    CipherState { k: key, n: 0u64 }\n}\n\npub fn has_key(cs: &CipherState) -> bool {\n    cs.k.is_some()\n}\n\npub fn set_nonce(cs: CipherState, n: u64) -> CipherState {\n    let CipherState { k, n: _ } = cs;\n    CipherState { k, n }\n}\n\npub fn encrypt_with_ad(\n    cs: CipherState,\n    ad: &[u8],\n    plaintext: &[u8],\n) -> Result<(CipherState, Vec<u8>), Error> {\n    let CipherState { k, n } = cs;\n    if n == 0xffffffffffffffffu64 {\n        Err(Error::CryptoError)\n    } else {\n        match k {\n            Some(k) => {\n                let cip = encrypt(&k, n, ad, plaintext);\n                Ok((\n                    CipherState {\n                        k: Some(k),\n                        n: n + 1,\n                    },\n                    cip,\n                ))\n            }\n            None => Ok((CipherState { k, n }, plaintext.to_vec())),\n        }\n    }\n}\n\npub fn decrypt_with_ad(\n    cs: CipherState,\n    ad: &[u8],\n    ciphertext: &[u8],\n) -> Result<(CipherState, Vec<u8>), Error> {\n    let CipherState { k, n } = cs;\n    if n == 0xffffffffffffffffu64 {\n        Err(Error::CryptoError)\n    } else {\n        match k {\n            Some(k) => {\n                let plain = decrypt(&k, n, ad, ciphertext)?;\n                Ok((\n                    CipherState {\n                        k: Some(k),\n                        n: n + 1,\n                    },\n                    plain,\n                ))\n            }\n            None => Ok((CipherState { k, n }, ciphertext.to_vec())),\n        }\n    }\n}\n\npub fn rekey(cs: CipherState) -> Result<CipherState, Error> {\n    let CipherState { k, n } = cs;\n    match k {\n        Some(k) => {\n            let new_k = noise_crypto::rekey(&k);\n            Ok(CipherState { k: Some(new_k), n })\n        }\n        None => Err(Error::CryptoError),\n    }\n}\n\n/// 5.2: The SymmetricState Object\n\npub fn initialize_symmetric(protocol_name: &[u8]) -> SymmetricState {\n    let pnlen = protocol_name.len();\n    let hv: Vec<u8> = if pnlen < HASHLEN {\n        [protocol_name, &vec![0u8; 32 - pnlen]].concat()\n    } else {\n        hash(protocol_name)\n    };\n    let ck = hv.clone();\n    SymmetricState {\n        cs: initialize_key(None),\n        ck,\n        h: hv,\n    }\n}\n\npub fn mix_key(st: SymmetricState, input_key_material: &[u8]) -> SymmetricState {\n    let SymmetricState { cs: _, ck, h } = st;\n    let (ck, mut temp_k) = hkdf2(&ck, input_key_material);\n    if HASHLEN == 64 {\n        temp_k.truncate(32);\n    }\n    SymmetricState {\n        cs: initialize_key(Some(temp_k)),\n        ck,\n        h,\n    }\n}\n\npub fn mix_hash(st: SymmetricState, data: &[u8]) -> SymmetricState {\n    let SymmetricState { cs, ck, h } = st;\n    SymmetricState {\n        cs,\n        ck,\n        h: hash(&[&h, data].concat()),\n    }\n}\n\npub fn mix_key_and_hash(st: SymmetricState, input_key_material: &[u8]) -> SymmetricState {\n    let SymmetricState { cs: _, ck, h } = st;\n    let (ck, temp_h, mut temp_k) = hkdf3(&ck, input_key_material);\n    let mut new_h = h;\n    new_h.extend_from_slice(&temp_h);\n    let new_h = hash(&new_h);\n    if HASHLEN == 64 {\n        temp_k.truncate(32);\n    }\n    SymmetricState {\n        cs: initialize_key(Some(temp_k)),\n        ck,\n        h: new_h,\n    }\n}\n\n/// Unclear if we need a special function for psk or we can reuse mix_key_and_hash above\n//pub fn mix_psk(st:SymmetricState,psk:&[u8]) -> (Vec<u8>,Vec<u8>,Vec<u8>) {\n//    let (ck,temp_hash,cs_k) = kdf3(key,psk);\n//    let next_hash = mix_hash(prev_hash,&temp_hash);\n//    (ck,cs_k,next_hash)\n//}\n\npub fn encrypt_and_hash(\n    st: SymmetricState,\n    plaintext: &[u8],\n) -> Result<(SymmetricState, Vec<u8>), Error> {\n    let (new_cs, ciphertext) = encrypt_with_ad(st.cs, &st.h, plaintext)?;\n    let mut new_h = st.h.clone();\n    new_h.extend_from_slice(&ciphertext);\n    let new_h = hash(&new_h);\n    Ok((\n        SymmetricState {\n            cs: new_cs,\n            ck: st.ck,\n            h: new_h,\n        },\n        ciphertext,\n    ))\n}\n\npub fn decrypt_and_hash(\n    st: SymmetricState,\n    ciphertext: &[u8],\n) -> Result<(SymmetricState, Vec<u8>), Error> {\n    let (new_cs, plaintext) = decrypt_with_ad(st.cs, &st.h, ciphertext)?;\n    let mut new_h = st.h.clone();\n    new_h.extend_from_slice(ciphertext);\n    let new_h = hash(&new_h);\n    Ok((\n        SymmetricState {\n            cs: new_cs,\n            ck: st.ck,\n            h: new_h,\n        },\n        plaintext,\n    ))\n}\n\npub fn split(st: SymmetricState) -> (CipherState, CipherState, Vec<u8>) {\n    let (mut temp_k1, mut temp_k2) = hkdf2(&st.ck, &Vec::new());\n    if HASHLEN == 64 {\n        temp_k1.truncate(32);\n        temp_k2.truncate(32);\n    }\n    (\n        initialize_key(Some(temp_k1)),\n        initialize_key(Some(temp_k2)),\n        st.h,\n    )\n}\n"
  },
  {
    "path": "tests/proverif-ping-pong/Cargo.toml",
    "content": "[package]\nname = \"ping-pong\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n[dependencies]\nhax-lib-protocol = { path = \"../../hax-lib-protocol\" }\nhax-lib-protocol-macros = { path = \"../../hax-lib-protocol-macros\" }\nhax-lib = { path = \"../../hax-lib\" }\n"
  },
  {
    "path": "tests/proverif-ping-pong/pingpong.pv",
    "content": "set attacker = passive.\nchannel c.\n\ntype ping_t.\ntype pong_t.\n\nfun new_ping(): ping_t.\nfun ping2pong(ping_t): pong_t.\n\nevent PingSent(ping_t).\nevent PingReceived(ping_t).\nevent PongSent(pong_t).\nevent PongReceived(pong_t).\n\nquery p: ping_t;\n    event(PingReceived(p)) ==> event(PingSent(p)).\n\n\n\nlet A =\n    (\n        let ping = new_ping() in\n        event PingSent(ping);\n        out(c, ping)\n    ) | (\n        in(c, pong: pong_t);\n        event PongReceived(pong)\n    ).\n\nlet B = \n    in(c, ping: ping_t);\n    event PingReceived(ping);\n    let pong = ping2pong(ping) in\n    event PongSent(pong);\n    out(c, pong);\n    0.\n\nprocess\n    A | B\n    "
  },
  {
    "path": "tests/proverif-ping-pong/src/a.rs",
    "content": "use hax_lib_protocol::{state_machine::*, ProtocolError, ProtocolResult};\n\nuse crate::Message;\n\n// ==== A states ====\npub struct A0 {\n    data: u8,\n}\n\npub struct A1 {}\n\npub struct A2 {\n    #[allow(dead_code)]\n    received: u8,\n}\n\n// ==== A initialization ====\n#[hax_lib_protocol_macros::init(A0)]\nfn init_a(prologue: Vec<u8>) -> ProtocolResult<A0> {\n    if prologue.len() < 1 {\n        Err(ProtocolError::InvalidPrologue)\n    } else {\n        Ok(A0 { data: prologue[0] })\n    }\n}\n\n// The following generated by macro:\n/* #[hax_lib_macros::exclude]\nimpl TryFrom<Vec<u8>> for A0 {\n    type Error = ProtocolError;\n\n    fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {\n        init_a(value)\n    }\n}\n\n#[hax_lib_macros::exclude]\nimpl InitialState for A0 {\n    fn init(prologue: Option<Vec<u8>>) -> ProtocolResult<Self> {\n        if let Some(prologue) = prologue {\n            prologue.try_into()\n        } else {\n            Err(ProtocolError::InvalidPrologue)\n        }\n    }\n} */\n\n// ==== A state transistion functions ====\n#[hax_lib_protocol_macros::write(A0, A1, Message)]\nfn write_ping(state: A0) -> ProtocolResult<(A1, Message)> {\n    Ok((A1 {}, Message::Ping(state.data)))\n}\n\n// The following generated by macro:\n/*#[hax_lib_macros::exclude]\nimpl TryFrom<A0> for (A1, Message) {\n    type Error = ProtocolError;\n\n    fn try_from(value: A0) -> Result<Self, Self::Error> {\n        write_ping(value)\n    }\n}\n\n#[hax_lib_macros::exclude]\nimpl WriteState for A0 {\n    type NextState = A1;\n    type Message = Message;\n\n    fn write(self) -> ProtocolResult<(Self::NextState, Message)> {\n        self.try_into()\n    }\n}*/\n\n#[hax_lib_protocol_macros::read(A1, A2, Message)]\nfn read_pong(_state: A1, msg: Message) -> ProtocolResult<A2> {\n    match msg {\n        Message::Ping(_) => Err(ProtocolError::InvalidMessage),\n        Message::Pong(received) => Ok(A2 { received }),\n    }\n}\n\n// The following generated by macro:\n/*#[hax_lib_macros::exclude]\nimpl TryFrom<(A1, Message)> for A2 {\n    type Error = ProtocolError;\n\n    fn try_from((state, msg): (A1, Message)) -> Result<Self, Self::Error> {\n        read_pong(state, msg)\n    }\n}\n#[hax_lib_macros::exclude]\nimpl ReadState<A2> for A1 {\n    type Message = Message;\n    fn read(self, msg: Message) -> ProtocolResult<A2> {\n        A2::try_from((self, msg))\n    }\n}*/\n"
  },
  {
    "path": "tests/proverif-ping-pong/src/b.rs",
    "content": "use hax_lib_protocol::{state_machine::*, ProtocolError, ProtocolResult};\n\nuse crate::Message;\n\n// ==== B states ====\npub struct B0 {}\n\npub struct B1 {\n    received: u8,\n}\n\n// An alternative successor of B0 to show read alternatives\npub struct B1alt {}\n\npub struct B2 {}\n\n// ==== B initialization ====\n#[hax_lib_protocol_macros::init_empty(B0)]\nfn init_b() -> ProtocolResult<B0> {\n    Ok(B0 {})\n}\n\n// The following generated by macro:\n// #[hax_lib_macros::exclude]\n// impl InitialState for B0 {\n//     fn init(prologue: Option<Vec<u8>>) -> ProtocolResult<Self> {\n//         if let Some(_) = prologue {\n//             Err(ProtocolError::InvalidPrologue)\n//         } else {\n//             init_b()\n//         }\n//     }\n// }\n\n// ==== B state transistion functions ====\n#[hax_lib_protocol_macros::read(B0, B1, Message)]\nfn read_ping(_state: B0, msg: Message) -> ProtocolResult<B1> {\n    match msg {\n        Message::Ping(received) => Ok(B1 { received }),\n        Message::Pong(_) => Err(ProtocolError::InvalidMessage),\n    }\n}\n\n// The following generated by macro:\n/*#[hax_lib_macros::exclude]\nimpl TryFrom<(B0, Message)> for B1 {\n    type Error = ProtocolError;\n\n    fn try_from((state, msg): (B0, Message)) -> Result<Self, Self::Error> {\n        read_ping(state, msg)\n    }\n}\n\n#[hax_lib_macros::exclude]\nimpl ReadState<B1> for B0 {\n    type Message = Message;\n    fn read(self, msg: Message) -> Result<B1, ProtocolError> {\n        B1::try_from((self, msg))\n    }\n}*/\n\n#[hax_lib_protocol_macros::read(B0, B1alt, Message)]\nfn read_ping_alt(_state: B0, msg: Message) -> ProtocolResult<B1alt> {\n    match msg {\n        Message::Ping(received) if received == 42 => Ok(B1alt {}),\n        _ => Err(ProtocolError::InvalidMessage),\n    }\n}\n\n// The following generated by macro:\n/*#[hax_lib_macros::exclude]\nimpl TryFrom<(B0, Message)> for B1alt {\n    type Error = ProtocolError;\n\n    fn try_from((state, msg): (B0, Message)) -> Result<Self, Self::Error> {\n        read_ping_alt(state, msg)\n    }\n}\n\n#[hax_lib_macros::exclude]\nimpl ReadState<B1alt> for B0 {\n    type Message = Message;\n    fn read(self, msg: Message) -> Result<B1alt, ProtocolError> {\n        B1alt::try_from((self, msg))\n    }\n}*/\n\n#[hax_lib_protocol_macros::write(B1, B2, Message)]\nfn write_pong(state: B1) -> ProtocolResult<(B2, Message)> {\n    Ok((B2 {}, Message::Pong(state.received)))\n}\n\n// The following generated by macro:\n/*#[hax_lib_macros::exclude]\nimpl TryFrom<B1> for (B2, Message) {\n    type Error = ProtocolError;\n\n    fn try_from(value: B1) -> Result<Self, Self::Error> {\n        write_pong(value)\n    }\n}\n\n#[hax_lib_macros::exclude]\nimpl WriteState for B1 {\n    type Message = Message;\n    type NextState = B2;\n\n    fn write(self) -> Result<(Self::NextState, Message), ProtocolError> {\n        self.try_into()\n    }\n}*/\n"
  },
  {
    "path": "tests/proverif-ping-pong/src/lib.rs",
    "content": "mod a;\nmod b;\n\n#[hax_lib::protocol_messages]\npub enum Message {\n    Ping(u8),\n    Pong(u8),\n}\n\n#[test]\nfn run() {\n    use a::A0;\n    use b::{B0, B1};\n    use hax_lib_protocol::state_machine::{InitialState, ReadState, WriteState};\n    let a = A0::init(Some(vec![1])).unwrap();\n    let b = B0::init(None).unwrap();\n\n    let (a, msg) = a.write().unwrap();\n    let b: B1 = b.read(msg).unwrap();\n\n    let (_b, msg) = b.write().unwrap();\n    let _a = a.read(msg).unwrap();\n}\n"
  },
  {
    "path": "tests/raw-attributes/Cargo.toml",
    "content": "[package]\nname = \"raw-attributes\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"none\" }\n"
  },
  {
    "path": "tests/raw-attributes/README.md",
    "content": "This example is more interesting with the debug mode enabled, to see all the attributes:\n - `cargo hax into --debug-engine SOME_EMPTY_EXISTING_DIR fstar`\n - `cd engine/utils/phase_debug_webapp && PORT=8989 node server.js SOME_EMPTY_EXISTING_DIR`\n - browse `http://localhost:8989/`, and observe the attributes\n"
  },
  {
    "path": "tests/raw-attributes/src/lib.rs",
    "content": "#![allow(dead_code)]\n#![feature(register_tool)]\n#![register_tool(hax)]\n\n/** TypeAlias:BlockDocComment Lorem ipsum dolor sit amet, consectetur\nadipiscing elit. Integer bibendum, massa quis facilisis aliquam,\ndui libero auctor sem, aliquet dignissim urna magna ac turpis. */\n#[hax::a::path(TypeAlias attr)]\ntype TypeAlias<#[hax::a::path(TypeAlias:T attr)] T: Clone> = (T, u8);\n\n/// f:LineBlockComment\n#[hax::a::path(f attr)]\nfn f<#[hax::a::path(f:T attr)] T, #[hax::a::path(f:Y attr)] const Y: usize>(_x: T) -> usize {\n    Y\n}\n\n#[hax::a::path(Foo attr)]\nenum Foo {\n    #[hax::a::path(Foo:A attr)]\n    A(\n        #[hax::a::path(Foo:A:u8 attr)] u8,\n        #[hax::a::path(Foo:A:u16 attr)] u16,\n    ),\n    #[hax::a::path(Foo:B attr)]\n    B {\n        /// some Foo::B::x comment\n        #[hax::a::path(Foo:B:x attr)]\n        x: u8,\n        /// some Foo::B::y comment\n        #[hax::a::path(Foo:B:y attr)]\n        y: u16,\n    },\n}\n\n#[hax::a::path(Bar attr)]\nstruct Bar {\n    #[hax::a::path(Bar:field1 attr)]\n    field1: u64,\n    /** some Bar::field2 comment Quisque et purus lacinia, venenatis\n       risus eu, hendrerit arcu. Nunc posuere iaculis mattis. Sed at\n       enim justo. Praesent aliquet ipsum in enim mollis\n       faucibus. Morbi eu diam molestie, posuere quam eget, pulvinar\n       diam.\n    */\n    #[hax::a::path(Bar:field2 attr)]\n    field2: u32,\n}\n"
  },
  {
    "path": "tests/recursion/Cargo.toml",
    "content": "[package]\nname = \"recursion\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { }\n"
  },
  {
    "path": "tests/recursion/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub fn f(n: u8) -> u8 {\n    if n == 0 {\n        0\n    } else {\n        n + f(n - 1)\n    }\n}\n"
  },
  {
    "path": "tests/reordering/Cargo.toml",
    "content": "[package]\nname = \"reordering\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq+ssprove\" = { snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/reordering/src/lib.rs",
    "content": "#![allow(dead_code)]\n\nfn no_dependency_1() {}\n\nfn g() -> Bar {\n    Bar(f(32))\n}\n\nfn no_dependency_2() {}\n\nfn f(_: u32) -> Foo {\n    Foo::A\n}\n\nstruct Bar(Foo);\nenum Foo {\n    A,\n    B,\n}\n\nmod mut_rec {\n    fn f() {\n        g()\n    }\n\n    fn f_2() {\n        f()\n    }\n\n    fn g() {\n        f()\n    }\n}\n\nmod independent_cycles {\n    fn a() {\n        c()\n    }\n    fn b() {\n        d()\n    }\n    fn c() {\n        a()\n    }\n    fn d() {\n        b()\n    }\n}\n"
  },
  {
    "path": "tests/side-effects/Cargo.toml",
    "content": "[package]\nname = \"side-effects\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.fstar = {}\ninto.coq = {broken = true, snapshot = \"none\", issue_id = 134}\ninto.ssprove = {broken = true, snapshot = \"none\"}\n"
  },
  {
    "path": "tests/side-effects/src/lib.rs",
    "content": "#![allow(dead_code)]\n\n/// Helper function\nfn add3(x: u32, y: u32, z: u32) -> u32 {\n    x.wrapping_add(y).wrapping_add(z)\n}\n\n/// Exercise local mutation with control flow and loops\nfn local_mutation(mut x: u32) -> u32 {\n    let mut y = 0;\n    if {\n        x = x.wrapping_add(1);\n        x > 3\n    } {\n        x = x.wrapping_sub(3);\n        let mut y = x / 2;\n        for i in {\n            y = y.wrapping_add(2);\n            0\n        }..10\n        {\n            y = x.wrapping_add(i);\n        }\n        x.wrapping_add(y)\n    } else {\n        x = match x {\n            12 => {\n                y = x.wrapping_add(y);\n                3\n            }\n            13 => add3(\n                x,\n                {\n                    x = x.wrapping_add(1);\n                    123u32.wrapping_add(x)\n                },\n                x,\n            ),\n            _ => 0,\n        };\n        x.wrapping_add(y)\n    }\n}\n\n/// Exercise early returns with control flow and loops\nfn early_returns(mut x: u32) -> u32 {\n    return (123u32.wrapping_add(\n        if {\n            if x > 3 {\n                return 0;\n            };\n            x > 30\n        } {\n            match true {\n                true => return 34,\n                _ => 3,\n            }\n        } else {\n            x = x + 9;\n            x + 1\n        },\n    ))\n    .wrapping_add(x);\n}\n\nfn simplifiable_return(c1: bool, c2: bool, c3: bool) -> i32 {\n    let mut x = 0;\n    if c1 {\n        if c2 {\n            x += 10;\n            if c3 {\n                return 1;\n            }\n        }\n        x += 1;\n    }\n    x\n}\n\nfn simplifiable_question_mark(c: bool, x: Option<i32>) -> Option<i32> {\n    let a = if c { x? + 10 } else { 0 };\n    let b = 20;\n    Some(a + b)\n}\n\n/// Question mark without error coercion\nfn direct_result_question_mark(y: Result<(), u32>) -> Result<i8, u32> {\n    y?;\n    Ok(0)\n}\n\n/// Question mark with an error coercion\nfn direct_result_question_mark_coercion(y: Result<i8, u16>) -> Result<i8, u32> {\n    Ok(y?)\n}\n\n/// Test question mark on `Option`s with some control flow\nfn options(x: Option<u8>, y: Option<u8>, z: Option<u64>) -> Option<u8> {\n    let v = match (if x? > 10 {\n        Some(x?.wrapping_add(3))\n    } else {\n        Some(x?.wrapping_add(y?))\n    })? {\n        3 => None?,\n        4 => 4 + (if z? > 4 { 0 } else { 3 }),\n        _ => 12u8,\n    };\n    Some(v.wrapping_add(x?).wrapping_add(y?))\n}\n\n/// Test question mark on `Result`s with local mutation\nfn question_mark(mut x: u32) -> Result<u32, u32> {\n    if x > 40u32 {\n        let mut y = 0;\n        x = x.wrapping_add(3);\n        y = x.wrapping_add(y);\n        if {\n            x = x.wrapping_add(y);\n            x > 90u32\n        } {\n            Err(12u8)?\n        }\n    };\n    Ok(3u32.wrapping_add(x))\n}\n\nstruct A;\nstruct B;\n\n/// Combine `?` and early return\nfn monad_lifting(x: u8) -> Result<A, B> {\n    if x > 123 {\n        return Ok(Err(B)?);\n    } else {\n        Ok(A)\n    }\n}\n\nstruct Bar {\n    a: bool,\n    b: ([(bool, bool); 6], bool),\n}\nstruct Foo {\n    x: bool,\n    y: (bool, Vec<Bar>),\n    z: [Bar; 6],\n    bar: Bar,\n}\n\n/// Test assignation on non-trivial places\nfn assign_non_trivial_lhs(mut foo: Foo) -> Foo {\n    foo.x = true;\n    foo.bar.a = true;\n    foo.bar.b.0[3].1 = true;\n    foo.z[3].a = true;\n    foo.y.1[3].b.0[5].0 = true;\n    foo\n}\n\nmod issue_1083 {\n    trait MyFrom<T> {\n        fn my_from(x: T) -> Self;\n    }\n\n    impl MyFrom<u8> for u16 {\n        fn my_from(x: u8) -> u16 {\n            x as u16\n        }\n    }\n\n    fn f(x: u8) -> Result<u16, u16> {\n        Err(1u8)?;\n        Ok(u16::my_from(x))\n    }\n}\n\nmod issue_1089 {\n    fn test(x: Option<i32>, y: Option<i32>) -> Option<i32> {\n        x.map(|i| Some(i + y?))?\n    }\n}\n\n/// issue 1175\nmod nested_return {\n    fn other_fun(rng: &mut i8) -> Result<(), ()> {\n        Ok(())\n    }\n\n    fn fun(rng: &mut i8) -> Result<(), ()> {\n        return Ok(other_fun(rng)?);\n    }\n}\n\nmod issue_1300 {\n    fn fun() -> Result<(), u8> {\n        let val = [0u8; 5]\n            .iter()\n            // Removing the inner Result/? below makes this pass\n            .map(|&prev| Ok::<(u8, [u8; 32]), u8>((prev, Ok::<[u8; 32], u8>([0u8; 32])?)))\n            // Removing the ? below makes this pass\n            .collect::<Result<Vec<_>, _>>()?;\n        Ok(())\n    }\n}\n\nmod issue_1299 {\n    pub struct S {\n        pub g: Foo,\n    }\n\n    pub struct OtherS {\n        pub g: Option<Foo>,\n    }\n\n    pub struct Foo {\n        y: u8,\n    }\n\n    impl Foo {\n        pub fn from(i: &Foo) -> Self {\n            Self { y: i.y.clone() }\n        }\n    }\n    struct Error();\n    impl S {\n        pub fn from(i: &OtherS) -> Result<Self, Error> {\n            Ok(Self {\n                g: Foo::from(i.g.as_ref().ok_or(Error())?),\n            })\n        }\n    }\n}\n"
  },
  {
    "path": "tests/slices/Cargo.toml",
    "content": "[package]\nname = \"slices\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar+coq\" = { broken = false, issue_id = \"85\" }\n"
  },
  {
    "path": "tests/slices/src/lib.rs",
    "content": "#![allow(dead_code)]\n\n// The issue here is probably both, pointer and slice. We first run into the slice.\nconst VERSION: &[u8] = b\"v1\";\n\n// This panics\n// thread 'rustc' panicked at 'hax-engine exited with non-zero code', cli/driver/src/exporter.rs:217:2\npub fn do_something(_: &[u8]) {}\n\npub fn sized(x: &[&[u8; 4]; 1]) {\n    r#unsized(&[(x[0] as &[u8])])\n}\n\npub fn r#unsized(_: &[&[u8]; 1]) {}\n"
  },
  {
    "path": "tests/statics/Cargo.toml",
    "content": "[package]\nname = \"statics\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/statics/src/lib.rs",
    "content": "static FOO: usize = 0;\n\nfn get_foo() -> usize {\n    FOO\n}\n"
  },
  {
    "path": "tests/traits/Cargo.toml",
    "content": "[package]\nname = \"traits\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\ntypenum = \"1.18.0\"\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"stdout\" }\n\n"
  },
  {
    "path": "tests/traits/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub trait SuperTrait: Clone {\n    fn function_of_super_trait(self) -> u32;\n}\n\npub trait Foo {\n    type AssocType: SuperTrait;\n    const N: usize;\n    fn assoc_f() -> ();\n    fn method_f(&self) -> ();\n    fn assoc_type(_: Self::AssocType) -> ()\n    where\n        Self::AssocType: Copy;\n}\n\nimpl SuperTrait for i32 {\n    fn function_of_super_trait(self) -> u32 {\n        self.abs() as u32\n    }\n}\n\nimpl Foo for () {\n    type AssocType = i32;\n    const N: usize = 32;\n    fn assoc_f() {\n        ()\n    }\n    fn method_f(&self) {\n        Self::assoc_f()\n    }\n    fn assoc_type(_: Self::AssocType) -> () {}\n}\n\nfn f<T: Foo>(x: T) {\n    T::assoc_f();\n    x.method_f()\n}\n\nfn g<T: Foo>(x: T::AssocType) -> u32 {\n    x.function_of_super_trait()\n}\n\nstruct Struct;\n\ntrait Bar<'a> {\n    fn bar(self);\n}\n\nimpl<'a> Struct {\n    fn method<T: Bar<'a>>(x: T) {\n        x.bar()\n    }\n}\n\npub fn closure_impl_expr<I: Iterator<Item = ()>>(it: I) -> Vec<()> {\n    it.map(|x| x).collect()\n}\n\npub fn closure_impl_expr_fngen<I: Iterator<Item = ()>, F: FnMut(()) -> ()>(it: I, f: F) -> Vec<()> {\n    it.map(f).collect()\n}\n\n// From issue #523\npub trait Lang: Sized {\n    type Var;\n    fn s(self, _: i32) -> (Self, Self::Var);\n}\n\npub enum Error {\n    Fail,\n}\n\n// From issue #474\nimpl Error {\n    pub fn for_application_callback() -> impl FnOnce() -> Self {\n        || Self::Fail\n    }\n}\n\n// Trickier case.\nfn iter_option<'a, T>(x: &'a Option<T>) -> impl Iterator<Item = &'a T> {\n    x.as_ref().into_iter()\n}\n\n// Issue #684\nfn use_impl_trait() {\n    let mut iter = iter_option(&Some(false));\n    let _ = iter.next();\n}\n\nmod for_clauses {\n    trait Foo<T> {\n        fn to_t(&self) -> T;\n    }\n\n    fn _f<X: for<'a> Foo<&'a u8>>(x: X) {\n        x.to_t();\n    }\n\n    // From issue #495\n    mod issue_495 {\n        use core::iter::Filter;\n        use core::ops::Range;\n\n        fn original_function_from_495(list: Vec<u8>) {\n            let _indices: Vec<_> = (0..5).filter(|i| list.iter().any(|n| n == i)).collect();\n        }\n\n        fn minimized_1(list: Vec<u8>) -> Vec<u8> {\n            (0..5).filter(|_| true).collect()\n        }\n        fn minimized_2(it: Filter<Range<u8>, for<'a> fn(&'a u8) -> bool>) {\n            let _indices: Vec<_> = it.collect();\n        }\n        mod minimized_3 {\n            pub trait Trait {}\n            impl<P: FnMut(&u8) -> bool> Trait for P {}\n        }\n    }\n}\n\nmod unconstrainted_types_issue_677 {\n    trait PolyOp {\n        fn op(x: u32, y: u32) -> u32;\n    }\n    struct Plus;\n    impl PolyOp for Plus {\n        fn op(x: u32, y: u32) -> u32 {\n            x + y\n        }\n    }\n\n    struct Times;\n    impl PolyOp for Times {\n        fn op(x: u32, y: u32) -> u32 {\n            x * y\n        }\n    }\n\n    fn twice<OP: PolyOp>(x: u32) -> u32 {\n        OP::op(x, x)\n    }\n\n    fn both(x: u32) -> (u32, u32) {\n        (twice::<Plus>(x), twice::<Times>(x))\n    }\n\n    #[test]\n    fn test() {\n        assert!(both(10) == (20, 100));\n    }\n}\n\n// From issue_667\nmod implicit_dependencies_issue_667 {\n    mod trait_definition {\n        pub trait MyTrait {\n            fn my_method(self);\n        }\n    }\n    mod define_type {\n        pub struct MyType;\n    }\n    mod impl_type {\n        impl super::trait_definition::MyTrait for super::define_type::MyType {\n            fn my_method(self) {}\n        }\n    }\n    mod use_type {\n        fn some_function(x: super::define_type::MyType) {\n            use super::trait_definition::MyTrait;\n            x.my_method()\n        }\n    }\n}\n\n// Related to issue 719\nmod interlaced_consts_types {\n    struct Bar<const FooConst: usize, FooType>([FooType; FooConst]);\n\n    trait Foo<const FooConst: usize, FooType> {\n        fn fun<const FunConst: usize, FunType>(x: [FooType; FooConst], y: [FunType; FunConst]);\n    }\n\n    impl<const FooConst: usize, FooType, SelfType> Foo<FooConst, FooType> for SelfType {\n        fn fun<const FunConst: usize, FunType>(x: [FooType; FooConst], y: [FunType; FunConst]) {}\n    }\n}\n\n// Related to issue #719 (after reopen)\nmod implicit_explicit_calling_conventions {\n    struct Type<TypeArg, const ConstArg: usize> {\n        field: [TypeArg; ConstArg],\n    }\n\n    trait Trait<TypeArg, const ConstArg: usize> {\n        fn method<MethodTypeArg, const MethodConstArg: usize>(\n            self,\n            value_TypeArg: TypeArg,\n            value_Type: Type<TypeArg, ConstArg>,\n        );\n        fn associated_function<MethodTypeArg, const MethodConstArg: usize>(\n            _self: Self,\n            value_TypeArg: TypeArg,\n            value_Type: Type<TypeArg, ConstArg>,\n        );\n    }\n\n    impl<TypeArg, const ConstArg: usize> Trait<TypeArg, ConstArg> for () {\n        fn method<MethodTypeArg, const MethodConstArg: usize>(\n            self,\n            value_TypeArg: TypeArg,\n            value_Type: Type<TypeArg, ConstArg>,\n        ) {\n        }\n        fn associated_function<MethodTypeArg, const MethodConstArg: usize>(\n            _self: Self,\n            value_TypeArg: TypeArg,\n            value_Type: Type<TypeArg, ConstArg>,\n        ) {\n        }\n    }\n\n    trait SubTrait<TypeArg, const ConstArg: usize>: Trait<TypeArg, ConstArg> {\n        type AssocType: Trait<TypeArg, ConstArg>;\n    }\n\n    fn method_caller<\n        MethodTypeArg,\n        TypeArg,\n        const ConstArg: usize,\n        const MethodConstArg: usize,\n        ImplTrait: Trait<TypeArg, ConstArg>,\n    >(\n        x: ImplTrait,\n        value_TypeArg: TypeArg,\n        value_Type: Type<TypeArg, ConstArg>,\n    ) {\n        x.method::<MethodTypeArg, MethodConstArg>(value_TypeArg, value_Type);\n    }\n\n    fn associated_function_caller<\n        MethodTypeArg,\n        TypeArg,\n        const ConstArg: usize,\n        const MethodConstArg: usize,\n        ImplTrait: Trait<TypeArg, ConstArg>,\n    >(\n        x: ImplTrait,\n        value_TypeArg: TypeArg,\n        value_Type: Type<TypeArg, ConstArg>,\n    ) {\n        ImplTrait::associated_function::<MethodTypeArg, MethodConstArg>(\n            x,\n            value_TypeArg,\n            value_Type,\n        );\n    }\n}\n\nmod type_alias_bounds_issue_707 {\n    struct StructWithGenericBounds<T: Clone>(T);\n    type SynonymA<T> = StructWithGenericBounds<T>;\n    type SynonymB<T> = StructWithGenericBounds<(T, T)>;\n}\n\n// Related to PR 730\nmod block_size {\n    pub trait BlockSizeUser {\n        type BlockSize;\n    }\n    pub trait ParBlocksSizeUser: BlockSizeUser {}\n\n    pub trait BlockBackend: ParBlocksSizeUser {\n        fn proc_block(block: Vec<<Self as BlockSizeUser>::BlockSize>);\n    }\n}\n\n// issue 692\nmod recursive_trait_with_assoc_type {\n    pub trait Trait1 {\n        type T: Trait1;\n    }\n\n    pub trait Trait2: Trait1 {\n        type U;\n    }\n}\n\n// issue 310\nmod default_traits_parameters {\n    trait Foo: Bar {\n        type U;\n    }\n    trait Bar<T = <Self as Foo>::U> {}\n}\n\n// issue 1218\nmod impl_expr_in_goal {\n    trait T1 {\n        type Assoc;\n    }\n\n    trait T2 {}\n\n    impl<U: T1> T2 for U where U::Assoc: T2 {}\n}\n\n// issue 1290\nmod implement_arithmetic_trait {\n    struct Wrapped(i32);\n\n    impl std::ops::Add for Wrapped {\n        type Output = Wrapped;\n        fn add(self, rhs: Self) -> Self::Output {\n            Wrapped(self.0 + rhs.0)\n        }\n    }\n\n    fn test(x: Wrapped, y: Wrapped) -> Wrapped {\n        x + y\n    }\n}\n\n// issue 1566\nmod typenum_perf {\n    use typenum::{IsLess, UInt, UTerm, B1};\n\n    type I20 = UInt<I19, B1>;\n    type I19 = UInt<I18, B1>;\n    type I18 = UInt<I17, B1>;\n    type I17 = UInt<I16, B1>;\n    type I16 = UInt<I15, B1>;\n    type I15 = UInt<I14, B1>;\n    type I14 = UInt<I13, B1>;\n    type I13 = UInt<I12, B1>;\n    type I12 = UInt<I11, B1>;\n    type I11 = UInt<I10, B1>;\n    type I10 = UInt<I9, B1>;\n    type I9 = UInt<I8, B1>;\n    type I8 = UInt<I7, B1>;\n    type I7 = UInt<I6, B1>;\n    type I6 = UInt<I5, B1>;\n    type I5 = UInt<I4, B1>;\n    type I4 = UInt<I3, B1>;\n    type I3 = UInt<I2, B1>;\n    type I2 = UInt<I1, B1>;\n    type I1 = UInt<I0, B1>;\n    type I0 = UTerm;\n\n    fn _f<T: IsLess<I20>>() {}\n}\n"
  },
  {
    "path": "tests/tuples/Cargo.toml",
    "content": "[package]\nname = \"tuples\"\nversion = \"0.0.1\"\nedition = \"2021\"\n\n[dependencies]\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { snapshot = \"stdout\" }\n"
  },
  {
    "path": "tests/tuples/src/lib.rs",
    "content": "#![allow(dead_code)]\n\npub fn project_tuple1() -> u8 {\n    let tuple1: (u8,) = (3,);\n    tuple1.0\n}\n"
  },
  {
    "path": "tests/unsafe/Cargo.toml",
    "content": "[package]\nname = \"unsafe\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nhax-lib = { path = \"../../hax-lib\" }\n\n[package.metadata.hax-tests]\ninto.\"fstar\" = { broken = false }\n"
  },
  {
    "path": "tests/unsafe/src/lib.rs",
    "content": "#![allow(dead_code)]\n\nenum Impossible {}\n\n#[hax_lib::requires(false)]\npub fn impossible() -> Impossible {\n    unsafe { std::hint::unreachable_unchecked() }\n}\n\n#[hax_lib::requires(slice.len() > 10)]\npub fn get_unchecked_example(slice: &[u8]) -> u8 {\n    unsafe { *slice.get_unchecked(6) }\n}\n"
  }
]